mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 07:06:41 +00:00
Merge branch 'master' into 3589.more-web-tests-python-3
This commit is contained in:
commit
2de76cd82d
@ -91,6 +91,9 @@ workflows:
|
||||
- "build-porting-depgraph":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
- "typechecks":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
|
||||
images:
|
||||
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
||||
# faster and takes various spurious failures out of the critical path.
|
||||
@ -475,6 +478,18 @@ jobs:
|
||||
. /tmp/venv/bin/activate
|
||||
./misc/python3/depgraph.sh
|
||||
|
||||
typechecks:
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/ubuntu:18.04-py3"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run:
|
||||
name: "Validate Types"
|
||||
command: |
|
||||
/tmp/venv/bin/tox -e typechecks
|
||||
|
||||
build-image: &BUILD_IMAGE
|
||||
# This is a template for a job to build a Docker image that has as much of
|
||||
# the setup as we can manage already done and baked in. This cuts down on
|
||||
|
3
mypy.ini
Normal file
3
mypy.ini
Normal file
@ -0,0 +1,3 @@
|
||||
[mypy]
|
||||
ignore_missing_imports = True
|
||||
plugins=mypy_zope:plugin
|
1
newsfragments/3399.feature
Normal file
1
newsfragments/3399.feature
Normal file
@ -0,0 +1 @@
|
||||
Added 'typechecks' environment for tox running mypy and performing static typechecks.
|
@ -14,7 +14,9 @@ __all__ = [
|
||||
|
||||
__version__ = "unknown"
|
||||
try:
|
||||
from allmydata._version import __version__
|
||||
# type ignored as it fails in CI
|
||||
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||
from allmydata._version import __version__ # type: ignore
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run update_version, and didn't
|
||||
# come with a _version.py, so we don't know what our version is.
|
||||
@ -24,7 +26,9 @@ except ImportError:
|
||||
full_version = "unknown"
|
||||
branch = "unknown"
|
||||
try:
|
||||
from allmydata._version import full_version, branch
|
||||
# type ignored as it fails in CI
|
||||
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||
from allmydata._version import full_version, branch # type: ignore
|
||||
except ImportError:
|
||||
# We're running in a tree that hasn't run update_version, and didn't
|
||||
# come with a _version.py, so we don't know what our full version or
|
||||
|
@ -57,6 +57,10 @@ class CRSEncoder(object):
|
||||
|
||||
return defer.succeed((shares, desired_share_ids))
|
||||
|
||||
def encode_proposal(self, data, desired_share_ids=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@implementer(ICodecDecoder)
|
||||
class CRSDecoder(object):
|
||||
|
||||
|
@ -568,7 +568,7 @@ class DirectoryNode(object):
|
||||
d = self.get_child_and_metadata(childnamex)
|
||||
return d
|
||||
|
||||
def set_uri(self, namex, writecap, readcap, metadata=None, overwrite=True):
|
||||
def set_uri(self, namex, writecap, readcap=None, metadata=None, overwrite=True):
|
||||
precondition(isinstance(writecap, (bytes, type(None))), writecap)
|
||||
precondition(isinstance(readcap, (bytes, type(None))), readcap)
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
import six
|
||||
import heapq, traceback, array, stat, struct
|
||||
from types import NoneType
|
||||
from stat import S_IFREG, S_IFDIR
|
||||
from time import time, strftime, localtime
|
||||
|
||||
@ -267,7 +266,7 @@ def _attrs_to_metadata(attrs):
|
||||
|
||||
|
||||
def _direntry_for(filenode_or_parent, childname, filenode=None):
|
||||
precondition(isinstance(childname, (unicode, NoneType)), childname=childname)
|
||||
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||
|
||||
if childname is None:
|
||||
filenode_or_parent = filenode
|
||||
@ -672,7 +671,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" %
|
||||
(parent, childname, filenode, metadata), level=OPERATIONAL)
|
||||
|
||||
precondition(isinstance(childname, (unicode, NoneType)), childname=childname)
|
||||
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||
precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode)
|
||||
precondition(not self.closed, sftpfile=self)
|
||||
|
||||
@ -1194,7 +1193,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore)
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
_assert(isinstance(userpath, str) and isinstance(direntry, (str, NoneType)),
|
||||
_assert(isinstance(userpath, str) and isinstance(direntry, (str, type(None))),
|
||||
userpath=userpath, direntry=direntry)
|
||||
|
||||
files = []
|
||||
@ -1219,7 +1218,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _remove_heisenfile(self, userpath, parent, childname, file_to_remove):
|
||||
if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY)
|
||||
|
||||
_assert(isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)),
|
||||
_assert(isinstance(userpath, str) and isinstance(childname, (unicode, type(None))),
|
||||
userpath=userpath, childname=childname)
|
||||
|
||||
direntry = _direntry_for(parent, childname)
|
||||
@ -1246,7 +1245,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata),
|
||||
level=NOISY)
|
||||
|
||||
_assert((isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)) and
|
||||
_assert((isinstance(userpath, str) and isinstance(childname, (unicode, type(None))) and
|
||||
(metadata is None or 'no-write' in metadata)),
|
||||
userpath=userpath, childname=childname, metadata=metadata)
|
||||
|
||||
@ -1979,7 +1978,7 @@ class SFTPServer(service.MultiService):
|
||||
|
||||
def __init__(self, client, accountfile, accounturl,
|
||||
sftp_portstr, pubkey_file, privkey_file):
|
||||
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
||||
precondition(isinstance(accountfile, (unicode, type(None))), accountfile)
|
||||
precondition(isinstance(pubkey_file, unicode), pubkey_file)
|
||||
precondition(isinstance(privkey_file, unicode), privkey_file)
|
||||
service.MultiService.__init__(self)
|
||||
|
@ -19,7 +19,7 @@ from twisted.protocols import basic
|
||||
from allmydata.interfaces import IImmutableFileNode, ICheckable
|
||||
from allmydata.uri import LiteralFileURI
|
||||
|
||||
@implementer(IImmutableFileNode, ICheckable)
|
||||
|
||||
class _ImmutableFileNodeBase(object):
|
||||
|
||||
def get_write_uri(self):
|
||||
@ -56,6 +56,7 @@ class _ImmutableFileNodeBase(object):
|
||||
return not self == other
|
||||
|
||||
|
||||
@implementer(IImmutableFileNode, ICheckable)
|
||||
class LiteralFileNode(_ImmutableFileNodeBase):
|
||||
|
||||
def __init__(self, filecap):
|
||||
|
@ -141,7 +141,7 @@ class CHKCheckerAndUEBFetcher(object):
|
||||
|
||||
|
||||
@implementer(interfaces.RICHKUploadHelper)
|
||||
class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
||||
class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warner/foolscap#78
|
||||
"""I am the helper-server -side counterpart to AssistedUploader. I handle
|
||||
peer selection, encoding, and share pushing. I read ciphertext from the
|
||||
remote AssistedUploader.
|
||||
@ -499,10 +499,13 @@ class LocalCiphertextReader(AskUntilSuccessMixin):
|
||||
# ??. I'm not sure if it makes sense to forward the close message.
|
||||
return self.call("close")
|
||||
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3561
|
||||
def set_upload_status(self, upload_status):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
||||
class Helper(Referenceable):
|
||||
class Helper(Referenceable): # type: ignore # warner/foolscap#78
|
||||
"""
|
||||
:ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which
|
||||
have been started but not finished, a mapping from storage index to the
|
||||
|
@ -386,6 +386,9 @@ class PeerSelector(object):
|
||||
)
|
||||
return self.happiness_mappings
|
||||
|
||||
def add_peers(self, peerids=None):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _QueryStatistics(object):
|
||||
|
||||
@ -1424,7 +1427,7 @@ class LiteralUploader(object):
|
||||
return self._status
|
||||
|
||||
@implementer(RIEncryptedUploadable)
|
||||
class RemoteEncryptedUploadable(Referenceable):
|
||||
class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, encrypted_uploadable, upload_status):
|
||||
self._eu = IEncryptedUploadable(encrypted_uploadable)
|
||||
|
@ -681,7 +681,7 @@ class IURI(Interface):
|
||||
passing into init_from_string."""
|
||||
|
||||
|
||||
class IVerifierURI(Interface, IURI):
|
||||
class IVerifierURI(IURI):
|
||||
def init_from_string(uri):
|
||||
"""Accept a string (as created by my to_string() method) and populate
|
||||
this instance with its data. I am not normally called directly,
|
||||
@ -748,7 +748,7 @@ class IProgress(Interface):
|
||||
"Current amount of progress (in percentage)"
|
||||
)
|
||||
|
||||
def set_progress(self, value):
|
||||
def set_progress(value):
|
||||
"""
|
||||
Sets the current amount of progress.
|
||||
|
||||
@ -756,7 +756,7 @@ class IProgress(Interface):
|
||||
set_progress_total.
|
||||
"""
|
||||
|
||||
def set_progress_total(self, value):
|
||||
def set_progress_total(value):
|
||||
"""
|
||||
Sets the total amount of expected progress
|
||||
|
||||
@ -859,12 +859,6 @@ class IPeerSelector(Interface):
|
||||
peer selection begins.
|
||||
"""
|
||||
|
||||
def confirm_share_allocation(peerid, shnum):
|
||||
"""
|
||||
Confirm that an allocated peer=>share pairing has been
|
||||
successfully established.
|
||||
"""
|
||||
|
||||
def add_peers(peerids=set):
|
||||
"""
|
||||
Update my internal state to include the peers in peerids as
|
||||
@ -1824,11 +1818,6 @@ class IEncoder(Interface):
|
||||
willing to receive data.
|
||||
"""
|
||||
|
||||
def set_size(size):
|
||||
"""Specify the number of bytes that will be encoded. This must be
|
||||
peformed before get_serialized_params() can be called.
|
||||
"""
|
||||
|
||||
def set_encrypted_uploadable(u):
|
||||
"""Provide a source of encrypted upload data. 'u' must implement
|
||||
IEncryptedUploadable.
|
||||
|
@ -178,9 +178,9 @@ class IntroducerClient(service.Service, Referenceable):
|
||||
kwargs["facility"] = "tahoe.introducer.client"
|
||||
return log.msg(*args, **kwargs)
|
||||
|
||||
def subscribe_to(self, service_name, cb, *args, **kwargs):
|
||||
def subscribe_to(self, service_name, callback, *args, **kwargs):
|
||||
obs = self._local_subscribers.setdefault(service_name, ObserverList())
|
||||
obs.subscribe(lambda key_s, ann: cb(key_s, ann, *args, **kwargs))
|
||||
obs.subscribe(lambda key_s, ann: callback(key_s, ann, *args, **kwargs))
|
||||
self._maybe_subscribe()
|
||||
for index,(ann,key_s,when) in list(self._inbound_announcements.items()):
|
||||
precondition(isinstance(key_s, bytes), key_s)
|
||||
|
@ -73,7 +73,7 @@ class IIntroducerClient(Interface):
|
||||
publish their services to the rest of the world, and I help them learn
|
||||
about services available on other nodes."""
|
||||
|
||||
def publish(service_name, ann, signing_key=None):
|
||||
def publish(service_name, ann, signing_key):
|
||||
"""Publish the given announcement dictionary (which must be
|
||||
JSON-serializable), plus some additional keys, to the world.
|
||||
|
||||
@ -83,8 +83,7 @@ class IIntroducerClient(Interface):
|
||||
the signing_key, if present, otherwise it is derived from the
|
||||
'anonymous-storage-FURL' key.
|
||||
|
||||
If signing_key= is set to an instance of SigningKey, it will be
|
||||
used to sign the announcement."""
|
||||
signing_key (a SigningKey) will be used to sign the announcement."""
|
||||
|
||||
def subscribe_to(service_name, callback, *args, **kwargs):
|
||||
"""Call this if you will eventually want to use services with the
|
||||
|
@ -15,6 +15,12 @@ from past.builtins import long
|
||||
from six import ensure_text
|
||||
|
||||
import time, os.path, textwrap
|
||||
|
||||
try:
|
||||
from typing import Any, Dict, Union
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer
|
||||
@ -147,10 +153,12 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
name = "introducer"
|
||||
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
||||
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
||||
VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||
# TODO: reconcile bytes/str for keys
|
||||
VERSION = {
|
||||
#"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||
}
|
||||
} # type: Dict[Union[bytes, str], Any]
|
||||
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
|
@ -564,7 +564,7 @@ class MutableFileNode(object):
|
||||
return d
|
||||
|
||||
|
||||
def upload(self, new_contents, servermap):
|
||||
def upload(self, new_contents, servermap, progress=None):
|
||||
"""
|
||||
I overwrite the contents of the best recoverable version of this
|
||||
mutable file with new_contents, using servermap instead of
|
||||
@ -951,7 +951,7 @@ class MutableFileVersion(object):
|
||||
return self._servermap.size_of_version(self._version)
|
||||
|
||||
|
||||
def download_to_data(self, fetch_privkey=False, progress=None):
|
||||
def download_to_data(self, fetch_privkey=False, progress=None): # type: ignore # fixme
|
||||
"""
|
||||
I return a Deferred that fires with the contents of this
|
||||
readable object as a byte string.
|
||||
@ -1205,3 +1205,7 @@ class MutableFileVersion(object):
|
||||
self._servermap,
|
||||
mode=mode)
|
||||
return u.update()
|
||||
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3562
|
||||
def get_servermap(self):
|
||||
raise NotImplementedError
|
||||
|
@ -23,6 +23,11 @@ from base64 import b32decode, b32encode
|
||||
from errno import ENOENT, EPERM
|
||||
from warnings import warn
|
||||
|
||||
try:
|
||||
from typing import Union
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import attr
|
||||
|
||||
# On Python 2 this will be the backported package.
|
||||
@ -273,6 +278,11 @@ def _error_about_old_config_files(basedir, generated_files):
|
||||
raise e
|
||||
|
||||
|
||||
def ensure_text_and_abspath_expanduser_unicode(basedir):
|
||||
# type: (Union[bytes, str]) -> str
|
||||
return abspath_expanduser_unicode(ensure_text(basedir))
|
||||
|
||||
|
||||
@attr.s
|
||||
class _Config(object):
|
||||
"""
|
||||
@ -300,8 +310,8 @@ class _Config(object):
|
||||
config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser))
|
||||
portnum_fname = attr.ib()
|
||||
_basedir = attr.ib(
|
||||
converter=lambda basedir: abspath_expanduser_unicode(ensure_text(basedir)),
|
||||
)
|
||||
converter=ensure_text_and_abspath_expanduser_unicode,
|
||||
) # type: str
|
||||
config_path = attr.ib(
|
||||
validator=attr.validators.optional(
|
||||
attr.validators.instance_of(FilePath),
|
||||
@ -927,7 +937,6 @@ class Node(service.MultiService):
|
||||
"""
|
||||
NODETYPE = "unknown NODETYPE"
|
||||
CERTFILE = "node.pem"
|
||||
GENERATED_FILES = []
|
||||
|
||||
def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider):
|
||||
"""
|
||||
|
@ -1,5 +1,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
|
||||
@ -79,8 +84,8 @@ def do_admin(options):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"],
|
||||
]
|
||||
("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"admin": do_admin,
|
||||
|
@ -1,6 +1,12 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path, re, fnmatch
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands, Parameters
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||
DEFAULT_ALIAS, BaseOptions
|
||||
@ -19,7 +25,7 @@ class FileStoreOptions(BaseOptions):
|
||||
"This overrides the URL found in the --node-directory ."],
|
||||
["dir-cap", None, None,
|
||||
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
def postOptions(self):
|
||||
self["quiet"] = self.parent["quiet"]
|
||||
@ -455,25 +461,25 @@ class DeepCheckOptions(FileStoreOptions):
|
||||
Optionally repair any problems found."""
|
||||
|
||||
subCommands = [
|
||||
["mkdir", None, MakeDirectoryOptions, "Create a new directory."],
|
||||
["add-alias", None, AddAliasOptions, "Add a new alias cap."],
|
||||
["create-alias", None, CreateAliasOptions, "Create a new alias cap."],
|
||||
["list-aliases", None, ListAliasesOptions, "List all alias caps."],
|
||||
["ls", None, ListOptions, "List a directory."],
|
||||
["get", None, GetOptions, "Retrieve a file from the grid."],
|
||||
["put", None, PutOptions, "Upload a file into the grid."],
|
||||
["cp", None, CpOptions, "Copy one or more files or directories."],
|
||||
["unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."],
|
||||
["mv", None, MvOptions, "Move a file within the grid."],
|
||||
["ln", None, LnOptions, "Make an additional link to an existing file or directory."],
|
||||
["backup", None, BackupOptions, "Make target dir look like local dir."],
|
||||
["webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."],
|
||||
["manifest", None, ManifestOptions, "List all files/directories in a subtree."],
|
||||
["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."],
|
||||
["check", None, CheckOptions, "Check a single file or directory."],
|
||||
["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."],
|
||||
["status", None, TahoeStatusCommand, "Various status information."],
|
||||
]
|
||||
("mkdir", None, MakeDirectoryOptions, "Create a new directory."),
|
||||
("add-alias", None, AddAliasOptions, "Add a new alias cap."),
|
||||
("create-alias", None, CreateAliasOptions, "Create a new alias cap."),
|
||||
("list-aliases", None, ListAliasesOptions, "List all alias caps."),
|
||||
("ls", None, ListOptions, "List a directory."),
|
||||
("get", None, GetOptions, "Retrieve a file from the grid."),
|
||||
("put", None, PutOptions, "Upload a file into the grid."),
|
||||
("cp", None, CpOptions, "Copy one or more files or directories."),
|
||||
("unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."),
|
||||
("mv", None, MvOptions, "Move a file within the grid."),
|
||||
("ln", None, LnOptions, "Make an additional link to an existing file or directory."),
|
||||
("backup", None, BackupOptions, "Make target dir look like local dir."),
|
||||
("webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."),
|
||||
("manifest", None, ManifestOptions, "List all files/directories in a subtree."),
|
||||
("stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."),
|
||||
("check", None, CheckOptions, "Check a single file or directory."),
|
||||
("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."),
|
||||
("status", None, TahoeStatusCommand, "Various status information."),
|
||||
] # type: SubCommands
|
||||
|
||||
def mkdir(options):
|
||||
from allmydata.scripts import tahoe_mkdir
|
||||
|
@ -4,6 +4,12 @@ import os, sys, urllib, textwrap
|
||||
import codecs
|
||||
from os.path import join
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
from .types_ import Parameters
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from yaml import (
|
||||
safe_dump,
|
||||
)
|
||||
@ -41,8 +47,8 @@ class BaseOptions(usage.Options):
|
||||
def opt_version(self):
|
||||
raise usage.UsageError("--version not allowed on subcommands")
|
||||
|
||||
description = None
|
||||
description_unwrapped = None
|
||||
description = None # type: Optional[str]
|
||||
description_unwrapped = None # type: Optional[str]
|
||||
|
||||
def __str__(self):
|
||||
width = int(os.environ.get('COLUMNS', '80'))
|
||||
@ -65,7 +71,7 @@ class BasedirOptions(BaseOptions):
|
||||
optParameters = [
|
||||
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
||||
% quote_local_unicode_path(_default_nodedir)],
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
def parseArgs(self, basedir=None):
|
||||
# This finds the node-directory option correctly even if we are in a subcommand.
|
||||
@ -102,7 +108,7 @@ class NoDefaultBasedirOptions(BasedirOptions):
|
||||
|
||||
optParameters = [
|
||||
["basedir", "C", None, "Specify which Tahoe base directory should be used."],
|
||||
]
|
||||
] # type: Parameters
|
||||
|
||||
# This is overridden in order to ensure we get a "Wrong number of arguments."
|
||||
# error when more than one argument is given.
|
||||
|
@ -3,6 +3,11 @@ from __future__ import print_function
|
||||
import os
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python.usage import UsageError
|
||||
from twisted.python.filepath import (
|
||||
@ -492,10 +497,10 @@ def create_introducer(config):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."],
|
||||
["create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."],
|
||||
["create-introducer", None, CreateIntroducerOptions, "Create an introducer node."],
|
||||
]
|
||||
("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."),
|
||||
("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."),
|
||||
("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"create-node": create_node,
|
||||
|
@ -1,5 +1,10 @@
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from future.utils import bchr
|
||||
|
||||
# do not import any allmydata modules at this level. Do that from inside
|
||||
@ -1053,8 +1058,8 @@ def do_debug(options):
|
||||
|
||||
|
||||
subCommands = [
|
||||
["debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."],
|
||||
]
|
||||
("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."),
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"debug": do_debug,
|
||||
|
@ -4,6 +4,11 @@ import os, sys
|
||||
from six.moves import StringIO
|
||||
import six
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, task, threads
|
||||
|
||||
@ -40,8 +45,8 @@ _control_node_dispatch = {
|
||||
}
|
||||
|
||||
process_control_commands = [
|
||||
["run", None, tahoe_run.RunOptions, "run a node without daemonizing"],
|
||||
]
|
||||
("run", None, tahoe_run.RunOptions, "run a node without daemonizing"),
|
||||
] # type: SubCommands
|
||||
|
||||
|
||||
class Options(usage.Options):
|
||||
@ -98,7 +103,7 @@ class Options(usage.Options):
|
||||
|
||||
create_dispatch = {}
|
||||
for module in (create_node,):
|
||||
create_dispatch.update(module.dispatch)
|
||||
create_dispatch.update(module.dispatch) # type: ignore
|
||||
|
||||
def parse_options(argv, config=None):
|
||||
if not config:
|
||||
|
@ -2,6 +2,11 @@ from __future__ import print_function
|
||||
|
||||
import json
|
||||
|
||||
try:
|
||||
from allmydata.scripts.types_ import SubCommands
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.python import usage
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
@ -103,7 +108,7 @@ def invite(options):
|
||||
subCommands = [
|
||||
("invite", None, InviteOptions,
|
||||
"Invite a new node to this grid"),
|
||||
]
|
||||
] # type: SubCommands
|
||||
|
||||
dispatch = {
|
||||
"invite": invite,
|
||||
|
12
src/allmydata/scripts/types_.py
Normal file
12
src/allmydata/scripts/types_.py
Normal file
@ -0,0 +1,12 @@
|
||||
from typing import List, Tuple, Type, Sequence, Any
|
||||
from allmydata.scripts.common import BaseOptions
|
||||
|
||||
|
||||
# Historically, subcommands were implemented as lists, but due to a
|
||||
# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170),
|
||||
# a Tuple is required.
|
||||
SubCommand = Tuple[str, None, Type[BaseOptions], str]
|
||||
|
||||
SubCommands = List[SubCommand]
|
||||
|
||||
Parameters = List[Sequence[Any]]
|
@ -23,7 +23,7 @@ from allmydata.interfaces import IStatsProducer
|
||||
@implementer(IStatsProducer)
|
||||
class CPUUsageMonitor(service.MultiService):
|
||||
HISTORY_LENGTH = 15
|
||||
POLL_INTERVAL = 60
|
||||
POLL_INTERVAL = 60 # type: float
|
||||
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
|
@ -19,7 +19,7 @@ import os, time, struct
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import pickle # type: ignore
|
||||
from twisted.internet import reactor
|
||||
from twisted.application import service
|
||||
from allmydata.storage.common import si_b2a
|
||||
|
@ -202,7 +202,7 @@ class ShareFile(object):
|
||||
|
||||
|
||||
@implementer(RIBucketWriter)
|
||||
class BucketWriter(Referenceable):
|
||||
class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary):
|
||||
self.ss = ss
|
||||
@ -301,7 +301,7 @@ class BucketWriter(Referenceable):
|
||||
|
||||
|
||||
@implementer(RIBucketReader)
|
||||
class BucketReader(Referenceable):
|
||||
class BucketReader(Referenceable): # type: ignore # warner/foolscap#78
|
||||
|
||||
def __init__(self, ss, sharefname, storage_index=None, shnum=None):
|
||||
self.ss = ss
|
||||
|
@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
for share in six.viewvalues(shares):
|
||||
share.add_or_renew_lease(lease_info)
|
||||
|
||||
def slot_testv_and_readv_and_writev(
|
||||
def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78
|
||||
self,
|
||||
storage_index,
|
||||
secrets,
|
||||
|
@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
||||
import os, sys, httplib, binascii
|
||||
import urllib, json, random, time, urlparse
|
||||
|
||||
try:
|
||||
from typing import Dict
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Python 2 compatibility
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
@ -49,13 +54,13 @@ if sys.argv[1] == "--stats":
|
||||
DELAY = 10
|
||||
MAXSAMPLES = 6
|
||||
totals = []
|
||||
last_stats = {}
|
||||
last_stats = {} # type: Dict[str, float]
|
||||
while True:
|
||||
stats = {}
|
||||
stats = {} # type: Dict[str, float]
|
||||
for sf in statsfiles:
|
||||
for line in open(sf, "r").readlines():
|
||||
name, value = line.split(":")
|
||||
value = int(value.strip())
|
||||
name, str_value = line.split(":")
|
||||
value = int(str_value.strip())
|
||||
if name not in stats:
|
||||
stats[name] = 0
|
||||
stats[name] += float(value)
|
||||
|
@ -508,13 +508,13 @@ if __name__ == '__main__':
|
||||
mode = "upload"
|
||||
if len(sys.argv) > 1:
|
||||
mode = sys.argv[1]
|
||||
if sys.maxint == 2147483647:
|
||||
if sys.maxsize == 2147483647:
|
||||
bits = "32"
|
||||
elif sys.maxint == 9223372036854775807:
|
||||
elif sys.maxsize == 9223372036854775807:
|
||||
bits = "64"
|
||||
else:
|
||||
bits = "?"
|
||||
print("%s-bit system (sys.maxint=%d)" % (bits, sys.maxint))
|
||||
print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize))
|
||||
# put the logfile and stats.out in _test_memory/ . These stick around.
|
||||
# put the nodes and other files in _test_memory/test/ . These are
|
||||
# removed each time we run.
|
||||
|
@ -406,7 +406,7 @@ class DummyProducer(object):
|
||||
pass
|
||||
|
||||
@implementer(IImmutableFileNode)
|
||||
class FakeCHKFileNode(object):
|
||||
class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
||||
"""I provide IImmutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
@ -544,7 +544,7 @@ def create_chk_filenode(contents, all_contents):
|
||||
|
||||
|
||||
@implementer(IMutableFileNode, ICheckable)
|
||||
class FakeMutableFileNode(object):
|
||||
class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
"""I provide IMutableFileNode, but all of my data is stored in a
|
||||
class-level dictionary."""
|
||||
|
||||
|
@ -68,7 +68,7 @@ class Marker(object):
|
||||
|
||||
fireNow = partial(defer.succeed, None)
|
||||
|
||||
@implementer(IRemoteReference)
|
||||
@implementer(IRemoteReference) # type: ignore # warner/foolscap#79
|
||||
class LocalWrapper(object):
|
||||
"""
|
||||
A ``LocalWrapper`` presents the remote reference interface to a local
|
||||
@ -213,9 +213,12 @@ class NoNetworkServer(object):
|
||||
return _StorageServer(lambda: self.rref)
|
||||
def get_version(self):
|
||||
return self.rref.version
|
||||
def start_connecting(self, trigger_cb):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@implementer(IStorageBroker)
|
||||
class NoNetworkStorageBroker(object):
|
||||
class NoNetworkStorageBroker(object): # type: ignore # missing many methods
|
||||
def get_servers_for_psi(self, peer_selection_index):
|
||||
def _permuted(server):
|
||||
seed = server.get_permutation_seed()
|
||||
@ -259,7 +262,7 @@ def create_no_network_client(basedir):
|
||||
return defer.succeed(client)
|
||||
|
||||
|
||||
class _NoNetworkClient(_Client):
|
||||
class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
"""
|
||||
Overrides all _Client networking functionality to do nothing.
|
||||
"""
|
||||
|
@ -47,8 +47,9 @@ class RIDummy(RemoteInterface):
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@implementer(IFoolscapStoragePlugin)
|
||||
# type ignored due to missing stubs for Twisted
|
||||
# https://twistedmatrix.com/trac/ticket/9717
|
||||
@implementer(IFoolscapStoragePlugin) # type: ignore
|
||||
@attr.s
|
||||
class DummyStorage(object):
|
||||
name = attr.ib()
|
||||
@ -107,7 +108,7 @@ class GetCounter(Resource, object):
|
||||
|
||||
@implementer(RIDummy)
|
||||
@attr.s(frozen=True)
|
||||
class DummyStorageServer(object):
|
||||
class DummyStorageServer(object): # type: ignore # warner/foolscap#78
|
||||
get_anonymous_storage_server = attr.ib()
|
||||
|
||||
def remote_just_some_method(self):
|
||||
@ -116,7 +117,7 @@ class DummyStorageServer(object):
|
||||
|
||||
@implementer(IStorageServer)
|
||||
@attr.s
|
||||
class DummyStorageClient(object):
|
||||
class DummyStorageClient(object): # type: ignore # incomplete implementation
|
||||
get_rref = attr.ib()
|
||||
configuration = attr.ib()
|
||||
announcement = attr.ib()
|
||||
|
@ -62,7 +62,7 @@ class FakeClient(object):
|
||||
|
||||
|
||||
@implementer(IServer)
|
||||
class FakeServer(object):
|
||||
class FakeServer(object): # type: ignore # incomplete implementation
|
||||
|
||||
def get_name(self):
|
||||
return "fake name"
|
||||
@ -75,7 +75,7 @@ class FakeServer(object):
|
||||
|
||||
|
||||
@implementer(ICheckResults)
|
||||
class FakeCheckResults(object):
|
||||
class FakeCheckResults(object): # type: ignore # incomplete implementation
|
||||
|
||||
def __init__(self, si=None,
|
||||
healthy=False, recoverable=False,
|
||||
@ -106,7 +106,7 @@ class FakeCheckResults(object):
|
||||
|
||||
|
||||
@implementer(ICheckAndRepairResults)
|
||||
class FakeCheckAndRepairResults(object):
|
||||
class FakeCheckAndRepairResults(object): # type: ignore # incomplete implementation
|
||||
|
||||
def __init__(self, si=None,
|
||||
repair_attempted=False,
|
||||
|
@ -1561,7 +1561,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
kids, fn.get_writekey(), deep_immutable=True)
|
||||
|
||||
@implementer(IMutableFileNode)
|
||||
class FakeMutableFile(object):
|
||||
class FakeMutableFile(object): # type: ignore # incomplete implementation
|
||||
counter = 0
|
||||
def __init__(self, initial_contents=b""):
|
||||
data = self._get_initial_contents(initial_contents)
|
||||
@ -1622,7 +1622,7 @@ class FakeNodeMaker(NodeMaker):
|
||||
def create_mutable_file(self, contents=b"", keysize=None, version=None):
|
||||
return defer.succeed(FakeMutableFile(contents))
|
||||
|
||||
class FakeClient2(_Client):
|
||||
class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self):
|
||||
self.nodemaker = FakeNodeMaker(None, None, None,
|
||||
None, None,
|
||||
|
@ -19,6 +19,12 @@ from functools import (
|
||||
)
|
||||
import attr
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
from allmydata.introducer.client import IntroducerClient
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
from twisted.application import service
|
||||
@ -125,7 +131,7 @@ class FakeCHKCheckerAndUEBFetcher(object):
|
||||
))
|
||||
|
||||
class FakeClient(service.MultiService):
|
||||
introducer_clients = []
|
||||
introducer_clients = [] # type: List[IntroducerClient]
|
||||
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
||||
"happy": 75,
|
||||
"n": 100,
|
||||
|
@ -564,7 +564,7 @@ class TestMissingPorts(unittest.TestCase):
|
||||
config = config_from_string(self.basedir, "portnum", config_data)
|
||||
with self.assertRaises(PortAssignmentRequired):
|
||||
_tub_portlocation(config, None, None)
|
||||
test_listen_on_zero_with_host.todo = native_str(
|
||||
test_listen_on_zero_with_host.todo = native_str( # type: ignore
|
||||
"https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563"
|
||||
)
|
||||
|
||||
|
@ -16,6 +16,7 @@ from testtools.matchers import (
|
||||
BLACKLIST = {
|
||||
"allmydata.test.check_load",
|
||||
"allmydata.windows.registry",
|
||||
"allmydata.scripts.types_",
|
||||
}
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ class Python3PortingEffortTests(SynchronousTestCase):
|
||||
),
|
||||
),
|
||||
)
|
||||
test_finished_porting.todo = native_str(
|
||||
test_finished_porting.todo = native_str( # type: ignore
|
||||
"https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed",
|
||||
)
|
||||
|
||||
|
@ -9,18 +9,15 @@ from twisted.python.failure import Failure
|
||||
from twisted.internet.error import ProcessDone, ProcessTerminated
|
||||
from allmydata.util import deferredutil
|
||||
|
||||
conch_interfaces = None
|
||||
sftp = None
|
||||
sftpd = None
|
||||
|
||||
try:
|
||||
from twisted.conch import interfaces as conch_interfaces
|
||||
from twisted.conch.ssh import filetransfer as sftp
|
||||
from allmydata.frontends import sftpd
|
||||
except ImportError as e:
|
||||
conch_interfaces = sftp = sftpd = None # type: ignore
|
||||
conch_unavailable_reason = e
|
||||
else:
|
||||
conch_unavailable_reason = None
|
||||
conch_unavailable_reason = None # type: ignore
|
||||
|
||||
from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError
|
||||
from allmydata.mutable.common import NotWriteableError
|
||||
|
@ -105,7 +105,8 @@ from allmydata.interfaces import (
|
||||
|
||||
SOME_FURL = "pb://abcde@nowhere/fake"
|
||||
|
||||
class NativeStorageServerWithVersion(NativeStorageServer):
|
||||
|
||||
class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self, version):
|
||||
# note: these instances won't work for anything other than
|
||||
# get_available_space() because we don't upcall
|
||||
@ -569,7 +570,7 @@ class SpyEndpoint(object):
|
||||
return d
|
||||
|
||||
|
||||
@implementer(IConnectionHintHandler)
|
||||
@implementer(IConnectionHintHandler) # type: ignore # warner/foolscap#78
|
||||
@attr.s
|
||||
class SpyHandler(object):
|
||||
"""
|
||||
|
@ -189,7 +189,7 @@ class FakeHistory(object):
|
||||
def list_all_helper_statuses(self):
|
||||
return []
|
||||
|
||||
class FakeDisplayableServer(StubServer):
|
||||
class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self, serverid, nickname, connected,
|
||||
last_connect_time, last_loss_time, last_rx_time):
|
||||
StubServer.__init__(self, serverid)
|
||||
@ -255,7 +255,7 @@ class FakeStorageServer(service.MultiService):
|
||||
def on_status_changed(self, cb):
|
||||
cb(self)
|
||||
|
||||
class FakeClient(_Client):
|
||||
class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||
def __init__(self):
|
||||
# don't upcall to Client.__init__, since we only want to initialize a
|
||||
# minimal subset
|
||||
|
@ -22,6 +22,11 @@ from past.builtins import unicode, long
|
||||
|
||||
import re
|
||||
|
||||
try:
|
||||
from typing import Type
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.python.components import registerAdapter
|
||||
|
||||
@ -489,7 +494,7 @@ class MDMFVerifierURI(_BaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IURI, IDirnodeURI)
|
||||
@implementer(IDirnodeURI)
|
||||
class _DirectoryBaseURI(_BaseURI):
|
||||
def __init__(self, filenode_uri=None):
|
||||
self._filenode_uri = filenode_uri
|
||||
@ -536,7 +541,7 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
return self._filenode_uri.get_storage_index()
|
||||
|
||||
|
||||
@implementer(IDirectoryURI)
|
||||
@implementer(IURI, IDirectoryURI)
|
||||
class DirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2:'
|
||||
@ -555,7 +560,7 @@ class DirectoryURI(_DirectoryBaseURI):
|
||||
return ReadonlyDirectoryURI(self._filenode_uri.get_readonly())
|
||||
|
||||
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
@implementer(IURI, IReadonlyDirectoryURI)
|
||||
class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-RO:'
|
||||
@ -574,6 +579,7 @@ class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IURI, IDirnodeURI)
|
||||
class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
|
||||
def __init__(self, filenode_uri=None):
|
||||
if filenode_uri:
|
||||
@ -611,7 +617,7 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
return None
|
||||
|
||||
|
||||
@implementer(IDirectoryURI)
|
||||
@implementer(IURI, IDirectoryURI)
|
||||
class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF:'
|
||||
@ -633,7 +639,7 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap())
|
||||
|
||||
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
@implementer(IURI, IReadonlyDirectoryURI)
|
||||
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF-RO:'
|
||||
@ -671,7 +677,7 @@ def wrap_dirnode_cap(filecap):
|
||||
raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__)
|
||||
|
||||
|
||||
@implementer(IVerifierURI)
|
||||
@implementer(IURI, IVerifierURI)
|
||||
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
|
||||
@ -696,12 +702,12 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
return self
|
||||
|
||||
|
||||
@implementer(IVerifierURI)
|
||||
@implementer(IURI, IVerifierURI)
|
||||
class DirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING=b'URI:DIR2-Verifier:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=SSKVerifierURI
|
||||
INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI]
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
if filenode_uri:
|
||||
|
@ -311,7 +311,7 @@ def precondition_abspath(path):
|
||||
|
||||
_getfullpathname = None
|
||||
try:
|
||||
from nt import _getfullpathname
|
||||
from nt import _getfullpathname # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -14,6 +14,12 @@ if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
from typing import List
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.internet import task
|
||||
|
||||
class TimeoutError(Exception):
|
||||
@ -23,7 +29,7 @@ class PollComplete(Exception):
|
||||
pass
|
||||
|
||||
class PollMixin(object):
|
||||
_poll_should_ignore_these_errors = []
|
||||
_poll_should_ignore_these_errors = [] # type: List[Exception]
|
||||
|
||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||
# Return a Deferred, then call check_f periodically until it returns
|
||||
|
@ -6,6 +6,11 @@ Can eventually be merged back into allmydata.web.common.
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.web import resource, http
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
@ -55,7 +60,7 @@ class MultiFormatResource(resource.Resource, object):
|
||||
format if nothing else is given as the ``formatDefault``.
|
||||
"""
|
||||
formatArgument = "t"
|
||||
formatDefault = None
|
||||
formatDefault = None # type: Optional[str]
|
||||
|
||||
def render(self, req):
|
||||
"""
|
||||
|
@ -61,7 +61,16 @@ class IToken(ICredentials):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(IToken)
|
||||
# Workaround for Shoobx/mypy-zope#26, where without suitable
|
||||
# stubs for twisted classes (ICredentials), IToken does not
|
||||
# appear to be an Interface. The proper fix appears to be to
|
||||
# create stubs for twisted
|
||||
# (https://twistedmatrix.com/trac/ticket/9717). For now,
|
||||
# bypassing the inline decorator syntax works around the issue.
|
||||
_itoken_impl = implementer(IToken)
|
||||
|
||||
|
||||
@_itoken_impl
|
||||
@attr.s
|
||||
class Token(object):
|
||||
proposed_token = attr.ib(type=bytes)
|
||||
|
@ -217,7 +217,12 @@ def initialize():
|
||||
# Instead it "mangles" or escapes them using \x7F as an escape character, which we
|
||||
# unescape here.
|
||||
def unmangle(s):
|
||||
return re.sub(u'\\x7F[0-9a-fA-F]*\\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s)
|
||||
return re.sub(
|
||||
u'\\x7F[0-9a-fA-F]*\\;',
|
||||
# type ignored for 'unichr' (Python 2 only)
|
||||
lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore
|
||||
s,
|
||||
)
|
||||
|
||||
try:
|
||||
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
||||
|
12
tox.ini
12
tox.ini
@ -7,7 +7,7 @@
|
||||
twisted = 1
|
||||
|
||||
[tox]
|
||||
envlist = codechecks,py27,py36,pypy27
|
||||
envlist = typechecks,codechecks,py27,py36,pypy27
|
||||
minversion = 2.4
|
||||
|
||||
[testenv]
|
||||
@ -112,6 +112,16 @@ commands =
|
||||
# file. See pyproject.toml for legal <change type> values.
|
||||
python -m towncrier.check --pyproject towncrier.pyproject.toml
|
||||
|
||||
|
||||
[testenv:typechecks]
|
||||
skip_install = True
|
||||
deps =
|
||||
mypy
|
||||
git+https://github.com/Shoobx/mypy-zope
|
||||
git+https://github.com/warner/foolscap
|
||||
commands = mypy src
|
||||
|
||||
|
||||
[testenv:draftnews]
|
||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||
# see comment in [testenv] about "certifi"
|
||||
|
Loading…
Reference in New Issue
Block a user