mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-26 22:09:54 +00:00
Merge remote-tracking branch 'origin/master' into 3875-http-storage-furls
This commit is contained in:
commit
5349f35a0b
14
default.nix
14
default.nix
@ -86,24 +86,10 @@ mach-nix.buildPythonPackage rec {
|
||||
# There are some reasonable defaults so we only need to specify certain
|
||||
# packages where the default configuration runs into some issue.
|
||||
providers = {
|
||||
# Through zfec 1.5.5 the wheel has an incorrect runtime dependency
|
||||
# declared on argparse, not available for recent versions of Python 3.
|
||||
# Force mach-nix to use the sdist instead. This allows us to apply a
|
||||
# patch that removes the offending declaration.
|
||||
zfec = "sdist";
|
||||
};
|
||||
|
||||
# Define certain overrides to the way Python dependencies are built.
|
||||
_ = {
|
||||
# Apply the argparse declaration fix to zfec sdist.
|
||||
zfec.patches = with pkgs; [
|
||||
(fetchpatch {
|
||||
name = "fix-argparse.patch";
|
||||
url = "https://github.com/tahoe-lafs/zfec/commit/c3e736a72cccf44b8e1fb7d6c276400204c6bc1e.patch";
|
||||
sha256 = "1md9i2fx1ya7mgcj9j01z58hs3q9pj4ch5is5b5kq4v86cf6x33x";
|
||||
})
|
||||
];
|
||||
|
||||
# Remove a click-default-group patch for a test suite problem which no
|
||||
# longer applies because the project apparently no longer has a test suite
|
||||
# in its source distribution.
|
||||
|
@ -350,6 +350,9 @@ Because of the simple types used throughout
|
||||
and the equivalence described in `RFC 7049`_
|
||||
these examples should be representative regardless of which of these two encodings is chosen.
|
||||
|
||||
For CBOR messages, any sequence that is semantically a set (i.e. no repeated values allowed, order doesn't matter, and elements are hashable in Python) should be sent as a set.
|
||||
Tag 6.258 is used to indicate sets in CBOR; see `the CBOR registry <https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml>`_ for more details.
|
||||
|
||||
HTTP Design
|
||||
~~~~~~~~~~~
|
||||
|
||||
|
0
newsfragments/3802.minor
Normal file
0
newsfragments/3802.minor
Normal file
8
newsfragments/3828.feature
Normal file
8
newsfragments/3828.feature
Normal file
@ -0,0 +1,8 @@
|
||||
The implementation of SDMF and MDMF (mutables) now requires RSA keys to be exactly 2048 bits, aligning them with the specification.
|
||||
|
||||
Some code existed to allow tests to shorten this and it's
|
||||
conceptually possible a modified client produced mutables
|
||||
with different key-sizes. However, the spec says that they
|
||||
must be 2048 bits. If you happen to have a capability with
|
||||
a key-size different from 2048 you may use 1.17.1 or earlier
|
||||
to read the content.
|
0
newsfragments/3883.minor
Normal file
0
newsfragments/3883.minor
Normal file
0
newsfragments/3889.minor
Normal file
0
newsfragments/3889.minor
Normal file
@ -41,10 +41,10 @@
|
||||
"homepage": "",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6c4b9f1a2fd761e2d384ef86cff0d208ca27fdca",
|
||||
"sha256": "1yl5gj0mzczhl1j8sl8iqpwa1jzsgr12fdszw9rq13cdig2a2r5f",
|
||||
"rev": "838eefb4f93f2306d4614aafb9b2375f315d917f",
|
||||
"sha256": "1bm8cmh1wx4h8b4fhbs75hjci3gcrpi7k1m1pmiy3nc0gjim9vkg",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/nixos/nixpkgs/archive/6c4b9f1a2fd761e2d384ef86cff0d208ca27fdca.tar.gz",
|
||||
"url": "https://github.com/NixOS/nixpkgs/archive/838eefb4f93f2306d4614aafb9b2375f315d917f.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
},
|
||||
"pypi-deps-db": {
|
||||
@ -53,10 +53,10 @@
|
||||
"homepage": "",
|
||||
"owner": "DavHau",
|
||||
"repo": "pypi-deps-db",
|
||||
"rev": "0f6de8bf1f186c275af862ec9667abb95aae8542",
|
||||
"sha256": "1ygw9pywyl4p25hx761d1sbwl3qjhm630fa36gdf6b649im4mx8y",
|
||||
"rev": "76b8f1e44a8ec051b853494bcf3cc8453a294a6a",
|
||||
"sha256": "18fgqyh4z578jjhk26n1xi2cw2l98vrqp962rgz9a6wa5yh1nm4x",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/DavHau/pypi-deps-db/archive/0f6de8bf1f186c275af862ec9667abb95aae8542.tar.gz",
|
||||
"url": "https://github.com/DavHau/pypi-deps-db/archive/76b8f1e44a8ec051b853494bcf3cc8453a294a6a.tar.gz",
|
||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||
}
|
||||
}
|
||||
|
3
setup.py
3
setup.py
@ -135,7 +135,8 @@ install_requires = [
|
||||
"klein",
|
||||
"werkzeug",
|
||||
"treq",
|
||||
"cbor2"
|
||||
"cbor2",
|
||||
"pycddl",
|
||||
]
|
||||
|
||||
setup_requires = [
|
||||
|
@ -168,29 +168,12 @@ class SecretHolder(object):
|
||||
|
||||
class KeyGenerator(object):
|
||||
"""I create RSA keys for mutable files. Each call to generate() returns a
|
||||
single keypair. The keysize is specified first by the keysize= argument
|
||||
to generate(), then with a default set by set_default_keysize(), then
|
||||
with a built-in default of 2048 bits."""
|
||||
def __init__(self):
|
||||
self.default_keysize = 2048
|
||||
single keypair."""
|
||||
|
||||
def set_default_keysize(self, keysize):
|
||||
"""Call this to override the size of the RSA keys created for new
|
||||
mutable files which don't otherwise specify a size. This will affect
|
||||
all subsequent calls to generate() without a keysize= argument. The
|
||||
default size is 2048 bits. Test cases should call this method once
|
||||
during setup, to cause me to create smaller keys, so the unit tests
|
||||
run faster."""
|
||||
self.default_keysize = keysize
|
||||
|
||||
def generate(self, keysize=None):
|
||||
def generate(self):
|
||||
"""I return a Deferred that fires with a (verifyingkey, signingkey)
|
||||
pair. I accept a keysize in bits (2048 bit keys are standard, smaller
|
||||
keys are used for testing). If you do not provide a keysize, I will
|
||||
use my default, which is set by a call to set_default_keysize(). If
|
||||
set_default_keysize() has never been called, I will create 2048 bit
|
||||
keys."""
|
||||
keysize = keysize or self.default_keysize
|
||||
pair. The returned key will be 2048 bit"""
|
||||
keysize = 2048
|
||||
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
|
||||
# secs
|
||||
signer, verifier = rsa.create_signing_keypair(keysize)
|
||||
@ -993,9 +976,6 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
helper_furlfile = self.config.get_private_path("helper.furl").encode(get_filesystem_encoding())
|
||||
self.tub.registerReference(self.helper, furlFile=helper_furlfile)
|
||||
|
||||
def set_default_mutable_keysize(self, keysize):
|
||||
self._key_generator.set_default_keysize(keysize)
|
||||
|
||||
def _get_tempdir(self):
|
||||
"""
|
||||
Determine the path to the directory where temporary files for this node
|
||||
@ -1096,8 +1076,8 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
def create_immutable_dirnode(self, children, convergence=None):
|
||||
return self.nodemaker.create_immutable_directory(children, convergence)
|
||||
|
||||
def create_mutable_file(self, contents=None, keysize=None, version=None):
|
||||
return self.nodemaker.create_mutable_file(contents, keysize,
|
||||
def create_mutable_file(self, contents=None, version=None):
|
||||
return self.nodemaker.create_mutable_file(contents,
|
||||
version=version)
|
||||
|
||||
def upload(self, uploadable, reactor=None):
|
||||
|
@ -77,6 +77,14 @@ def create_signing_keypair_from_string(private_key_der):
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
if not isinstance(priv_key, rsa.RSAPrivateKey):
|
||||
raise ValueError(
|
||||
"Private Key did not decode to an RSA key"
|
||||
)
|
||||
if priv_key.key_size != 2048:
|
||||
raise ValueError(
|
||||
"Private Key must be 2048 bits"
|
||||
)
|
||||
return priv_key, priv_key.public_key()
|
||||
|
||||
|
||||
|
@ -126,12 +126,12 @@ class NodeMaker(object):
|
||||
return self._create_dirnode(filenode)
|
||||
return None
|
||||
|
||||
def create_mutable_file(self, contents=None, keysize=None, version=None):
|
||||
def create_mutable_file(self, contents=None, version=None):
|
||||
if version is None:
|
||||
version = self.mutable_file_default
|
||||
n = MutableFileNode(self.storage_broker, self.secret_holder,
|
||||
self.default_encoding_parameters, self.history)
|
||||
d = self.key_generator.generate(keysize)
|
||||
d = self.key_generator.generate()
|
||||
d.addCallback(n.create_with_keys, contents, version=version)
|
||||
d.addCallback(lambda res: n)
|
||||
return d
|
||||
|
@ -1,54 +1,39 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from time import clock as process_time
|
||||
else:
|
||||
from time import process_time
|
||||
from collections import deque
|
||||
from time import process_time
|
||||
import time
|
||||
from typing import Deque, Tuple
|
||||
|
||||
from twisted.application import service
|
||||
from twisted.application.internet import TimerService
|
||||
from zope.interface import implementer
|
||||
from foolscap.api import eventually
|
||||
|
||||
from allmydata.util import log, dictutil
|
||||
from allmydata.interfaces import IStatsProducer
|
||||
|
||||
@implementer(IStatsProducer)
|
||||
class CPUUsageMonitor(service.MultiService):
|
||||
HISTORY_LENGTH = 15
|
||||
POLL_INTERVAL = 60 # type: float
|
||||
HISTORY_LENGTH: int = 15
|
||||
POLL_INTERVAL: float = 60
|
||||
initial_cpu: float = 0.0
|
||||
|
||||
def __init__(self):
|
||||
service.MultiService.__init__(self)
|
||||
# we don't use process_time() here, because the constructor is run by
|
||||
# the twistd parent process (as it loads the .tac file), whereas the
|
||||
# rest of the program will be run by the child process, after twistd
|
||||
# forks. Instead, set self.initial_cpu as soon as the reactor starts
|
||||
# up.
|
||||
self.initial_cpu = 0.0 # just in case
|
||||
eventually(self._set_initial_cpu)
|
||||
self.samples = []
|
||||
self.samples: Deque[Tuple[float, float]] = deque([], self.HISTORY_LENGTH + 1)
|
||||
# we provide 1min, 5min, and 15min moving averages
|
||||
TimerService(self.POLL_INTERVAL, self.check).setServiceParent(self)
|
||||
|
||||
def _set_initial_cpu(self):
|
||||
def startService(self):
|
||||
self.initial_cpu = process_time()
|
||||
return super().startService()
|
||||
|
||||
def check(self):
|
||||
now_wall = time.time()
|
||||
now_cpu = process_time()
|
||||
self.samples.append( (now_wall, now_cpu) )
|
||||
while len(self.samples) > self.HISTORY_LENGTH+1:
|
||||
self.samples.pop(0)
|
||||
|
||||
def _average_N_minutes(self, size):
|
||||
if len(self.samples) < size+1:
|
||||
|
@ -10,6 +10,7 @@ import attr
|
||||
|
||||
# TODO Make sure to import Python version?
|
||||
from cbor2 import loads, dumps
|
||||
from pycddl import Schema
|
||||
from collections_extended import RangeMap
|
||||
from werkzeug.datastructures import Range, ContentRange
|
||||
from twisted.web.http_headers import Headers
|
||||
@ -53,18 +54,69 @@ class ClientException(Exception):
|
||||
self.code = code
|
||||
|
||||
|
||||
def _decode_cbor(response):
|
||||
# Schemas for server responses.
|
||||
#
|
||||
# Tags are of the form #6.nnn, where the number is documented at
|
||||
# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258
|
||||
# indicates a set.
|
||||
_SCHEMAS = {
|
||||
"get_version": Schema(
|
||||
"""
|
||||
message = {'http://allmydata.org/tahoe/protocols/storage/v1' => {
|
||||
'maximum-immutable-share-size' => uint
|
||||
'maximum-mutable-share-size' => uint
|
||||
'available-space' => uint
|
||||
'tolerates-immutable-read-overrun' => bool
|
||||
'delete-mutable-shares-with-zero-length-writev' => bool
|
||||
'fills-holes-with-zero-bytes' => bool
|
||||
'prevents-read-past-end-of-share-data' => bool
|
||||
}
|
||||
'application-version' => bstr
|
||||
}
|
||||
"""
|
||||
),
|
||||
"allocate_buckets": Schema(
|
||||
"""
|
||||
message = {
|
||||
already-have: #6.258([* uint])
|
||||
allocated: #6.258([* uint])
|
||||
}
|
||||
"""
|
||||
),
|
||||
"immutable_write_share_chunk": Schema(
|
||||
"""
|
||||
message = {
|
||||
required: [* {begin: uint, end: uint}]
|
||||
}
|
||||
"""
|
||||
),
|
||||
"list_shares": Schema(
|
||||
"""
|
||||
message = #6.258([* uint])
|
||||
"""
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def _decode_cbor(response, schema: Schema):
|
||||
"""Given HTTP response, return decoded CBOR body."""
|
||||
|
||||
def got_content(data):
|
||||
schema.validate_cbor(data)
|
||||
return loads(data)
|
||||
|
||||
if response.code > 199 and response.code < 300:
|
||||
content_type = get_content_type(response.headers)
|
||||
if content_type == CBOR_MIME_TYPE:
|
||||
# TODO limit memory usage
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3872
|
||||
return treq.content(response).addCallback(loads)
|
||||
return treq.content(response).addCallback(got_content)
|
||||
else:
|
||||
raise ClientException(-1, "Server didn't send CBOR")
|
||||
else:
|
||||
return fail(ClientException(response.code, response.phrase))
|
||||
return treq.content(response).addCallback(
|
||||
lambda data: fail(ClientException(response.code, response.phrase, data))
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
@ -263,7 +315,7 @@ class StorageClientGeneral(object):
|
||||
"""
|
||||
url = self._client.relative_url("/v1/version")
|
||||
response = yield self._client.request("GET", url)
|
||||
decoded_response = yield _decode_cbor(response)
|
||||
decoded_response = yield _decode_cbor(response, _SCHEMAS["get_version"])
|
||||
returnValue(decoded_response)
|
||||
|
||||
|
||||
@ -321,7 +373,7 @@ class StorageClientImmutables(object):
|
||||
upload_secret=upload_secret,
|
||||
message_to_serialize=message,
|
||||
)
|
||||
decoded_response = yield _decode_cbor(response)
|
||||
decoded_response = yield _decode_cbor(response, _SCHEMAS["allocate_buckets"])
|
||||
returnValue(
|
||||
ImmutableCreateResult(
|
||||
already_have=decoded_response["already-have"],
|
||||
@ -393,7 +445,7 @@ class StorageClientImmutables(object):
|
||||
raise ClientException(
|
||||
response.code,
|
||||
)
|
||||
body = yield _decode_cbor(response)
|
||||
body = yield _decode_cbor(response, _SCHEMAS["immutable_write_share_chunk"])
|
||||
remaining = RangeMap()
|
||||
for chunk in body["required"]:
|
||||
remaining.set(True, chunk["begin"], chunk["end"])
|
||||
@ -446,7 +498,7 @@ class StorageClientImmutables(object):
|
||||
url,
|
||||
)
|
||||
if response.code == http.OK:
|
||||
body = yield _decode_cbor(response)
|
||||
body = yield _decode_cbor(response, _SCHEMAS["list_shares"])
|
||||
returnValue(set(body))
|
||||
else:
|
||||
raise ClientException(response.code)
|
||||
|
@ -32,7 +32,7 @@ from cryptography.x509 import load_pem_x509_certificate
|
||||
|
||||
# TODO Make sure to use pure Python versions?
|
||||
from cbor2 import dumps, loads
|
||||
|
||||
from pycddl import Schema, ValidationError as CDDLValidationError
|
||||
from .server import StorageServer
|
||||
from .http_common import (
|
||||
swissnum_auth_header,
|
||||
@ -104,8 +104,8 @@ def _authorization_decorator(required_secrets):
|
||||
try:
|
||||
secrets = _extract_secrets(authorization, required_secrets)
|
||||
except ClientSecretsException:
|
||||
request.setResponseCode(400)
|
||||
return b""
|
||||
request.setResponseCode(http.BAD_REQUEST)
|
||||
return b"Missing required secrets"
|
||||
return f(self, request, secrets, *args, **kwargs)
|
||||
|
||||
return route
|
||||
@ -233,6 +233,25 @@ class _HTTPError(Exception):
|
||||
self.code = code
|
||||
|
||||
|
||||
# CDDL schemas.
|
||||
#
|
||||
# Tags are of the form #6.nnn, where the number is documented at
|
||||
# https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml. Notably, #6.258
|
||||
# indicates a set.
|
||||
_SCHEMAS = {
|
||||
"allocate_buckets": Schema("""
|
||||
message = {
|
||||
share-numbers: #6.258([* uint])
|
||||
allocated-size: uint
|
||||
}
|
||||
"""),
|
||||
"advise_corrupt_share": Schema("""
|
||||
message = {
|
||||
reason: tstr
|
||||
}
|
||||
""")
|
||||
}
|
||||
|
||||
class HTTPServer(object):
|
||||
"""
|
||||
A HTTP interface to the storage server.
|
||||
@ -247,6 +266,12 @@ class HTTPServer(object):
|
||||
request.setResponseCode(failure.value.code)
|
||||
return b""
|
||||
|
||||
@_app.handle_errors(CDDLValidationError)
|
||||
def _cddl_validation_error(self, request, failure):
|
||||
"""Handle CDDL validation errors."""
|
||||
request.setResponseCode(http.BAD_REQUEST)
|
||||
return str(failure.value).encode("utf-8")
|
||||
|
||||
def __init__(
|
||||
self, storage_server, swissnum
|
||||
): # type: (StorageServer, bytes) -> None
|
||||
@ -286,7 +311,7 @@ class HTTPServer(object):
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3861
|
||||
raise _HTTPError(http.NOT_ACCEPTABLE)
|
||||
|
||||
def _read_encoded(self, request) -> Any:
|
||||
def _read_encoded(self, request, schema: Schema) -> Any:
|
||||
"""
|
||||
Read encoded request body data, decoding it with CBOR by default.
|
||||
"""
|
||||
@ -294,7 +319,10 @@ class HTTPServer(object):
|
||||
if content_type == CBOR_MIME_TYPE:
|
||||
# TODO limit memory usage, client could send arbitrarily large data...
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3872
|
||||
return loads(request.content.read())
|
||||
message = request.content.read()
|
||||
schema.validate_cbor(message)
|
||||
result = loads(message)
|
||||
return result
|
||||
else:
|
||||
raise _HTTPError(http.UNSUPPORTED_MEDIA_TYPE)
|
||||
|
||||
@ -316,7 +344,7 @@ class HTTPServer(object):
|
||||
def allocate_buckets(self, request, authorization, storage_index):
|
||||
"""Allocate buckets."""
|
||||
upload_secret = authorization[Secrets.UPLOAD]
|
||||
info = self._read_encoded(request)
|
||||
info = self._read_encoded(request, _SCHEMAS["allocate_buckets"])
|
||||
|
||||
# We do NOT validate the upload secret for existing bucket uploads.
|
||||
# Another upload may be happening in parallel, with a different upload
|
||||
@ -426,7 +454,7 @@ class HTTPServer(object):
|
||||
"""
|
||||
List shares for the given storage index.
|
||||
"""
|
||||
share_numbers = list(self._storage_server.get_buckets(storage_index).keys())
|
||||
share_numbers = set(self._storage_server.get_buckets(storage_index).keys())
|
||||
return self._send_encoded(request, share_numbers)
|
||||
|
||||
@_authorized_route(
|
||||
@ -516,7 +544,7 @@ class HTTPServer(object):
|
||||
except KeyError:
|
||||
raise _HTTPError(http.NOT_FOUND)
|
||||
|
||||
info = self._read_encoded(request)
|
||||
info = self._read_encoded(request, _SCHEMAS["advise_corrupt_share"])
|
||||
bucket.advise_corrupt_share(info["reason"].encode("utf-8"))
|
||||
return b""
|
||||
|
||||
|
@ -133,8 +133,6 @@ from subprocess import (
|
||||
PIPE,
|
||||
)
|
||||
|
||||
TEST_RSA_KEY_SIZE = 522
|
||||
|
||||
EMPTY_CLIENT_CONFIG = config_from_string(
|
||||
"/dev/null",
|
||||
"tub.port",
|
||||
|
@ -34,7 +34,6 @@ from twisted.python.filepath import (
|
||||
)
|
||||
|
||||
from .common import (
|
||||
TEST_RSA_KEY_SIZE,
|
||||
SameProcessStreamEndpointAssigner,
|
||||
)
|
||||
|
||||
@ -736,7 +735,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
c = yield client.create_client(basedirs[0])
|
||||
c.setServiceParent(self.sparent)
|
||||
self.clients.append(c)
|
||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
|
||||
with open(os.path.join(basedirs[0],"private","helper.furl"), "r") as f:
|
||||
helper_furl = f.read()
|
||||
@ -754,7 +752,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
c = yield client.create_client(basedirs[i])
|
||||
c.setServiceParent(self.sparent)
|
||||
self.clients.append(c)
|
||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
log.msg("STARTING")
|
||||
yield self.wait_for_connections()
|
||||
log.msg("CONNECTED")
|
||||
@ -838,7 +835,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
def _stopped(res):
|
||||
new_c = yield client.create_client(self.getdir("client%d" % num))
|
||||
self.clients[num] = new_c
|
||||
new_c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
new_c.setServiceParent(self.sparent)
|
||||
d.addCallback(_stopped)
|
||||
d.addCallback(lambda res: self.wait_for_connections())
|
||||
@ -877,7 +873,6 @@ class SystemTestMixin(pollmixin.PollMixin, testutil.StallMixin):
|
||||
|
||||
c = yield client.create_client(basedir.path)
|
||||
self.clients.append(c)
|
||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
self.numclients += 1
|
||||
if add_to_sparent:
|
||||
c.setServiceParent(self.sparent)
|
||||
|
1
src/allmydata/test/data/pycryptopp-rsa-1024-priv.txt
Normal file
1
src/allmydata/test/data/pycryptopp-rsa-1024-priv.txt
Normal file
@ -0,0 +1 @@
|
||||
MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAJLEAfZueLuT4vUQ1+c8ZM9dJ/LA29CYgA5toaMklQjbVQ2Skywvw1wEkRjhMpjQAx5+lpLTE2xCtqtfkHooMRNnquOxoh0o1Xya60jUHze7VB5QMV7BMKeUTff1hQqpIgw/GLvJRtar53cVY+SYf4SXx2/slDbVr8BI3DPwdeNtAgERAoGABzHD3GTJrteQJRxu+cQ3I0NPwx2IQ/Nlplq1GZDaIQ/FbJY+bhZrdXOswnl4cOcPNjNhu+c1qHGznv0ntayjCGgJ9dDySGqknDau+ezZcBO1JrIpPOABS7MVMst79mn47vB2+t8w5krrBYahAVp/L5kY8k+Pr9AU+L9mbevFW9MCQQDA+bAeMRNBfGc4gvoVV8ecovE1KRksFDlkaDVEOc76zNW6JZazHhQF/zIoMkV81rrg5UBntw3WR3R8A3l9osgDAkEAwrLQICJ3zjsJBt0xEkCBv9tK6IvSIc7MUQIc4J2Y1hiSjqsnTRACRy3UMsODfx/Lg7ITlDbABCLfv3v4D39jzwJBAKpFuYQNLxuqALlkgk8RN6hTiYlCYYE/BXa2TR4U4848RBy3wTSiEarwO1Ck0+afWZlCwFuDZo/kshMSH+dTZS8CQQC3PuIAIHDCGXHoV7W200zwzmSeoba2aEfTxcDTZyZvJi+VVcqi4eQGwbioP4rR/86aEQNeUaWpijv/g7xK0j/RAkBbt2U9bFFcja10KIpgw2bBxDU/c67h4+38lkrBUnM9XVBZxjbtQbnkkeAfOgQDiq3oBDBrHF3/Q8XM0CzZJBWS
|
1
src/allmydata/test/data/pycryptopp-rsa-32768-priv.txt
Normal file
1
src/allmydata/test/data/pycryptopp-rsa-32768-priv.txt
Normal file
File diff suppressed because one or more lines are too long
@ -26,7 +26,6 @@ from allmydata.mutable.common import \
|
||||
NotEnoughServersError
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from allmydata.storage.common import storage_index_to_dir
|
||||
from ..common import TEST_RSA_KEY_SIZE
|
||||
from ..no_network import GridTestMixin
|
||||
from .. import common_util as testutil
|
||||
from ..common_util import DevNullDictionary
|
||||
@ -219,7 +218,7 @@ class Problems(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
|
||||
# use #467 static-server-selection to disable permutation and force
|
||||
# the choice of server for share[0].
|
||||
|
||||
d = nm.key_generator.generate(TEST_RSA_KEY_SIZE)
|
||||
d = nm.key_generator.generate()
|
||||
def _got_key(keypair):
|
||||
(pubkey, privkey) = keypair
|
||||
nm.key_generator = SameKeyGenerator(pubkey, privkey)
|
||||
|
@ -25,7 +25,6 @@ from allmydata.storage_client import StorageFarmBroker
|
||||
from allmydata.mutable.layout import MDMFSlotReadProxy
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from ..common import (
|
||||
TEST_RSA_KEY_SIZE,
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
@ -287,7 +286,7 @@ def make_storagebroker_with_peers(peers):
|
||||
return storage_broker
|
||||
|
||||
|
||||
def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE):
|
||||
def make_nodemaker(s=None, num_peers=10):
|
||||
"""
|
||||
Make a ``NodeMaker`` connected to some number of fake storage servers.
|
||||
|
||||
@ -298,20 +297,20 @@ def make_nodemaker(s=None, num_peers=10, keysize=TEST_RSA_KEY_SIZE):
|
||||
the node maker.
|
||||
"""
|
||||
storage_broker = make_storagebroker(s, num_peers)
|
||||
return make_nodemaker_with_storage_broker(storage_broker, keysize)
|
||||
return make_nodemaker_with_storage_broker(storage_broker)
|
||||
|
||||
|
||||
def make_nodemaker_with_peers(peers, keysize=TEST_RSA_KEY_SIZE):
|
||||
def make_nodemaker_with_peers(peers):
|
||||
"""
|
||||
Make a ``NodeMaker`` connected to the given storage servers.
|
||||
|
||||
:param list peers: The storage servers to associate with the node maker.
|
||||
"""
|
||||
storage_broker = make_storagebroker_with_peers(peers)
|
||||
return make_nodemaker_with_storage_broker(storage_broker, keysize)
|
||||
return make_nodemaker_with_storage_broker(storage_broker)
|
||||
|
||||
|
||||
def make_nodemaker_with_storage_broker(storage_broker, keysize):
|
||||
def make_nodemaker_with_storage_broker(storage_broker):
|
||||
"""
|
||||
Make a ``NodeMaker`` using the given storage broker.
|
||||
|
||||
@ -319,8 +318,6 @@ def make_nodemaker_with_storage_broker(storage_broker, keysize):
|
||||
"""
|
||||
sh = client.SecretHolder(b"lease secret", b"convergence secret")
|
||||
keygen = client.KeyGenerator()
|
||||
if keysize:
|
||||
keygen.set_default_keysize(keysize)
|
||||
nodemaker = NodeMaker(storage_broker, sh, None,
|
||||
None, None,
|
||||
{"k": 3, "n": 10}, SDMF_VERSION, keygen)
|
||||
|
@ -61,7 +61,6 @@ from allmydata.storage_client import (
|
||||
_StorageServer,
|
||||
)
|
||||
from .common import (
|
||||
TEST_RSA_KEY_SIZE,
|
||||
SameProcessStreamEndpointAssigner,
|
||||
)
|
||||
|
||||
@ -393,7 +392,6 @@ class NoNetworkGrid(service.MultiService):
|
||||
|
||||
if not c:
|
||||
c = yield create_no_network_client(clientdir)
|
||||
c.set_default_mutable_keysize(TEST_RSA_KEY_SIZE)
|
||||
|
||||
c.nodeid = clientid
|
||||
c.short_nodeid = b32encode(clientid).lower()[:8]
|
||||
|
@ -60,6 +60,28 @@ class TestRegression(unittest.TestCase):
|
||||
# The public key corresponding to `RSA_2048_PRIV_KEY`.
|
||||
RSA_2048_PUB_KEY = b64decode(f.read().strip())
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-1024-priv.txt').open('r') as f:
|
||||
# Created using `pycryptopp`:
|
||||
#
|
||||
# from base64 import b64encode
|
||||
# from pycryptopp.publickey import rsa
|
||||
# priv = rsa.generate(1024)
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_TINY_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_TINY_PRIV_KEY, native_bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-32768-priv.txt').open('r') as f:
|
||||
# Created using `pycryptopp`:
|
||||
#
|
||||
# from base64 import b64encode
|
||||
# from pycryptopp.publickey import rsa
|
||||
# priv = rsa.generate(32768)
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_HUGE_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_HUGE_PRIV_KEY, native_bytes)
|
||||
|
||||
def test_old_start_up_test(self):
|
||||
"""
|
||||
This was the old startup test run at import time in `pycryptopp.cipher.aes`.
|
||||
@ -232,6 +254,22 @@ class TestRegression(unittest.TestCase):
|
||||
priv_key, pub_key = rsa.create_signing_keypair_from_string(self.RSA_2048_PRIV_KEY)
|
||||
rsa.verify_signature(pub_key, self.RSA_2048_SIG, b'test')
|
||||
|
||||
def test_decode_tiny_rsa_keypair(self):
|
||||
'''
|
||||
An unreasonably small RSA key is rejected ("unreasonably small"
|
||||
means less that 2048 bits)
|
||||
'''
|
||||
with self.assertRaises(ValueError):
|
||||
rsa.create_signing_keypair_from_string(self.RSA_TINY_PRIV_KEY)
|
||||
|
||||
def test_decode_huge_rsa_keypair(self):
|
||||
'''
|
||||
An unreasonably _large_ RSA key is rejected ("unreasonably large"
|
||||
means 32768 or more bits)
|
||||
'''
|
||||
with self.assertRaises(ValueError):
|
||||
rsa.create_signing_keypair_from_string(self.RSA_HUGE_PRIV_KEY)
|
||||
|
||||
def test_encrypt_data_not_bytes(self):
|
||||
'''
|
||||
only bytes can be encrypted
|
||||
|
@ -17,7 +17,7 @@ from allmydata.util import pollmixin
|
||||
import allmydata.test.common_util as testutil
|
||||
|
||||
class FasterMonitor(CPUUsageMonitor):
|
||||
POLL_INTERVAL = 0.1
|
||||
POLL_INTERVAL = 0.01
|
||||
|
||||
|
||||
class CPUUsage(unittest.TestCase, pollmixin.PollMixin, testutil.StallMixin):
|
||||
@ -36,9 +36,9 @@ class CPUUsage(unittest.TestCase, pollmixin.PollMixin, testutil.StallMixin):
|
||||
def _poller():
|
||||
return bool(len(m.samples) == m.HISTORY_LENGTH+1)
|
||||
d = self.poll(_poller)
|
||||
# pause one more second, to make sure that the history-trimming code
|
||||
# is exercised
|
||||
d.addCallback(self.stall, 1.0)
|
||||
# pause a couple more intervals, to make sure that the history-trimming
|
||||
# code is exercised
|
||||
d.addCallback(self.stall, FasterMonitor.POLL_INTERVAL * 2)
|
||||
def _check(res):
|
||||
s = m.get_stats()
|
||||
self.failUnless("cpu_monitor.1min_avg" in s)
|
||||
|
@ -18,6 +18,8 @@ from base64 import b64encode
|
||||
from contextlib import contextmanager
|
||||
from os import urandom
|
||||
|
||||
from cbor2 import dumps
|
||||
from pycddl import ValidationError as CDDLValidationError
|
||||
from hypothesis import assume, given, strategies as st
|
||||
from fixtures import Fixture, TempDir
|
||||
from treq.testing import StubTreq
|
||||
@ -31,7 +33,7 @@ from werkzeug import routing
|
||||
from werkzeug.exceptions import NotFound as WNotFound
|
||||
|
||||
from .common import SyncTestCase
|
||||
from ..storage.http_common import get_content_type
|
||||
from ..storage.http_common import get_content_type, CBOR_MIME_TYPE
|
||||
from ..storage.common import si_b2a
|
||||
from ..storage.server import StorageServer
|
||||
from ..storage.http_server import (
|
||||
@ -239,6 +241,12 @@ class TestApp(object):
|
||||
else:
|
||||
return "BAD: {}".format(authorization)
|
||||
|
||||
@_authorized_route(_app, set(), "/v1/version", methods=["GET"])
|
||||
def bad_version(self, request, authorization):
|
||||
"""Return version result that violates the expected schema."""
|
||||
request.setHeader("content-type", CBOR_MIME_TYPE)
|
||||
return dumps({"garbage": 123})
|
||||
|
||||
|
||||
def result_of(d):
|
||||
"""
|
||||
@ -257,15 +265,15 @@ def result_of(d):
|
||||
)
|
||||
|
||||
|
||||
class RoutingTests(SyncTestCase):
|
||||
class CustomHTTPServerTests(SyncTestCase):
|
||||
"""
|
||||
Tests for the HTTP routing infrastructure.
|
||||
Tests that use a custom HTTP server.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
if PY2:
|
||||
self.skipTest("Not going to bother supporting Python 2")
|
||||
super(RoutingTests, self).setUp()
|
||||
super(CustomHTTPServerTests, self).setUp()
|
||||
# Could be a fixture, but will only be used in this test class so not
|
||||
# going to bother:
|
||||
self._http_server = TestApp()
|
||||
@ -277,8 +285,8 @@ class RoutingTests(SyncTestCase):
|
||||
|
||||
def test_authorization_enforcement(self):
|
||||
"""
|
||||
The requirement for secrets is enforced; if they are not given, a 400
|
||||
response code is returned.
|
||||
The requirement for secrets is enforced by the ``_authorized_route``
|
||||
decorator; if they are not given, a 400 response code is returned.
|
||||
"""
|
||||
# Without secret, get a 400 error.
|
||||
response = result_of(
|
||||
@ -298,6 +306,14 @@ class RoutingTests(SyncTestCase):
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(result_of(response.content()), b"GOOD SECRET")
|
||||
|
||||
def test_client_side_schema_validation(self):
|
||||
"""
|
||||
The client validates returned CBOR message against a schema.
|
||||
"""
|
||||
client = StorageClientGeneral(self.client)
|
||||
with self.assertRaises(CDDLValidationError):
|
||||
result_of(client.get_version())
|
||||
|
||||
|
||||
class HttpTestFixture(Fixture):
|
||||
"""
|
||||
@ -413,6 +429,36 @@ class GenericHTTPAPITests(SyncTestCase):
|
||||
)
|
||||
self.assertEqual(version, expected_version)
|
||||
|
||||
def test_server_side_schema_validation(self):
|
||||
"""
|
||||
Ensure that schema validation is happening: invalid CBOR should result
|
||||
in bad request response code (error 400).
|
||||
|
||||
We don't bother checking every single request, the API on the
|
||||
server-side is designed to require a schema, so it validates
|
||||
everywhere. But we check at least one to ensure we get correct
|
||||
response code on bad input, so we know validation happened.
|
||||
"""
|
||||
upload_secret = urandom(32)
|
||||
lease_secret = urandom(32)
|
||||
storage_index = urandom(16)
|
||||
url = self.http.client.relative_url(
|
||||
"/v1/immutable/" + _encode_si(storage_index)
|
||||
)
|
||||
message = {"bad-message": "missing expected keys"}
|
||||
|
||||
response = result_of(
|
||||
self.http.client.request(
|
||||
"POST",
|
||||
url,
|
||||
lease_renew_secret=lease_secret,
|
||||
lease_cancel_secret=lease_secret,
|
||||
upload_secret=upload_secret,
|
||||
message_to_serialize=message,
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.code, http.BAD_REQUEST)
|
||||
|
||||
|
||||
class ImmutableHTTPAPITests(SyncTestCase):
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user