mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-23 10:40:23 +00:00
Merge pull request #616 from tahoe-lafs/3031-replace-pycryptopp
fixes🎫3031 replace pycryptopp with cryptography
This commit is contained in:
commit
ede9fc7b31
@ -68,6 +68,8 @@ compile the dependencies yourself (instead of using ``--find-links`` to take
|
|||||||
advantage of the pre-compiled ones we host), you'll also need to install
|
advantage of the pre-compiled ones we host), you'll also need to install
|
||||||
Xcode and its command-line tools.
|
Xcode and its command-line tools.
|
||||||
|
|
||||||
|
**Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater.
|
||||||
|
|
||||||
Python 2.7
|
Python 2.7
|
||||||
----------
|
----------
|
||||||
|
|
||||||
@ -121,6 +123,9 @@ On Debian/Ubuntu-derived systems, the necessary packages are ``python-dev``,
|
|||||||
RPM-based system (like Fedora) these may be named ``python-devel``, etc,
|
RPM-based system (like Fedora) these may be named ``python-devel``, etc,
|
||||||
instead, and cam be installed with ``yum`` or ``rpm``.
|
instead, and cam be installed with ``yum`` or ``rpm``.
|
||||||
|
|
||||||
|
**Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater.
|
||||||
|
|
||||||
|
|
||||||
Install the Latest Tahoe-LAFS Release
|
Install the Latest Tahoe-LAFS Release
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
@ -284,6 +289,8 @@ Similar errors about ``openssl/crypto.h`` indicate that you are missing the
|
|||||||
OpenSSL development headers (``libssl-dev``). Likewise ``ffi.h`` means you
|
OpenSSL development headers (``libssl-dev``). Likewise ``ffi.h`` means you
|
||||||
need ``libffi-dev``.
|
need ``libffi-dev``.
|
||||||
|
|
||||||
|
**Note** that Tahoe-LAFS depends on `openssl 1.1.1c` or greater.
|
||||||
|
|
||||||
|
|
||||||
Using Tahoe-LAFS
|
Using Tahoe-LAFS
|
||||||
================
|
================
|
||||||
|
@ -39,16 +39,16 @@ virtualenv.
|
|||||||
|
|
||||||
The ``.deb`` packages, of course, rely solely upon other ``.deb`` packages.
|
The ``.deb`` packages, of course, rely solely upon other ``.deb`` packages.
|
||||||
For reference, here is a list of the debian package names that provide Tahoe's
|
For reference, here is a list of the debian package names that provide Tahoe's
|
||||||
dependencies as of the 1.9 release:
|
dependencies as of the 1.14.0 release:
|
||||||
|
|
||||||
* python
|
* python
|
||||||
* python-zfec
|
* python-zfec
|
||||||
* python-pycryptopp
|
|
||||||
* python-foolscap
|
* python-foolscap
|
||||||
* python-openssl (needed by foolscap)
|
* python-openssl (needed by foolscap)
|
||||||
* python-twisted
|
* python-twisted
|
||||||
* python-nevow
|
* python-nevow
|
||||||
* python-mock
|
* python-mock
|
||||||
|
* python-cryptography
|
||||||
* python-simplejson
|
* python-simplejson
|
||||||
* python-setuptools
|
* python-setuptools
|
||||||
* python-support (for Debian-specific install-time tools)
|
* python-support (for Debian-specific install-time tools)
|
||||||
|
@ -44,7 +44,7 @@ arguments. "``tahoe --help``" might also provide something useful.
|
|||||||
Running "``tahoe --version``" will display a list of version strings, starting
|
Running "``tahoe --version``" will display a list of version strings, starting
|
||||||
with the "allmydata" module (which contains the majority of the Tahoe-LAFS
|
with the "allmydata" module (which contains the majority of the Tahoe-LAFS
|
||||||
functionality) and including versions for a number of dependent libraries,
|
functionality) and including versions for a number of dependent libraries,
|
||||||
like Twisted, Foolscap, pycryptopp, and zfec. "``tahoe --version-and-path``"
|
like Twisted, Foolscap, cryptography, and zfec. "``tahoe --version-and-path``"
|
||||||
will also show the path from which each library was imported.
|
will also show the path from which each library was imported.
|
||||||
|
|
||||||
On Unix systems, the shell expands filename wildcards (``'*'`` and ``'?'``)
|
On Unix systems, the shell expands filename wildcards (``'*'`` and ``'?'``)
|
||||||
|
@ -211,14 +211,7 @@ Dependencies
|
|||||||
|
|
||||||
The Tahoe-LAFS SFTP server requires the Twisted "Conch" component (a "conch"
|
The Tahoe-LAFS SFTP server requires the Twisted "Conch" component (a "conch"
|
||||||
is a twisted shell, get it?). Many Linux distributions package the Conch code
|
is a twisted shell, get it?). Many Linux distributions package the Conch code
|
||||||
separately: debian puts it in the "python-twisted-conch" package. Conch
|
separately: debian puts it in the "python-twisted-conch" package.
|
||||||
requires the "pycrypto" package, which is a Python+C implementation of many
|
|
||||||
cryptographic functions (the debian package is named "python-crypto").
|
|
||||||
|
|
||||||
Note that "pycrypto" is different than the "pycryptopp" package that
|
|
||||||
Tahoe-LAFS uses (which is a Python wrapper around the C++ -based Crypto++
|
|
||||||
library, a library that is frequently installed as /usr/lib/libcryptopp.a, to
|
|
||||||
avoid problems with non-alphanumerics in filenames).
|
|
||||||
|
|
||||||
Immutable and Mutable Files
|
Immutable and Mutable Files
|
||||||
===========================
|
===========================
|
||||||
|
@ -272,22 +272,3 @@ that size, assume that they have been corrupted and are not retrievable from the
|
|||||||
Tahoe storage grid. Tahoe v1.1 clients will refuse to upload files larger than
|
Tahoe storage grid. Tahoe v1.1 clients will refuse to upload files larger than
|
||||||
12 GiB with a clean failure. A future release of Tahoe will remove this
|
12 GiB with a clean failure. A future release of Tahoe will remove this
|
||||||
limitation so that larger files can be uploaded.
|
limitation so that larger files can be uploaded.
|
||||||
|
|
||||||
|
|
||||||
=== pycryptopp defect resulting in data corruption ===
|
|
||||||
|
|
||||||
Versions of pycryptopp earlier than pycryptopp-0.5.0 had a defect
|
|
||||||
which, when compiled with some compilers, would cause AES-256
|
|
||||||
encryption and decryption to be computed incorrectly. This could
|
|
||||||
cause data corruption. Tahoe v1.0 required, and came with a bundled
|
|
||||||
copy of, pycryptopp v0.3.
|
|
||||||
|
|
||||||
==== how to manage it ====
|
|
||||||
|
|
||||||
You can detect whether pycryptopp-0.3 has this failure when it is
|
|
||||||
compiled by your compiler. Run the unit tests that come with
|
|
||||||
pycryptopp-0.3: unpack the "pycryptopp-0.3.tar" file that comes in the
|
|
||||||
Tahoe v1.0 {{{misc/dependencies}}} directory, cd into the resulting
|
|
||||||
{{{pycryptopp-0.3.0}}} directory, and execute {{{python ./setup.py
|
|
||||||
test}}}. If the tests pass, then your compiler does not trigger this
|
|
||||||
failure.
|
|
||||||
|
@ -546,16 +546,15 @@ The "restrictions dictionary" is a table which establishes an upper bound on
|
|||||||
how this authority (or any attenuations thereof) may be used. It is
|
how this authority (or any attenuations thereof) may be used. It is
|
||||||
effectively a set of key-value pairs.
|
effectively a set of key-value pairs.
|
||||||
|
|
||||||
A "signing key" is an EC-DSA192 private key string, as supplied to the
|
A "signing key" is an EC-DSA192 private key string and is 12 bytes
|
||||||
pycryptopp SigningKey() constructor, and is 12 bytes long. A "verifying key"
|
long. A "verifying key" is an EC-DSA192 public key string, and is 24
|
||||||
is an EC-DSA192 public key string, as produced by pycryptopp, and is 24 bytes
|
bytes long. A "key identifier" is a string which securely identifies a
|
||||||
long. A "key identifier" is a string which securely identifies a specific
|
specific signing/verifying keypair: for long RSA keys it would be a
|
||||||
signing/verifying keypair: for long RSA keys it would be a secure hash of the
|
secure hash of the public key, but since ECDSA192 keys are so short,
|
||||||
public key, but since ECDSA192 keys are so short, we simply use the full
|
we simply use the full verifying key verbatim. A "key hint" is a
|
||||||
verifying key verbatim. A "key hint" is a variable-length prefix of the key
|
variable-length prefix of the key identifier, perhaps zero bytes long,
|
||||||
identifier, perhaps zero bytes long, used to help a recipient reduce the
|
used to help a recipient reduce the number of verifying keys that it
|
||||||
number of verifying keys that it must search to find one that matches a
|
must search to find one that matches a signed message.
|
||||||
signed message.
|
|
||||||
|
|
||||||
==== Authority Chains ====
|
==== Authority Chains ====
|
||||||
|
|
||||||
|
@ -77,9 +77,9 @@ If you're planning to hack on the source code, you might want to add
|
|||||||
Dependencies
|
Dependencies
|
||||||
------------
|
------------
|
||||||
|
|
||||||
Tahoe-LAFS depends upon several packages that use compiled C code, such as
|
Tahoe-LAFS depends upon several packages that use compiled C code
|
||||||
zfec, pycryptopp, and others. This code must be built separately for each
|
(such as zfec). This code must be built separately for each platform
|
||||||
platform (Windows, OS-X, and different flavors of Linux).
|
(Windows, OS-X, and different flavors of Linux).
|
||||||
|
|
||||||
Pre-compiled "wheels" of all Tahoe's dependencies are hosted on the
|
Pre-compiled "wheels" of all Tahoe's dependencies are hosted on the
|
||||||
tahoe-lafs.org website in the ``deps/`` directory. The ``--find-links=``
|
tahoe-lafs.org website in the ``deps/`` directory. The ``--find-links=``
|
||||||
|
@ -146,8 +146,7 @@ print_py_pkg_ver('mock')
|
|||||||
print_py_pkg_ver('Nevow', 'nevow')
|
print_py_pkg_ver('Nevow', 'nevow')
|
||||||
print_py_pkg_ver('pyasn1')
|
print_py_pkg_ver('pyasn1')
|
||||||
print_py_pkg_ver('pycparser')
|
print_py_pkg_ver('pycparser')
|
||||||
print_py_pkg_ver('pycrypto', 'Crypto')
|
print_py_pkg_ver('cryptography')
|
||||||
print_py_pkg_ver('pycryptopp')
|
|
||||||
print_py_pkg_ver('pyflakes')
|
print_py_pkg_ver('pyflakes')
|
||||||
print_py_pkg_ver('pyOpenSSL', 'OpenSSL')
|
print_py_pkg_ver('pyOpenSSL', 'OpenSSL')
|
||||||
print_py_pkg_ver('six')
|
print_py_pkg_ver('six')
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
# allmydata-tahoe: 1.10.0.post185.dev0 [2249-deps-and-osx-packaging-1: 76ac53846042d9a4095995be92af66cdc09d5ad0-dirty] (/Applications/tahoe.app/src)
|
# allmydata-tahoe: 1.10.0.post185.dev0 [2249-deps-and-osx-packaging-1: 76ac53846042d9a4095995be92af66cdc09d5ad0-dirty] (/Applications/tahoe.app/src)
|
||||||
# foolscap: 0.7.0 (/Applications/tahoe.app/support/lib/python2.7/site-packages/foolscap-0.7.0-py2.7.egg)
|
# foolscap: 0.7.0 (/Applications/tahoe.app/support/lib/python2.7/site-packages/foolscap-0.7.0-py2.7.egg)
|
||||||
# pycryptopp: 0.6.0.1206569328141510525648634803928199668821045408958 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pycryptopp-0.6.0.1206569328141510525648634803928199668821045408958-py2.7-macosx-10.9-intel.egg)
|
|
||||||
# zfec: 1.4.24 (/Applications/tahoe.app/support/lib/python2.7/site-packages/zfec-1.4.24-py2.7-macosx-10.9-intel.egg)
|
# zfec: 1.4.24 (/Applications/tahoe.app/support/lib/python2.7/site-packages/zfec-1.4.24-py2.7-macosx-10.9-intel.egg)
|
||||||
# Twisted: 13.0.0 (/Applications/tahoe.app/support/lib/python2.7/site-packages/Twisted-13.0.0-py2.7-macosx-10.9-intel.egg)
|
# Twisted: 13.0.0 (/Applications/tahoe.app/support/lib/python2.7/site-packages/Twisted-13.0.0-py2.7-macosx-10.9-intel.egg)
|
||||||
# Nevow: 0.11.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages/Nevow-0.11.1-py2.7.egg)
|
# Nevow: 0.11.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages/Nevow-0.11.1-py2.7.egg)
|
||||||
@ -23,7 +22,6 @@
|
|||||||
# python: 2.7.5 (/usr/bin/python)
|
# python: 2.7.5 (/usr/bin/python)
|
||||||
# platform: Darwin-13.4.0-x86_64-i386-64bit (None)
|
# platform: Darwin-13.4.0-x86_64-i386-64bit (None)
|
||||||
# pyOpenSSL: 0.13 (/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python)
|
# pyOpenSSL: 0.13 (/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python)
|
||||||
# pycrypto: 2.6.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pycrypto-2.6.1-py2.7-macosx-10.9-intel.egg)
|
|
||||||
# pyasn1: 0.1.7 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pyasn1-0.1.7-py2.7.egg)
|
# pyasn1: 0.1.7 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pyasn1-0.1.7-py2.7.egg)
|
||||||
# mock: 1.0.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages)
|
# mock: 1.0.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages)
|
||||||
# setuptools: 0.6c16dev6 (/Applications/tahoe.app/support/lib/python2.7/site-packages/setuptools-0.6c16dev6.egg)
|
# setuptools: 0.6c16dev6 (/Applications/tahoe.app/support/lib/python2.7/site-packages/setuptools-0.6c16dev6.egg)
|
||||||
|
1
newsfragments/3031.other
Normal file
1
newsfragments/3031.other
Normal file
@ -0,0 +1 @@
|
|||||||
|
Replaced pycryptopp with cryptography.
|
1
setup.py
1
setup.py
@ -292,6 +292,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
|||||||
"static/css/*.css",
|
"static/css/*.css",
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
include_package_data=True,
|
||||||
setup_requires=setup_requires,
|
setup_requires=setup_requires,
|
||||||
entry_points = { 'console_scripts': [ 'tahoe = allmydata.scripts.runner:run' ] },
|
entry_points = { 'console_scripts': [ 'tahoe = allmydata.scripts.runner:run' ] },
|
||||||
**setup_args
|
**setup_args
|
||||||
|
@ -42,8 +42,8 @@ install_requires = [
|
|||||||
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
|
# * foolscap >= 0.12.6 has an i2p.sam_endpoint() that takes kwargs
|
||||||
"foolscap >= 0.12.6",
|
"foolscap >= 0.12.6",
|
||||||
|
|
||||||
# pycryptopp-0.6.0 includes ed25519
|
# cryptography>2.3 because of CVE-2018-10903
|
||||||
"pycryptopp >= 0.6.0",
|
'cryptography >= 2.3',
|
||||||
|
|
||||||
"service-identity", # this is needed to suppress complaints about being unable to verify certs
|
"service-identity", # this is needed to suppress complaints about being unable to verify certs
|
||||||
"characteristic >= 14.0.0", # latest service-identity depends on this version
|
"characteristic >= 14.0.0", # latest service-identity depends on this version
|
||||||
@ -117,7 +117,6 @@ install_requires = [
|
|||||||
package_imports = [
|
package_imports = [
|
||||||
# package name module name
|
# package name module name
|
||||||
('foolscap', 'foolscap'),
|
('foolscap', 'foolscap'),
|
||||||
('pycryptopp', 'pycryptopp'),
|
|
||||||
('zfec', 'zfec'),
|
('zfec', 'zfec'),
|
||||||
('Twisted', 'twisted'),
|
('Twisted', 'twisted'),
|
||||||
('Nevow', 'nevow'),
|
('Nevow', 'nevow'),
|
||||||
|
@ -8,17 +8,17 @@ from twisted.internet import reactor, defer
|
|||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.application.internet import TimerService
|
from twisted.application.internet import TimerService
|
||||||
from twisted.python.filepath import FilePath
|
from twisted.python.filepath import FilePath
|
||||||
from pycryptopp.publickey import rsa
|
|
||||||
|
|
||||||
import allmydata
|
import allmydata
|
||||||
|
from allmydata.crypto import rsa, ed25519
|
||||||
|
from allmydata.crypto.util import remove_prefix
|
||||||
from allmydata.storage.server import StorageServer
|
from allmydata.storage.server import StorageServer
|
||||||
from allmydata import storage_client
|
from allmydata import storage_client
|
||||||
from allmydata.immutable.upload import Uploader
|
from allmydata.immutable.upload import Uploader
|
||||||
from allmydata.immutable.offloaded import Helper
|
from allmydata.immutable.offloaded import Helper
|
||||||
from allmydata.control import ControlServer
|
from allmydata.control import ControlServer
|
||||||
from allmydata.introducer.client import IntroducerClient
|
from allmydata.introducer.client import IntroducerClient
|
||||||
from allmydata.util import (hashutil, base32, pollmixin, log, keyutil, idlib,
|
from allmydata.util import (hashutil, base32, pollmixin, log, idlib, yamlutil)
|
||||||
yamlutil)
|
|
||||||
from allmydata.util.encodingutil import (get_filesystem_encoding,
|
from allmydata.util.encodingutil import (get_filesystem_encoding,
|
||||||
from_utf8_or_none)
|
from_utf8_or_none)
|
||||||
from allmydata.util.abbreviate import parse_abbreviated_size
|
from allmydata.util.abbreviate import parse_abbreviated_size
|
||||||
@ -155,8 +155,7 @@ class KeyGenerator(object):
|
|||||||
keysize = keysize or self.default_keysize
|
keysize = keysize or self.default_keysize
|
||||||
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
|
# RSA key generation for a 2048 bit key takes between 0.8 and 3.2
|
||||||
# secs
|
# secs
|
||||||
signer = rsa.generate(keysize)
|
signer, verifier = rsa.create_signing_keypair(keysize)
|
||||||
verifier = signer.get_verifying_key()
|
|
||||||
return defer.succeed( (verifier, signer) )
|
return defer.succeed( (verifier, signer) )
|
||||||
|
|
||||||
class Terminator(service.Service):
|
class Terminator(service.Service):
|
||||||
@ -479,17 +478,20 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
# we only create the key once. On all subsequent runs, we re-use the
|
# we only create the key once. On all subsequent runs, we re-use the
|
||||||
# existing key
|
# existing key
|
||||||
def _make_key():
|
def _make_key():
|
||||||
sk_vs,vk_vs = keyutil.make_keypair()
|
private_key, _ = ed25519.create_signing_keypair()
|
||||||
return sk_vs+"\n"
|
return ed25519.string_from_signing_key(private_key) + "\n"
|
||||||
sk_vs = self.config.get_or_create_private_config("node.privkey", _make_key)
|
|
||||||
sk,vk_vs = keyutil.parse_privkey(sk_vs.strip())
|
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
|
||||||
self.config.write_config_file("node.pubkey", vk_vs+"\n")
|
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
|
||||||
self._node_key = sk
|
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||||
|
self.config.write_config_file("node.pubkey", public_key_str + "\n")
|
||||||
|
self._node_private_key = private_key
|
||||||
|
self._node_public_key = public_key
|
||||||
|
|
||||||
def get_long_nodeid(self):
|
def get_long_nodeid(self):
|
||||||
# this matches what IServer.get_longname() says about us elsewhere
|
# this matches what IServer.get_longname() says about us elsewhere
|
||||||
vk_bytes = self._node_key.get_verifying_key_bytes()
|
vk_string = ed25519.string_from_verifying_key(self._node_public_key)
|
||||||
return "v0-"+base32.b2a(vk_bytes)
|
return remove_prefix(vk_string, "pub-")
|
||||||
|
|
||||||
def get_long_tubid(self):
|
def get_long_tubid(self):
|
||||||
return idlib.nodeid_b2a(self.nodeid)
|
return idlib.nodeid_b2a(self.nodeid)
|
||||||
@ -510,7 +512,8 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
else:
|
else:
|
||||||
# otherwise, we're free to use the more natural seed of our
|
# otherwise, we're free to use the more natural seed of our
|
||||||
# pubkey-based serverid
|
# pubkey-based serverid
|
||||||
vk_bytes = self._node_key.get_verifying_key_bytes()
|
vk_string = ed25519.string_from_verifying_key(self._node_public_key)
|
||||||
|
vk_bytes = remove_prefix(vk_string, ed25519.PUBLIC_KEY_PREFIX)
|
||||||
seed = base32.b2a(vk_bytes)
|
seed = base32.b2a(vk_bytes)
|
||||||
self.config.write_config_file("permutation-seed", seed+"\n")
|
self.config.write_config_file("permutation-seed", seed+"\n")
|
||||||
return seed.strip()
|
return seed.strip()
|
||||||
@ -581,7 +584,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
"permutation-seed-base32": self._init_permutation_seed(ss),
|
"permutation-seed-base32": self._init_permutation_seed(ss),
|
||||||
}
|
}
|
||||||
for ic in self.introducer_clients:
|
for ic in self.introducer_clients:
|
||||||
ic.publish("storage", ann, self._node_key)
|
ic.publish("storage", ann, self._node_private_key)
|
||||||
|
|
||||||
def init_client(self):
|
def init_client(self):
|
||||||
helper_furl = self.config.get_config("client", "helper.furl", None)
|
helper_furl = self.config.get_config("client", "helper.furl", None)
|
||||||
|
8
src/allmydata/crypto/__init__.py
Normal file
8
src/allmydata/crypto/__init__.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
"""
|
||||||
|
Helper functions for cryptography-related operations inside Tahoe
|
||||||
|
|
||||||
|
For the most part, these functions use and return objects that are
|
||||||
|
documented in the `cryptography` library -- however, code inside Tahoe
|
||||||
|
should only use these functions and not rely on features of any
|
||||||
|
objects that `cryptography` documents.
|
||||||
|
"""
|
180
src/allmydata/crypto/aes.py
Normal file
180
src/allmydata/crypto/aes.py
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
"""
|
||||||
|
Helper functions for cryptograhpy-related operations inside Tahoe
|
||||||
|
using AES
|
||||||
|
|
||||||
|
These functions use and return objects that are documented in the
|
||||||
|
`cryptography` library -- however, code inside Tahoe should only use
|
||||||
|
functions from allmydata.crypto.aes and not rely on features of any
|
||||||
|
objects that `cryptography` documents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives.ciphers import (
|
||||||
|
Cipher,
|
||||||
|
algorithms,
|
||||||
|
modes,
|
||||||
|
CipherContext,
|
||||||
|
)
|
||||||
|
from zope.interface import (
|
||||||
|
Interface,
|
||||||
|
directlyProvides,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_IV = b'\x00' * 16
|
||||||
|
|
||||||
|
|
||||||
|
class IEncryptor(Interface):
|
||||||
|
"""
|
||||||
|
An object which can encrypt data.
|
||||||
|
|
||||||
|
Create one using :func:`create_encryptor` and use it with
|
||||||
|
:func:`encrypt_data`
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class IDecryptor(Interface):
|
||||||
|
"""
|
||||||
|
An object which can decrypt data.
|
||||||
|
|
||||||
|
Create one using :func:`create_decryptor` and use it with
|
||||||
|
:func:`decrypt_data`
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def create_encryptor(key, iv=None):
|
||||||
|
"""
|
||||||
|
Create and return a new object which can do AES encryptions with
|
||||||
|
the given key and initialization vector (IV). The default IV is 16
|
||||||
|
zero-bytes.
|
||||||
|
|
||||||
|
:param bytes key: the key bytes, should be 128 or 256 bits (16 or
|
||||||
|
32 bytes)
|
||||||
|
|
||||||
|
:param bytes iv: the Initialization Vector consisting of 16 bytes,
|
||||||
|
or None for the default (which is 16 zero bytes)
|
||||||
|
|
||||||
|
:returns: an object suitable for use with :func:`encrypt_data` (an
|
||||||
|
:class:`IEncryptor`)
|
||||||
|
"""
|
||||||
|
cryptor = _create_cryptor(key, iv)
|
||||||
|
directlyProvides(cryptor, IEncryptor)
|
||||||
|
return cryptor
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_data(encryptor, plaintext):
|
||||||
|
"""
|
||||||
|
AES-encrypt `plaintext` with the given `encryptor`.
|
||||||
|
|
||||||
|
:param encryptor: an instance of :class:`IEncryptor` previously
|
||||||
|
returned from `create_encryptor`
|
||||||
|
|
||||||
|
:param bytes plaintext: the data to encrypt
|
||||||
|
|
||||||
|
:returns: bytes of ciphertext
|
||||||
|
"""
|
||||||
|
|
||||||
|
_validate_cryptor(encryptor, encrypt=True)
|
||||||
|
if not isinstance(plaintext, six.binary_type):
|
||||||
|
raise ValueError('Plaintext must be bytes')
|
||||||
|
|
||||||
|
return encryptor.update(plaintext)
|
||||||
|
|
||||||
|
|
||||||
|
def create_decryptor(key, iv=None):
|
||||||
|
"""
|
||||||
|
Create and return a new object which can do AES decryptions with
|
||||||
|
the given key and initialization vector (IV). The default IV is 16
|
||||||
|
zero-bytes.
|
||||||
|
|
||||||
|
:param bytes key: the key bytes, should be 128 or 256 bits (16 or
|
||||||
|
32 bytes)
|
||||||
|
|
||||||
|
:param bytes iv: the Initialization Vector consisting of 16 bytes,
|
||||||
|
or None for the default (which is 16 zero bytes)
|
||||||
|
|
||||||
|
:returns: an object suitable for use with :func:`decrypt_data` (an
|
||||||
|
:class:`IDecryptor` instance)
|
||||||
|
"""
|
||||||
|
cryptor = _create_cryptor(key, iv)
|
||||||
|
directlyProvides(cryptor, IDecryptor)
|
||||||
|
return cryptor
|
||||||
|
|
||||||
|
|
||||||
|
def decrypt_data(decryptor, plaintext):
|
||||||
|
"""
|
||||||
|
AES-decrypt `plaintext` with the given `decryptor`.
|
||||||
|
|
||||||
|
:param decryptor: an instance of :class:`IDecryptor` previously
|
||||||
|
returned from `create_decryptor`
|
||||||
|
|
||||||
|
:param bytes plaintext: the data to decrypt
|
||||||
|
|
||||||
|
:returns: bytes of ciphertext
|
||||||
|
"""
|
||||||
|
|
||||||
|
_validate_cryptor(decryptor, encrypt=False)
|
||||||
|
if not isinstance(plaintext, six.binary_type):
|
||||||
|
raise ValueError('Plaintext must be bytes')
|
||||||
|
|
||||||
|
return decryptor.update(plaintext)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_cryptor(key, iv):
|
||||||
|
"""
|
||||||
|
Internal helper.
|
||||||
|
|
||||||
|
See :func:`create_encryptor` or :func:`create_decryptor`.
|
||||||
|
"""
|
||||||
|
key = _validate_key(key)
|
||||||
|
iv = _validate_iv(iv)
|
||||||
|
cipher = Cipher(
|
||||||
|
algorithms.AES(key),
|
||||||
|
modes.CTR(iv),
|
||||||
|
backend=default_backend()
|
||||||
|
)
|
||||||
|
return cipher.encryptor()
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_cryptor(cryptor, encrypt=True):
|
||||||
|
"""
|
||||||
|
raise ValueError if `cryptor` is not a valid object
|
||||||
|
"""
|
||||||
|
klass = IEncryptor if encrypt else IDecryptor
|
||||||
|
name = "encryptor" if encrypt else "decryptor"
|
||||||
|
if not isinstance(cryptor, CipherContext):
|
||||||
|
raise ValueError(
|
||||||
|
"'{}' must be a CipherContext".format(name)
|
||||||
|
)
|
||||||
|
if not klass.providedBy(cryptor):
|
||||||
|
raise ValueError(
|
||||||
|
"'{}' must be created with create_{}()".format(name, name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_key(key):
|
||||||
|
"""
|
||||||
|
confirm `key` is suitable for AES encryption, or raise ValueError
|
||||||
|
"""
|
||||||
|
if not isinstance(key, six.binary_type):
|
||||||
|
raise TypeError('Key must be bytes')
|
||||||
|
if len(key) not in (16, 32):
|
||||||
|
raise ValueError('Key must be 16 or 32 bytes long')
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_iv(iv):
|
||||||
|
"""
|
||||||
|
Returns a suitable initialiation vector. If `iv` is `None`, a
|
||||||
|
default is returned. If `iv` is not a suitable initialization
|
||||||
|
vector an error is raised. `iv` is returned if it valid.
|
||||||
|
"""
|
||||||
|
if iv is None:
|
||||||
|
return DEFAULT_IV
|
||||||
|
if not isinstance(iv, six.binary_type):
|
||||||
|
raise TypeError('IV must be bytes')
|
||||||
|
if len(iv) != 16:
|
||||||
|
raise ValueError('IV must be 16 bytes long')
|
||||||
|
return iv
|
190
src/allmydata/crypto/ed25519.py
Normal file
190
src/allmydata/crypto/ed25519.py
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
'''
|
||||||
|
Ed25519 keys and helpers.
|
||||||
|
|
||||||
|
Key Formatting
|
||||||
|
--------------
|
||||||
|
|
||||||
|
- in base32, keys are 52 chars long (both signing and verifying keys)
|
||||||
|
- in base62, keys is 43 chars long
|
||||||
|
- in base64, keys is 43 chars long
|
||||||
|
|
||||||
|
We can't use base64 because we want to reserve punctuation and preserve
|
||||||
|
cut-and-pasteability. The base62 encoding is shorter than the base32 form,
|
||||||
|
but the minor usability improvement is not worth the documentation and
|
||||||
|
specification confusion of using a non-standard encoding. So we stick with
|
||||||
|
base32.
|
||||||
|
'''
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidSignature,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ed25519 import (
|
||||||
|
Ed25519PrivateKey,
|
||||||
|
Ed25519PublicKey,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.serialization import (
|
||||||
|
Encoding,
|
||||||
|
PrivateFormat,
|
||||||
|
NoEncryption,
|
||||||
|
PublicFormat,
|
||||||
|
)
|
||||||
|
|
||||||
|
from allmydata.crypto.util import remove_prefix
|
||||||
|
from allmydata.crypto.error import BadSignature
|
||||||
|
|
||||||
|
from allmydata.util.base32 import (
|
||||||
|
a2b,
|
||||||
|
b2a,
|
||||||
|
)
|
||||||
|
|
||||||
|
PRIVATE_KEY_PREFIX = b'priv-v0-'
|
||||||
|
PUBLIC_KEY_PREFIX = b'pub-v0-'
|
||||||
|
|
||||||
|
|
||||||
|
def create_signing_keypair():
|
||||||
|
"""
|
||||||
|
Creates a new ed25519 keypair.
|
||||||
|
|
||||||
|
:returns: 2-tuple of (private_key, public_key)
|
||||||
|
"""
|
||||||
|
private_key = Ed25519PrivateKey.generate()
|
||||||
|
return private_key, private_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
def verifying_key_from_signing_key(private_key):
|
||||||
|
"""
|
||||||
|
:returns: the public key associated to the given `private_key`
|
||||||
|
"""
|
||||||
|
_validate_private_key(private_key)
|
||||||
|
return private_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
def sign_data(private_key, data):
|
||||||
|
"""
|
||||||
|
Sign the given data using the given private key
|
||||||
|
|
||||||
|
:param private_key: the private part returned from
|
||||||
|
`create_signing_keypair` or from
|
||||||
|
`signing_keypair_from_string`
|
||||||
|
|
||||||
|
:param bytes data: the data to sign
|
||||||
|
|
||||||
|
:returns: bytes representing the signature
|
||||||
|
"""
|
||||||
|
|
||||||
|
_validate_private_key(private_key)
|
||||||
|
if not isinstance(data, six.binary_type):
|
||||||
|
raise ValueError('data must be bytes')
|
||||||
|
return private_key.sign(data)
|
||||||
|
|
||||||
|
|
||||||
|
def string_from_signing_key(private_key):
|
||||||
|
"""
|
||||||
|
Encode a private key to a string of bytes
|
||||||
|
|
||||||
|
:param private_key: the private part returned from
|
||||||
|
`create_signing_keypair` or from
|
||||||
|
`signing_keypair_from_string`
|
||||||
|
|
||||||
|
:returns: byte-string representing this key
|
||||||
|
"""
|
||||||
|
_validate_private_key(private_key)
|
||||||
|
raw_key_bytes = private_key.private_bytes(
|
||||||
|
Encoding.Raw,
|
||||||
|
PrivateFormat.Raw,
|
||||||
|
NoEncryption(),
|
||||||
|
)
|
||||||
|
return PRIVATE_KEY_PREFIX + b2a(raw_key_bytes)
|
||||||
|
|
||||||
|
|
||||||
|
def signing_keypair_from_string(private_key_bytes):
|
||||||
|
"""
|
||||||
|
Load a signing keypair from a string of bytes (which includes the
|
||||||
|
PRIVATE_KEY_PREFIX)
|
||||||
|
|
||||||
|
:returns: a 2-tuple of (private_key, public_key)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(private_key_bytes, six.binary_type):
|
||||||
|
raise ValueError('private_key_bytes must be bytes')
|
||||||
|
|
||||||
|
private_key = Ed25519PrivateKey.from_private_bytes(
|
||||||
|
a2b(remove_prefix(private_key_bytes, PRIVATE_KEY_PREFIX))
|
||||||
|
)
|
||||||
|
return private_key, private_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
def verify_signature(public_key, alleged_signature, data):
|
||||||
|
"""
|
||||||
|
:param public_key: a verifying key
|
||||||
|
|
||||||
|
:param bytes alleged_signature: the bytes of the alleged signature
|
||||||
|
|
||||||
|
:param bytes data: the data which was allegedly signed
|
||||||
|
|
||||||
|
:raises: BadSignature if the signature is bad
|
||||||
|
:returns: None (or raises an exception).
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(alleged_signature, six.binary_type):
|
||||||
|
raise ValueError('alleged_signature must be bytes')
|
||||||
|
|
||||||
|
if not isinstance(data, six.binary_type):
|
||||||
|
raise ValueError('data must be bytes')
|
||||||
|
|
||||||
|
_validate_public_key(public_key)
|
||||||
|
try:
|
||||||
|
public_key.verify(alleged_signature, data)
|
||||||
|
except InvalidSignature:
|
||||||
|
raise BadSignature()
|
||||||
|
|
||||||
|
|
||||||
|
def verifying_key_from_string(public_key_bytes):
|
||||||
|
"""
|
||||||
|
Load a verifying key from a string of bytes (which includes the
|
||||||
|
PUBLIC_KEY_PREFIX)
|
||||||
|
|
||||||
|
:returns: a public_key
|
||||||
|
"""
|
||||||
|
if not isinstance(public_key_bytes, six.binary_type):
|
||||||
|
raise ValueError('public_key_bytes must be bytes')
|
||||||
|
|
||||||
|
return Ed25519PublicKey.from_public_bytes(
|
||||||
|
a2b(remove_prefix(public_key_bytes, PUBLIC_KEY_PREFIX))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def string_from_verifying_key(public_key):
|
||||||
|
"""
|
||||||
|
Encode a public key to a string of bytes
|
||||||
|
|
||||||
|
:param public_key: the public part of a keypair
|
||||||
|
|
||||||
|
:returns: byte-string representing this key
|
||||||
|
"""
|
||||||
|
_validate_public_key(public_key)
|
||||||
|
raw_key_bytes = public_key.public_bytes(
|
||||||
|
Encoding.Raw,
|
||||||
|
PublicFormat.Raw,
|
||||||
|
)
|
||||||
|
return PUBLIC_KEY_PREFIX + b2a(raw_key_bytes)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_public_key(public_key):
|
||||||
|
"""
|
||||||
|
Internal helper. Verify that `public_key` is an appropriate object
|
||||||
|
"""
|
||||||
|
if not isinstance(public_key, Ed25519PublicKey):
|
||||||
|
raise ValueError('public_key must be an Ed25519PublicKey')
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_private_key(private_key):
|
||||||
|
"""
|
||||||
|
Internal helper. Verify that `private_key` is an appropriate object
|
||||||
|
"""
|
||||||
|
if not isinstance(private_key, Ed25519PrivateKey):
|
||||||
|
raise ValueError('private_key must be an Ed25519PrivateKey')
|
||||||
|
return None
|
15
src/allmydata/crypto/error.py
Normal file
15
src/allmydata/crypto/error.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Exceptions raise by allmydata.crypto.* modules
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BadSignature(Exception):
|
||||||
|
"""
|
||||||
|
An alleged signature did not match
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BadPrefixError(Exception):
|
||||||
|
"""
|
||||||
|
A key did not start with the required prefix
|
||||||
|
"""
|
188
src/allmydata/crypto/rsa.py
Normal file
188
src/allmydata/crypto/rsa.py
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
"""
|
||||||
|
Helper functions for cryptography-related operations inside Tahoe
|
||||||
|
using RSA public-key encryption and decryption.
|
||||||
|
|
||||||
|
In cases where these functions happen to use and return objects that
|
||||||
|
are documented in the `cryptography` library, code outside this module
|
||||||
|
should only use functions from allmydata.crypto.rsa and not rely on
|
||||||
|
features of any objects that `cryptography` documents.
|
||||||
|
|
||||||
|
That is, the public and private keys are opaque objects; DO NOT depend
|
||||||
|
on any of their methods.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_der_private_key, load_der_public_key, \
|
||||||
|
Encoding, PrivateFormat, PublicFormat, NoEncryption
|
||||||
|
|
||||||
|
from allmydata.crypto.error import BadSignature
|
||||||
|
|
||||||
|
|
||||||
|
# This is the value that was used by `pycryptopp`, and we must continue to use it for
|
||||||
|
# both backwards compatibility and interoperability.
|
||||||
|
#
|
||||||
|
# The docs for `cryptography` suggest to use the constant defined at
|
||||||
|
# `cryptography.hazmat.primitives.asymmetric.padding.PSS.MAX_LENGTH`, but this causes old
|
||||||
|
# signatures to fail to validate.
|
||||||
|
RSA_PSS_SALT_LENGTH = 32
|
||||||
|
|
||||||
|
RSA_PADDING = padding.PSS(
|
||||||
|
mgf=padding.MGF1(hashes.SHA256()),
|
||||||
|
salt_length=RSA_PSS_SALT_LENGTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def create_signing_keypair(key_size):
|
||||||
|
"""
|
||||||
|
Create a new RSA signing (private) keypair from scratch. Can be used with
|
||||||
|
`sign_data` function.
|
||||||
|
|
||||||
|
:param int key_size: length of key in bits
|
||||||
|
|
||||||
|
:returns: 2-tuple of (private_key, public_key)
|
||||||
|
"""
|
||||||
|
# Tahoe's original use of pycryptopp would use cryptopp's default
|
||||||
|
# public_exponent, which is 17
|
||||||
|
#
|
||||||
|
# Thus, we are using 17 here as well. However, there are other
|
||||||
|
# choices; see this for more discussion:
|
||||||
|
# https://security.stackexchange.com/questions/2335/should-rsa-public-exponent-be-only-in-3-5-17-257-or-65537-due-to-security-c
|
||||||
|
#
|
||||||
|
# Another popular choice is 65537. See:
|
||||||
|
# https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/#cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key
|
||||||
|
# https://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
|
||||||
|
priv_key = rsa.generate_private_key(
|
||||||
|
public_exponent=17,
|
||||||
|
key_size=key_size,
|
||||||
|
backend=default_backend()
|
||||||
|
)
|
||||||
|
return priv_key, priv_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
def create_signing_keypair_from_string(private_key_der):
|
||||||
|
"""
|
||||||
|
Create an RSA signing (private) key from previously serialized
|
||||||
|
private key bytes.
|
||||||
|
|
||||||
|
:param bytes private_key_der: blob as returned from `der_string_from_signing_keypair`
|
||||||
|
|
||||||
|
:returns: 2-tuple of (private_key, public_key)
|
||||||
|
"""
|
||||||
|
priv_key = load_der_private_key(
|
||||||
|
private_key_der,
|
||||||
|
password=None,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
return priv_key, priv_key.public_key()
|
||||||
|
|
||||||
|
|
||||||
|
def der_string_from_signing_key(private_key):
|
||||||
|
"""
|
||||||
|
Serializes a given RSA private key to a DER string
|
||||||
|
|
||||||
|
:param private_key: a private key object as returned from
|
||||||
|
`create_signing_keypair` or `create_signing_keypair_from_string`
|
||||||
|
|
||||||
|
:returns: bytes representing `private_key`
|
||||||
|
"""
|
||||||
|
_validate_private_key(private_key)
|
||||||
|
return private_key.private_bytes(
|
||||||
|
encoding=Encoding.DER,
|
||||||
|
format=PrivateFormat.PKCS8,
|
||||||
|
encryption_algorithm=NoEncryption(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def der_string_from_verifying_key(public_key):
|
||||||
|
"""
|
||||||
|
Serializes a given RSA public key to a DER string.
|
||||||
|
|
||||||
|
:param public_key: a public key object as returned from
|
||||||
|
`create_signing_keypair` or `create_signing_keypair_from_string`
|
||||||
|
|
||||||
|
:returns: bytes representing `public_key`
|
||||||
|
"""
|
||||||
|
_validate_public_key(public_key)
|
||||||
|
return public_key.public_bytes(
|
||||||
|
encoding=Encoding.DER,
|
||||||
|
format=PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_verifying_key_from_string(public_key_der):
|
||||||
|
"""
|
||||||
|
Create an RSA verifying key from a previously serialized public key
|
||||||
|
|
||||||
|
:param bytes public_key_der: a blob as returned by `der_string_from_verifying_key`
|
||||||
|
|
||||||
|
:returns: a public key object suitable for use with other
|
||||||
|
functions in this module
|
||||||
|
"""
|
||||||
|
pub_key = load_der_public_key(
|
||||||
|
public_key_der,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
return pub_key
|
||||||
|
|
||||||
|
|
||||||
|
def sign_data(private_key, data):
|
||||||
|
"""
|
||||||
|
:param private_key: the private part of a keypair returned from
|
||||||
|
`create_signing_keypair_from_string` or `create_signing_keypair`
|
||||||
|
|
||||||
|
:param bytes data: the bytes to sign
|
||||||
|
|
||||||
|
:returns: bytes which are a signature of the bytes given as `data`.
|
||||||
|
"""
|
||||||
|
_validate_private_key(private_key)
|
||||||
|
return private_key.sign(
|
||||||
|
data,
|
||||||
|
RSA_PADDING,
|
||||||
|
hashes.SHA256(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def verify_signature(public_key, alleged_signature, data):
|
||||||
|
"""
|
||||||
|
:param public_key: a verifying key, returned from `create_verifying_key_from_string` or `create_verifying_key_from_private_key`
|
||||||
|
|
||||||
|
:param bytes alleged_signature: the bytes of the alleged signature
|
||||||
|
|
||||||
|
:param bytes data: the data which was allegedly signed
|
||||||
|
"""
|
||||||
|
_validate_public_key(public_key)
|
||||||
|
try:
|
||||||
|
public_key.verify(
|
||||||
|
alleged_signature,
|
||||||
|
data,
|
||||||
|
RSA_PADDING,
|
||||||
|
hashes.SHA256(),
|
||||||
|
)
|
||||||
|
except InvalidSignature:
|
||||||
|
raise BadSignature()
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_public_key(public_key):
|
||||||
|
"""
|
||||||
|
Internal helper. Checks that `public_key` is a valid cryptography
|
||||||
|
object
|
||||||
|
"""
|
||||||
|
if not isinstance(public_key, rsa.RSAPublicKey):
|
||||||
|
raise ValueError(
|
||||||
|
"public_key must be an RSAPublicKey"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_private_key(private_key):
|
||||||
|
"""
|
||||||
|
Internal helper. Checks that `public_key` is a valid cryptography
|
||||||
|
object
|
||||||
|
"""
|
||||||
|
if not isinstance(private_key, rsa.RSAPrivateKey):
|
||||||
|
raise ValueError(
|
||||||
|
"private_key must be an RSAPrivateKey"
|
||||||
|
)
|
24
src/allmydata/crypto/util.py
Normal file
24
src/allmydata/crypto/util.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""
|
||||||
|
Utilities used by allmydata.crypto modules
|
||||||
|
"""
|
||||||
|
|
||||||
|
from allmydata.crypto.error import BadPrefixError
|
||||||
|
|
||||||
|
|
||||||
|
def remove_prefix(s_bytes, prefix):
|
||||||
|
"""
|
||||||
|
:param bytes s_bytes: a string of bytes whose prefix is removed
|
||||||
|
|
||||||
|
:param bytes prefix: the bytes to remove from the beginning of `s_bytes`
|
||||||
|
|
||||||
|
Removes `prefix` from `s_bytes` and returns the new bytes or
|
||||||
|
raises `BadPrefixError` if `s_bytes` did not start with the
|
||||||
|
`prefix` specified.
|
||||||
|
|
||||||
|
:returns: `s_bytes` with `prefix` removed from the front.
|
||||||
|
"""
|
||||||
|
if s_bytes.startswith(prefix):
|
||||||
|
return s_bytes[len(prefix):]
|
||||||
|
raise BadPrefixError(
|
||||||
|
"did not see expected '{}' prefix".format(prefix)
|
||||||
|
)
|
@ -6,6 +6,7 @@ from twisted.internet import defer
|
|||||||
from foolscap.api import fireEventually
|
from foolscap.api import fireEventually
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
from allmydata.deep_stats import DeepStats
|
from allmydata.deep_stats import DeepStats
|
||||||
from allmydata.mutable.common import NotWriteableError
|
from allmydata.mutable.common import NotWriteableError
|
||||||
from allmydata.mutable.filenode import MutableFileNode
|
from allmydata.mutable.filenode import MutableFileNode
|
||||||
@ -22,7 +23,6 @@ from allmydata.util.assertutil import precondition
|
|||||||
from allmydata.util.netstring import netstring, split_netstring
|
from allmydata.util.netstring import netstring, split_netstring
|
||||||
from allmydata.util.consumer import download_to_data
|
from allmydata.util.consumer import download_to_data
|
||||||
from allmydata.uri import wrap_dirnode_cap
|
from allmydata.uri import wrap_dirnode_cap
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
from allmydata.util.dictutil import AuxValueDict
|
from allmydata.util.dictutil import AuxValueDict
|
||||||
|
|
||||||
from eliot import (
|
from eliot import (
|
||||||
@ -214,8 +214,8 @@ def _encrypt_rw_uri(writekey, rw_uri):
|
|||||||
|
|
||||||
salt = hashutil.mutable_rwcap_salt_hash(rw_uri)
|
salt = hashutil.mutable_rwcap_salt_hash(rw_uri)
|
||||||
key = hashutil.mutable_rwcap_key_hash(salt, writekey)
|
key = hashutil.mutable_rwcap_key_hash(salt, writekey)
|
||||||
cryptor = AES(key)
|
encryptor = aes.create_encryptor(key)
|
||||||
crypttext = cryptor.process(rw_uri)
|
crypttext = aes.encrypt_data(encryptor, rw_uri)
|
||||||
mac = hashutil.hmac(key, salt + crypttext)
|
mac = hashutil.hmac(key, salt + crypttext)
|
||||||
assert len(mac) == 32
|
assert len(mac) == 32
|
||||||
return salt + crypttext + mac
|
return salt + crypttext + mac
|
||||||
@ -331,8 +331,8 @@ class DirectoryNode(object):
|
|||||||
salt = encwrcap[:16]
|
salt = encwrcap[:16]
|
||||||
crypttext = encwrcap[16:-32]
|
crypttext = encwrcap[16:-32]
|
||||||
key = hashutil.mutable_rwcap_key_hash(salt, self._node.get_writekey())
|
key = hashutil.mutable_rwcap_key_hash(salt, self._node.get_writekey())
|
||||||
cryptor = AES(key)
|
encryptor = aes.create_decryptor(key)
|
||||||
plaintext = cryptor.process(crypttext)
|
plaintext = aes.decrypt_data(encryptor, crypttext)
|
||||||
return plaintext
|
return plaintext
|
||||||
|
|
||||||
def _create_and_validate_node(self, rw_uri, ro_uri, name):
|
def _create_and_validate_node(self, rw_uri, ro_uri, name):
|
||||||
|
@ -7,12 +7,12 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
from twisted.internet.interfaces import IConsumer
|
from twisted.internet.interfaces import IConsumer
|
||||||
|
from allmydata.crypto import aes
|
||||||
from allmydata.interfaces import IImmutableFileNode, IUploadResults
|
from allmydata.interfaces import IImmutableFileNode, IUploadResults
|
||||||
from allmydata.util import consumer
|
from allmydata.util import consumer
|
||||||
from allmydata.check_results import CheckResults, CheckAndRepairResults
|
from allmydata.check_results import CheckResults, CheckAndRepairResults
|
||||||
from allmydata.util.dictutil import DictOfSets
|
from allmydata.util.dictutil import DictOfSets
|
||||||
from allmydata.util.happinessutil import servers_of_happiness
|
from allmydata.util.happinessutil import servers_of_happiness
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from allmydata.immutable.checker import Checker
|
from allmydata.immutable.checker import Checker
|
||||||
@ -201,8 +201,9 @@ class DecryptingConsumer(object):
|
|||||||
offset_big = offset // 16
|
offset_big = offset // 16
|
||||||
offset_small = offset % 16
|
offset_small = offset % 16
|
||||||
iv = binascii.unhexlify("%032x" % offset_big)
|
iv = binascii.unhexlify("%032x" % offset_big)
|
||||||
self._decryptor = AES(readkey, iv=iv)
|
self._decryptor = aes.create_decryptor(readkey, iv)
|
||||||
self._decryptor.process("\x00"*offset_small)
|
# this is just to advance the counter
|
||||||
|
aes.decrypt_data(self._decryptor, b"\x00" * offset_small)
|
||||||
|
|
||||||
def set_download_status_read_event(self, read_ev):
|
def set_download_status_read_event(self, read_ev):
|
||||||
self._read_ev = read_ev
|
self._read_ev = read_ev
|
||||||
@ -219,7 +220,7 @@ class DecryptingConsumer(object):
|
|||||||
self._consumer.unregisterProducer()
|
self._consumer.unregisterProducer()
|
||||||
def write(self, ciphertext):
|
def write(self, ciphertext):
|
||||||
started = now()
|
started = now()
|
||||||
plaintext = self._decryptor.process(ciphertext)
|
plaintext = aes.decrypt_data(self._decryptor, ciphertext)
|
||||||
if self._read_ev:
|
if self._read_ev:
|
||||||
elapsed = now() - started
|
elapsed = now() - started
|
||||||
self._read_ev.update(0, elapsed, 0)
|
self._read_ev.update(0, elapsed, 0)
|
||||||
|
@ -5,6 +5,7 @@ from twisted.internet import defer
|
|||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually
|
from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
||||||
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
||||||
bucket_cancel_secret_hash, plaintext_hasher, \
|
bucket_cancel_secret_hash, plaintext_hasher, \
|
||||||
@ -23,7 +24,6 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
|
|||||||
NoServersError, InsufficientVersionError, UploadUnhappinessError, \
|
NoServersError, InsufficientVersionError, UploadUnhappinessError, \
|
||||||
DEFAULT_MAX_SEGMENT_SIZE, IProgress, IPeerSelector
|
DEFAULT_MAX_SEGMENT_SIZE, IProgress, IPeerSelector
|
||||||
from allmydata.immutable import layout
|
from allmydata.immutable import layout
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
|
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
from happiness_upload import share_placement, calculate_happiness
|
from happiness_upload import share_placement, calculate_happiness
|
||||||
@ -946,8 +946,7 @@ class EncryptAnUploadable(object):
|
|||||||
|
|
||||||
d = self.original.get_encryption_key()
|
d = self.original.get_encryption_key()
|
||||||
def _got(key):
|
def _got(key):
|
||||||
e = AES(key)
|
self._encryptor = aes.create_encryptor(key)
|
||||||
self._encryptor = e
|
|
||||||
|
|
||||||
storage_index = storage_index_hash(key)
|
storage_index = storage_index_hash(key)
|
||||||
assert isinstance(storage_index, str)
|
assert isinstance(storage_index, str)
|
||||||
@ -957,7 +956,7 @@ class EncryptAnUploadable(object):
|
|||||||
self._storage_index = storage_index
|
self._storage_index = storage_index
|
||||||
if self._status:
|
if self._status:
|
||||||
self._status.set_storage_index(storage_index)
|
self._status.set_storage_index(storage_index)
|
||||||
return e
|
return self._encryptor
|
||||||
d.addCallback(_got)
|
d.addCallback(_got)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -1064,11 +1063,11 @@ class EncryptAnUploadable(object):
|
|||||||
self._plaintext_hasher.update(chunk)
|
self._plaintext_hasher.update(chunk)
|
||||||
self._update_segment_hash(chunk)
|
self._update_segment_hash(chunk)
|
||||||
# TODO: we have to encrypt the data (even if hash_only==True)
|
# TODO: we have to encrypt the data (even if hash_only==True)
|
||||||
# because pycryptopp's AES-CTR implementation doesn't offer a
|
# because the AES-CTR implementation doesn't offer a
|
||||||
# way to change the counter value. Once pycryptopp acquires
|
# way to change the counter value. Once it acquires
|
||||||
# this ability, change this to simply update the counter
|
# this ability, change this to simply update the counter
|
||||||
# before each call to (hash_only==False) _encryptor.process()
|
# before each call to (hash_only==False) encrypt_data
|
||||||
ciphertext = self._encryptor.process(chunk)
|
ciphertext = aes.encrypt_data(self._encryptor, chunk)
|
||||||
if hash_only:
|
if hash_only:
|
||||||
self.log(" skipping encryption", level=log.NOISY)
|
self.log(" skipping encryption", level=log.NOISY)
|
||||||
else:
|
else:
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
@ -10,7 +9,7 @@ from allmydata.introducer.common import sign_to_foolscap, unsign_from_foolscap,\
|
|||||||
get_tubid_string_from_ann
|
get_tubid_string_from_ann
|
||||||
from allmydata.util import log, yamlutil, connection_status
|
from allmydata.util import log, yamlutil, connection_status
|
||||||
from allmydata.util.rrefutil import add_version_to_remote_reference
|
from allmydata.util.rrefutil import add_version_to_remote_reference
|
||||||
from allmydata.util.keyutil import BadSignatureError
|
from allmydata.crypto.error import BadSignature
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
|
|
||||||
class InvalidCacheError(Exception):
|
class InvalidCacheError(Exception):
|
||||||
@ -239,7 +238,7 @@ class IntroducerClient(service.Service, Referenceable):
|
|||||||
ann, key_s = unsign_from_foolscap(ann_t)
|
ann, key_s = unsign_from_foolscap(ann_t)
|
||||||
# key is "v0-base32abc123"
|
# key is "v0-base32abc123"
|
||||||
precondition(isinstance(key_s, str), key_s)
|
precondition(isinstance(key_s, str), key_s)
|
||||||
except BadSignatureError:
|
except BadSignature:
|
||||||
self.log("bad signature on inbound announcement: %s" % (ann_t,),
|
self.log("bad signature on inbound announcement: %s" % (ann_t,),
|
||||||
parent=lp, level=log.WEIRD, umid="ZAU15Q")
|
parent=lp, level=log.WEIRD, umid="ZAU15Q")
|
||||||
# process other announcements that arrived with the bad one
|
# process other announcements that arrived with the bad one
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
from allmydata.util import keyutil, base32, rrefutil
|
from allmydata.crypto.util import remove_prefix
|
||||||
|
from allmydata.crypto import ed25519
|
||||||
|
from allmydata.util import base32, rrefutil
|
||||||
|
|
||||||
|
|
||||||
def get_tubid_string_from_ann(ann):
|
def get_tubid_string_from_ann(ann):
|
||||||
return get_tubid_string(str(ann.get("anonymous-storage-FURL")
|
return get_tubid_string(str(ann.get("anonymous-storage-FURL")
|
||||||
@ -13,34 +15,50 @@ def get_tubid_string(furl):
|
|||||||
return m.group(1).lower()
|
return m.group(1).lower()
|
||||||
|
|
||||||
|
|
||||||
def sign_to_foolscap(ann, sk):
|
def sign_to_foolscap(announcement, signing_key):
|
||||||
|
"""
|
||||||
|
:param signing_key: a (private) signing key, as returned from
|
||||||
|
e.g. :func:`allmydata.crypto.ed25519.signing_keypair_from_string`
|
||||||
|
|
||||||
|
:returns: 3-tuple of (msg, sig, vk) where msg is a UTF8 JSON
|
||||||
|
serialization of the `announcement` (bytes), sig is bytes (a
|
||||||
|
signature of msg) and vk is the verifying key bytes
|
||||||
|
"""
|
||||||
# return (bytes, sig-str, pubkey-str). A future HTTP-based serialization
|
# return (bytes, sig-str, pubkey-str). A future HTTP-based serialization
|
||||||
# will use JSON({msg:b64(JSON(msg).utf8), sig:v0-b64(sig),
|
# will use JSON({msg:b64(JSON(msg).utf8), sig:v0-b64(sig),
|
||||||
# pubkey:v0-b64(pubkey)}) .
|
# pubkey:v0-b64(pubkey)}) .
|
||||||
msg = json.dumps(ann).encode("utf-8")
|
msg = json.dumps(announcement).encode("utf-8")
|
||||||
sig = "v0-"+base32.b2a(sk.sign(msg))
|
sig = b"v0-" + base32.b2a(
|
||||||
vk_bytes = sk.get_verifying_key_bytes()
|
ed25519.sign_data(signing_key, msg)
|
||||||
ann_t = (msg, sig, "v0-"+base32.b2a(vk_bytes))
|
)
|
||||||
|
verifying_key_string = ed25519.string_from_verifying_key(
|
||||||
|
ed25519.verifying_key_from_signing_key(signing_key)
|
||||||
|
)
|
||||||
|
ann_t = (msg, sig, remove_prefix(verifying_key_string, b"pub-"))
|
||||||
return ann_t
|
return ann_t
|
||||||
|
|
||||||
|
|
||||||
class UnknownKeyError(Exception):
|
class UnknownKeyError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def unsign_from_foolscap(ann_t):
|
def unsign_from_foolscap(ann_t):
|
||||||
(msg, sig_vs, claimed_key_vs) = ann_t
|
(msg, sig_vs, claimed_key_vs) = ann_t
|
||||||
if not sig_vs or not claimed_key_vs:
|
if not sig_vs or not claimed_key_vs:
|
||||||
raise UnknownKeyError("only signed announcements recognized")
|
raise UnknownKeyError("only signed announcements recognized")
|
||||||
if not sig_vs.startswith("v0-"):
|
if not sig_vs.startswith(b"v0-"):
|
||||||
raise UnknownKeyError("only v0- signatures recognized")
|
raise UnknownKeyError("only v0- signatures recognized")
|
||||||
if not claimed_key_vs.startswith("v0-"):
|
if not claimed_key_vs.startswith(b"v0-"):
|
||||||
raise UnknownKeyError("only v0- keys recognized")
|
raise UnknownKeyError("only v0- keys recognized")
|
||||||
claimed_key = keyutil.parse_pubkey("pub-"+claimed_key_vs)
|
|
||||||
sig_bytes = base32.a2b(keyutil.remove_prefix(sig_vs, "v0-"))
|
claimed_key = ed25519.verifying_key_from_string(b"pub-" + claimed_key_vs)
|
||||||
claimed_key.verify(sig_bytes, msg)
|
sig_bytes = base32.a2b(remove_prefix(sig_vs, b"v0-"))
|
||||||
|
ed25519.verify_signature(claimed_key, sig_bytes, msg)
|
||||||
key_vs = claimed_key_vs
|
key_vs = claimed_key_vs
|
||||||
ann = json.loads(msg.decode("utf-8"))
|
ann = json.loads(msg.decode("utf-8"))
|
||||||
return (ann, key_vs)
|
return (ann, key_vs)
|
||||||
|
|
||||||
|
|
||||||
class SubscriberDescriptor(object):
|
class SubscriberDescriptor(object):
|
||||||
"""This describes a subscriber, for status display purposes. It contains
|
"""This describes a subscriber, for status display purposes. It contains
|
||||||
the following attributes:
|
the following attributes:
|
||||||
|
@ -229,7 +229,7 @@ class IntroducerService(service.MultiService, Referenceable):
|
|||||||
self._debug_counts["inbound_message"] += 1
|
self._debug_counts["inbound_message"] += 1
|
||||||
self.log("introducer: announcement published: %s" % (ann_t,),
|
self.log("introducer: announcement published: %s" % (ann_t,),
|
||||||
umid="wKHgCw")
|
umid="wKHgCw")
|
||||||
ann, key = unsign_from_foolscap(ann_t) # might raise BadSignatureError
|
ann, key = unsign_from_foolscap(ann_t) # might raise BadSignature
|
||||||
service_name = str(ann["service-name"])
|
service_name = str(ann["service-name"])
|
||||||
|
|
||||||
index = (service_name, key)
|
index = (service_name, key)
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from foolscap.api import eventually
|
from foolscap.api import eventually
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
|
from allmydata.crypto import rsa
|
||||||
from allmydata.interfaces import IMutableFileNode, ICheckable, ICheckResults, \
|
from allmydata.interfaces import IMutableFileNode, ICheckable, ICheckResults, \
|
||||||
NotEnoughSharesError, MDMF_VERSION, SDMF_VERSION, IMutableUploadable, \
|
NotEnoughSharesError, MDMF_VERSION, SDMF_VERSION, IMutableUploadable, \
|
||||||
IMutableFileVersion, IWriteable
|
IMutableFileVersion, IWriteable
|
||||||
@ -12,8 +14,6 @@ from allmydata.util.assertutil import precondition
|
|||||||
from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI, \
|
from allmydata.uri import WriteableSSKFileURI, ReadonlySSKFileURI, \
|
||||||
WriteableMDMFFileURI, ReadonlyMDMFFileURI
|
WriteableMDMFFileURI, ReadonlyMDMFFileURI
|
||||||
from allmydata.monitor import Monitor
|
from allmydata.monitor import Monitor
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
|
|
||||||
from allmydata.mutable.publish import Publish, MutableData,\
|
from allmydata.mutable.publish import Publish, MutableData,\
|
||||||
TransformingUploadable
|
TransformingUploadable
|
||||||
from allmydata.mutable.common import MODE_READ, MODE_WRITE, MODE_CHECK, UnrecoverableFileError, \
|
from allmydata.mutable.common import MODE_READ, MODE_WRITE, MODE_CHECK, UnrecoverableFileError, \
|
||||||
@ -129,8 +129,8 @@ class MutableFileNode(object):
|
|||||||
"""
|
"""
|
||||||
(pubkey, privkey) = keypair
|
(pubkey, privkey) = keypair
|
||||||
self._pubkey, self._privkey = pubkey, privkey
|
self._pubkey, self._privkey = pubkey, privkey
|
||||||
pubkey_s = self._pubkey.serialize()
|
pubkey_s = rsa.der_string_from_verifying_key(self._pubkey)
|
||||||
privkey_s = self._privkey.serialize()
|
privkey_s = rsa.der_string_from_signing_key(self._privkey)
|
||||||
self._writekey = hashutil.ssk_writekey_hash(privkey_s)
|
self._writekey = hashutil.ssk_writekey_hash(privkey_s)
|
||||||
self._encprivkey = self._encrypt_privkey(self._writekey, privkey_s)
|
self._encprivkey = self._encrypt_privkey(self._writekey, privkey_s)
|
||||||
self._fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s)
|
self._fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey_s)
|
||||||
@ -160,13 +160,13 @@ class MutableFileNode(object):
|
|||||||
return contents(self)
|
return contents(self)
|
||||||
|
|
||||||
def _encrypt_privkey(self, writekey, privkey):
|
def _encrypt_privkey(self, writekey, privkey):
|
||||||
enc = AES(writekey)
|
encryptor = aes.create_encryptor(writekey)
|
||||||
crypttext = enc.process(privkey)
|
crypttext = aes.encrypt_data(encryptor, privkey)
|
||||||
return crypttext
|
return crypttext
|
||||||
|
|
||||||
def _decrypt_privkey(self, enc_privkey):
|
def _decrypt_privkey(self, enc_privkey):
|
||||||
enc = AES(self._writekey)
|
decryptor = aes.create_decryptor(self._writekey)
|
||||||
privkey = enc.process(enc_privkey)
|
privkey = aes.decrypt_data(decryptor, enc_privkey)
|
||||||
return privkey
|
return privkey
|
||||||
|
|
||||||
def _populate_pubkey(self, pubkey):
|
def _populate_pubkey(self, pubkey):
|
||||||
|
@ -4,15 +4,16 @@ from itertools import count
|
|||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
|
from allmydata.crypto import rsa
|
||||||
from allmydata.interfaces import IPublishStatus, SDMF_VERSION, MDMF_VERSION, \
|
from allmydata.interfaces import IPublishStatus, SDMF_VERSION, MDMF_VERSION, \
|
||||||
IMutableUploadable
|
IMutableUploadable
|
||||||
from allmydata.util import base32, hashutil, mathutil, log
|
from allmydata.util import base32, hashutil, mathutil, log
|
||||||
from allmydata.util.dictutil import DictOfSets
|
from allmydata.util.dictutil import DictOfSets
|
||||||
from allmydata import hashtree, codec
|
from allmydata import hashtree, codec
|
||||||
from allmydata.storage.server import si_b2a
|
from allmydata.storage.server import si_b2a
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
from foolscap.api import eventually, fireEventually
|
from foolscap.api import eventually, fireEventually
|
||||||
|
|
||||||
from allmydata.mutable.common import MODE_WRITE, MODE_CHECK, MODE_REPAIR, \
|
from allmydata.mutable.common import MODE_WRITE, MODE_CHECK, MODE_REPAIR, \
|
||||||
UncoordinatedWriteError, NotEnoughServersError
|
UncoordinatedWriteError, NotEnoughServersError
|
||||||
from allmydata.mutable.servermap import ServerMap
|
from allmydata.mutable.servermap import ServerMap
|
||||||
@ -711,8 +712,8 @@ class Publish(object):
|
|||||||
|
|
||||||
key = hashutil.ssk_readkey_data_hash(salt, self.readkey)
|
key = hashutil.ssk_readkey_data_hash(salt, self.readkey)
|
||||||
self._status.set_status("Encrypting")
|
self._status.set_status("Encrypting")
|
||||||
enc = AES(key)
|
encryptor = aes.create_encryptor(key)
|
||||||
crypttext = enc.process(data)
|
crypttext = aes.encrypt_data(encryptor, data)
|
||||||
assert len(crypttext) == len(data)
|
assert len(crypttext) == len(data)
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
@ -849,7 +850,7 @@ class Publish(object):
|
|||||||
started = time.time()
|
started = time.time()
|
||||||
self._status.set_status("Signing prefix")
|
self._status.set_status("Signing prefix")
|
||||||
signable = self._get_some_writer().get_signable()
|
signable = self._get_some_writer().get_signable()
|
||||||
self.signature = self._privkey.sign(signable)
|
self.signature = rsa.sign_data(self._privkey, signable)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.iteritems():
|
for (shnum, writers) in self.writers.iteritems():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
@ -864,7 +865,7 @@ class Publish(object):
|
|||||||
self._status.set_status("Pushing shares")
|
self._status.set_status("Pushing shares")
|
||||||
self._started_pushing = started
|
self._started_pushing = started
|
||||||
ds = []
|
ds = []
|
||||||
verification_key = self._pubkey.serialize()
|
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.copy().iteritems():
|
for (shnum, writers) in self.writers.copy().iteritems():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -8,6 +8,8 @@ from twisted.internet.interfaces import IPushProducer, IConsumer
|
|||||||
from foolscap.api import eventually, fireEventually, DeadReferenceError, \
|
from foolscap.api import eventually, fireEventually, DeadReferenceError, \
|
||||||
RemoteException
|
RemoteException
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
|
from allmydata.crypto import rsa
|
||||||
from allmydata.interfaces import IRetrieveStatus, NotEnoughSharesError, \
|
from allmydata.interfaces import IRetrieveStatus, NotEnoughSharesError, \
|
||||||
DownloadStopped, MDMF_VERSION, SDMF_VERSION
|
DownloadStopped, MDMF_VERSION, SDMF_VERSION
|
||||||
from allmydata.util.assertutil import _assert, precondition
|
from allmydata.util.assertutil import _assert, precondition
|
||||||
@ -15,8 +17,6 @@ from allmydata.util import hashutil, log, mathutil, deferredutil
|
|||||||
from allmydata.util.dictutil import DictOfSets
|
from allmydata.util.dictutil import DictOfSets
|
||||||
from allmydata import hashtree, codec
|
from allmydata import hashtree, codec
|
||||||
from allmydata.storage.server import si_b2a
|
from allmydata.storage.server import si_b2a
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
from pycryptopp.publickey import rsa
|
|
||||||
|
|
||||||
from allmydata.mutable.common import CorruptShareError, BadShareError, \
|
from allmydata.mutable.common import CorruptShareError, BadShareError, \
|
||||||
UncoordinatedWriteError
|
UncoordinatedWriteError
|
||||||
@ -899,8 +899,8 @@ class Retrieve(object):
|
|||||||
self.log("decrypting segment %d" % self._current_segment)
|
self.log("decrypting segment %d" % self._current_segment)
|
||||||
started = time.time()
|
started = time.time()
|
||||||
key = hashutil.ssk_readkey_data_hash(salt, self._node.get_readkey())
|
key = hashutil.ssk_readkey_data_hash(salt, self._node.get_readkey())
|
||||||
decryptor = AES(key)
|
decryptor = aes.create_decryptor(key)
|
||||||
plaintext = decryptor.process(segment)
|
plaintext = aes.decrypt_data(decryptor, segment)
|
||||||
self._status.accumulate_decrypt_time(time.time() - started)
|
self._status.accumulate_decrypt_time(time.time() - started)
|
||||||
return plaintext
|
return plaintext
|
||||||
|
|
||||||
@ -935,13 +935,11 @@ class Retrieve(object):
|
|||||||
# it's good
|
# it's good
|
||||||
self.log("got valid privkey from shnum %d on reader %s" %
|
self.log("got valid privkey from shnum %d on reader %s" %
|
||||||
(reader.shnum, reader))
|
(reader.shnum, reader))
|
||||||
privkey = rsa.create_signing_key_from_string(alleged_privkey_s)
|
privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s)
|
||||||
self._node._populate_encprivkey(enc_privkey)
|
self._node._populate_encprivkey(enc_privkey)
|
||||||
self._node._populate_privkey(privkey)
|
self._node._populate_privkey(privkey)
|
||||||
self._need_privkey = False
|
self._need_privkey = False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _done(self):
|
def _done(self):
|
||||||
"""
|
"""
|
||||||
I am called by _download_current_segment when the download process
|
I am called by _download_current_segment when the download process
|
||||||
@ -972,7 +970,6 @@ class Retrieve(object):
|
|||||||
self._consumer.unregisterProducer()
|
self._consumer.unregisterProducer()
|
||||||
eventually(self._done_deferred.callback, ret)
|
eventually(self._done_deferred.callback, ret)
|
||||||
|
|
||||||
|
|
||||||
def _raise_notenoughshareserror(self):
|
def _raise_notenoughshareserror(self):
|
||||||
"""
|
"""
|
||||||
I am called when there are not enough active servers left to complete
|
I am called when there are not enough active servers left to complete
|
||||||
|
@ -8,11 +8,12 @@ from twisted.internet import defer
|
|||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
from foolscap.api import DeadReferenceError, RemoteException, eventually, \
|
from foolscap.api import DeadReferenceError, RemoteException, eventually, \
|
||||||
fireEventually
|
fireEventually
|
||||||
|
from allmydata.crypto.error import BadSignature
|
||||||
|
from allmydata.crypto import rsa
|
||||||
from allmydata.util import base32, hashutil, log, deferredutil
|
from allmydata.util import base32, hashutil, log, deferredutil
|
||||||
from allmydata.util.dictutil import DictOfSets
|
from allmydata.util.dictutil import DictOfSets
|
||||||
from allmydata.storage.server import si_b2a
|
from allmydata.storage.server import si_b2a
|
||||||
from allmydata.interfaces import IServermapUpdaterStatus
|
from allmydata.interfaces import IServermapUpdaterStatus
|
||||||
from pycryptopp.publickey import rsa
|
|
||||||
|
|
||||||
from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \
|
from allmydata.mutable.common import MODE_CHECK, MODE_ANYTHING, MODE_WRITE, \
|
||||||
MODE_READ, MODE_REPAIR, CorruptShareError
|
MODE_READ, MODE_REPAIR, CorruptShareError
|
||||||
@ -843,8 +844,9 @@ class ServermapUpdater(object):
|
|||||||
# This is a new version tuple, and we need to validate it
|
# This is a new version tuple, and we need to validate it
|
||||||
# against the public key before keeping track of it.
|
# against the public key before keeping track of it.
|
||||||
assert self._node.get_pubkey()
|
assert self._node.get_pubkey()
|
||||||
valid = self._node.get_pubkey().verify(prefix, signature[1])
|
try:
|
||||||
if not valid:
|
rsa.verify_signature(self._node.get_pubkey(), signature[1], prefix)
|
||||||
|
except BadSignature:
|
||||||
raise CorruptShareError(server, shnum,
|
raise CorruptShareError(server, shnum,
|
||||||
"signature is invalid")
|
"signature is invalid")
|
||||||
|
|
||||||
@ -913,12 +915,10 @@ class ServermapUpdater(object):
|
|||||||
verinfo,
|
verinfo,
|
||||||
update_data)
|
update_data)
|
||||||
|
|
||||||
|
|
||||||
def _deserialize_pubkey(self, pubkey_s):
|
def _deserialize_pubkey(self, pubkey_s):
|
||||||
verifier = rsa.create_verifying_key_from_string(pubkey_s)
|
verifier = rsa.create_verifying_key_from_string(pubkey_s)
|
||||||
return verifier
|
return verifier
|
||||||
|
|
||||||
|
|
||||||
def _try_to_validate_privkey(self, enc_privkey, server, shnum, lp):
|
def _try_to_validate_privkey(self, enc_privkey, server, shnum, lp):
|
||||||
"""
|
"""
|
||||||
Given a writekey from a remote server, I validate it against the
|
Given a writekey from a remote server, I validate it against the
|
||||||
@ -937,7 +937,7 @@ class ServermapUpdater(object):
|
|||||||
self.log("got valid privkey from shnum %d on serverid %s" %
|
self.log("got valid privkey from shnum %d on serverid %s" %
|
||||||
(shnum, server.get_name()),
|
(shnum, server.get_name()),
|
||||||
parent=lp)
|
parent=lp)
|
||||||
privkey = rsa.create_signing_key_from_string(alleged_privkey_s)
|
privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s)
|
||||||
self._node._populate_encprivkey(enc_privkey)
|
self._node._populate_encprivkey(enc_privkey)
|
||||||
self._node._populate_privkey(privkey)
|
self._node._populate_privkey(privkey)
|
||||||
self._need_privkey = False
|
self._need_privkey = False
|
||||||
|
@ -14,11 +14,11 @@ Generate a public/private keypair, dumped to stdout as two lines of ASCII..
|
|||||||
return t
|
return t
|
||||||
|
|
||||||
def print_keypair(options):
|
def print_keypair(options):
|
||||||
from allmydata.util.keyutil import make_keypair
|
from allmydata.crypto import ed25519
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
privkey_vs, pubkey_vs = make_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
print("private:", privkey_vs, file=out)
|
print("private:", ed25519.string_from_signing_key(private_key), file=out)
|
||||||
print("public:", pubkey_vs, file=out)
|
print("public:", ed25519.string_from_verifying_key(public_key), file=out)
|
||||||
|
|
||||||
class DerivePubkeyOptions(BaseOptions):
|
class DerivePubkeyOptions(BaseOptions):
|
||||||
def parseArgs(self, privkey):
|
def parseArgs(self, privkey):
|
||||||
@ -38,11 +38,11 @@ generate-keypair, derive the public key and print it to stdout.
|
|||||||
|
|
||||||
def derive_pubkey(options):
|
def derive_pubkey(options):
|
||||||
out = options.stdout
|
out = options.stdout
|
||||||
from allmydata.util import keyutil
|
from allmydata.crypto import ed25519
|
||||||
privkey_vs = options.privkey
|
privkey_vs = options.privkey
|
||||||
sk, pubkey_vs = keyutil.parse_privkey(privkey_vs)
|
private_key, public_key = ed25519.signing_keypair_from_string(privkey_vs)
|
||||||
print("private:", privkey_vs, file=out)
|
print("private:", ed25519.string_from_signing_key(private_key), file=out)
|
||||||
print("public:", pubkey_vs, file=out)
|
print("public:", ed25519.string_from_verifying_key(public_key), file=out)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
class AdminCommand(BaseOptions):
|
class AdminCommand(BaseOptions):
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
import urllib, sys
|
import urllib, sys
|
||||||
@ -11,14 +10,14 @@ from twisted.internet import task
|
|||||||
from twisted.python.filepath import FilePath
|
from twisted.python.filepath import FilePath
|
||||||
|
|
||||||
import allmydata
|
import allmydata
|
||||||
from allmydata.util import fileutil, hashutil, base32, keyutil
|
from allmydata.crypto import ed25519
|
||||||
|
from allmydata.util import fileutil, hashutil, base32
|
||||||
from allmydata.util.namespace import Namespace
|
from allmydata.util.namespace import Namespace
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
from allmydata.immutable import upload
|
from allmydata.immutable import upload
|
||||||
from allmydata.dirnode import normalize
|
from allmydata.dirnode import normalize
|
||||||
from allmydata.scripts.common_http import socket_error
|
from allmydata.scripts.common_http import socket_error
|
||||||
import allmydata.scripts.common_http
|
import allmydata.scripts.common_http
|
||||||
from pycryptopp.publickey import ed25519
|
|
||||||
|
|
||||||
# Test that the scripts can be imported.
|
# Test that the scripts can be imported.
|
||||||
from allmydata.scripts import create_node, debug, tahoe_start, tahoe_restart, \
|
from allmydata.scripts import create_node, debug, tahoe_start, tahoe_restart, \
|
||||||
@ -35,10 +34,10 @@ from allmydata.scripts.common import DEFAULT_ALIAS, get_aliases, get_alias, \
|
|||||||
DefaultAliasMarker
|
DefaultAliasMarker
|
||||||
|
|
||||||
from allmydata.scripts import cli, debug, runner
|
from allmydata.scripts import cli, debug, runner
|
||||||
from ..common_util import (ReallyEqualMixin, skip_if_cannot_represent_filename,
|
from allmydata.test.common_util import (ReallyEqualMixin, skip_if_cannot_represent_filename,
|
||||||
run_cli)
|
run_cli)
|
||||||
from ..no_network import GridTestMixin
|
from allmydata.test.no_network import GridTestMixin
|
||||||
from .common import CLITestMixin, parse_options
|
from allmydata.test.cli.common import CLITestMixin, parse_options
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
from allmydata.util.encodingutil import listdir_unicode, get_io_encoding
|
from allmydata.util.encodingutil import listdir_unicode, get_io_encoding
|
||||||
@ -734,16 +733,20 @@ class Admin(unittest.TestCase):
|
|||||||
self.failUnlessEqual(pubkey_bits[0], vk_header, lines[1])
|
self.failUnlessEqual(pubkey_bits[0], vk_header, lines[1])
|
||||||
self.failUnless(privkey_bits[1].startswith("priv-v0-"), lines[0])
|
self.failUnless(privkey_bits[1].startswith("priv-v0-"), lines[0])
|
||||||
self.failUnless(pubkey_bits[1].startswith("pub-v0-"), lines[1])
|
self.failUnless(pubkey_bits[1].startswith("pub-v0-"), lines[1])
|
||||||
sk_bytes = base32.a2b(keyutil.remove_prefix(privkey_bits[1], "priv-v0-"))
|
sk, pk = ed25519.signing_keypair_from_string(privkey_bits[1])
|
||||||
sk = ed25519.SigningKey(sk_bytes)
|
vk_bytes = pubkey_bits[1]
|
||||||
vk_bytes = base32.a2b(keyutil.remove_prefix(pubkey_bits[1], "pub-v0-"))
|
self.assertEqual(
|
||||||
self.failUnlessEqual(sk.get_verifying_key_bytes(), vk_bytes)
|
ed25519.string_from_verifying_key(pk),
|
||||||
|
vk_bytes,
|
||||||
|
)
|
||||||
d.addCallback(_done)
|
d.addCallback(_done)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_derive_pubkey(self):
|
def test_derive_pubkey(self):
|
||||||
priv1,pub1 = keyutil.make_keypair()
|
priv_key, pub_key = ed25519.create_signing_keypair()
|
||||||
d = run_cli("admin", "derive-pubkey", priv1)
|
priv_key_str = ed25519.string_from_signing_key(priv_key)
|
||||||
|
pub_key_str = ed25519.string_from_verifying_key(pub_key)
|
||||||
|
d = run_cli("admin", "derive-pubkey", priv_key_str)
|
||||||
def _done(args):
|
def _done(args):
|
||||||
(rc, stdout, stderr) = args
|
(rc, stdout, stderr) = args
|
||||||
lines = stdout.split("\n")
|
lines = stdout.split("\n")
|
||||||
@ -753,8 +756,8 @@ class Admin(unittest.TestCase):
|
|||||||
vk_header = "public: pub-v0-"
|
vk_header = "public: pub-v0-"
|
||||||
self.failUnless(privkey_line.startswith(sk_header), privkey_line)
|
self.failUnless(privkey_line.startswith(sk_header), privkey_line)
|
||||||
self.failUnless(pubkey_line.startswith(vk_header), pubkey_line)
|
self.failUnless(pubkey_line.startswith(vk_header), pubkey_line)
|
||||||
pub2 = pubkey_line[len(vk_header):]
|
pub_key_str2 = pubkey_line[len(vk_header):]
|
||||||
self.failUnlessEqual("pub-v0-"+pub2, pub1)
|
self.assertEqual("pub-v0-" + pub_key_str2, pub_key_str)
|
||||||
d.addCallback(_done)
|
d.addCallback(_done)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
1
src/allmydata/test/data/pycryptopp-rsa-2048-priv.txt
Normal file
1
src/allmydata/test/data/pycryptopp-rsa-2048-priv.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC0JwgBbVsI+XlOopqjvBatKkQbJPXuap7Psbe5i4EoMfiYI2PC2UB7GuYeTdE79TvDtmfjFD/RVWA3Y/RTQYQz/lKyCFS4w3wa/TPkZwF1r3OjIMSsCYe2J3W9NV3cK+PVw2A8D2y5DvUIAdO+Mi6aH26p2UV8FTnPqHWvJubrcLQt6979/BQnqKCFJ+SPx4se5XsMZ3vrbs6MCqM2qS9RnNEhexlNrJd1wXezILKsmQdf/QiZiY7LXjEdD6BNG8OYQ2iSbCa8aGEoSPQfdnZZxcTFE02QwKcScZKhU9fRv0Ttqr3i8xiliw9gn4UzptEZO6MVO2BrptS30SjJDXC7AgERAoIBADpI3PFnJPtfxV00m3E1UqFvjoFAqetAnMq5fzR/9RSIo0BHr1Wgo+uXwuuvw7GEC85gqSPR2GlfYuS+dLGGIz3/dRt7KngDAoEzzQYhU0u4w4eZqQp7jcn9tSagUxKGq5f7cfVQSNJ1x77TaibyHiLN7xjVWj67krQf6dbI0j0cYvnxu+4EZbzNdvFw93ddoOZB/dFjLu0kVKVl/mWyCX9GNr2nCSHe9wYipOz5b9WkdD0J2Oy0v8Wkn4y3yOOvo/EgrNYfo4IVslsDo9Yw3Yk32Eml0ZsdwSqu+wM4c+jRbTJ+sBGqci4etPpMhcsH0Vt9+97Lnuan2Jza9xjrL2ECgYEA8wj+/bfjTCXsu22f8V7Z40vJUyM7j4WvUoA9khAQ7qAlnFdqdzq5a7ArA9vRjeN6ya16j36IXCkpT+FGe6YWCsZCKd1ZVy7sZ1Uh7X2hRqf0vxJsSJvG/OmofFUfuwCgLFLKI4SDhHaB+pWAdkAIL4MkJQADg/qVlAdrWoPsfhECgYEAvcNHhSCW010SRudwmTRX5QtndHk/LM6VAgyR0ElarvmG6K5pbpL8MD5CpJ3AhUwKp96SlMsBEG3a9BR5zv6Jvhc/KHxT7W/EjLnV9PSD90+BgHHsTonjg6TayJ9XE6RpO3MqeifVG/2S5WhhFFGGd5KSFnvZwr9ni+LYRuDVpgsCgYEAgKpo4KylgqqqgVgnf8jNtJGIs4sfiDe3K61NxcxFMwl9UsTeAuLaomxTAgr2eEtBAVvXeSTex2EV3v7K9irAYA6bf5NNamQizUswFFGRneByg0X9F2GHdtYN53hcF7UJgOCJIdy+GPNx/SH4txLXKDZebfDyzWaLbHxmAr5QBoECgYBC+aDFkwgOXRWCb81jP6aNExV0Zwc8/Z4AuSRnoWtM0In3xRYnBrNcUjWjgvinhD//A0LLGnjYnz44BzoM0k67j7vwK+Fi3CdAug9HZVvAsqYtVWJ2EoyI0MWwODzZwY6Nc/Df0dK+lbtgBrjZ/qft937awkzbUp0EMfH65fENbQKBgQCSVWXy+WLQXeHtx/+nNv9HyjQnowalp3SwWRf0YoK/xa526xg+ixViVZvT6e2KTcJGdHFQ+cbCsc1Vx6E13n3Mu9y0N3a4WRQkZHPgnsNouPLaKn0SmVY7RX/I/Rz2r0hRE+gDM6+1/99zPuwP3FW5eLoTBX021Y35kBFHbZ4r+w==
|
1
src/allmydata/test/data/pycryptopp-rsa-2048-pub.txt
Normal file
1
src/allmydata/test/data/pycryptopp-rsa-2048-pub.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEAtCcIAW1bCPl5TqKao7wWrSpEGyT17mqez7G3uYuBKDH4mCNjwtlAexrmHk3RO/U7w7Zn4xQ/0VVgN2P0U0GEM/5SsghUuMN8Gv0z5GcBda9zoyDErAmHtid1vTVd3Cvj1cNgPA9suQ71CAHTvjIumh9uqdlFfBU5z6h1rybm63C0Leve/fwUJ6ighSfkj8eLHuV7DGd7627OjAqjNqkvUZzRIXsZTayXdcF3syCyrJkHX/0ImYmOy14xHQ+gTRvDmENokmwmvGhhKEj0H3Z2WcXExRNNkMCnEnGSoVPX0b9E7aq94vMYpYsPYJ+FM6bRGTujFTtga6bUt9EoyQ1wuwIBEQ==
|
1
src/allmydata/test/data/pycryptopp-rsa-2048-sig.txt
Normal file
1
src/allmydata/test/data/pycryptopp-rsa-2048-sig.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
ItsyW1XTOIvet6WsS68AJ/ernMG62aoeJKzyBBZ9fdeB2mVzURCBmgX5P0hTPgxHa1sEI6oIbREv4lIQnWHcPgjvz5qBkDtbOp1YHkkFAFOh533dH4s2MiRECIzHh19sBsqTGe0w/pRTHhwV+nStFqZ0IMsdxv0Qsgk5IClIY/WgBSnHQZpVbxyfL7qwvm1JK2GRuygRRsrSsxLiSnA5RWlOsDkDikVu5nhZI31K+PWa9v1i6U7ZkV4uD9triJkHW2XBIRkCyqT6wgM4KBN6V4H9nqlxZhJSQoSn1U5Rh3pL+XG6yevaZq7+pwOnRUcFkEwiJ2wT/NIK0Bjng8Szmw==
|
@ -5,6 +5,7 @@ from twisted.trial import unittest
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from foolscap.logging import log
|
from foolscap.logging import log
|
||||||
from allmydata import uri
|
from allmydata import uri
|
||||||
|
from allmydata.crypto import rsa
|
||||||
from allmydata.interfaces import NotEnoughSharesError, SDMF_VERSION, MDMF_VERSION
|
from allmydata.interfaces import NotEnoughSharesError, SDMF_VERSION, MDMF_VERSION
|
||||||
from allmydata.util import fileutil
|
from allmydata.util import fileutil
|
||||||
from allmydata.util.hashutil import ssk_writekey_hash, ssk_pubkey_fingerprint_hash
|
from allmydata.util.hashutil import ssk_writekey_hash, ssk_pubkey_fingerprint_hash
|
||||||
@ -211,8 +212,8 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
def _got_key(keypair):
|
def _got_key(keypair):
|
||||||
(pubkey, privkey) = keypair
|
(pubkey, privkey) = keypair
|
||||||
nm.key_generator = SameKeyGenerator(pubkey, privkey)
|
nm.key_generator = SameKeyGenerator(pubkey, privkey)
|
||||||
pubkey_s = pubkey.serialize()
|
pubkey_s = rsa.der_string_from_verifying_key(pubkey)
|
||||||
privkey_s = privkey.serialize()
|
privkey_s = rsa.der_string_from_signing_key(privkey)
|
||||||
u = uri.WriteableSSKFileURI(ssk_writekey_hash(privkey_s),
|
u = uri.WriteableSSKFileURI(ssk_writekey_hash(privkey_s),
|
||||||
ssk_pubkey_fingerprint_hash(pubkey_s))
|
ssk_pubkey_fingerprint_hash(pubkey_s))
|
||||||
self._storage_index = u.get_storage_index()
|
self._storage_index = u.get_storage_index()
|
||||||
|
496
src/allmydata/test/test_crypto.py
Normal file
496
src/allmydata/test/test_crypto.py
Normal file
@ -0,0 +1,496 @@
|
|||||||
|
import six
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from base64 import b64decode
|
||||||
|
from binascii import a2b_hex, b2a_hex
|
||||||
|
|
||||||
|
from twisted.python.filepath import FilePath
|
||||||
|
|
||||||
|
from allmydata.crypto import (
|
||||||
|
aes,
|
||||||
|
ed25519,
|
||||||
|
rsa,
|
||||||
|
)
|
||||||
|
from allmydata.crypto.util import remove_prefix
|
||||||
|
from allmydata.crypto.error import BadPrefixError
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
RESOURCE_DIR = FilePath(__file__).parent().child('data')
|
||||||
|
|
||||||
|
|
||||||
|
class TestRegression(unittest.TestCase):
|
||||||
|
'''
|
||||||
|
These tests are regression tests to ensure that the upgrade from `pycryptopp` to `cryptography`
|
||||||
|
doesn't break anything. They check that data encrypted with old keys can be decrypted with new
|
||||||
|
keys.
|
||||||
|
'''
|
||||||
|
|
||||||
|
AES_KEY = b'My\x9c\xc0f\xd3\x03\x9a1\x8f\xbd\x17W_\x1f2'
|
||||||
|
IV = b'\x96\x1c\xa0\xbcUj\x89\xc1\x85J\x1f\xeb=\x17\x04\xca'
|
||||||
|
|
||||||
|
with RESOURCE_DIR.child('pycryptopp-rsa-2048-priv.txt').open('r') as f:
|
||||||
|
# Created using `pycryptopp`:
|
||||||
|
#
|
||||||
|
# from base64 import b64encode
|
||||||
|
# from pycryptopp.publickey import rsa
|
||||||
|
# priv = rsa.generate(2048)
|
||||||
|
# priv_str = b64encode(priv.serialize())
|
||||||
|
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||||
|
RSA_2048_PRIV_KEY = six.b(b64decode(f.read().strip()))
|
||||||
|
|
||||||
|
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
|
||||||
|
# Signature created using `RSA_2048_PRIV_KEY` via:
|
||||||
|
#
|
||||||
|
# sig = priv.sign(b'test')
|
||||||
|
RSA_2048_SIG = six.b(b64decode(f.read().strip()))
|
||||||
|
|
||||||
|
with RESOURCE_DIR.child('pycryptopp-rsa-2048-pub.txt').open('r') as f:
|
||||||
|
# The public key corresponding to `RSA_2048_PRIV_KEY`.
|
||||||
|
RSA_2048_PUB_KEY = six.b(b64decode(f.read().strip()))
|
||||||
|
|
||||||
|
def test_old_start_up_test(self):
|
||||||
|
"""
|
||||||
|
This was the old startup test run at import time in `pycryptopp.cipher.aes`.
|
||||||
|
"""
|
||||||
|
enc0 = b"dc95c078a2408989ad48a21492842087530f8afbc74536b9a963b4f1c4cb738b"
|
||||||
|
cryptor = aes.create_decryptor(key=b"\x00" * 32)
|
||||||
|
ct = aes.decrypt_data(cryptor, b"\x00" * 32)
|
||||||
|
self.assertEqual(enc0, b2a_hex(ct))
|
||||||
|
|
||||||
|
cryptor = aes.create_decryptor(key=b"\x00" * 32)
|
||||||
|
ct1 = aes.decrypt_data(cryptor, b"\x00" * 15)
|
||||||
|
ct2 = aes.decrypt_data(cryptor, b"\x00" * 17)
|
||||||
|
self.assertEqual(enc0, b2a_hex(ct1+ct2))
|
||||||
|
|
||||||
|
enc0 = b"66e94bd4ef8a2c3b884cfa59ca342b2e"
|
||||||
|
cryptor = aes.create_decryptor(key=b"\x00" * 16)
|
||||||
|
ct = aes.decrypt_data(cryptor, b"\x00" * 16)
|
||||||
|
self.assertEqual(enc0, b2a_hex(ct))
|
||||||
|
|
||||||
|
cryptor = aes.create_decryptor(key=b"\x00" * 16)
|
||||||
|
ct1 = aes.decrypt_data(cryptor, b"\x00" * 8)
|
||||||
|
ct2 = aes.decrypt_data(cryptor, b"\x00" * 8)
|
||||||
|
self.assertEqual(enc0, b2a_hex(ct1+ct2))
|
||||||
|
|
||||||
|
def _test_from_Niels_AES(keysize, result):
|
||||||
|
def fake_ecb_using_ctr(k, p):
|
||||||
|
encryptor = aes.create_encryptor(key=k, iv=p)
|
||||||
|
return aes.encrypt_data(encryptor, b'\x00' * 16)
|
||||||
|
|
||||||
|
E = fake_ecb_using_ctr
|
||||||
|
b = 16
|
||||||
|
k = keysize
|
||||||
|
S = b'\x00' * (k + b)
|
||||||
|
|
||||||
|
for i in range(1000):
|
||||||
|
K = S[-k:]
|
||||||
|
P = S[-k-b:-k]
|
||||||
|
S += E(K, E(K, P))
|
||||||
|
|
||||||
|
self.assertEqual(S[-b:], a2b_hex(result))
|
||||||
|
|
||||||
|
_test_from_Niels_AES(16, b'bd883f01035e58f42f9d812f2dacbcd8')
|
||||||
|
_test_from_Niels_AES(32, b'c84b0f3a2c76dd9871900b07f09bdd3e')
|
||||||
|
|
||||||
|
def test_aes_no_iv_process_short_input(self):
|
||||||
|
'''
|
||||||
|
The old code used the following patterns with AES ciphers.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pycryptopp.cipher.aes import AES
|
||||||
|
key = = os.urandom(16)
|
||||||
|
ciphertext = AES(key).process(plaintext)
|
||||||
|
|
||||||
|
This test verifies that using the new AES wrapper generates the same output.
|
||||||
|
'''
|
||||||
|
plaintext = b'test'
|
||||||
|
expected_ciphertext = b'\x7fEK\\'
|
||||||
|
|
||||||
|
k = aes.create_decryptor(self.AES_KEY)
|
||||||
|
ciphertext = aes.decrypt_data(k, plaintext)
|
||||||
|
|
||||||
|
self.assertEqual(ciphertext, expected_ciphertext)
|
||||||
|
|
||||||
|
def test_aes_no_iv_process_long_input(self):
|
||||||
|
'''
|
||||||
|
The old code used the following patterns with AES ciphers.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pycryptopp.cipher.aes import AES
|
||||||
|
key = = os.urandom(16)
|
||||||
|
ciphertext = AES(key).process(plaintext)
|
||||||
|
|
||||||
|
This test verifies that using the new AES wrapper generates the same output.
|
||||||
|
'''
|
||||||
|
plaintext = b'hi' * 32
|
||||||
|
expected_ciphertext = (
|
||||||
|
b'cIPAY%o:\xce\xfex\x8e@^.\x90\xb1\x80a\xff\xd8^\xac\x8d\xa7/\x1d\xe6\x92\xa1\x04\x92'
|
||||||
|
b'\x1f\xa1|\xd2$E\xb5\xe7\x9d\xae\xd1\x1f)\xe4\xc7\x83\xb8\xd5|dHhU\xc8\x9a\xb1\x10\xed'
|
||||||
|
b'\xd1\xe7|\xd1')
|
||||||
|
|
||||||
|
k = aes.create_decryptor(self.AES_KEY)
|
||||||
|
ciphertext = aes.decrypt_data(k, plaintext)
|
||||||
|
|
||||||
|
self.assertEqual(ciphertext, expected_ciphertext)
|
||||||
|
|
||||||
|
def test_aes_with_iv_process_short_input(self):
|
||||||
|
'''
|
||||||
|
The old code used the following patterns with AES ciphers.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pycryptopp.cipher.aes import AES
|
||||||
|
key = = os.urandom(16)
|
||||||
|
ciphertext = AES(key).process(plaintext)
|
||||||
|
|
||||||
|
This test verifies that using the new AES wrapper generates the same output.
|
||||||
|
'''
|
||||||
|
plaintext = b'test'
|
||||||
|
expected_ciphertext = b'\x82\x0e\rt'
|
||||||
|
|
||||||
|
k = aes.create_decryptor(self.AES_KEY, iv=self.IV)
|
||||||
|
ciphertext = aes.decrypt_data(k, plaintext)
|
||||||
|
|
||||||
|
self.assertEqual(ciphertext, expected_ciphertext)
|
||||||
|
|
||||||
|
def test_aes_with_iv_process_long_input(self):
|
||||||
|
'''
|
||||||
|
The old code used the following patterns with AES ciphers.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pycryptopp.cipher.aes import AES
|
||||||
|
key = = os.urandom(16)
|
||||||
|
ciphertext = AES(key).process(plaintext)
|
||||||
|
|
||||||
|
This test verifies that using the new AES wrapper generates the same output.
|
||||||
|
'''
|
||||||
|
plaintext = b'hi' * 32
|
||||||
|
expected_ciphertext = (
|
||||||
|
b'\x9e\x02\x16i}WL\xbf\x83\xac\xb4K\xf7\xa0\xdf\xa3\xba!3\x15\xd3(L\xb7\xb3\x91\xbcb'
|
||||||
|
b'\x97a\xdc\x100?\xf5L\x9f\xd9\xeeO\x98\xda\xf5g\x93\xa7q\xe1\xb1~\xf8\x1b\xe8[\\s'
|
||||||
|
b'\x144$\x86\xeaC^f')
|
||||||
|
|
||||||
|
k = aes.create_decryptor(self.AES_KEY, iv=self.IV)
|
||||||
|
ciphertext = aes.decrypt_data(k, plaintext)
|
||||||
|
|
||||||
|
self.assertEqual(ciphertext, expected_ciphertext)
|
||||||
|
|
||||||
|
def test_decode_ed15519_keypair(self):
|
||||||
|
'''
|
||||||
|
Created using the old code:
|
||||||
|
|
||||||
|
from allmydata.util.keyutil import make_keypair, parse_privkey, parse_pubkey
|
||||||
|
test_data = b'test'
|
||||||
|
priv_str, pub_str = make_keypair()
|
||||||
|
priv, _ = parse_privkey(priv_str)
|
||||||
|
pub = parse_pubkey(pub_str)
|
||||||
|
sig = priv.sign(test_data)
|
||||||
|
pub.verify(sig, test_data)
|
||||||
|
|
||||||
|
This simply checks that keys and signatures generated using the old code are still valid
|
||||||
|
using the new code.
|
||||||
|
'''
|
||||||
|
priv_str = b'priv-v0-lqcj746bqa4npkb6zpyc6esd74x3bl6mbcjgqend7cvtgmcpawhq'
|
||||||
|
pub_str = b'pub-v0-yzpqin3of3ep363lwzxwpvgai3ps43dao46k2jds5kw5ohhpcwhq'
|
||||||
|
test_data = b'test'
|
||||||
|
sig = (b'\xde\x0e\xd6\xe2\xf5\x03]8\xfe\xa71\xad\xb4g\x03\x11\x81\x8b\x08\xffz\xf4K\xa0'
|
||||||
|
b'\x86 ier!\xe8\xe5#*\x9d\x8c\x0bI\x02\xd90\x0e7\xbeW\xbf\xa3\xfe\xc1\x1c\xf5+\xe9)'
|
||||||
|
b'\xa3\xde\xc9\xc6s\xc9\x90\xf7x\x08')
|
||||||
|
|
||||||
|
private_key, derived_public_key = ed25519.signing_keypair_from_string(priv_str)
|
||||||
|
public_key = ed25519.verifying_key_from_string(pub_str)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
ed25519.string_from_verifying_key(public_key),
|
||||||
|
ed25519.string_from_verifying_key(derived_public_key),
|
||||||
|
)
|
||||||
|
|
||||||
|
new_sig = ed25519.sign_data(private_key, test_data)
|
||||||
|
self.assertEqual(new_sig, sig)
|
||||||
|
|
||||||
|
ed25519.verify_signature(public_key, new_sig, test_data)
|
||||||
|
ed25519.verify_signature(derived_public_key, new_sig, test_data)
|
||||||
|
ed25519.verify_signature(public_key, sig, test_data)
|
||||||
|
ed25519.verify_signature(derived_public_key, sig, test_data)
|
||||||
|
|
||||||
|
def test_decode_rsa_keypair(self):
|
||||||
|
'''
|
||||||
|
This simply checks that keys and signatures generated using the old code are still valid
|
||||||
|
using the new code.
|
||||||
|
'''
|
||||||
|
priv_key, pub_key = rsa.create_signing_keypair_from_string(self.RSA_2048_PRIV_KEY)
|
||||||
|
rsa.verify_signature(pub_key, self.RSA_2048_SIG, b'test')
|
||||||
|
|
||||||
|
def test_encrypt_data_not_bytes(self):
|
||||||
|
'''
|
||||||
|
only bytes can be encrypted
|
||||||
|
'''
|
||||||
|
key = b'\x00' * 16
|
||||||
|
encryptor = aes.create_encryptor(key)
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
aes.encrypt_data(encryptor, u"not bytes")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_key_incorrect_size(self):
|
||||||
|
'''
|
||||||
|
keys that aren't 16 or 32 bytes are rejected
|
||||||
|
'''
|
||||||
|
key = b'\x00' * 12
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
aes.create_encryptor(key)
|
||||||
|
self.assertIn(
|
||||||
|
"16 or 32 bytes long",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_iv_not_bytes(self):
|
||||||
|
'''
|
||||||
|
iv must be bytes
|
||||||
|
'''
|
||||||
|
key = b'\x00' * 16
|
||||||
|
with self.assertRaises(TypeError) as ctx:
|
||||||
|
aes.create_encryptor(key, iv=u"1234567890abcdef")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_incorrect_iv_size(self):
|
||||||
|
'''
|
||||||
|
iv must be 16 bytes
|
||||||
|
'''
|
||||||
|
key = b'\x00' * 16
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
aes.create_encryptor(key, iv=b'\x00' * 3)
|
||||||
|
self.assertIn(
|
||||||
|
"16 bytes long",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestEd25519(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Test allmydata.crypto.ed25519
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_key_serialization(self):
|
||||||
|
"""
|
||||||
|
a serialized+deserialized keypair is the same as the original
|
||||||
|
"""
|
||||||
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
|
private_key_str = ed25519.string_from_signing_key(private_key)
|
||||||
|
|
||||||
|
self.assertIsInstance(private_key_str, six.string_types)
|
||||||
|
|
||||||
|
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
|
||||||
|
|
||||||
|
# the deserialized signing keys are the same as the original
|
||||||
|
self.assertEqual(
|
||||||
|
ed25519.string_from_signing_key(private_key),
|
||||||
|
ed25519.string_from_signing_key(private_key2),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
ed25519.string_from_verifying_key(public_key),
|
||||||
|
ed25519.string_from_verifying_key(public_key2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# ditto, but for the verifying keys
|
||||||
|
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||||
|
self.assertIsInstance(public_key_str, six.string_types)
|
||||||
|
|
||||||
|
public_key2 = ed25519.verifying_key_from_string(public_key_str)
|
||||||
|
self.assertEqual(
|
||||||
|
ed25519.string_from_verifying_key(public_key),
|
||||||
|
ed25519.string_from_verifying_key(public_key2),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_deserialize_private_not_bytes(self):
|
||||||
|
'''
|
||||||
|
serialized key must be bytes
|
||||||
|
'''
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.signing_keypair_from_string(u"not bytes")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_deserialize_public_not_bytes(self):
|
||||||
|
'''
|
||||||
|
serialized key must be bytes
|
||||||
|
'''
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.verifying_key_from_string(u"not bytes")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_signed_data_not_bytes(self):
|
||||||
|
'''
|
||||||
|
data to sign must be bytes
|
||||||
|
'''
|
||||||
|
priv, pub = ed25519.create_signing_keypair()
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.sign_data(priv, u"not bytes")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_signature_not_bytes(self):
|
||||||
|
'''
|
||||||
|
signature must be bytes
|
||||||
|
'''
|
||||||
|
priv, pub = ed25519.create_signing_keypair()
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.verify_signature(pub, u"not bytes", b"data")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_signature_data_not_bytes(self):
|
||||||
|
'''
|
||||||
|
signed data must be bytes
|
||||||
|
'''
|
||||||
|
priv, pub = ed25519.create_signing_keypair()
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.verify_signature(pub, b"signature", u"not bytes")
|
||||||
|
self.assertIn(
|
||||||
|
"must be bytes",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_sign_invalid_pubkey(self):
|
||||||
|
'''
|
||||||
|
pubkey must be correct kind of object
|
||||||
|
'''
|
||||||
|
priv, pub = ed25519.create_signing_keypair()
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.sign_data(object(), b"data")
|
||||||
|
self.assertIn(
|
||||||
|
"must be an Ed25519PrivateKey",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_verify_invalid_pubkey(self):
|
||||||
|
'''
|
||||||
|
pubkey must be correct kind of object
|
||||||
|
'''
|
||||||
|
priv, pub = ed25519.create_signing_keypair()
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
ed25519.verify_signature(object(), b"signature", b"data")
|
||||||
|
self.assertIn(
|
||||||
|
"must be an Ed25519PublicKey",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRsa(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Tests related to allmydata.crypto.rsa module
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_keys(self):
|
||||||
|
"""
|
||||||
|
test that two instances of 'the same' key sign and verify data
|
||||||
|
in the same way
|
||||||
|
"""
|
||||||
|
priv_key, pub_key = rsa.create_signing_keypair(2048)
|
||||||
|
priv_key_str = rsa.der_string_from_signing_key(priv_key)
|
||||||
|
|
||||||
|
self.assertIsInstance(priv_key_str, six.string_types)
|
||||||
|
|
||||||
|
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)
|
||||||
|
|
||||||
|
# instead of asking "are these two keys equal", we can instead
|
||||||
|
# test their function: can the second key verify a signature
|
||||||
|
# produced by the first (and FAIL a signature with different
|
||||||
|
# data)
|
||||||
|
|
||||||
|
data_to_sign = b"test data"
|
||||||
|
sig0 = rsa.sign_data(priv_key, data_to_sign)
|
||||||
|
rsa.verify_signature(pub_key2, sig0, data_to_sign)
|
||||||
|
|
||||||
|
# ..and the other way
|
||||||
|
sig1 = rsa.sign_data(priv_key2, data_to_sign)
|
||||||
|
rsa.verify_signature(pub_key, sig1, data_to_sign)
|
||||||
|
|
||||||
|
# ..and a failed way
|
||||||
|
with self.assertRaises(rsa.BadSignature):
|
||||||
|
rsa.verify_signature(pub_key, sig1, data_to_sign + b"more")
|
||||||
|
|
||||||
|
def test_sign_invalid_pubkey(self):
|
||||||
|
'''
|
||||||
|
signing data using an invalid key-object fails
|
||||||
|
'''
|
||||||
|
priv, pub = rsa.create_signing_keypair(1024)
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
rsa.sign_data(object(), b"data")
|
||||||
|
self.assertIn(
|
||||||
|
"must be an RSAPrivateKey",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_verify_invalid_pubkey(self):
|
||||||
|
'''
|
||||||
|
verifying a signature using an invalid key-object fails
|
||||||
|
'''
|
||||||
|
priv, pub = rsa.create_signing_keypair(1024)
|
||||||
|
with self.assertRaises(ValueError) as ctx:
|
||||||
|
rsa.verify_signature(object(), b"signature", b"data")
|
||||||
|
self.assertIn(
|
||||||
|
"must be an RSAPublicKey",
|
||||||
|
str(ctx.exception)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestUtil(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
tests related to allmydata.crypto utils
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_remove_prefix_good(self):
|
||||||
|
"""
|
||||||
|
remove a simple prefix properly
|
||||||
|
"""
|
||||||
|
self.assertEquals(
|
||||||
|
remove_prefix(b"foobar", b"foo"),
|
||||||
|
b"bar"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_remove_prefix_bad(self):
|
||||||
|
"""
|
||||||
|
attempt to remove a prefix that doesn't exist fails with exception
|
||||||
|
"""
|
||||||
|
with self.assertRaises(BadPrefixError):
|
||||||
|
remove_prefix(b"foobar", b"bar")
|
||||||
|
|
||||||
|
def test_remove_prefix_zero(self):
|
||||||
|
"""
|
||||||
|
removing a zero-length prefix does nothing
|
||||||
|
"""
|
||||||
|
self.assertEquals(
|
||||||
|
remove_prefix(b"foobar", b""),
|
||||||
|
b"foobar",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_remove_prefix_entire_string(self):
|
||||||
|
"""
|
||||||
|
removing a prefix which is the whole string is empty
|
||||||
|
"""
|
||||||
|
self.assertEquals(
|
||||||
|
remove_prefix(b"foobar", b"foobar"),
|
||||||
|
b"",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_remove_prefix_partial(self):
|
||||||
|
"""
|
||||||
|
removing a prefix with only partial match fails with exception
|
||||||
|
"""
|
||||||
|
with self.assertRaises(BadPrefixError):
|
||||||
|
remove_prefix(b"foobar", b"fooz"),
|
@ -5,12 +5,12 @@ from twisted.application import service
|
|||||||
|
|
||||||
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
||||||
|
|
||||||
|
from allmydata.crypto import aes
|
||||||
from allmydata.storage.server import si_b2a
|
from allmydata.storage.server import si_b2a
|
||||||
from allmydata.storage_client import StorageFarmBroker
|
from allmydata.storage_client import StorageFarmBroker
|
||||||
from allmydata.immutable import offloaded, upload
|
from allmydata.immutable import offloaded, upload
|
||||||
from allmydata import uri, client
|
from allmydata import uri, client
|
||||||
from allmydata.util import hashutil, fileutil, mathutil
|
from allmydata.util import hashutil, fileutil, mathutil
|
||||||
from pycryptopp.cipher.aes import AES
|
|
||||||
|
|
||||||
MiB = 1024*1024
|
MiB = 1024*1024
|
||||||
|
|
||||||
@ -189,12 +189,12 @@ class AssistedUpload(unittest.TestCase):
|
|||||||
|
|
||||||
key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
|
key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
|
||||||
assert len(key) == 16
|
assert len(key) == 16
|
||||||
encryptor = AES(key)
|
encryptor = aes.create_encryptor(key)
|
||||||
SI = hashutil.storage_index_hash(key)
|
SI = hashutil.storage_index_hash(key)
|
||||||
SI_s = si_b2a(SI)
|
SI_s = si_b2a(SI)
|
||||||
encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
|
encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
|
||||||
f = open(encfile, "wb")
|
f = open(encfile, "wb")
|
||||||
f.write(encryptor.process(DATA))
|
f.write(aes.encrypt_data(encryptor, DATA))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
u = upload.Uploader(self.helper_furl)
|
u = upload.Uploader(self.helper_furl)
|
||||||
|
@ -14,6 +14,9 @@ from twisted.python.filepath import FilePath
|
|||||||
|
|
||||||
from foolscap.api import Tub, Referenceable, fireEventually, flushEventualQueue
|
from foolscap.api import Tub, Referenceable, fireEventually, flushEventualQueue
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
|
from allmydata.crypto import ed25519
|
||||||
|
from allmydata.crypto.util import remove_prefix
|
||||||
|
from allmydata.crypto.error import BadSignature
|
||||||
from allmydata.interfaces import InsufficientVersionError
|
from allmydata.interfaces import InsufficientVersionError
|
||||||
from allmydata.introducer.client import IntroducerClient
|
from allmydata.introducer.client import IntroducerClient
|
||||||
from allmydata.introducer.server import IntroducerService, FurlFileConflictError
|
from allmydata.introducer.server import IntroducerService, FurlFileConflictError
|
||||||
@ -31,12 +34,12 @@ from allmydata.client import (
|
|||||||
create_client,
|
create_client,
|
||||||
create_introducer_clients,
|
create_introducer_clients,
|
||||||
)
|
)
|
||||||
from allmydata.util import pollmixin, keyutil, idlib, fileutil, yamlutil
|
from allmydata.util import pollmixin, idlib, fileutil, yamlutil
|
||||||
from allmydata.util.iputil import (
|
from allmydata.util.iputil import (
|
||||||
listenOnUnused,
|
listenOnUnused,
|
||||||
)
|
)
|
||||||
import allmydata.test.common_util as testutil
|
import allmydata.test.common_util as testutil
|
||||||
from .common import (
|
from allmydata.test.common import (
|
||||||
SyncTestCase,
|
SyncTestCase,
|
||||||
AsyncTestCase,
|
AsyncTestCase,
|
||||||
AsyncBrokenTestCase,
|
AsyncBrokenTestCase,
|
||||||
@ -200,21 +203,21 @@ class Client(AsyncTestCase):
|
|||||||
furl1a = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp"
|
furl1a = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:7777/gydnp"
|
||||||
furl2 = "pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo"
|
furl2 = "pb://ttwwooyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/ttwwoo"
|
||||||
|
|
||||||
privkey_s, pubkey_vs = keyutil.make_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
privkey, _ignored = keyutil.parse_privkey(privkey_s)
|
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||||
pubkey_s = keyutil.remove_prefix(pubkey_vs, "pub-")
|
pubkey_s = remove_prefix(public_key_str, "pub-")
|
||||||
|
|
||||||
# ann1: ic1, furl1
|
# ann1: ic1, furl1
|
||||||
# ann1a: ic1, furl1a (same SturdyRef, different connection hints)
|
# ann1a: ic1, furl1a (same SturdyRef, different connection hints)
|
||||||
# ann1b: ic2, furl1
|
# ann1b: ic2, furl1
|
||||||
# ann2: ic2, furl2
|
# ann2: ic2, furl2
|
||||||
|
|
||||||
self.ann1 = make_ann_t(ic1, furl1, privkey, seqnum=10)
|
self.ann1 = make_ann_t(ic1, furl1, private_key, seqnum=10)
|
||||||
self.ann1old = make_ann_t(ic1, furl1, privkey, seqnum=9)
|
self.ann1old = make_ann_t(ic1, furl1, private_key, seqnum=9)
|
||||||
self.ann1noseqnum = make_ann_t(ic1, furl1, privkey, seqnum=None)
|
self.ann1noseqnum = make_ann_t(ic1, furl1, private_key, seqnum=None)
|
||||||
self.ann1b = make_ann_t(ic2, furl1, privkey, seqnum=11)
|
self.ann1b = make_ann_t(ic2, furl1, private_key, seqnum=11)
|
||||||
self.ann1a = make_ann_t(ic1, furl1a, privkey, seqnum=12)
|
self.ann1a = make_ann_t(ic1, furl1a, private_key, seqnum=12)
|
||||||
self.ann2 = make_ann_t(ic2, furl2, privkey, seqnum=13)
|
self.ann2 = make_ann_t(ic2, furl2, private_key, seqnum=13)
|
||||||
|
|
||||||
ic1.remote_announce_v2([self.ann1]) # queues eventual-send
|
ic1.remote_announce_v2([self.ann1]) # queues eventual-send
|
||||||
d = fireEventually()
|
d = fireEventually()
|
||||||
@ -298,14 +301,13 @@ class Server(AsyncTestCase):
|
|||||||
FilePath(self.mktemp()))
|
FilePath(self.mktemp()))
|
||||||
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:36106/gydnp"
|
||||||
|
|
||||||
privkey_s, _ = keyutil.make_keypair()
|
private_key, _ = ed25519.create_signing_keypair()
|
||||||
privkey, _ = keyutil.parse_privkey(privkey_s)
|
|
||||||
|
|
||||||
ann1 = make_ann_t(ic1, furl1, privkey, seqnum=10)
|
ann1 = make_ann_t(ic1, furl1, private_key, seqnum=10)
|
||||||
ann1_old = make_ann_t(ic1, furl1, privkey, seqnum=9)
|
ann1_old = make_ann_t(ic1, furl1, private_key, seqnum=9)
|
||||||
ann1_new = make_ann_t(ic1, furl1, privkey, seqnum=11)
|
ann1_new = make_ann_t(ic1, furl1, private_key, seqnum=11)
|
||||||
ann1_noseqnum = make_ann_t(ic1, furl1, privkey, seqnum=None)
|
ann1_noseqnum = make_ann_t(ic1, furl1, private_key, seqnum=None)
|
||||||
ann1_badseqnum = make_ann_t(ic1, furl1, privkey, seqnum="not an int")
|
ann1_badseqnum = make_ann_t(ic1, furl1, private_key, seqnum="not an int")
|
||||||
|
|
||||||
i.remote_publish_v2(ann1, None)
|
i.remote_publish_v2(ann1, None)
|
||||||
all = i.get_announcements()
|
all = i.get_announcements()
|
||||||
@ -396,22 +398,24 @@ class Queue(SystemTestMixin, AsyncTestCase):
|
|||||||
u"nickname", "version", "oldest", {}, fakeseq,
|
u"nickname", "version", "oldest", {}, fakeseq,
|
||||||
FilePath(self.mktemp()))
|
FilePath(self.mktemp()))
|
||||||
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||||
sk_s, vk_s = keyutil.make_keypair()
|
private_key, _ = ed25519.create_signing_keypair()
|
||||||
sk, _ignored = keyutil.parse_privkey(sk_s)
|
|
||||||
|
|
||||||
d = introducer.disownServiceParent()
|
d = introducer.disownServiceParent()
|
||||||
|
|
||||||
def _offline(ign):
|
def _offline(ign):
|
||||||
# now that the introducer server is offline, create a client and
|
# now that the introducer server is offline, create a client and
|
||||||
# publish some messages
|
# publish some messages
|
||||||
c.setServiceParent(self.parent) # this starts the reconnector
|
c.setServiceParent(self.parent) # this starts the reconnector
|
||||||
c.publish("storage", make_ann(furl1), sk)
|
c.publish("storage", make_ann(furl1), private_key)
|
||||||
|
|
||||||
introducer.setServiceParent(self.parent) # restart the server
|
introducer.setServiceParent(self.parent) # restart the server
|
||||||
# now wait for the messages to be delivered
|
# now wait for the messages to be delivered
|
||||||
def _got_announcement():
|
def _got_announcement():
|
||||||
return bool(introducer.get_announcements())
|
return bool(introducer.get_announcements())
|
||||||
return self.poll(_got_announcement)
|
return self.poll(_got_announcement)
|
||||||
|
|
||||||
d.addCallback(_offline)
|
d.addCallback(_offline)
|
||||||
|
|
||||||
def _done(ign):
|
def _done(ign):
|
||||||
v = introducer.get_announcements()[0]
|
v = introducer.get_announcements()[0]
|
||||||
furl = v.announcement["anonymous-storage-FURL"]
|
furl = v.announcement["anonymous-storage-FURL"]
|
||||||
@ -427,6 +431,7 @@ class Queue(SystemTestMixin, AsyncTestCase):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
return self.poll(_idle)
|
return self.poll(_idle)
|
||||||
|
|
||||||
d.addCallback(_wait_until_idle)
|
d.addCallback(_wait_until_idle)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -482,16 +487,15 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
|||||||
expected_announcements[i] += 1 # all expect a 'storage' announcement
|
expected_announcements[i] += 1 # all expect a 'storage' announcement
|
||||||
|
|
||||||
node_furl = tub.registerReference(Referenceable())
|
node_furl = tub.registerReference(Referenceable())
|
||||||
privkey_s, pubkey_s = keyutil.make_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
privkey, _ignored = keyutil.parse_privkey(privkey_s)
|
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||||
privkeys[i] = privkey
|
privkeys[i] = private_key
|
||||||
pubkeys[i] = pubkey_s
|
pubkeys[i] = public_key_str
|
||||||
|
|
||||||
if i < NUM_STORAGE:
|
if i < NUM_STORAGE:
|
||||||
# sign all announcements
|
# sign all announcements
|
||||||
c.publish("storage", make_ann(node_furl), privkey)
|
c.publish("storage", make_ann(node_furl), private_key)
|
||||||
assert pubkey_s.startswith("pub-")
|
printable_serverids[i] = remove_prefix(public_key_str, b"pub-")
|
||||||
printable_serverids[i] = pubkey_s[len("pub-"):]
|
|
||||||
publishing_clients.append(c)
|
publishing_clients.append(c)
|
||||||
else:
|
else:
|
||||||
# the last one does not publish anything
|
# the last one does not publish anything
|
||||||
@ -500,13 +504,12 @@ class SystemTest(SystemTestMixin, AsyncTestCase):
|
|||||||
if i == 2:
|
if i == 2:
|
||||||
# also publish something that nobody cares about
|
# also publish something that nobody cares about
|
||||||
boring_furl = tub.registerReference(Referenceable())
|
boring_furl = tub.registerReference(Referenceable())
|
||||||
c.publish("boring", make_ann(boring_furl), privkey)
|
c.publish("boring", make_ann(boring_furl), private_key)
|
||||||
|
|
||||||
c.setServiceParent(self.parent)
|
c.setServiceParent(self.parent)
|
||||||
clients.append(c)
|
clients.append(c)
|
||||||
tubs[c] = tub
|
tubs[c] = tub
|
||||||
|
|
||||||
|
|
||||||
def _wait_for_connected(ign):
|
def _wait_for_connected(ign):
|
||||||
def _connected():
|
def _connected():
|
||||||
for c in clients:
|
for c in clients:
|
||||||
@ -746,6 +749,7 @@ class ClientInfo(AsyncTestCase):
|
|||||||
self.failUnlessEqual(s0.nickname, NICKNAME % u"v2")
|
self.failUnlessEqual(s0.nickname, NICKNAME % u"v2")
|
||||||
self.failUnlessEqual(s0.version, "my_version")
|
self.failUnlessEqual(s0.version, "my_version")
|
||||||
|
|
||||||
|
|
||||||
class Announcements(AsyncTestCase):
|
class Announcements(AsyncTestCase):
|
||||||
def test_client_v2_signed(self):
|
def test_client_v2_signed(self):
|
||||||
introducer = IntroducerService()
|
introducer = IntroducerService()
|
||||||
@ -755,16 +759,17 @@ class Announcements(AsyncTestCase):
|
|||||||
"my_version", "oldest", app_versions,
|
"my_version", "oldest", app_versions,
|
||||||
fakeseq, FilePath(self.mktemp()))
|
fakeseq, FilePath(self.mktemp()))
|
||||||
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
furl1 = "pb://62ubehyunnyhzs7r6vdonnm2hpi52w6y@127.0.0.1:0/swissnum"
|
||||||
sk_s, vk_s = keyutil.make_keypair()
|
|
||||||
sk, _ignored = keyutil.parse_privkey(sk_s)
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
pks = keyutil.remove_prefix(vk_s, "pub-")
|
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-")
|
||||||
ann_t0 = make_ann_t(client_v2, furl1, sk, 10)
|
|
||||||
|
ann_t0 = make_ann_t(client_v2, furl1, private_key, 10)
|
||||||
canary0 = Referenceable()
|
canary0 = Referenceable()
|
||||||
introducer.remote_publish_v2(ann_t0, canary0)
|
introducer.remote_publish_v2(ann_t0, canary0)
|
||||||
a = introducer.get_announcements()
|
a = introducer.get_announcements()
|
||||||
self.failUnlessEqual(len(a), 1)
|
self.failUnlessEqual(len(a), 1)
|
||||||
self.assertThat(a[0].canary, Is(canary0))
|
self.assertThat(a[0].canary, Is(canary0))
|
||||||
self.failUnlessEqual(a[0].index, ("storage", pks))
|
self.failUnlessEqual(a[0].index, ("storage", public_key_str))
|
||||||
self.failUnlessEqual(a[0].announcement["app-versions"], app_versions)
|
self.failUnlessEqual(a[0].announcement["app-versions"], app_versions)
|
||||||
self.failUnlessEqual(a[0].nickname, u"nick-v2")
|
self.failUnlessEqual(a[0].nickname, u"nick-v2")
|
||||||
self.failUnlessEqual(a[0].service_name, "storage")
|
self.failUnlessEqual(a[0].service_name, "storage")
|
||||||
@ -786,20 +791,18 @@ class Announcements(AsyncTestCase):
|
|||||||
# during startup (although the announcement will wait in a queue
|
# during startup (although the announcement will wait in a queue
|
||||||
# until the introducer connection is established). To avoid getting
|
# until the introducer connection is established). To avoid getting
|
||||||
# confused by this, disable storage.
|
# confused by this, disable storage.
|
||||||
f = open(os.path.join(basedir, "tahoe.cfg"), "w")
|
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
||||||
f.write("[client]\n")
|
f.write("[client]\n")
|
||||||
f.write("introducer.furl = nope\n")
|
f.write("introducer.furl = nope\n")
|
||||||
f.write("[storage]\n")
|
f.write("[storage]\n")
|
||||||
f.write("enabled = false\n")
|
f.write("enabled = false\n")
|
||||||
f.close()
|
|
||||||
|
|
||||||
c = yield create_client(basedir)
|
c = yield create_client(basedir)
|
||||||
ic = c.introducer_clients[0]
|
ic = c.introducer_clients[0]
|
||||||
sk_s, vk_s = keyutil.make_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
sk, _ignored = keyutil.parse_privkey(sk_s)
|
public_key_str = remove_prefix(ed25519.string_from_verifying_key(public_key), "pub-")
|
||||||
pub1 = keyutil.remove_prefix(vk_s, "pub-")
|
|
||||||
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
furl1 = "pb://onug64tu@127.0.0.1:123/short" # base32("short")
|
||||||
ann_t = make_ann_t(ic, furl1, sk, 1)
|
ann_t = make_ann_t(ic, furl1, private_key, 1)
|
||||||
|
|
||||||
ic.got_announcements([ann_t])
|
ic.got_announcements([ann_t])
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
@ -807,7 +810,7 @@ class Announcements(AsyncTestCase):
|
|||||||
# check the cache for the announcement
|
# check the cache for the announcement
|
||||||
announcements = self._load_cache(cache_filepath)
|
announcements = self._load_cache(cache_filepath)
|
||||||
self.failUnlessEqual(len(announcements), 1)
|
self.failUnlessEqual(len(announcements), 1)
|
||||||
self.failUnlessEqual(announcements[0]['key_s'], pub1)
|
self.failUnlessEqual(announcements[0]['key_s'], public_key_str)
|
||||||
ann = announcements[0]["ann"]
|
ann = announcements[0]["ann"]
|
||||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1)
|
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl1)
|
||||||
self.failUnlessEqual(ann["seqnum"], 1)
|
self.failUnlessEqual(ann["seqnum"], 1)
|
||||||
@ -815,29 +818,28 @@ class Announcements(AsyncTestCase):
|
|||||||
# a new announcement that replaces the first should replace the
|
# a new announcement that replaces the first should replace the
|
||||||
# cached entry, not duplicate it
|
# cached entry, not duplicate it
|
||||||
furl2 = furl1 + "er"
|
furl2 = furl1 + "er"
|
||||||
ann_t2 = make_ann_t(ic, furl2, sk, 2)
|
ann_t2 = make_ann_t(ic, furl2, private_key, 2)
|
||||||
ic.got_announcements([ann_t2])
|
ic.got_announcements([ann_t2])
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
announcements = self._load_cache(cache_filepath)
|
announcements = self._load_cache(cache_filepath)
|
||||||
self.failUnlessEqual(len(announcements), 1)
|
self.failUnlessEqual(len(announcements), 1)
|
||||||
self.failUnlessEqual(announcements[0]['key_s'], pub1)
|
self.failUnlessEqual(announcements[0]['key_s'], public_key_str)
|
||||||
ann = announcements[0]["ann"]
|
ann = announcements[0]["ann"]
|
||||||
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl2)
|
self.failUnlessEqual(ann["anonymous-storage-FURL"], furl2)
|
||||||
self.failUnlessEqual(ann["seqnum"], 2)
|
self.failUnlessEqual(ann["seqnum"], 2)
|
||||||
|
|
||||||
# but a third announcement with a different key should add to the
|
# but a third announcement with a different key should add to the
|
||||||
# cache
|
# cache
|
||||||
sk_s2, vk_s2 = keyutil.make_keypair()
|
private_key2, public_key2 = ed25519.create_signing_keypair()
|
||||||
sk2, _ignored = keyutil.parse_privkey(sk_s2)
|
public_key_str2 = remove_prefix(ed25519.string_from_verifying_key(public_key2), "pub-")
|
||||||
pub2 = keyutil.remove_prefix(vk_s2, "pub-")
|
|
||||||
furl3 = "pb://onug64tu@127.0.0.1:456/short"
|
furl3 = "pb://onug64tu@127.0.0.1:456/short"
|
||||||
ann_t3 = make_ann_t(ic, furl3, sk2, 1)
|
ann_t3 = make_ann_t(ic, furl3, private_key2, 1)
|
||||||
ic.got_announcements([ann_t3])
|
ic.got_announcements([ann_t3])
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
|
|
||||||
announcements = self._load_cache(cache_filepath)
|
announcements = self._load_cache(cache_filepath)
|
||||||
self.failUnlessEqual(len(announcements), 2)
|
self.failUnlessEqual(len(announcements), 2)
|
||||||
self.failUnlessEqual(set([pub1, pub2]),
|
self.failUnlessEqual(set([public_key_str, public_key_str2]),
|
||||||
set([a["key_s"] for a in announcements]))
|
set([a["key_s"] for a in announcements]))
|
||||||
self.failUnlessEqual(set([furl2, furl3]),
|
self.failUnlessEqual(set([furl2, furl3]),
|
||||||
set([a["ann"]["anonymous-storage-FURL"]
|
set([a["ann"]["anonymous-storage-FURL"]
|
||||||
@ -855,17 +857,17 @@ class Announcements(AsyncTestCase):
|
|||||||
ic2._load_announcements() # normally happens when connection fails
|
ic2._load_announcements() # normally happens when connection fails
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
|
|
||||||
self.failUnless(pub1 in announcements)
|
self.failUnless(public_key_str in announcements)
|
||||||
self.failUnlessEqual(announcements[pub1]["anonymous-storage-FURL"],
|
self.failUnlessEqual(announcements[public_key_str]["anonymous-storage-FURL"],
|
||||||
furl2)
|
furl2)
|
||||||
self.failUnlessEqual(announcements[pub2]["anonymous-storage-FURL"],
|
self.failUnlessEqual(announcements[public_key_str2]["anonymous-storage-FURL"],
|
||||||
furl3)
|
furl3)
|
||||||
|
|
||||||
c2 = yield create_client(basedir)
|
c2 = yield create_client(basedir)
|
||||||
c2.introducer_clients[0]._load_announcements()
|
c2.introducer_clients[0]._load_announcements()
|
||||||
yield flushEventualQueue()
|
yield flushEventualQueue()
|
||||||
self.assertEqual(c2.storage_broker.get_all_serverids(),
|
self.assertEqual(c2.storage_broker.get_all_serverids(),
|
||||||
frozenset([pub1, pub2]))
|
frozenset([public_key_str, public_key_str2]))
|
||||||
|
|
||||||
class ClientSeqnums(AsyncBrokenTestCase):
|
class ClientSeqnums(AsyncBrokenTestCase):
|
||||||
|
|
||||||
@ -894,7 +896,7 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
|||||||
f.close()
|
f.close()
|
||||||
return int(seqnum)
|
return int(seqnum)
|
||||||
|
|
||||||
ic.publish("sA", {"key": "value1"}, c._node_key)
|
ic.publish("sA", {"key": "value1"}, c._node_private_key)
|
||||||
self.failUnlessEqual(read_seqnum(), 1)
|
self.failUnlessEqual(read_seqnum(), 1)
|
||||||
self.failUnless("sA" in outbound)
|
self.failUnless("sA" in outbound)
|
||||||
self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
|
self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
|
||||||
@ -906,7 +908,7 @@ class ClientSeqnums(AsyncBrokenTestCase):
|
|||||||
|
|
||||||
# publishing a second service causes both services to be
|
# publishing a second service causes both services to be
|
||||||
# re-published, with the next higher sequence number
|
# re-published, with the next higher sequence number
|
||||||
ic.publish("sB", {"key": "value2"}, c._node_key)
|
ic.publish("sB", {"key": "value2"}, c._node_private_key)
|
||||||
self.failUnlessEqual(read_seqnum(), 2)
|
self.failUnlessEqual(read_seqnum(), 2)
|
||||||
self.failUnless("sB" in outbound)
|
self.failUnless("sB" in outbound)
|
||||||
self.failUnlessEqual(outbound["sB"]["seqnum"], 2)
|
self.failUnlessEqual(outbound["sB"]["seqnum"], 2)
|
||||||
@ -978,11 +980,12 @@ class DecodeFurl(SyncTestCase):
|
|||||||
self.failUnlessEqual(nodeid, "\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d")
|
self.failUnlessEqual(nodeid, "\x9fM\xf2\x19\xcckU0\xbf\x03\r\x10\x99\xfb&\x9b-\xc7A\x1d")
|
||||||
|
|
||||||
class Signatures(SyncTestCase):
|
class Signatures(SyncTestCase):
|
||||||
|
|
||||||
def test_sign(self):
|
def test_sign(self):
|
||||||
ann = {"key1": "value1"}
|
ann = {"key1": "value1"}
|
||||||
sk_s,vk_s = keyutil.make_keypair()
|
private_key, public_key = ed25519.create_signing_keypair()
|
||||||
sk,ignored = keyutil.parse_privkey(sk_s)
|
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||||
ann_t = sign_to_foolscap(ann, sk)
|
ann_t = sign_to_foolscap(ann, private_key)
|
||||||
(msg, sig, key) = ann_t
|
(msg, sig, key) = ann_t
|
||||||
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
|
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
|
||||||
self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann)
|
self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann)
|
||||||
@ -990,7 +993,7 @@ class Signatures(SyncTestCase):
|
|||||||
self.failUnless(key.startswith("v0-"))
|
self.failUnless(key.startswith("v0-"))
|
||||||
(ann2,key2) = unsign_from_foolscap(ann_t)
|
(ann2,key2) = unsign_from_foolscap(ann_t)
|
||||||
self.failUnlessEqual(ann2, ann)
|
self.failUnlessEqual(ann2, ann)
|
||||||
self.failUnlessEqual("pub-"+key2, vk_s)
|
self.failUnlessEqual("pub-" + key2, public_key_str)
|
||||||
|
|
||||||
# not signed
|
# not signed
|
||||||
self.failUnlessRaises(UnknownKeyError,
|
self.failUnlessRaises(UnknownKeyError,
|
||||||
@ -1000,14 +1003,34 @@ class Signatures(SyncTestCase):
|
|||||||
# bad signature
|
# bad signature
|
||||||
bad_ann = {"key1": "value2"}
|
bad_ann = {"key1": "value2"}
|
||||||
bad_msg = json.dumps(bad_ann).encode("utf-8")
|
bad_msg = json.dumps(bad_ann).encode("utf-8")
|
||||||
self.failUnlessRaises(keyutil.BadSignatureError,
|
self.failUnlessRaises(BadSignature,
|
||||||
unsign_from_foolscap, (bad_msg,sig,key))
|
unsign_from_foolscap, (bad_msg, sig, key))
|
||||||
|
|
||||||
# unrecognized signatures
|
# unrecognized signatures
|
||||||
self.failUnlessRaises(UnknownKeyError,
|
self.failUnlessRaises(UnknownKeyError,
|
||||||
unsign_from_foolscap, (bad_msg,"v999-sig",key))
|
unsign_from_foolscap, (bad_msg, "v999-sig", key))
|
||||||
self.failUnlessRaises(UnknownKeyError,
|
self.failUnlessRaises(UnknownKeyError,
|
||||||
unsign_from_foolscap, (bad_msg,sig,"v999-key"))
|
unsign_from_foolscap, (bad_msg, sig, "v999-key"))
|
||||||
|
|
||||||
|
def test_unsigned_announcement(self):
|
||||||
|
ed25519.verifying_key_from_string(b"pub-v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||||
|
mock_tub = Mock()
|
||||||
|
ic = IntroducerClient(
|
||||||
|
mock_tub,
|
||||||
|
u"pb://",
|
||||||
|
u"fake_nick",
|
||||||
|
"0.0.0",
|
||||||
|
"1.2.3",
|
||||||
|
{},
|
||||||
|
(0, u"i am a nonce"),
|
||||||
|
"invalid",
|
||||||
|
)
|
||||||
|
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||||
|
ic.got_announcements([
|
||||||
|
("message", "v0-aaaaaaa", "v0-wodst6ly4f7i7akt2nxizsmmy2rlmer6apltl56zctn67wfyu5tq")
|
||||||
|
])
|
||||||
|
# we should have rejected this announcement due to a bad signature
|
||||||
|
self.assertEqual(0, ic._debug_counts["inbound_announcement"])
|
||||||
|
|
||||||
|
|
||||||
# add tests of StorageFarmBroker: if it receives duplicate announcements, it
|
# add tests of StorageFarmBroker: if it receives duplicate announcements, it
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
import time, os.path, platform, stat, re, json, struct, shutil
|
import time, os.path, platform, stat, re, json, struct, shutil
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
@ -3,16 +3,18 @@ from __future__ import print_function
|
|||||||
|
|
||||||
def foo(): pass # keep the line number constant
|
def foo(): pass # keep the line number constant
|
||||||
|
|
||||||
|
import binascii
|
||||||
import six
|
import six
|
||||||
|
import hashlib
|
||||||
import os, time, sys
|
import os, time, sys
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
from pycryptopp.hash.sha256 import SHA256 as _hash
|
|
||||||
|
|
||||||
from allmydata.util import base32, idlib, humanreadable, mathutil, hashutil
|
from allmydata.util import base32, idlib, humanreadable, mathutil, hashutil
|
||||||
from allmydata.util import assertutil, fileutil, deferredutil, abbreviate
|
from allmydata.util import assertutil, fileutil, deferredutil, abbreviate
|
||||||
@ -20,12 +22,22 @@ from allmydata.util import limiter, time_format, pollmixin, cachedir
|
|||||||
from allmydata.util import statistics, dictutil, pipeline, yamlutil
|
from allmydata.util import statistics, dictutil, pipeline, yamlutil
|
||||||
from allmydata.util import log as tahoe_log
|
from allmydata.util import log as tahoe_log
|
||||||
from allmydata.util.spans import Spans, overlap, DataSpans
|
from allmydata.util.spans import Spans, overlap, DataSpans
|
||||||
|
from allmydata.util.fileutil import EncryptedTemporaryFile
|
||||||
from allmydata.test.common_util import ReallyEqualMixin, TimezoneMixin
|
from allmydata.test.common_util import ReallyEqualMixin, TimezoneMixin
|
||||||
|
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
long = int
|
long = int
|
||||||
|
|
||||||
|
|
||||||
|
def sha256(data):
|
||||||
|
"""
|
||||||
|
:param bytes data: data to hash
|
||||||
|
|
||||||
|
:returns: a hex-encoded SHA256 hash of the data
|
||||||
|
"""
|
||||||
|
return binascii.hexlify(hashlib.sha256(data).digest())
|
||||||
|
|
||||||
|
|
||||||
class Base32(unittest.TestCase):
|
class Base32(unittest.TestCase):
|
||||||
def test_b2a_matches_Pythons(self):
|
def test_b2a_matches_Pythons(self):
|
||||||
import base64
|
import base64
|
||||||
@ -777,6 +789,11 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
|||||||
self.failUnlessFalse(symlinkinfo.isfile)
|
self.failUnlessFalse(symlinkinfo.isfile)
|
||||||
self.failUnlessFalse(symlinkinfo.isdir)
|
self.failUnlessFalse(symlinkinfo.isdir)
|
||||||
|
|
||||||
|
def test_encrypted_tempfile(self):
|
||||||
|
f = EncryptedTemporaryFile()
|
||||||
|
f.write("foobar")
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
class PollMixinTests(unittest.TestCase):
|
class PollMixinTests(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -1825,7 +1842,7 @@ class ByteSpans(unittest.TestCase):
|
|||||||
def _create(subseed):
|
def _create(subseed):
|
||||||
ns1 = S1(); ns2 = S2()
|
ns1 = S1(); ns2 = S2()
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
what = _hash(subseed+str(i)).hexdigest()
|
what = sha256(subseed+str(i))
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
length = max(1,int(what[5:6], 16))
|
length = max(1,int(what[5:6], 16))
|
||||||
ns1.add(start, length); ns2.add(start, length)
|
ns1.add(start, length); ns2.add(start, length)
|
||||||
@ -1833,7 +1850,7 @@ class ByteSpans(unittest.TestCase):
|
|||||||
|
|
||||||
#print
|
#print
|
||||||
for i in range(1000):
|
for i in range(1000):
|
||||||
what = _hash(seed+str(i)).hexdigest()
|
what = sha256(seed+str(i))
|
||||||
op = what[0]
|
op = what[0]
|
||||||
subop = what[1]
|
subop = what[1]
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
@ -1879,7 +1896,7 @@ class ByteSpans(unittest.TestCase):
|
|||||||
self.failUnlessEqual(bool(s1), bool(s2))
|
self.failUnlessEqual(bool(s1), bool(s2))
|
||||||
self.failUnlessEqual(list(s1), list(s2))
|
self.failUnlessEqual(list(s1), list(s2))
|
||||||
for j in range(10):
|
for j in range(10):
|
||||||
what = _hash(what[12:14]+str(j)).hexdigest()
|
what = sha256(what[12:14]+str(j))
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
length = max(1, int(what[5:6], 16))
|
length = max(1, int(what[5:6], 16))
|
||||||
span = (start, length)
|
span = (start, length)
|
||||||
@ -2148,14 +2165,14 @@ class StringSpans(unittest.TestCase):
|
|||||||
created = 0
|
created = 0
|
||||||
pieces = []
|
pieces = []
|
||||||
while created < length:
|
while created < length:
|
||||||
piece = _hash(seed + str(created)).hexdigest()
|
piece = sha256(seed + str(created))
|
||||||
pieces.append(piece)
|
pieces.append(piece)
|
||||||
created += len(piece)
|
created += len(piece)
|
||||||
return "".join(pieces)[:length]
|
return "".join(pieces)[:length]
|
||||||
def _create(subseed):
|
def _create(subseed):
|
||||||
ns1 = S1(); ns2 = S2()
|
ns1 = S1(); ns2 = S2()
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
what = _hash(subseed+str(i)).hexdigest()
|
what = sha256(subseed+str(i))
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
length = max(1,int(what[5:6], 16))
|
length = max(1,int(what[5:6], 16))
|
||||||
ns1.add(start, _randstr(length, what[7:9]));
|
ns1.add(start, _randstr(length, what[7:9]));
|
||||||
@ -2164,7 +2181,7 @@ class StringSpans(unittest.TestCase):
|
|||||||
|
|
||||||
#print
|
#print
|
||||||
for i in range(1000):
|
for i in range(1000):
|
||||||
what = _hash(seed+str(i)).hexdigest()
|
what = sha256(seed+str(i))
|
||||||
op = what[0]
|
op = what[0]
|
||||||
subop = what[1]
|
subop = what[1]
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
@ -2192,7 +2209,7 @@ class StringSpans(unittest.TestCase):
|
|||||||
self.failUnlessEqual(s1.len(), s2.len())
|
self.failUnlessEqual(s1.len(), s2.len())
|
||||||
self.failUnlessEqual(list(s1._dump()), list(s2._dump()))
|
self.failUnlessEqual(list(s1._dump()), list(s2._dump()))
|
||||||
for j in range(100):
|
for j in range(100):
|
||||||
what = _hash(what[12:14]+str(j)).hexdigest()
|
what = sha256(what[12:14]+str(j))
|
||||||
start = int(what[2:4], 16)
|
start = int(what[2:4], 16)
|
||||||
length = max(1, int(what[5:6], 16))
|
length = max(1, int(what[5:6], 16))
|
||||||
d1 = s1.get(start, length); d2 = s2.get(start, length)
|
d1 = s1.get(start, length); d2 = s2.get(start, length)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
# from the Python Standard Library
|
# from the Python Standard Library
|
||||||
|
import six
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
@ -179,13 +180,13 @@ def init_s5():
|
|||||||
s5 = init_s5()
|
s5 = init_s5()
|
||||||
|
|
||||||
def could_be_base32_encoded(s, s8=s8, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
def could_be_base32_encoded(s, s8=s8, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
||||||
precondition(isinstance(s, str), s)
|
precondition(isinstance(s, six.binary_type), s)
|
||||||
if s == '':
|
if s == '':
|
||||||
return True
|
return True
|
||||||
return s8[len(s)%8][ord(s[-1])] and not tr(s, identitytranstable, chars)
|
return s8[len(s)%8][ord(s[-1])] and not tr(s, identitytranstable, chars)
|
||||||
|
|
||||||
def could_be_base32_encoded_l(s, lengthinbits, s5=s5, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
def could_be_base32_encoded_l(s, lengthinbits, s5=s5, tr=string.translate, identitytranstable=identitytranstable, chars=chars):
|
||||||
precondition(isinstance(s, str), s)
|
precondition(isinstance(s, six.binary_type), s)
|
||||||
if s == '':
|
if s == '':
|
||||||
return True
|
return True
|
||||||
assert lengthinbits%5 < len(s5), lengthinbits
|
assert lengthinbits%5 < len(s5), lengthinbits
|
||||||
@ -201,7 +202,7 @@ def a2b(cs):
|
|||||||
@param cs the base-32 encoded data (a string)
|
@param cs the base-32 encoded data (a string)
|
||||||
"""
|
"""
|
||||||
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
|
precondition(could_be_base32_encoded(cs), "cs is required to be possibly base32 encoded data.", cs=cs)
|
||||||
precondition(isinstance(cs, str), cs)
|
precondition(isinstance(cs, six.binary_type), cs)
|
||||||
|
|
||||||
return a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
|
return a2b_l(cs, num_octets_that_encode_to_this_many_quintets(len(cs))*8)
|
||||||
|
|
||||||
@ -226,7 +227,7 @@ def a2b_l(cs, lengthinbits):
|
|||||||
@return the data encoded in cs
|
@return the data encoded in cs
|
||||||
"""
|
"""
|
||||||
precondition(could_be_base32_encoded_l(cs, lengthinbits), "cs is required to be possibly base32 encoded data.", cs=cs, lengthinbits=lengthinbits)
|
precondition(could_be_base32_encoded_l(cs, lengthinbits), "cs is required to be possibly base32 encoded data.", cs=cs, lengthinbits=lengthinbits)
|
||||||
precondition(isinstance(cs, str), cs)
|
precondition(isinstance(cs, six.binary_type), cs)
|
||||||
if cs == '':
|
if cs == '':
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
@ -16,8 +16,7 @@ if sys.platform == "win32":
|
|||||||
|
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
|
|
||||||
from pycryptopp.cipher.aes import AES
|
from allmydata.crypto import aes
|
||||||
|
|
||||||
from allmydata.util.assertutil import _assert
|
from allmydata.util.assertutil import _assert
|
||||||
|
|
||||||
|
|
||||||
@ -110,9 +109,10 @@ class EncryptedTemporaryFile(object):
|
|||||||
offset_big = offset // 16
|
offset_big = offset // 16
|
||||||
offset_small = offset % 16
|
offset_small = offset % 16
|
||||||
iv = binascii.unhexlify("%032x" % offset_big)
|
iv = binascii.unhexlify("%032x" % offset_big)
|
||||||
cipher = AES(self.key, iv=iv)
|
cipher = aes.create_encryptor(self.key, iv)
|
||||||
cipher.process("\x00"*offset_small)
|
# this is just to advance the counter
|
||||||
return cipher.process(data)
|
aes.encrypt_data(cipher, b"\x00" * offset_small)
|
||||||
|
return aes.encrypt_data(cipher, data)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.file.close()
|
self.file.close()
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from pycryptopp.hash.sha256 import SHA256
|
|
||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
from allmydata.util.netstring import netstring
|
from allmydata.util.netstring import netstring
|
||||||
@ -12,40 +11,44 @@ from allmydata.util.netstring import netstring
|
|||||||
# randomly-generated secrets such as the lease secret, and symmetric encryption
|
# randomly-generated secrets such as the lease secret, and symmetric encryption
|
||||||
# keys. In the near future we will add DSA private keys, and salts of various
|
# keys. In the near future we will add DSA private keys, and salts of various
|
||||||
# kinds.
|
# kinds.
|
||||||
CRYPTO_VAL_SIZE=32
|
CRYPTO_VAL_SIZE = 32
|
||||||
|
|
||||||
|
|
||||||
class _SHA256d_Hasher(object):
|
class _SHA256d_Hasher(object):
|
||||||
# use SHA-256d, as defined by Ferguson and Schneier: hash the output
|
# use SHA-256d, as defined by Ferguson and Schneier: hash the output
|
||||||
# again to prevent length-extension attacks
|
# again to prevent length-extension attacks
|
||||||
def __init__(self, truncate_to=None):
|
def __init__(self, truncate_to=None):
|
||||||
self.h = SHA256()
|
self.h = hashlib.sha256()
|
||||||
self.truncate_to = truncate_to
|
self.truncate_to = truncate_to
|
||||||
self._digest = None
|
self._digest = None
|
||||||
|
|
||||||
def update(self, data):
|
def update(self, data):
|
||||||
assert isinstance(data, str) # no unicode
|
assert isinstance(data, bytes) # no unicode
|
||||||
self.h.update(data)
|
self.h.update(data)
|
||||||
|
|
||||||
def digest(self):
|
def digest(self):
|
||||||
if self._digest is None:
|
if self._digest is None:
|
||||||
h1 = self.h.digest()
|
h1 = self.h.digest()
|
||||||
del self.h
|
del self.h
|
||||||
h2 = SHA256(h1).digest()
|
h2 = hashlib.sha256(h1).digest()
|
||||||
if self.truncate_to:
|
if self.truncate_to:
|
||||||
h2 = h2[:self.truncate_to]
|
h2 = h2[:self.truncate_to]
|
||||||
self._digest = h2
|
self._digest = h2
|
||||||
return self._digest
|
return self._digest
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def tagged_hasher(tag, truncate_to=None):
|
def tagged_hasher(tag, truncate_to=None):
|
||||||
hasher = _SHA256d_Hasher(truncate_to)
|
hasher = _SHA256d_Hasher(truncate_to)
|
||||||
hasher.update(netstring(tag))
|
hasher.update(netstring(tag))
|
||||||
return hasher
|
return hasher
|
||||||
|
|
||||||
|
|
||||||
def tagged_hash(tag, val, truncate_to=None):
|
def tagged_hash(tag, val, truncate_to=None):
|
||||||
hasher = tagged_hasher(tag, truncate_to)
|
hasher = tagged_hasher(tag, truncate_to)
|
||||||
hasher.update(val)
|
hasher.update(val)
|
||||||
return hasher.digest()
|
return hasher.digest()
|
||||||
|
|
||||||
|
|
||||||
def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
||||||
s = _SHA256d_Hasher(truncate_to)
|
s = _SHA256d_Hasher(truncate_to)
|
||||||
s.update(netstring(tag))
|
s.update(netstring(tag))
|
||||||
@ -53,7 +56,8 @@ def tagged_pair_hash(tag, val1, val2, truncate_to=None):
|
|||||||
s.update(netstring(val2))
|
s.update(netstring(val2))
|
||||||
return s.digest()
|
return s.digest()
|
||||||
|
|
||||||
## specific hash tags that we use
|
# specific hash tags that we use
|
||||||
|
|
||||||
|
|
||||||
# immutable
|
# immutable
|
||||||
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1"
|
STORAGE_INDEX_TAG = "allmydata_immutable_key_to_storage_index_v1"
|
||||||
@ -85,6 +89,7 @@ MUTABLE_STORAGEINDEX_TAG = "allmydata_mutable_readkey_to_storage_index_v1"
|
|||||||
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
DIRNODE_CHILD_WRITECAP_TAG = "allmydata_mutable_writekey_and_salt_to_dirnode_child_capkey_v1"
|
||||||
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1"
|
DIRNODE_CHILD_SALT_TAG = "allmydata_dirnode_child_rwcap_to_salt_v1"
|
||||||
|
|
||||||
|
|
||||||
def storage_index_hash(key):
|
def storage_index_hash(key):
|
||||||
# storage index is truncated to 128 bits (16 bytes). We're only hashing a
|
# storage index is truncated to 128 bits (16 bytes). We're only hashing a
|
||||||
# 16-byte value to get it, so there's no point in using a larger value. We
|
# 16-byte value to get it, so there's no point in using a larger value. We
|
||||||
@ -93,115 +98,165 @@ def storage_index_hash(key):
|
|||||||
# files. Mutable files use ssk_storage_index_hash().
|
# files. Mutable files use ssk_storage_index_hash().
|
||||||
return tagged_hash(STORAGE_INDEX_TAG, key, 16)
|
return tagged_hash(STORAGE_INDEX_TAG, key, 16)
|
||||||
|
|
||||||
|
|
||||||
def block_hash(data):
|
def block_hash(data):
|
||||||
return tagged_hash(BLOCK_TAG, data)
|
return tagged_hash(BLOCK_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def block_hasher():
|
def block_hasher():
|
||||||
return tagged_hasher(BLOCK_TAG)
|
return tagged_hasher(BLOCK_TAG)
|
||||||
|
|
||||||
|
|
||||||
def uri_extension_hash(data):
|
def uri_extension_hash(data):
|
||||||
return tagged_hash(UEB_TAG, data)
|
return tagged_hash(UEB_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def uri_extension_hasher():
|
def uri_extension_hasher():
|
||||||
return tagged_hasher(UEB_TAG)
|
return tagged_hasher(UEB_TAG)
|
||||||
|
|
||||||
|
|
||||||
def plaintext_hash(data):
|
def plaintext_hash(data):
|
||||||
return tagged_hash(PLAINTEXT_TAG, data)
|
return tagged_hash(PLAINTEXT_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def plaintext_hasher():
|
def plaintext_hasher():
|
||||||
return tagged_hasher(PLAINTEXT_TAG)
|
return tagged_hasher(PLAINTEXT_TAG)
|
||||||
|
|
||||||
|
|
||||||
def crypttext_hash(data):
|
def crypttext_hash(data):
|
||||||
return tagged_hash(CIPHERTEXT_TAG, data)
|
return tagged_hash(CIPHERTEXT_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def crypttext_hasher():
|
def crypttext_hasher():
|
||||||
return tagged_hasher(CIPHERTEXT_TAG)
|
return tagged_hasher(CIPHERTEXT_TAG)
|
||||||
|
|
||||||
|
|
||||||
def crypttext_segment_hash(data):
|
def crypttext_segment_hash(data):
|
||||||
return tagged_hash(CIPHERTEXT_SEGMENT_TAG, data)
|
return tagged_hash(CIPHERTEXT_SEGMENT_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def crypttext_segment_hasher():
|
def crypttext_segment_hasher():
|
||||||
return tagged_hasher(CIPHERTEXT_SEGMENT_TAG)
|
return tagged_hasher(CIPHERTEXT_SEGMENT_TAG)
|
||||||
|
|
||||||
|
|
||||||
def plaintext_segment_hash(data):
|
def plaintext_segment_hash(data):
|
||||||
return tagged_hash(PLAINTEXT_SEGMENT_TAG, data)
|
return tagged_hash(PLAINTEXT_SEGMENT_TAG, data)
|
||||||
|
|
||||||
|
|
||||||
def plaintext_segment_hasher():
|
def plaintext_segment_hasher():
|
||||||
return tagged_hasher(PLAINTEXT_SEGMENT_TAG)
|
return tagged_hasher(PLAINTEXT_SEGMENT_TAG)
|
||||||
|
|
||||||
|
|
||||||
KEYLEN = 16
|
KEYLEN = 16
|
||||||
IVLEN = 16
|
IVLEN = 16
|
||||||
|
|
||||||
|
|
||||||
def convergence_hash(k, n, segsize, data, convergence):
|
def convergence_hash(k, n, segsize, data, convergence):
|
||||||
h = convergence_hasher(k, n, segsize, convergence)
|
h = convergence_hasher(k, n, segsize, convergence)
|
||||||
h.update(data)
|
h.update(data)
|
||||||
return h.digest()
|
return h.digest()
|
||||||
|
|
||||||
|
|
||||||
def convergence_hasher(k, n, segsize, convergence):
|
def convergence_hasher(k, n, segsize, convergence):
|
||||||
assert isinstance(convergence, str)
|
assert isinstance(convergence, str)
|
||||||
param_tag = netstring("%d,%d,%d" % (k, n, segsize))
|
param_tag = netstring("%d,%d,%d" % (k, n, segsize))
|
||||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||||
return tagged_hasher(tag, KEYLEN)
|
return tagged_hasher(tag, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def random_key():
|
def random_key():
|
||||||
return os.urandom(KEYLEN)
|
return os.urandom(KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def my_renewal_secret_hash(my_secret):
|
def my_renewal_secret_hash(my_secret):
|
||||||
return tagged_hash(my_secret, CLIENT_RENEWAL_TAG)
|
return tagged_hash(my_secret, CLIENT_RENEWAL_TAG)
|
||||||
|
|
||||||
|
|
||||||
def my_cancel_secret_hash(my_secret):
|
def my_cancel_secret_hash(my_secret):
|
||||||
return tagged_hash(my_secret, CLIENT_CANCEL_TAG)
|
return tagged_hash(my_secret, CLIENT_CANCEL_TAG)
|
||||||
|
|
||||||
|
|
||||||
def file_renewal_secret_hash(client_renewal_secret, storage_index):
|
def file_renewal_secret_hash(client_renewal_secret, storage_index):
|
||||||
return tagged_pair_hash(FILE_RENEWAL_TAG,
|
return tagged_pair_hash(FILE_RENEWAL_TAG,
|
||||||
client_renewal_secret, storage_index)
|
client_renewal_secret, storage_index)
|
||||||
|
|
||||||
|
|
||||||
def file_cancel_secret_hash(client_cancel_secret, storage_index):
|
def file_cancel_secret_hash(client_cancel_secret, storage_index):
|
||||||
return tagged_pair_hash(FILE_CANCEL_TAG,
|
return tagged_pair_hash(FILE_CANCEL_TAG,
|
||||||
client_cancel_secret, storage_index)
|
client_cancel_secret, storage_index)
|
||||||
|
|
||||||
|
|
||||||
def bucket_renewal_secret_hash(file_renewal_secret, peerid):
|
def bucket_renewal_secret_hash(file_renewal_secret, peerid):
|
||||||
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
||||||
return tagged_pair_hash(BUCKET_RENEWAL_TAG, file_renewal_secret, peerid)
|
return tagged_pair_hash(BUCKET_RENEWAL_TAG, file_renewal_secret, peerid)
|
||||||
|
|
||||||
|
|
||||||
def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
def bucket_cancel_secret_hash(file_cancel_secret, peerid):
|
||||||
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
||||||
return tagged_pair_hash(BUCKET_CANCEL_TAG, file_cancel_secret, peerid)
|
return tagged_pair_hash(BUCKET_CANCEL_TAG, file_cancel_secret, peerid)
|
||||||
|
|
||||||
|
|
||||||
def _xor(a, b):
|
def _xor(a, b):
|
||||||
return "".join([chr(ord(c) ^ ord(b)) for c in a])
|
return "".join([chr(ord(c) ^ ord(b)) for c in a])
|
||||||
|
|
||||||
|
|
||||||
def hmac(tag, data):
|
def hmac(tag, data):
|
||||||
ikey = _xor(tag, "\x36")
|
ikey = _xor(tag, "\x36")
|
||||||
okey = _xor(tag, "\x5c")
|
okey = _xor(tag, "\x5c")
|
||||||
h1 = SHA256(ikey + data).digest()
|
h1 = hashlib.sha256(ikey + data).digest()
|
||||||
h2 = SHA256(okey + h1).digest()
|
h2 = hashlib.sha256(okey + h1).digest()
|
||||||
return h2
|
return h2
|
||||||
|
|
||||||
|
|
||||||
def mutable_rwcap_key_hash(iv, writekey):
|
def mutable_rwcap_key_hash(iv, writekey):
|
||||||
return tagged_pair_hash(DIRNODE_CHILD_WRITECAP_TAG, iv, writekey, KEYLEN)
|
return tagged_pair_hash(DIRNODE_CHILD_WRITECAP_TAG, iv, writekey, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def mutable_rwcap_salt_hash(writekey):
|
def mutable_rwcap_salt_hash(writekey):
|
||||||
return tagged_hash(DIRNODE_CHILD_SALT_TAG, writekey, IVLEN)
|
return tagged_hash(DIRNODE_CHILD_SALT_TAG, writekey, IVLEN)
|
||||||
|
|
||||||
|
|
||||||
def ssk_writekey_hash(privkey):
|
def ssk_writekey_hash(privkey):
|
||||||
return tagged_hash(MUTABLE_WRITEKEY_TAG, privkey, KEYLEN)
|
return tagged_hash(MUTABLE_WRITEKEY_TAG, privkey, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def ssk_write_enabler_master_hash(writekey):
|
def ssk_write_enabler_master_hash(writekey):
|
||||||
return tagged_hash(MUTABLE_WRITE_ENABLER_MASTER_TAG, writekey)
|
return tagged_hash(MUTABLE_WRITE_ENABLER_MASTER_TAG, writekey)
|
||||||
|
|
||||||
|
|
||||||
def ssk_write_enabler_hash(writekey, peerid):
|
def ssk_write_enabler_hash(writekey, peerid):
|
||||||
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
assert len(peerid) == 20, "%s: %r" % (len(peerid), peerid) # binary!
|
||||||
wem = ssk_write_enabler_master_hash(writekey)
|
wem = ssk_write_enabler_master_hash(writekey)
|
||||||
return tagged_pair_hash(MUTABLE_WRITE_ENABLER_TAG, wem, peerid)
|
return tagged_pair_hash(MUTABLE_WRITE_ENABLER_TAG, wem, peerid)
|
||||||
|
|
||||||
|
|
||||||
def ssk_pubkey_fingerprint_hash(pubkey):
|
def ssk_pubkey_fingerprint_hash(pubkey):
|
||||||
return tagged_hash(MUTABLE_PUBKEY_TAG, pubkey)
|
return tagged_hash(MUTABLE_PUBKEY_TAG, pubkey)
|
||||||
|
|
||||||
|
|
||||||
def ssk_readkey_hash(writekey):
|
def ssk_readkey_hash(writekey):
|
||||||
return tagged_hash(MUTABLE_READKEY_TAG, writekey, KEYLEN)
|
return tagged_hash(MUTABLE_READKEY_TAG, writekey, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def ssk_readkey_data_hash(IV, readkey):
|
def ssk_readkey_data_hash(IV, readkey):
|
||||||
return tagged_pair_hash(MUTABLE_DATAKEY_TAG, IV, readkey, KEYLEN)
|
return tagged_pair_hash(MUTABLE_DATAKEY_TAG, IV, readkey, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def ssk_storage_index_hash(readkey):
|
def ssk_storage_index_hash(readkey):
|
||||||
return tagged_hash(MUTABLE_STORAGEINDEX_TAG, readkey, KEYLEN)
|
return tagged_hash(MUTABLE_STORAGEINDEX_TAG, readkey, KEYLEN)
|
||||||
|
|
||||||
|
|
||||||
def timing_safe_compare(a, b):
|
def timing_safe_compare(a, b):
|
||||||
n = os.urandom(32)
|
n = os.urandom(32)
|
||||||
return bool(tagged_hash(n, a) == tagged_hash(n, b))
|
return bool(tagged_hash(n, a) == tagged_hash(n, b))
|
||||||
|
|
||||||
|
|
||||||
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1"
|
BACKUPDB_DIRHASH_TAG = "allmydata_backupdb_dirhash_v1"
|
||||||
|
|
||||||
|
|
||||||
def backupdb_dirhash(contents):
|
def backupdb_dirhash(contents):
|
||||||
return tagged_hash(BACKUPDB_DIRHASH_TAG, contents)
|
return tagged_hash(BACKUPDB_DIRHASH_TAG, contents)
|
||||||
|
|
||||||
|
|
||||||
def permute_server_hash(peer_selection_index, server_permutation_seed):
|
def permute_server_hash(peer_selection_index, server_permutation_seed):
|
||||||
return hashlib.sha1(peer_selection_index + server_permutation_seed).digest()
|
return hashlib.sha1(peer_selection_index + server_permutation_seed).digest()
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
import os
|
|
||||||
from pycryptopp.publickey import ed25519
|
|
||||||
from allmydata.util.base32 import a2b, b2a
|
|
||||||
|
|
||||||
BadSignatureError = ed25519.BadSignatureError
|
|
||||||
|
|
||||||
class BadPrefixError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def remove_prefix(s_bytes, prefix):
|
|
||||||
if not s_bytes.startswith(prefix):
|
|
||||||
raise BadPrefixError("did not see expected '%s' prefix" % (prefix,))
|
|
||||||
return s_bytes[len(prefix):]
|
|
||||||
|
|
||||||
# in base32, keys are 52 chars long (both signing and verifying keys)
|
|
||||||
# in base62, keys is 43 chars long
|
|
||||||
# in base64, keys is 43 chars long
|
|
||||||
#
|
|
||||||
# We can't use base64 because we want to reserve punctuation and preserve
|
|
||||||
# cut-and-pasteability. The base62 encoding is shorter than the base32 form,
|
|
||||||
# but the minor usability improvement is not worth the documentation and
|
|
||||||
# specification confusion of using a non-standard encoding. So we stick with
|
|
||||||
# base32.
|
|
||||||
|
|
||||||
def make_keypair():
|
|
||||||
sk_bytes = os.urandom(32)
|
|
||||||
sk = ed25519.SigningKey(sk_bytes)
|
|
||||||
vk_bytes = sk.get_verifying_key_bytes()
|
|
||||||
return ("priv-v0-"+b2a(sk_bytes), "pub-v0-"+b2a(vk_bytes))
|
|
||||||
|
|
||||||
def parse_privkey(privkey_vs):
|
|
||||||
sk_bytes = a2b(remove_prefix(privkey_vs, "priv-v0-"))
|
|
||||||
sk = ed25519.SigningKey(sk_bytes)
|
|
||||||
vk_bytes = sk.get_verifying_key_bytes()
|
|
||||||
return (sk, "pub-v0-"+b2a(vk_bytes))
|
|
||||||
|
|
||||||
def parse_pubkey(pubkey_vs):
|
|
||||||
vk_bytes = a2b(remove_prefix(pubkey_vs, "pub-v0-"))
|
|
||||||
return ed25519.VerifyingKey(vk_bytes)
|
|
@ -9,11 +9,13 @@ from allmydata.web.common import (
|
|||||||
from allmydata.util.abbreviate import abbreviate_space
|
from allmydata.util.abbreviate import abbreviate_space
|
||||||
from allmydata.util import time_format, idlib
|
from allmydata.util import time_format, idlib
|
||||||
|
|
||||||
|
|
||||||
def remove_prefix(s, prefix):
|
def remove_prefix(s, prefix):
|
||||||
if not s.startswith(prefix):
|
if not s.startswith(prefix):
|
||||||
return None
|
return None
|
||||||
return s[len(prefix):]
|
return s[len(prefix):]
|
||||||
|
|
||||||
|
|
||||||
class StorageStatus(MultiFormatPage):
|
class StorageStatus(MultiFormatPage):
|
||||||
docFactory = getxmlfile("storage_status.xhtml")
|
docFactory = getxmlfile("storage_status.xhtml")
|
||||||
# the default 'data' argument is the StorageServer instance
|
# the default 'data' argument is the StorageServer instance
|
||||||
|
Loading…
x
Reference in New Issue
Block a user