mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-07 10:56:49 +00:00
Merge pull request #961 from tahoe-lafs/3584.integration-tests-sftp
Integration tests and bug fixes for SFTP Fixes ticket:3584
This commit is contained in:
commit
59f0c9e63b
@ -7,6 +7,7 @@ from os import mkdir, listdir, environ
|
||||
from os.path import join, exists
|
||||
from tempfile import mkdtemp, mktemp
|
||||
from functools import partial
|
||||
from json import loads
|
||||
|
||||
from foolscap.furl import (
|
||||
decode_furl,
|
||||
@ -37,6 +38,10 @@ from util import (
|
||||
_tahoe_runner_optional_coverage,
|
||||
await_client_ready,
|
||||
TahoeProcess,
|
||||
cli,
|
||||
_run_node,
|
||||
generate_ssh_key,
|
||||
block_with_timeout,
|
||||
)
|
||||
|
||||
|
||||
@ -152,7 +157,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
)
|
||||
print("Waiting for flogtool to complete")
|
||||
try:
|
||||
pytest_twisted.blockon(flog_protocol.done)
|
||||
block_with_timeout(flog_protocol.done, reactor)
|
||||
except ProcessTerminated as e:
|
||||
print("flogtool exited unexpectedly: {}".format(str(e)))
|
||||
print("Flogtool completed")
|
||||
@ -293,7 +298,7 @@ log_gatherer.furl = {log_furl}
|
||||
def cleanup():
|
||||
try:
|
||||
transport.signalProcess('TERM')
|
||||
pytest_twisted.blockon(protocol.exited)
|
||||
block_with_timeout(protocol.exited, reactor)
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
request.addfinalizer(cleanup)
|
||||
@ -347,8 +352,50 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ
|
||||
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
|
||||
web_port="tcp:9980:interface=localhost",
|
||||
storage=False,
|
||||
# We're going to kill this ourselves, so no need for finalizer to
|
||||
# do it:
|
||||
finalize=False,
|
||||
)
|
||||
)
|
||||
await_client_ready(process)
|
||||
|
||||
# 1. Create a new RW directory cap:
|
||||
cli(process, "create-alias", "test")
|
||||
rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"]
|
||||
|
||||
# 2. Enable SFTP on the node:
|
||||
host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key")
|
||||
accounts_path = join(process.node_dir, "private", "accounts")
|
||||
with open(join(process.node_dir, "tahoe.cfg"), "a") as f:
|
||||
f.write("""\
|
||||
[sftpd]
|
||||
enabled = true
|
||||
port = tcp:8022:interface=127.0.0.1
|
||||
host_pubkey_file = {ssh_key_path}.pub
|
||||
host_privkey_file = {ssh_key_path}
|
||||
accounts.file = {accounts_path}
|
||||
""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path))
|
||||
generate_ssh_key(host_ssh_key_path)
|
||||
|
||||
# 3. Add a SFTP access file with username/password and SSH key auth.
|
||||
|
||||
# The client SSH key path is typically going to be somewhere else (~/.ssh,
|
||||
# typically), but for convenience sake for testing we'll put it inside node.
|
||||
client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key")
|
||||
generate_ssh_key(client_ssh_key_path)
|
||||
# Pub key format is "ssh-rsa <thekey> <username>". We want the key.
|
||||
ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1]
|
||||
with open(accounts_path, "w") as f:
|
||||
f.write("""\
|
||||
alice password {rwcap}
|
||||
|
||||
alice2 ssh-rsa {ssh_public_key} {rwcap}
|
||||
""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
|
||||
|
||||
# 4. Restart the node with new SFTP config.
|
||||
process.kill()
|
||||
pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None))
|
||||
|
||||
await_client_ready(process)
|
||||
return process
|
||||
|
||||
@ -490,7 +537,13 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
path=join(chutney_dir),
|
||||
env=env,
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
try:
|
||||
block_with_timeout(proto.done, reactor)
|
||||
except ProcessTerminated:
|
||||
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
||||
# the test suite.
|
||||
pass
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
return chut
|
||||
|
162
integration/test_sftp.py
Normal file
162
integration/test_sftp.py
Normal file
@ -0,0 +1,162 @@
|
||||
"""
|
||||
It's possible to create/rename/delete files and directories in Tahoe-LAFS using
|
||||
SFTP.
|
||||
|
||||
These tests use Paramiko, rather than Twisted's Conch, because:
|
||||
|
||||
1. It's a different implementation, so we're not testing Conch against
|
||||
itself.
|
||||
|
||||
2. Its API is much simpler to use.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from posixpath import join
|
||||
from stat import S_ISDIR
|
||||
|
||||
from paramiko import SSHClient
|
||||
from paramiko.client import AutoAddPolicy
|
||||
from paramiko.sftp_client import SFTPClient
|
||||
from paramiko.ssh_exception import AuthenticationException
|
||||
from paramiko.rsakey import RSAKey
|
||||
|
||||
import pytest
|
||||
|
||||
from .util import generate_ssh_key, run_in_thread
|
||||
|
||||
|
||||
def connect_sftp(connect_args={"username": "alice", "password": "password"}):
|
||||
"""Create an SFTP client."""
|
||||
client = SSHClient()
|
||||
client.set_missing_host_key_policy(AutoAddPolicy)
|
||||
client.connect("localhost", port=8022, look_for_keys=False,
|
||||
allow_agent=False, **connect_args)
|
||||
sftp = SFTPClient.from_transport(client.get_transport())
|
||||
|
||||
def rmdir(path, delete_root=True):
|
||||
for f in sftp.listdir_attr(path=path):
|
||||
childpath = join(path, f.filename)
|
||||
if S_ISDIR(f.st_mode):
|
||||
rmdir(childpath)
|
||||
else:
|
||||
sftp.remove(childpath)
|
||||
if delete_root:
|
||||
sftp.rmdir(path)
|
||||
|
||||
# Delete any files left over from previous tests :(
|
||||
rmdir("/", delete_root=False)
|
||||
|
||||
return sftp
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_bad_account_password_ssh_key(alice, tmpdir):
|
||||
"""
|
||||
Can't login with unknown username, wrong password, or wrong SSH pub key.
|
||||
"""
|
||||
# Wrong password, wrong username:
|
||||
for u, p in [("alice", "wrong"), ("someuser", "password")]:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": u, "password": p,
|
||||
})
|
||||
|
||||
another_key = join(str(tmpdir), "ssh_key")
|
||||
generate_ssh_key(another_key)
|
||||
good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
|
||||
bad_key = RSAKey(filename=another_key)
|
||||
|
||||
# Wrong key:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": "alice2", "pkey": bad_key,
|
||||
})
|
||||
|
||||
# Wrong username:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": "someoneelse", "pkey": good_key,
|
||||
})
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_ssh_key_auth(alice):
|
||||
"""It's possible to login authenticating with SSH public key."""
|
||||
key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
|
||||
sftp = connect_sftp(connect_args={
|
||||
"username": "alice2", "pkey": key
|
||||
})
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_read_write_files(alice):
|
||||
"""It's possible to upload and download files."""
|
||||
sftp = connect_sftp()
|
||||
with sftp.file("myfile", "wb") as f:
|
||||
f.write(b"abc")
|
||||
f.write(b"def")
|
||||
|
||||
with sftp.file("myfile", "rb") as f:
|
||||
assert f.read(4) == b"abcd"
|
||||
assert f.read(2) == b"ef"
|
||||
assert f.read(1) == b""
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_directories(alice):
|
||||
"""
|
||||
It's possible to create, list directories, and create and remove files in
|
||||
them.
|
||||
"""
|
||||
sftp = connect_sftp()
|
||||
assert sftp.listdir() == []
|
||||
|
||||
sftp.mkdir("childdir")
|
||||
assert sftp.listdir() == ["childdir"]
|
||||
|
||||
with sftp.file("myfile", "wb") as f:
|
||||
f.write(b"abc")
|
||||
assert sorted(sftp.listdir()) == ["childdir", "myfile"]
|
||||
|
||||
sftp.chdir("childdir")
|
||||
assert sftp.listdir() == []
|
||||
|
||||
with sftp.file("myfile2", "wb") as f:
|
||||
f.write(b"def")
|
||||
assert sftp.listdir() == ["myfile2"]
|
||||
|
||||
sftp.chdir(None) # root
|
||||
with sftp.file("childdir/myfile2", "rb") as f:
|
||||
assert f.read() == b"def"
|
||||
|
||||
sftp.remove("myfile")
|
||||
assert sftp.listdir() == ["childdir"]
|
||||
|
||||
sftp.rmdir("childdir")
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_rename(alice):
|
||||
"""Directories and files can be renamed."""
|
||||
sftp = connect_sftp()
|
||||
sftp.mkdir("dir")
|
||||
|
||||
filepath = join("dir", "file")
|
||||
with sftp.file(filepath, "wb") as f:
|
||||
f.write(b"abc")
|
||||
|
||||
sftp.rename(filepath, join("dir", "file2"))
|
||||
sftp.rename("dir", "dir2")
|
||||
|
||||
with sftp.file(join("dir2", "file2"), "rb") as f:
|
||||
assert f.read() == b"abc"
|
@ -175,6 +175,7 @@ def test_deep_stats(alice):
|
||||
time.sleep(.5)
|
||||
|
||||
|
||||
@util.run_in_thread
|
||||
def test_status(alice):
|
||||
"""
|
||||
confirm we get something sensible from /status and the various sub-types
|
||||
|
@ -5,6 +5,7 @@ from os import mkdir, environ
|
||||
from os.path import exists, join
|
||||
from six.moves import StringIO
|
||||
from functools import partial
|
||||
from subprocess import check_output
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
@ -12,9 +13,13 @@ from twisted.python.filepath import (
|
||||
from twisted.internet.defer import Deferred, succeed
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.threads import deferToThread
|
||||
|
||||
import requests
|
||||
|
||||
from paramiko.rsakey import RSAKey
|
||||
from boltons.funcutils import wraps
|
||||
|
||||
from allmydata.util.configutil import (
|
||||
get_config,
|
||||
set_config,
|
||||
@ -25,6 +30,12 @@ from allmydata import client
|
||||
import pytest_twisted
|
||||
|
||||
|
||||
def block_with_timeout(deferred, reactor, timeout=120):
|
||||
"""Block until Deferred has result, but timeout instead of waiting forever."""
|
||||
deferred.addTimeout(timeout, reactor)
|
||||
return pytest_twisted.blockon(deferred)
|
||||
|
||||
|
||||
class _ProcessExitedProtocol(ProcessProtocol):
|
||||
"""
|
||||
Internal helper that .callback()s on self.done when the process
|
||||
@ -123,11 +134,12 @@ def _cleanup_tahoe_process(tahoe_transport, exited):
|
||||
|
||||
:return: After the process has exited.
|
||||
"""
|
||||
from twisted.internet import reactor
|
||||
try:
|
||||
print("signaling {} with TERM".format(tahoe_transport.pid))
|
||||
tahoe_transport.signalProcess('TERM')
|
||||
print("signaled, blocking on exit")
|
||||
pytest_twisted.blockon(exited)
|
||||
block_with_timeout(exited, reactor)
|
||||
print("exited, goodbye")
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
@ -175,11 +187,15 @@ class TahoeProcess(object):
|
||||
u"portnum",
|
||||
)
|
||||
|
||||
def kill(self):
|
||||
"""Kill the process, block until it's done."""
|
||||
_cleanup_tahoe_process(self.transport, self.transport.exited)
|
||||
|
||||
def __str__(self):
|
||||
return "<TahoeProcess in '{}'>".format(self._node_dir)
|
||||
|
||||
|
||||
def _run_node(reactor, node_dir, request, magic_text):
|
||||
def _run_node(reactor, node_dir, request, magic_text, finalize=True):
|
||||
"""
|
||||
Run a tahoe process from its node_dir.
|
||||
|
||||
@ -203,7 +219,8 @@ def _run_node(reactor, node_dir, request, magic_text):
|
||||
)
|
||||
transport.exited = protocol.exited
|
||||
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
if finalize:
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
|
||||
# XXX abusing the Deferred; should use .when_magic_seen() pattern
|
||||
|
||||
@ -222,7 +239,8 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
magic_text=None,
|
||||
needed=2,
|
||||
happy=3,
|
||||
total=4):
|
||||
total=4,
|
||||
finalize=True):
|
||||
"""
|
||||
Helper to create a single node, run it and return the instance
|
||||
spawnProcess returned (ITransport)
|
||||
@ -270,7 +288,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
d = Deferred()
|
||||
d.callback(None)
|
||||
d.addCallback(lambda _: created_d)
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text))
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize))
|
||||
return d
|
||||
|
||||
|
||||
@ -390,17 +408,13 @@ def await_file_vanishes(path, timeout=10):
|
||||
raise FileShouldVanishException(path, timeout)
|
||||
|
||||
|
||||
def cli(request, reactor, node_dir, *argv):
|
||||
def cli(node, *argv):
|
||||
"""
|
||||
Run a tahoe CLI subcommand for a given node, optionally running
|
||||
under coverage if '--coverage' was supplied.
|
||||
Run a tahoe CLI subcommand for a given node in a blocking manner, returning
|
||||
the output.
|
||||
"""
|
||||
proto = _CollectOutputProtocol()
|
||||
_tahoe_runner_optional_coverage(
|
||||
proto, reactor, request,
|
||||
['--node-directory', node_dir] + list(argv),
|
||||
)
|
||||
return proto.done
|
||||
arguments = ["tahoe", '--node-directory', node.node_dir]
|
||||
return check_output(arguments + list(argv))
|
||||
|
||||
|
||||
def node_url(node_dir, uri_fragment):
|
||||
@ -505,3 +519,36 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2):
|
||||
tahoe,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def generate_ssh_key(path):
|
||||
"""Create a new SSH private/public key pair."""
|
||||
key = RSAKey.generate(2048)
|
||||
key.write_private_key_file(path)
|
||||
with open(path + ".pub", "wb") as f:
|
||||
f.write(b"%s %s" % (key.get_name(), key.get_base64()))
|
||||
|
||||
|
||||
def run_in_thread(f):
|
||||
"""Decorator for integration tests that runs code in a thread.
|
||||
|
||||
Because we're using pytest_twisted, tests that rely on the reactor are
|
||||
expected to return a Deferred and use async APIs so the reactor can run.
|
||||
|
||||
In the case of the integration test suite, it launches nodes in the
|
||||
background using Twisted APIs. The nodes stdout and stderr is read via
|
||||
Twisted code. If the reactor doesn't run, reads don't happen, and
|
||||
eventually the buffers fill up, and the nodes block when they try to flush
|
||||
logs.
|
||||
|
||||
We can switch to Twisted APIs (treq instead of requests etc.), but
|
||||
sometimes it's easier or expedient to just have a blocking test. So this
|
||||
decorator allows you to run the test in a thread, and the reactor can keep
|
||||
running in the main thread.
|
||||
|
||||
See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug.
|
||||
"""
|
||||
@wraps(f)
|
||||
def test(*args, **kwargs):
|
||||
return deferToThread(lambda: f(*args, **kwargs))
|
||||
return test
|
||||
|
1
newsfragments/3584.bugfix
Normal file
1
newsfragments/3584.bugfix
Normal file
@ -0,0 +1 @@
|
||||
SFTP public key auth likely works more consistently, and SFTP in general was previously broken.
|
2
setup.py
2
setup.py
@ -395,6 +395,8 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"html5lib",
|
||||
"junitxml",
|
||||
"tenacity",
|
||||
"paramiko",
|
||||
"pytest-timeout",
|
||||
] + tor_requires + i2p_requires,
|
||||
"tor": tor_requires,
|
||||
"i2p": i2p_requires,
|
||||
|
@ -4,8 +4,8 @@ from zope.interface import implementer
|
||||
from twisted.web.client import getPage
|
||||
from twisted.internet import defer
|
||||
from twisted.cred import error, checkers, credentials
|
||||
from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
@ -29,7 +29,7 @@ class AccountFileChecker(object):
|
||||
def __init__(self, client, accountfile):
|
||||
self.client = client
|
||||
self.passwords = {}
|
||||
self.pubkeys = {}
|
||||
pubkeys = {}
|
||||
self.rootcaps = {}
|
||||
with open(abspath_expanduser_unicode(accountfile), "r") as f:
|
||||
for line in f:
|
||||
@ -40,12 +40,14 @@ class AccountFileChecker(object):
|
||||
if passwd.startswith("ssh-"):
|
||||
bits = rest.split()
|
||||
keystring = " ".join([passwd] + bits[:-1])
|
||||
key = keys.Key.fromString(keystring)
|
||||
rootcap = bits[-1]
|
||||
self.pubkeys[name] = keystring
|
||||
pubkeys[name] = [key]
|
||||
else:
|
||||
self.passwords[name] = passwd
|
||||
rootcap = rest
|
||||
self.rootcaps[name] = rootcap
|
||||
self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys))
|
||||
|
||||
def _avatarId(self, username):
|
||||
return FTPAvatarID(username, self.rootcaps[username])
|
||||
@ -57,11 +59,9 @@ class AccountFileChecker(object):
|
||||
|
||||
def requestAvatarId(self, creds):
|
||||
if credentials.ISSHPrivateKey.providedBy(creds):
|
||||
# Re-using twisted.conch.checkers.SSHPublicKeyChecker here, rather
|
||||
# than re-implementing all of the ISSHPrivateKey checking logic,
|
||||
# would be better. That would require Twisted 14.1.0 or newer,
|
||||
# though.
|
||||
return self._checkKey(creds)
|
||||
d = defer.maybeDeferred(self._pubkeychecker.requestAvatarId, creds)
|
||||
d.addCallback(self._avatarId)
|
||||
return d
|
||||
elif credentials.IUsernameHashedPassword.providedBy(creds):
|
||||
return self._checkPassword(creds)
|
||||
elif credentials.IUsernamePassword.providedBy(creds):
|
||||
@ -86,28 +86,6 @@ class AccountFileChecker(object):
|
||||
d.addCallback(self._cbPasswordMatch, str(creds.username))
|
||||
return d
|
||||
|
||||
def _checkKey(self, creds):
|
||||
"""
|
||||
Determine whether some key-based credentials correctly authenticates a
|
||||
user.
|
||||
|
||||
Returns a Deferred that fires with the username if so or with an
|
||||
UnauthorizedLogin failure otherwise.
|
||||
"""
|
||||
|
||||
# Is the public key indicated by the given credentials allowed to
|
||||
# authenticate the username in those credentials?
|
||||
if creds.blob == self.pubkeys.get(creds.username):
|
||||
if creds.signature is None:
|
||||
return defer.fail(conch_error.ValidPublicKey())
|
||||
|
||||
# Is the signature in the given credentials the correct
|
||||
# signature for the data in those credentials?
|
||||
key = keys.Key.fromString(creds.blob)
|
||||
if key.verify(creds.signature, creds.sigData):
|
||||
return defer.succeed(self._avatarId(creds.username))
|
||||
|
||||
return defer.fail(error.UnauthorizedLogin())
|
||||
|
||||
@implementer(checkers.ICredentialsChecker)
|
||||
class AccountURLChecker(object):
|
||||
|
@ -2011,5 +2011,5 @@ class SFTPServer(service.MultiService):
|
||||
f = SSHFactory()
|
||||
f.portal = p
|
||||
|
||||
s = strports.service(sftp_portstr, f)
|
||||
s = strports.service(six.ensure_str(sftp_portstr), f)
|
||||
s.setServiceParent(self)
|
||||
|
2
tox.ini
2
tox.ini
@ -77,7 +77,7 @@ setenv =
|
||||
COVERAGE_PROCESS_START=.coveragerc
|
||||
commands =
|
||||
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||
py.test --coverage -v {posargs:integration}
|
||||
py.test --timeout=1800 --coverage -v {posargs:integration}
|
||||
coverage combine
|
||||
coverage report
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user