mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-19 03:06:33 +00:00
Merge remote-tracking branch 'origin/master' into 3600.eliotutil-vs-skips
This commit is contained in:
commit
ff25ee3f71
@ -29,7 +29,7 @@ workflows:
|
||||
- "debian-9": &DOCKERHUB_CONTEXT
|
||||
context: "dockerhub-auth"
|
||||
|
||||
- "debian-8":
|
||||
- "debian-10":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
requires:
|
||||
- "debian-9"
|
||||
@ -107,7 +107,7 @@ workflows:
|
||||
- "master"
|
||||
|
||||
jobs:
|
||||
- "build-image-debian-8":
|
||||
- "build-image-debian-10":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
- "build-image-debian-9":
|
||||
<<: *DOCKERHUB_CONTEXT
|
||||
@ -277,11 +277,11 @@ jobs:
|
||||
fi
|
||||
|
||||
|
||||
debian-8:
|
||||
debian-10:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- <<: *DOCKERHUB_AUTH
|
||||
image: "tahoelafsci/debian:8-py2.7"
|
||||
image: "tahoelafsci/debian:10-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
@ -529,12 +529,12 @@ jobs:
|
||||
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||
|
||||
|
||||
build-image-debian-8:
|
||||
build-image-debian-10:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "8"
|
||||
TAG: "10"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
|
2
.github/CONTRIBUTING.rst
vendored
2
.github/CONTRIBUTING.rst
vendored
@ -17,4 +17,4 @@ Examples of contributions include:
|
||||
* `Patch reviews <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/PatchReviewProcess>`_
|
||||
|
||||
Before authoring or reviewing a patch,
|
||||
please familiarize yourself with the `coding standard <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CodingStandards>`_.
|
||||
please familiarize yourself with the `Coding Standards <https://tahoe-lafs.org/trac/tahoe-lafs/wiki/CodingStandards>`_ and the `Contributor Code of Conduct <../docs/CODE_OF_CONDUCT.md>`_.
|
||||
|
@ -173,7 +173,9 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run
|
||||
Install From a Source Tarball
|
||||
-----------------------------
|
||||
|
||||
You can also install directly from the source tarball URL::
|
||||
You can also install directly from the source tarball URL. To verify
|
||||
signatures, first see verifying_signatures_ and replace the URL in the
|
||||
following instructions with the local filename.
|
||||
|
||||
% virtualenv venv
|
||||
New python executable in ~/venv/bin/python2.7
|
||||
@ -189,6 +191,40 @@ You can also install directly from the source tarball URL::
|
||||
tahoe-lafs: 1.14.0
|
||||
...
|
||||
|
||||
.. _verifying_signatures:
|
||||
|
||||
Verifying Signatures
|
||||
--------------------
|
||||
|
||||
First download the source tarball and then any signatures. There are several
|
||||
developers who are able to produce signatures for a release. A release may
|
||||
have multiple signatures. All should be valid and you should confirm at least
|
||||
one of them (ideally, confirm all).
|
||||
|
||||
This statement, signed by the existing Tahoe release-signing key, attests to
|
||||
those developers authorized to sign a Tahoe release:
|
||||
|
||||
.. include:: developer-release-signatures
|
||||
:code:
|
||||
|
||||
Signatures are made available beside the release. So for example, a release
|
||||
like ``https://tahoe-lafs.org/downloads/tahoe-lafs-1.16.0.tar.bz2`` might
|
||||
have signatures ``tahoe-lafs-1.16.0.tar.bz2.meejah.asc`` and
|
||||
``tahoe-lafs-1.16.0.tar.bz2.warner.asc``.
|
||||
|
||||
To verify the signatures using GnuPG::
|
||||
|
||||
% gpg --verify tahoe-lafs-1.16.0.tar.bz2.meejah.asc tahoe-lafs-1.16.0.tar.bz2
|
||||
gpg: Signature made XXX
|
||||
gpg: using RSA key 9D5A2BD5688ECB889DEBCD3FC2602803128069A7
|
||||
gpg: Good signature from "meejah <meejah@meejah.ca>" [full]
|
||||
% gpg --verify tahoe-lafs-1.16.0.tar.bz2.warner.asc tahoe-lafs-1.16.0.tar.bz2
|
||||
gpg: Signature made XXX
|
||||
gpg: using RSA key 967EFE06699872411A77DF36D43B4C9C73225AAF
|
||||
gpg: Good signature from "Brian Warner <warner@lothar.com>" [full]
|
||||
|
||||
|
||||
|
||||
Extras
|
||||
------
|
||||
|
||||
|
42
docs/developer-release-signatures
Normal file
42
docs/developer-release-signatures
Normal file
@ -0,0 +1,42 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
|
||||
January 20, 2021
|
||||
|
||||
Any of the following core Tahoe contributers may sign a release. Each
|
||||
release MUST be signed by at least one developer but MAY have
|
||||
additional signatures. Each developer independently produces a
|
||||
signature which is made available beside Tahoe releases after 1.15.0
|
||||
|
||||
This statement is signed by the existing Tahoe release key. Any future
|
||||
such statements may be signed by it OR by any two developers (for
|
||||
example, to add or remove developers from the list).
|
||||
|
||||
meejah
|
||||
0xC2602803128069A7
|
||||
9D5A 2BD5 688E CB88 9DEB CD3F C260 2803 1280 69A7
|
||||
https://meejah.ca/meejah.asc
|
||||
|
||||
jean-paul calderone (exarkun)
|
||||
0xE27B085EDEAA4B1B
|
||||
96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B
|
||||
https://twistedmatrix.com/~exarkun/E27B085EDEAA4B1B.asc
|
||||
|
||||
brian warner (lothar)
|
||||
0x863333C265497810
|
||||
5810 F125 7F8C F753 7753 895A 8633 33C2 6549 7810
|
||||
https://www.lothar.com/warner-gpg.html
|
||||
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQEzBAEBCgAdFiEE405i0G0Oac/KQXn/veDTHWhmanoFAmAHIyIACgkQveDTHWhm
|
||||
anqhqQf/YSbMXL+gwFhAZsjX39EVlbr/Ik7WPPkJW7v1oHybTnwFpFIc52COU1x/
|
||||
sqRfk4OyYtz9IBgOPXoWgXu9R4qdK6vYKxEsekcGT9C5l0OyDz8YWXEWgbGK5mvI
|
||||
aEub9WucD8r2uOQnnW6DtznFuEpvOjtf/+2BU767+bvLsbViW88ocbuLfCqLdOgD
|
||||
WZT9j3M+Y2Dc56DAJzP/4fkrUSVIofZStYp5u9HBjburgcYIp0g/cyc4xXRoi6Mp
|
||||
lFTRFv3MIjmoamzSQseoIgP6fi8QRqPrffPrsyqAp+06mJnPhxxFqxtO/ZErmpSa
|
||||
+BGrLBxdWa8IF9U1A4Fs5nuAzAKMEg==
|
||||
=E9J+
|
||||
-----END PGP SIGNATURE-----
|
@ -137,6 +137,12 @@ Did anyone contribute a hack since the last release? If so, then
|
||||
https://tahoe-lafs.org/hacktahoelafs/ needs to be updated.
|
||||
|
||||
|
||||
Sign Git Tag
|
||||
````````````
|
||||
|
||||
- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
|
||||
|
||||
|
||||
Upload Artifacts
|
||||
````````````````
|
||||
|
||||
|
@ -7,6 +7,7 @@ from os import mkdir, listdir, environ
|
||||
from os.path import join, exists
|
||||
from tempfile import mkdtemp, mktemp
|
||||
from functools import partial
|
||||
from json import loads
|
||||
|
||||
from foolscap.furl import (
|
||||
decode_furl,
|
||||
@ -37,6 +38,10 @@ from util import (
|
||||
_tahoe_runner_optional_coverage,
|
||||
await_client_ready,
|
||||
TahoeProcess,
|
||||
cli,
|
||||
_run_node,
|
||||
generate_ssh_key,
|
||||
block_with_timeout,
|
||||
)
|
||||
|
||||
|
||||
@ -152,7 +157,7 @@ def flog_gatherer(reactor, temp_dir, flog_binary, request):
|
||||
)
|
||||
print("Waiting for flogtool to complete")
|
||||
try:
|
||||
pytest_twisted.blockon(flog_protocol.done)
|
||||
block_with_timeout(flog_protocol.done, reactor)
|
||||
except ProcessTerminated as e:
|
||||
print("flogtool exited unexpectedly: {}".format(str(e)))
|
||||
print("Flogtool completed")
|
||||
@ -293,7 +298,7 @@ log_gatherer.furl = {log_furl}
|
||||
def cleanup():
|
||||
try:
|
||||
transport.signalProcess('TERM')
|
||||
pytest_twisted.blockon(protocol.exited)
|
||||
block_with_timeout(protocol.exited, reactor)
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
request.addfinalizer(cleanup)
|
||||
@ -347,8 +352,50 @@ def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, requ
|
||||
reactor, request, temp_dir, introducer_furl, flog_gatherer, "alice",
|
||||
web_port="tcp:9980:interface=localhost",
|
||||
storage=False,
|
||||
# We're going to kill this ourselves, so no need for finalizer to
|
||||
# do it:
|
||||
finalize=False,
|
||||
)
|
||||
)
|
||||
await_client_ready(process)
|
||||
|
||||
# 1. Create a new RW directory cap:
|
||||
cli(process, "create-alias", "test")
|
||||
rwcap = loads(cli(process, "list-aliases", "--json"))["test"]["readwrite"]
|
||||
|
||||
# 2. Enable SFTP on the node:
|
||||
host_ssh_key_path = join(process.node_dir, "private", "ssh_host_rsa_key")
|
||||
accounts_path = join(process.node_dir, "private", "accounts")
|
||||
with open(join(process.node_dir, "tahoe.cfg"), "a") as f:
|
||||
f.write("""\
|
||||
[sftpd]
|
||||
enabled = true
|
||||
port = tcp:8022:interface=127.0.0.1
|
||||
host_pubkey_file = {ssh_key_path}.pub
|
||||
host_privkey_file = {ssh_key_path}
|
||||
accounts.file = {accounts_path}
|
||||
""".format(ssh_key_path=host_ssh_key_path, accounts_path=accounts_path))
|
||||
generate_ssh_key(host_ssh_key_path)
|
||||
|
||||
# 3. Add a SFTP access file with username/password and SSH key auth.
|
||||
|
||||
# The client SSH key path is typically going to be somewhere else (~/.ssh,
|
||||
# typically), but for convenience sake for testing we'll put it inside node.
|
||||
client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key")
|
||||
generate_ssh_key(client_ssh_key_path)
|
||||
# Pub key format is "ssh-rsa <thekey> <username>". We want the key.
|
||||
ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1]
|
||||
with open(accounts_path, "w") as f:
|
||||
f.write("""\
|
||||
alice password {rwcap}
|
||||
|
||||
alice2 ssh-rsa {ssh_public_key} {rwcap}
|
||||
""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
|
||||
|
||||
# 4. Restart the node with new SFTP config.
|
||||
process.kill()
|
||||
pytest_twisted.blockon(_run_node(reactor, process.node_dir, request, None))
|
||||
|
||||
await_client_ready(process)
|
||||
return process
|
||||
|
||||
@ -490,7 +537,13 @@ def tor_network(reactor, temp_dir, chutney, request):
|
||||
path=join(chutney_dir),
|
||||
env=env,
|
||||
)
|
||||
pytest_twisted.blockon(proto.done)
|
||||
try:
|
||||
block_with_timeout(proto.done, reactor)
|
||||
except ProcessTerminated:
|
||||
# If this doesn't exit cleanly, that's fine, that shouldn't fail
|
||||
# the test suite.
|
||||
pass
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
return chut
|
||||
|
162
integration/test_sftp.py
Normal file
162
integration/test_sftp.py
Normal file
@ -0,0 +1,162 @@
|
||||
"""
|
||||
It's possible to create/rename/delete files and directories in Tahoe-LAFS using
|
||||
SFTP.
|
||||
|
||||
These tests use Paramiko, rather than Twisted's Conch, because:
|
||||
|
||||
1. It's a different implementation, so we're not testing Conch against
|
||||
itself.
|
||||
|
||||
2. Its API is much simpler to use.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from posixpath import join
|
||||
from stat import S_ISDIR
|
||||
|
||||
from paramiko import SSHClient
|
||||
from paramiko.client import AutoAddPolicy
|
||||
from paramiko.sftp_client import SFTPClient
|
||||
from paramiko.ssh_exception import AuthenticationException
|
||||
from paramiko.rsakey import RSAKey
|
||||
|
||||
import pytest
|
||||
|
||||
from .util import generate_ssh_key, run_in_thread
|
||||
|
||||
|
||||
def connect_sftp(connect_args={"username": "alice", "password": "password"}):
|
||||
"""Create an SFTP client."""
|
||||
client = SSHClient()
|
||||
client.set_missing_host_key_policy(AutoAddPolicy)
|
||||
client.connect("localhost", port=8022, look_for_keys=False,
|
||||
allow_agent=False, **connect_args)
|
||||
sftp = SFTPClient.from_transport(client.get_transport())
|
||||
|
||||
def rmdir(path, delete_root=True):
|
||||
for f in sftp.listdir_attr(path=path):
|
||||
childpath = join(path, f.filename)
|
||||
if S_ISDIR(f.st_mode):
|
||||
rmdir(childpath)
|
||||
else:
|
||||
sftp.remove(childpath)
|
||||
if delete_root:
|
||||
sftp.rmdir(path)
|
||||
|
||||
# Delete any files left over from previous tests :(
|
||||
rmdir("/", delete_root=False)
|
||||
|
||||
return sftp
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_bad_account_password_ssh_key(alice, tmpdir):
|
||||
"""
|
||||
Can't login with unknown username, wrong password, or wrong SSH pub key.
|
||||
"""
|
||||
# Wrong password, wrong username:
|
||||
for u, p in [("alice", "wrong"), ("someuser", "password")]:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": u, "password": p,
|
||||
})
|
||||
|
||||
another_key = join(str(tmpdir), "ssh_key")
|
||||
generate_ssh_key(another_key)
|
||||
good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
|
||||
bad_key = RSAKey(filename=another_key)
|
||||
|
||||
# Wrong key:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": "alice2", "pkey": bad_key,
|
||||
})
|
||||
|
||||
# Wrong username:
|
||||
with pytest.raises(AuthenticationException):
|
||||
connect_sftp(connect_args={
|
||||
"username": "someoneelse", "pkey": good_key,
|
||||
})
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_ssh_key_auth(alice):
|
||||
"""It's possible to login authenticating with SSH public key."""
|
||||
key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
|
||||
sftp = connect_sftp(connect_args={
|
||||
"username": "alice2", "pkey": key
|
||||
})
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_read_write_files(alice):
|
||||
"""It's possible to upload and download files."""
|
||||
sftp = connect_sftp()
|
||||
with sftp.file("myfile", "wb") as f:
|
||||
f.write(b"abc")
|
||||
f.write(b"def")
|
||||
|
||||
with sftp.file("myfile", "rb") as f:
|
||||
assert f.read(4) == b"abcd"
|
||||
assert f.read(2) == b"ef"
|
||||
assert f.read(1) == b""
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_directories(alice):
|
||||
"""
|
||||
It's possible to create, list directories, and create and remove files in
|
||||
them.
|
||||
"""
|
||||
sftp = connect_sftp()
|
||||
assert sftp.listdir() == []
|
||||
|
||||
sftp.mkdir("childdir")
|
||||
assert sftp.listdir() == ["childdir"]
|
||||
|
||||
with sftp.file("myfile", "wb") as f:
|
||||
f.write(b"abc")
|
||||
assert sorted(sftp.listdir()) == ["childdir", "myfile"]
|
||||
|
||||
sftp.chdir("childdir")
|
||||
assert sftp.listdir() == []
|
||||
|
||||
with sftp.file("myfile2", "wb") as f:
|
||||
f.write(b"def")
|
||||
assert sftp.listdir() == ["myfile2"]
|
||||
|
||||
sftp.chdir(None) # root
|
||||
with sftp.file("childdir/myfile2", "rb") as f:
|
||||
assert f.read() == b"def"
|
||||
|
||||
sftp.remove("myfile")
|
||||
assert sftp.listdir() == ["childdir"]
|
||||
|
||||
sftp.rmdir("childdir")
|
||||
assert sftp.listdir() == []
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_rename(alice):
|
||||
"""Directories and files can be renamed."""
|
||||
sftp = connect_sftp()
|
||||
sftp.mkdir("dir")
|
||||
|
||||
filepath = join("dir", "file")
|
||||
with sftp.file(filepath, "wb") as f:
|
||||
f.write(b"abc")
|
||||
|
||||
sftp.rename(filepath, join("dir", "file2"))
|
||||
sftp.rename("dir", "dir2")
|
||||
|
||||
with sftp.file(join("dir2", "file2"), "rb") as f:
|
||||
assert f.read() == b"abc"
|
@ -175,6 +175,7 @@ def test_deep_stats(alice):
|
||||
time.sleep(.5)
|
||||
|
||||
|
||||
@util.run_in_thread
|
||||
def test_status(alice):
|
||||
"""
|
||||
confirm we get something sensible from /status and the various sub-types
|
||||
|
@ -5,6 +5,7 @@ from os import mkdir, environ
|
||||
from os.path import exists, join
|
||||
from six.moves import StringIO
|
||||
from functools import partial
|
||||
from subprocess import check_output
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
@ -12,9 +13,13 @@ from twisted.python.filepath import (
|
||||
from twisted.internet.defer import Deferred, succeed
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||
from twisted.internet.threads import deferToThread
|
||||
|
||||
import requests
|
||||
|
||||
from paramiko.rsakey import RSAKey
|
||||
from boltons.funcutils import wraps
|
||||
|
||||
from allmydata.util.configutil import (
|
||||
get_config,
|
||||
set_config,
|
||||
@ -25,6 +30,12 @@ from allmydata import client
|
||||
import pytest_twisted
|
||||
|
||||
|
||||
def block_with_timeout(deferred, reactor, timeout=120):
|
||||
"""Block until Deferred has result, but timeout instead of waiting forever."""
|
||||
deferred.addTimeout(timeout, reactor)
|
||||
return pytest_twisted.blockon(deferred)
|
||||
|
||||
|
||||
class _ProcessExitedProtocol(ProcessProtocol):
|
||||
"""
|
||||
Internal helper that .callback()s on self.done when the process
|
||||
@ -123,11 +134,12 @@ def _cleanup_tahoe_process(tahoe_transport, exited):
|
||||
|
||||
:return: After the process has exited.
|
||||
"""
|
||||
from twisted.internet import reactor
|
||||
try:
|
||||
print("signaling {} with TERM".format(tahoe_transport.pid))
|
||||
tahoe_transport.signalProcess('TERM')
|
||||
print("signaled, blocking on exit")
|
||||
pytest_twisted.blockon(exited)
|
||||
block_with_timeout(exited, reactor)
|
||||
print("exited, goodbye")
|
||||
except ProcessExitedAlready:
|
||||
pass
|
||||
@ -175,11 +187,15 @@ class TahoeProcess(object):
|
||||
u"portnum",
|
||||
)
|
||||
|
||||
def kill(self):
|
||||
"""Kill the process, block until it's done."""
|
||||
_cleanup_tahoe_process(self.transport, self.transport.exited)
|
||||
|
||||
def __str__(self):
|
||||
return "<TahoeProcess in '{}'>".format(self._node_dir)
|
||||
|
||||
|
||||
def _run_node(reactor, node_dir, request, magic_text):
|
||||
def _run_node(reactor, node_dir, request, magic_text, finalize=True):
|
||||
"""
|
||||
Run a tahoe process from its node_dir.
|
||||
|
||||
@ -203,7 +219,8 @@ def _run_node(reactor, node_dir, request, magic_text):
|
||||
)
|
||||
transport.exited = protocol.exited
|
||||
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
if finalize:
|
||||
request.addfinalizer(partial(_cleanup_tahoe_process, transport, protocol.exited))
|
||||
|
||||
# XXX abusing the Deferred; should use .when_magic_seen() pattern
|
||||
|
||||
@ -222,7 +239,8 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
magic_text=None,
|
||||
needed=2,
|
||||
happy=3,
|
||||
total=4):
|
||||
total=4,
|
||||
finalize=True):
|
||||
"""
|
||||
Helper to create a single node, run it and return the instance
|
||||
spawnProcess returned (ITransport)
|
||||
@ -270,7 +288,7 @@ def _create_node(reactor, request, temp_dir, introducer_furl, flog_gatherer, nam
|
||||
d = Deferred()
|
||||
d.callback(None)
|
||||
d.addCallback(lambda _: created_d)
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text))
|
||||
d.addCallback(lambda _: _run_node(reactor, node_dir, request, magic_text, finalize=finalize))
|
||||
return d
|
||||
|
||||
|
||||
@ -390,17 +408,13 @@ def await_file_vanishes(path, timeout=10):
|
||||
raise FileShouldVanishException(path, timeout)
|
||||
|
||||
|
||||
def cli(request, reactor, node_dir, *argv):
|
||||
def cli(node, *argv):
|
||||
"""
|
||||
Run a tahoe CLI subcommand for a given node, optionally running
|
||||
under coverage if '--coverage' was supplied.
|
||||
Run a tahoe CLI subcommand for a given node in a blocking manner, returning
|
||||
the output.
|
||||
"""
|
||||
proto = _CollectOutputProtocol()
|
||||
_tahoe_runner_optional_coverage(
|
||||
proto, reactor, request,
|
||||
['--node-directory', node_dir] + list(argv),
|
||||
)
|
||||
return proto.done
|
||||
arguments = ["tahoe", '--node-directory', node.node_dir]
|
||||
return check_output(arguments + list(argv))
|
||||
|
||||
|
||||
def node_url(node_dir, uri_fragment):
|
||||
@ -505,3 +519,36 @@ def await_client_ready(tahoe, timeout=10, liveness=60*2):
|
||||
tahoe,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def generate_ssh_key(path):
|
||||
"""Create a new SSH private/public key pair."""
|
||||
key = RSAKey.generate(2048)
|
||||
key.write_private_key_file(path)
|
||||
with open(path + ".pub", "wb") as f:
|
||||
f.write(b"%s %s" % (key.get_name(), key.get_base64()))
|
||||
|
||||
|
||||
def run_in_thread(f):
|
||||
"""Decorator for integration tests that runs code in a thread.
|
||||
|
||||
Because we're using pytest_twisted, tests that rely on the reactor are
|
||||
expected to return a Deferred and use async APIs so the reactor can run.
|
||||
|
||||
In the case of the integration test suite, it launches nodes in the
|
||||
background using Twisted APIs. The nodes stdout and stderr is read via
|
||||
Twisted code. If the reactor doesn't run, reads don't happen, and
|
||||
eventually the buffers fill up, and the nodes block when they try to flush
|
||||
logs.
|
||||
|
||||
We can switch to Twisted APIs (treq instead of requests etc.), but
|
||||
sometimes it's easier or expedient to just have a blocking test. So this
|
||||
decorator allows you to run the test in a thread, and the reactor can keep
|
||||
running in the main thread.
|
||||
|
||||
See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug.
|
||||
"""
|
||||
@wraps(f)
|
||||
def test(*args, **kwargs):
|
||||
return deferToThread(lambda: f(*args, **kwargs))
|
||||
return test
|
||||
|
@ -46,7 +46,7 @@ class ProvisioningTool(rend.Page):
|
||||
req = inevow.IRequest(ctx)
|
||||
|
||||
def getarg(name, astype=int):
|
||||
if req.method != "POST":
|
||||
if req.method != b"POST":
|
||||
return None
|
||||
if name in req.fields:
|
||||
return astype(req.fields[name].value)
|
||||
|
1
newsfragments/3326.installation
Normal file
1
newsfragments/3326.installation
Normal file
@ -0,0 +1 @@
|
||||
Debian 8 support has been replaced with Debian 10 support.
|
0
newsfragments/3580.minor
Normal file
0
newsfragments/3580.minor
Normal file
1
newsfragments/3584.bugfix
Normal file
1
newsfragments/3584.bugfix
Normal file
@ -0,0 +1 @@
|
||||
SFTP public key auth likely works more consistently, and SFTP in general was previously broken.
|
0
newsfragments/3588.minor
Normal file
0
newsfragments/3588.minor
Normal file
0
newsfragments/3592.minor
Normal file
0
newsfragments/3592.minor
Normal file
0
newsfragments/3593.minor
Normal file
0
newsfragments/3593.minor
Normal file
0
newsfragments/3596.minor
Normal file
0
newsfragments/3596.minor
Normal file
0
newsfragments/3599.minor
Normal file
0
newsfragments/3599.minor
Normal file
2
setup.py
2
setup.py
@ -395,6 +395,8 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"html5lib",
|
||||
"junitxml",
|
||||
"tenacity",
|
||||
"paramiko",
|
||||
"pytest-timeout",
|
||||
] + tor_requires + i2p_requires,
|
||||
"tor": tor_requires,
|
||||
"i2p": i2p_requires,
|
||||
|
@ -4,8 +4,8 @@ from zope.interface import implementer
|
||||
from twisted.web.client import getPage
|
||||
from twisted.internet import defer
|
||||
from twisted.cred import error, checkers, credentials
|
||||
from twisted.conch import error as conch_error
|
||||
from twisted.conch.ssh import keys
|
||||
from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
@ -29,7 +29,7 @@ class AccountFileChecker(object):
|
||||
def __init__(self, client, accountfile):
|
||||
self.client = client
|
||||
self.passwords = {}
|
||||
self.pubkeys = {}
|
||||
pubkeys = {}
|
||||
self.rootcaps = {}
|
||||
with open(abspath_expanduser_unicode(accountfile), "r") as f:
|
||||
for line in f:
|
||||
@ -40,12 +40,14 @@ class AccountFileChecker(object):
|
||||
if passwd.startswith("ssh-"):
|
||||
bits = rest.split()
|
||||
keystring = " ".join([passwd] + bits[:-1])
|
||||
key = keys.Key.fromString(keystring)
|
||||
rootcap = bits[-1]
|
||||
self.pubkeys[name] = keystring
|
||||
pubkeys[name] = [key]
|
||||
else:
|
||||
self.passwords[name] = passwd
|
||||
rootcap = rest
|
||||
self.rootcaps[name] = rootcap
|
||||
self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys))
|
||||
|
||||
def _avatarId(self, username):
|
||||
return FTPAvatarID(username, self.rootcaps[username])
|
||||
@ -57,11 +59,9 @@ class AccountFileChecker(object):
|
||||
|
||||
def requestAvatarId(self, creds):
|
||||
if credentials.ISSHPrivateKey.providedBy(creds):
|
||||
# Re-using twisted.conch.checkers.SSHPublicKeyChecker here, rather
|
||||
# than re-implementing all of the ISSHPrivateKey checking logic,
|
||||
# would be better. That would require Twisted 14.1.0 or newer,
|
||||
# though.
|
||||
return self._checkKey(creds)
|
||||
d = defer.maybeDeferred(self._pubkeychecker.requestAvatarId, creds)
|
||||
d.addCallback(self._avatarId)
|
||||
return d
|
||||
elif credentials.IUsernameHashedPassword.providedBy(creds):
|
||||
return self._checkPassword(creds)
|
||||
elif credentials.IUsernamePassword.providedBy(creds):
|
||||
@ -86,28 +86,6 @@ class AccountFileChecker(object):
|
||||
d.addCallback(self._cbPasswordMatch, str(creds.username))
|
||||
return d
|
||||
|
||||
def _checkKey(self, creds):
|
||||
"""
|
||||
Determine whether some key-based credentials correctly authenticates a
|
||||
user.
|
||||
|
||||
Returns a Deferred that fires with the username if so or with an
|
||||
UnauthorizedLogin failure otherwise.
|
||||
"""
|
||||
|
||||
# Is the public key indicated by the given credentials allowed to
|
||||
# authenticate the username in those credentials?
|
||||
if creds.blob == self.pubkeys.get(creds.username):
|
||||
if creds.signature is None:
|
||||
return defer.fail(conch_error.ValidPublicKey())
|
||||
|
||||
# Is the signature in the given credentials the correct
|
||||
# signature for the data in those credentials?
|
||||
key = keys.Key.fromString(creds.blob)
|
||||
if key.verify(creds.signature, creds.sigData):
|
||||
return defer.succeed(self._avatarId(creds.username))
|
||||
|
||||
return defer.fail(error.UnauthorizedLogin())
|
||||
|
||||
@implementer(checkers.ICredentialsChecker)
|
||||
class AccountURLChecker(object):
|
||||
|
@ -1,5 +1,17 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import six
|
||||
import heapq, traceback, array, stat, struct
|
||||
import heapq, traceback, stat, struct
|
||||
from stat import S_IFREG, S_IFDIR
|
||||
from time import time, strftime, localtime
|
||||
|
||||
@ -44,6 +56,17 @@ from allmydata.util.log import NOISY, OPERATIONAL, WEIRD, \
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
|
||||
def createSFTPError(errorCode, errorMessage):
|
||||
"""
|
||||
SFTPError that can accept both Unicode and bytes.
|
||||
|
||||
Twisted expects _native_ strings for the SFTPError message, but we often do
|
||||
Unicode by default even on Python 2.
|
||||
"""
|
||||
return SFTPError(errorCode, six.ensure_str(errorMessage))
|
||||
|
||||
|
||||
def eventually_callback(d):
|
||||
return lambda res: eventually(d.callback, res)
|
||||
|
||||
@ -52,9 +75,9 @@ def eventually_errback(d):
|
||||
|
||||
|
||||
def _utf8(x):
|
||||
if isinstance(x, unicode):
|
||||
return x.encode('utf-8')
|
||||
if isinstance(x, str):
|
||||
return x.encode('utf-8')
|
||||
if isinstance(x, bytes):
|
||||
return x
|
||||
return repr(x)
|
||||
|
||||
@ -63,7 +86,7 @@ def _to_sftp_time(t):
|
||||
"""SFTP times are unsigned 32-bit integers representing UTC seconds
|
||||
(ignoring leap seconds) since the Unix epoch, January 1 1970 00:00 UTC.
|
||||
A Tahoe time is the corresponding float."""
|
||||
return long(t) & long(0xFFFFFFFF)
|
||||
return int(t) & int(0xFFFFFFFF)
|
||||
|
||||
|
||||
def _convert_error(res, request):
|
||||
@ -72,7 +95,7 @@ def _convert_error(res, request):
|
||||
|
||||
if not isinstance(res, Failure):
|
||||
logged_res = res
|
||||
if isinstance(res, str): logged_res = "<data of length %r>" % (len(res),)
|
||||
if isinstance(res, (bytes, str)): logged_res = "<data of length %r>" % (len(res),)
|
||||
logmsg("SUCCESS %r %r" % (request, logged_res,), level=OPERATIONAL)
|
||||
return res
|
||||
|
||||
@ -91,10 +114,10 @@ def _convert_error(res, request):
|
||||
raise err
|
||||
if err.check(NoSuchChildError):
|
||||
childname = _utf8(err.value.args[0])
|
||||
raise SFTPError(FX_NO_SUCH_FILE, childname)
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, childname)
|
||||
if err.check(NotWriteableError) or err.check(ChildOfWrongTypeError):
|
||||
msg = _utf8(err.value.args[0])
|
||||
raise SFTPError(FX_PERMISSION_DENIED, msg)
|
||||
raise createSFTPError(FX_PERMISSION_DENIED, msg)
|
||||
if err.check(ExistingChildError):
|
||||
# Versions of SFTP after v3 (which is what twisted.conch implements)
|
||||
# define a specific error code for this case: FX_FILE_ALREADY_EXISTS.
|
||||
@ -103,16 +126,16 @@ def _convert_error(res, request):
|
||||
# to translate the error to the equivalent of POSIX EEXIST, which is
|
||||
# necessary for some picky programs (such as gedit).
|
||||
msg = _utf8(err.value.args[0])
|
||||
raise SFTPError(FX_FAILURE, msg)
|
||||
raise createSFTPError(FX_FAILURE, msg)
|
||||
if err.check(NotImplementedError):
|
||||
raise SFTPError(FX_OP_UNSUPPORTED, _utf8(err.value))
|
||||
raise createSFTPError(FX_OP_UNSUPPORTED, _utf8(err.value))
|
||||
if err.check(EOFError):
|
||||
raise SFTPError(FX_EOF, "end of file reached")
|
||||
raise createSFTPError(FX_EOF, "end of file reached")
|
||||
if err.check(defer.FirstError):
|
||||
_convert_error(err.value.subFailure, request)
|
||||
|
||||
# We assume that the error message is not anonymity-sensitive.
|
||||
raise SFTPError(FX_FAILURE, _utf8(err.value))
|
||||
raise createSFTPError(FX_FAILURE, _utf8(err.value))
|
||||
|
||||
|
||||
def _repr_flags(flags):
|
||||
@ -145,7 +168,7 @@ def _lsLine(name, attrs):
|
||||
# Since we now depend on Twisted v10.1, consider calling Twisted's version.
|
||||
|
||||
mode = st_mode
|
||||
perms = array.array('c', '-'*10)
|
||||
perms = ["-"] * 10
|
||||
ft = stat.S_IFMT(mode)
|
||||
if stat.S_ISDIR(ft): perms[0] = 'd'
|
||||
elif stat.S_ISREG(ft): perms[0] = '-'
|
||||
@ -164,7 +187,7 @@ def _lsLine(name, attrs):
|
||||
if mode&stat.S_IXOTH: perms[9] = 'x'
|
||||
# suid/sgid never set
|
||||
|
||||
l = perms.tostring()
|
||||
l = "".join(perms)
|
||||
l += str(st_nlink).rjust(5) + ' '
|
||||
un = str(st_uid)
|
||||
l += un.ljust(9)
|
||||
@ -181,6 +204,7 @@ def _lsLine(name, attrs):
|
||||
l += strftime("%b %d %Y ", localtime(st_mtime))
|
||||
else:
|
||||
l += strftime("%b %d %H:%M ", localtime(st_mtime))
|
||||
l = l.encode("utf-8")
|
||||
l += name
|
||||
return l
|
||||
|
||||
@ -222,7 +246,7 @@ def _populate_attrs(childnode, metadata, size=None):
|
||||
if childnode and size is None:
|
||||
size = childnode.get_size()
|
||||
if size is not None:
|
||||
_assert(isinstance(size, (int, long)) and not isinstance(size, bool), size=size)
|
||||
_assert(isinstance(size, int) and not isinstance(size, bool), size=size)
|
||||
attrs['size'] = size
|
||||
perms = S_IFREG | 0o666
|
||||
|
||||
@ -254,7 +278,7 @@ def _attrs_to_metadata(attrs):
|
||||
|
||||
for key in attrs:
|
||||
if key == "mtime" or key == "ctime" or key == "createtime":
|
||||
metadata[key] = long(attrs[key])
|
||||
metadata[key] = int(attrs[key])
|
||||
elif key.startswith("ext_"):
|
||||
metadata[key] = str(attrs[key])
|
||||
|
||||
@ -266,7 +290,7 @@ def _attrs_to_metadata(attrs):
|
||||
|
||||
|
||||
def _direntry_for(filenode_or_parent, childname, filenode=None):
|
||||
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||
precondition(isinstance(childname, (str, type(None))), childname=childname)
|
||||
|
||||
if childname is None:
|
||||
filenode_or_parent = filenode
|
||||
@ -274,7 +298,7 @@ def _direntry_for(filenode_or_parent, childname, filenode=None):
|
||||
if filenode_or_parent:
|
||||
rw_uri = filenode_or_parent.get_write_uri()
|
||||
if rw_uri and childname:
|
||||
return rw_uri + "/" + childname.encode('utf-8')
|
||||
return rw_uri + b"/" + childname.encode('utf-8')
|
||||
else:
|
||||
return rw_uri
|
||||
|
||||
@ -326,7 +350,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
if size < self.current_size or size < self.downloaded:
|
||||
self.f.truncate(size)
|
||||
if size > self.current_size:
|
||||
self.overwrite(self.current_size, "\x00" * (size - self.current_size))
|
||||
self.overwrite(self.current_size, b"\x00" * (size - self.current_size))
|
||||
self.current_size = size
|
||||
|
||||
# make the invariant self.download_size <= self.current_size be true again
|
||||
@ -334,7 +358,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
self.download_size = size
|
||||
|
||||
if self.downloaded >= self.download_size:
|
||||
self.download_done("size changed")
|
||||
self.download_done(b"size changed")
|
||||
|
||||
def registerProducer(self, p, streaming):
|
||||
if noisy: self.log(".registerProducer(%r, streaming=%r)" % (p, streaming), level=NOISY)
|
||||
@ -409,21 +433,21 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
milestone = end
|
||||
|
||||
while len(self.milestones) > 0:
|
||||
(next, d) = self.milestones[0]
|
||||
if next > milestone:
|
||||
(next_, d) = self.milestones[0]
|
||||
if next_ > milestone:
|
||||
return
|
||||
if noisy: self.log("MILESTONE %r %r" % (next, d), level=NOISY)
|
||||
if noisy: self.log("MILESTONE %r %r" % (next_, d), level=NOISY)
|
||||
heapq.heappop(self.milestones)
|
||||
eventually_callback(d)("reached")
|
||||
eventually_callback(d)(b"reached")
|
||||
|
||||
if milestone >= self.download_size:
|
||||
self.download_done("reached download size")
|
||||
self.download_done(b"reached download size")
|
||||
|
||||
def overwrite(self, offset, data):
|
||||
if noisy: self.log(".overwrite(%r, <data of length %r>)" % (offset, len(data)), level=NOISY)
|
||||
if self.is_closed:
|
||||
self.log("overwrite called on a closed OverwriteableFileConsumer", level=WEIRD)
|
||||
raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle")
|
||||
raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle")
|
||||
|
||||
if offset > self.current_size:
|
||||
# Normally writing at an offset beyond the current end-of-file
|
||||
@ -434,7 +458,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
# the gap between the current EOF and the offset.
|
||||
|
||||
self.f.seek(self.current_size)
|
||||
self.f.write("\x00" * (offset - self.current_size))
|
||||
self.f.write(b"\x00" * (offset - self.current_size))
|
||||
start = self.current_size
|
||||
else:
|
||||
self.f.seek(offset)
|
||||
@ -454,7 +478,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
if noisy: self.log(".read(%r, %r), current_size = %r" % (offset, length, self.current_size), level=NOISY)
|
||||
if self.is_closed:
|
||||
self.log("read called on a closed OverwriteableFileConsumer", level=WEIRD)
|
||||
raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
|
||||
# Note that the overwrite method is synchronous. When a write request is processed
|
||||
# (e.g. a writeChunk request on the async queue of GeneralSFTPFile), overwrite will
|
||||
@ -508,7 +532,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
return d
|
||||
|
||||
def download_done(self, res):
|
||||
_assert(isinstance(res, (str, Failure)), res=res)
|
||||
_assert(isinstance(res, (bytes, Failure)), res=res)
|
||||
# Only the first call to download_done counts, but we log subsequent calls
|
||||
# (multiple calls are normal).
|
||||
if self.done_status is not None:
|
||||
@ -525,8 +549,8 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
eventually_callback(self.done)(None)
|
||||
|
||||
while len(self.milestones) > 0:
|
||||
(next, d) = self.milestones[0]
|
||||
if noisy: self.log("MILESTONE FINISH %r %r %r" % (next, d, res), level=NOISY)
|
||||
(next_, d) = self.milestones[0]
|
||||
if noisy: self.log("MILESTONE FINISH %r %r %r" % (next_, d, res), level=NOISY)
|
||||
heapq.heappop(self.milestones)
|
||||
# The callback means that the milestone has been reached if
|
||||
# it is ever going to be. Note that the file may have been
|
||||
@ -540,7 +564,7 @@ class OverwriteableFileConsumer(PrefixingLogMixin):
|
||||
self.f.close()
|
||||
except Exception as e:
|
||||
self.log("suppressed %r from close of temporary file %r" % (e, self.f), level=WEIRD)
|
||||
self.download_done("closed")
|
||||
self.download_done(b"closed")
|
||||
return self.done_status
|
||||
|
||||
def unregisterProducer(self):
|
||||
@ -564,7 +588,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath)
|
||||
if noisy: self.log(".__init__(%r, %r, %r)" % (userpath, filenode, metadata), level=NOISY)
|
||||
|
||||
precondition(isinstance(userpath, str) and IFileNode.providedBy(filenode),
|
||||
precondition(isinstance(userpath, bytes) and IFileNode.providedBy(filenode),
|
||||
userpath=userpath, filenode=filenode)
|
||||
self.filenode = filenode
|
||||
self.metadata = metadata
|
||||
@ -576,7 +600,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
d = defer.Deferred()
|
||||
@ -593,7 +617,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
# i.e. we respond with an EOF error iff offset is already at EOF.
|
||||
|
||||
if offset >= len(data):
|
||||
eventually_errback(d)(Failure(SFTPError(FX_EOF, "read at or past end of file")))
|
||||
eventually_errback(d)(Failure(createSFTPError(FX_EOF, "read at or past end of file")))
|
||||
else:
|
||||
eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data)
|
||||
return data
|
||||
@ -604,7 +628,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
def writeChunk(self, offset, data):
|
||||
self.log(".writeChunk(%r, <data of length %r>) denied" % (offset, len(data)), level=OPERATIONAL)
|
||||
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
return defer.execute(_denied)
|
||||
|
||||
def close(self):
|
||||
@ -618,7 +642,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
d = defer.execute(_populate_attrs, self.filenode, self.metadata)
|
||||
@ -627,7 +651,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
||||
|
||||
def setAttrs(self, attrs):
|
||||
self.log(".setAttrs(%r) denied" % (attrs,), level=OPERATIONAL)
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
return defer.execute(_denied)
|
||||
|
||||
|
||||
@ -648,7 +672,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
if noisy: self.log(".__init__(%r, %r = %r, %r, <convergence censored>)" %
|
||||
(userpath, flags, _repr_flags(flags), close_notify), level=NOISY)
|
||||
|
||||
precondition(isinstance(userpath, str), userpath=userpath)
|
||||
precondition(isinstance(userpath, bytes), userpath=userpath)
|
||||
self.userpath = userpath
|
||||
self.flags = flags
|
||||
self.close_notify = close_notify
|
||||
@ -667,11 +691,11 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
# not be set before then.
|
||||
self.consumer = None
|
||||
|
||||
def open(self, parent=None, childname=None, filenode=None, metadata=None):
|
||||
def open(self, parent=None, childname=None, filenode=None, metadata=None): # noqa: F811
|
||||
self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" %
|
||||
(parent, childname, filenode, metadata), level=OPERATIONAL)
|
||||
|
||||
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||
precondition(isinstance(childname, (str, type(None))), childname=childname)
|
||||
precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode)
|
||||
precondition(not self.closed, sftpfile=self)
|
||||
|
||||
@ -688,7 +712,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
if (self.flags & FXF_TRUNC) or not filenode:
|
||||
# We're either truncating or creating the file, so we don't need the old contents.
|
||||
self.consumer = OverwriteableFileConsumer(0, tempfile_maker)
|
||||
self.consumer.download_done("download not needed")
|
||||
self.consumer.download_done(b"download not needed")
|
||||
else:
|
||||
self.async_.addCallback(lambda ignored: filenode.get_best_readable_version())
|
||||
|
||||
@ -702,7 +726,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
d = version.read(self.consumer, 0, None)
|
||||
def _finished(res):
|
||||
if not isinstance(res, Failure):
|
||||
res = "download finished"
|
||||
res = b"download finished"
|
||||
self.consumer.download_done(res)
|
||||
d.addBoth(_finished)
|
||||
# It is correct to drop d here.
|
||||
@ -722,7 +746,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
def rename(self, new_userpath, new_parent, new_childname):
|
||||
self.log(".rename(%r, %r, %r)" % (new_userpath, new_parent, new_childname), level=OPERATIONAL)
|
||||
|
||||
precondition(isinstance(new_userpath, str) and isinstance(new_childname, unicode),
|
||||
precondition(isinstance(new_userpath, bytes) and isinstance(new_childname, str),
|
||||
new_userpath=new_userpath, new_childname=new_childname)
|
||||
self.userpath = new_userpath
|
||||
self.parent = new_parent
|
||||
@ -750,11 +774,11 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
if not (self.flags & FXF_READ):
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for reading")
|
||||
return defer.execute(_denied)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot read from a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
d = defer.Deferred()
|
||||
@ -772,11 +796,11 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
self.log(".writeChunk(%r, <data of length %r>)" % (offset, len(data)), level=OPERATIONAL)
|
||||
|
||||
if not (self.flags & FXF_WRITE):
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
return defer.execute(_denied)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot write to a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
self.has_changed = True
|
||||
@ -892,7 +916,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot get attributes for a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
# Optimization for read-only handles, when we already know the metadata.
|
||||
@ -916,16 +940,16 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
if not (self.flags & FXF_WRITE):
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "file handle was not opened for writing")
|
||||
return defer.execute(_denied)
|
||||
|
||||
if self.closed:
|
||||
def _closed(): raise SFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle")
|
||||
def _closed(): raise createSFTPError(FX_BAD_MESSAGE, "cannot set attributes for a closed file handle")
|
||||
return defer.execute(_closed)
|
||||
|
||||
size = attrs.get("size", None)
|
||||
if size is not None and (not isinstance(size, (int, long)) or size < 0):
|
||||
def _bad(): raise SFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer")
|
||||
if size is not None and (not isinstance(size, int) or size < 0):
|
||||
def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "new size is not a valid nonnegative integer")
|
||||
return defer.execute(_bad)
|
||||
|
||||
d = defer.Deferred()
|
||||
@ -1011,7 +1035,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def logout(self):
|
||||
self.log(".logout()", level=OPERATIONAL)
|
||||
|
||||
for files in self._heisenfiles.itervalues():
|
||||
for files in self._heisenfiles.values():
|
||||
for f in files:
|
||||
f.abandon()
|
||||
|
||||
@ -1038,7 +1062,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
request = "._abandon_any_heisenfiles(%r, %r)" % (userpath, direntry)
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
precondition(isinstance(userpath, str), userpath=userpath)
|
||||
precondition(isinstance(userpath, bytes), userpath=userpath)
|
||||
|
||||
# First we synchronously mark all heisenfiles matching the userpath or direntry
|
||||
# as abandoned, and remove them from the two heisenfile dicts. Then we .sync()
|
||||
@ -1087,8 +1111,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(from_userpath, from_parent, from_childname, to_userpath, to_parent, to_childname, overwrite))
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
precondition((isinstance(from_userpath, str) and isinstance(from_childname, unicode) and
|
||||
isinstance(to_userpath, str) and isinstance(to_childname, unicode)),
|
||||
precondition((isinstance(from_userpath, bytes) and isinstance(from_childname, str) and
|
||||
isinstance(to_userpath, bytes) and isinstance(to_childname, str)),
|
||||
from_userpath=from_userpath, from_childname=from_childname, to_userpath=to_userpath, to_childname=to_childname)
|
||||
|
||||
if noisy: self.log("all_heisenfiles = %r\nself._heisenfiles = %r" % (all_heisenfiles, self._heisenfiles), level=NOISY)
|
||||
@ -1117,7 +1141,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
# does not mean that they were not committed; it is used to determine whether
|
||||
# a NoSuchChildError from the rename attempt should be suppressed). If overwrite
|
||||
# is False and there were already heisenfiles at the destination userpath or
|
||||
# direntry, we return a Deferred that fails with SFTPError(FX_PERMISSION_DENIED).
|
||||
# direntry, we return a Deferred that fails with createSFTPError(FX_PERMISSION_DENIED).
|
||||
|
||||
from_direntry = _direntry_for(from_parent, from_childname)
|
||||
to_direntry = _direntry_for(to_parent, to_childname)
|
||||
@ -1126,7 +1150,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(from_direntry, to_direntry, len(all_heisenfiles), len(self._heisenfiles), request), level=NOISY)
|
||||
|
||||
if not overwrite and (to_userpath in self._heisenfiles or to_direntry in all_heisenfiles):
|
||||
def _existing(): raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath)
|
||||
def _existing(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8"))
|
||||
if noisy: self.log("existing", level=NOISY)
|
||||
return defer.execute(_existing)
|
||||
|
||||
@ -1160,7 +1184,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
request = "._update_attrs_for_heisenfiles(%r, %r, %r)" % (userpath, direntry, attrs)
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
_assert(isinstance(userpath, str) and isinstance(direntry, str),
|
||||
_assert(isinstance(userpath, bytes) and isinstance(direntry, bytes),
|
||||
userpath=userpath, direntry=direntry)
|
||||
|
||||
files = []
|
||||
@ -1193,7 +1217,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore)
|
||||
self.log(request, level=OPERATIONAL)
|
||||
|
||||
_assert(isinstance(userpath, str) and isinstance(direntry, (str, type(None))),
|
||||
_assert(isinstance(userpath, bytes) and isinstance(direntry, (bytes, type(None))),
|
||||
userpath=userpath, direntry=direntry)
|
||||
|
||||
files = []
|
||||
@ -1218,7 +1242,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _remove_heisenfile(self, userpath, parent, childname, file_to_remove):
|
||||
if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY)
|
||||
|
||||
_assert(isinstance(userpath, str) and isinstance(childname, (unicode, type(None))),
|
||||
_assert(isinstance(userpath, bytes) and isinstance(childname, (str, type(None))),
|
||||
userpath=userpath, childname=childname)
|
||||
|
||||
direntry = _direntry_for(parent, childname)
|
||||
@ -1245,8 +1269,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata),
|
||||
level=NOISY)
|
||||
|
||||
_assert((isinstance(userpath, str) and isinstance(childname, (unicode, type(None))) and
|
||||
(metadata is None or 'no-write' in metadata)),
|
||||
_assert((isinstance(userpath, bytes) and isinstance(childname, (str, type(None))) and
|
||||
(metadata is None or 'no-write' in metadata)),
|
||||
userpath=userpath, childname=childname, metadata=metadata)
|
||||
|
||||
writing = (flags & (FXF_WRITE | FXF_CREAT)) != 0
|
||||
@ -1279,17 +1303,17 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
|
||||
if not (flags & (FXF_READ | FXF_WRITE)):
|
||||
def _bad_readwrite():
|
||||
raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set")
|
||||
raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: at least one of FXF_READ and FXF_WRITE must be set")
|
||||
return defer.execute(_bad_readwrite)
|
||||
|
||||
if (flags & FXF_EXCL) and not (flags & FXF_CREAT):
|
||||
def _bad_exclcreat():
|
||||
raise SFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT")
|
||||
raise createSFTPError(FX_BAD_MESSAGE, "invalid file open flags: FXF_EXCL cannot be set without FXF_CREAT")
|
||||
return defer.execute(_bad_exclcreat)
|
||||
|
||||
path = self._path_from_string(pathstring)
|
||||
if not path:
|
||||
def _emptypath(): raise SFTPError(FX_NO_SUCH_FILE, "path cannot be empty")
|
||||
def _emptypath(): raise createSFTPError(FX_NO_SUCH_FILE, "path cannot be empty")
|
||||
return defer.execute(_emptypath)
|
||||
|
||||
# The combination of flags is potentially valid.
|
||||
@ -1348,20 +1372,20 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _got_root(root_and_path):
|
||||
(root, path) = root_and_path
|
||||
if root.is_unknown():
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open an unknown cap (or child of an unknown object). "
|
||||
"Upgrading the gateway to a later Tahoe-LAFS version may help")
|
||||
if not path:
|
||||
# case 1
|
||||
if noisy: self.log("case 1: root = %r, path[:-1] = %r" % (root, path[:-1]), level=NOISY)
|
||||
if not IFileNode.providedBy(root):
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open a directory cap")
|
||||
if (flags & FXF_WRITE) and root.is_readonly():
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot write to a non-writeable filecap without a parent directory")
|
||||
if flags & FXF_EXCL:
|
||||
raise SFTPError(FX_FAILURE,
|
||||
raise createSFTPError(FX_FAILURE,
|
||||
"cannot create a file exclusively when it already exists")
|
||||
|
||||
# The file does not need to be added to all_heisenfiles, because it is not
|
||||
@ -1388,7 +1412,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _got_parent(parent):
|
||||
if noisy: self.log("_got_parent(%r)" % (parent,), level=NOISY)
|
||||
if parent.is_unknown():
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open a child of an unknown object. "
|
||||
"Upgrading the gateway to a later Tahoe-LAFS version may help")
|
||||
|
||||
@ -1403,13 +1427,13 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
# which is consistent with what might happen on a POSIX filesystem.
|
||||
|
||||
if parent_readonly:
|
||||
raise SFTPError(FX_FAILURE,
|
||||
raise createSFTPError(FX_FAILURE,
|
||||
"cannot create a file exclusively when the parent directory is read-only")
|
||||
|
||||
# 'overwrite=False' ensures failure if the link already exists.
|
||||
# FIXME: should use a single call to set_uri and return (child, metadata) (#1035)
|
||||
|
||||
zero_length_lit = "URI:LIT:"
|
||||
zero_length_lit = b"URI:LIT:"
|
||||
if noisy: self.log("%r.set_uri(%r, None, readcap=%r, overwrite=False)" %
|
||||
(parent, zero_length_lit, childname), level=NOISY)
|
||||
d3.addCallback(lambda ign: parent.set_uri(childname, None, readcap=zero_length_lit,
|
||||
@ -1435,14 +1459,14 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
metadata['no-write'] = _no_write(parent_readonly, filenode, current_metadata)
|
||||
|
||||
if filenode.is_unknown():
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open an unknown cap. Upgrading the gateway "
|
||||
"to a later Tahoe-LAFS version may help")
|
||||
if not IFileNode.providedBy(filenode):
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open a directory as if it were a file")
|
||||
if (flags & FXF_WRITE) and metadata['no-write']:
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot open a non-writeable file for writing")
|
||||
|
||||
return self._make_file(file, userpath, flags, parent=parent, childname=childname,
|
||||
@ -1452,10 +1476,10 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
f.trap(NoSuchChildError)
|
||||
|
||||
if not (flags & FXF_CREAT):
|
||||
raise SFTPError(FX_NO_SUCH_FILE,
|
||||
raise createSFTPError(FX_NO_SUCH_FILE,
|
||||
"the file does not exist, and was not opened with the creation (CREAT) flag")
|
||||
if parent_readonly:
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot create a file when the parent directory is read-only")
|
||||
|
||||
return self._make_file(file, userpath, flags, parent=parent, childname=childname)
|
||||
@ -1494,9 +1518,9 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
(to_parent, to_childname) = to_pair
|
||||
|
||||
if from_childname is None:
|
||||
raise SFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI")
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename a source object specified by URI")
|
||||
if to_childname is None:
|
||||
raise SFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI")
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, "cannot rename to a destination specified by URI")
|
||||
|
||||
# <http://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#section-6.5>
|
||||
# "It is an error if there already exists a file with the name specified
|
||||
@ -1511,7 +1535,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
d2.addCallback(lambda ign: to_parent.get(to_childname))
|
||||
def _expect_fail(res):
|
||||
if not isinstance(res, Failure):
|
||||
raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath)
|
||||
raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8"))
|
||||
|
||||
# It is OK if we fail for errors other than NoSuchChildError, since that probably
|
||||
# indicates some problem accessing the destination directory.
|
||||
@ -1536,7 +1560,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
if not isinstance(err, Failure) or (renamed and err.check(NoSuchChildError)):
|
||||
return None
|
||||
if not overwrite and err.check(ExistingChildError):
|
||||
raise SFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + to_userpath)
|
||||
raise createSFTPError(FX_PERMISSION_DENIED, "cannot rename to existing path " + str(to_userpath, "utf-8"))
|
||||
|
||||
return err
|
||||
d3.addBoth(_check)
|
||||
@ -1554,7 +1578,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
path = self._path_from_string(pathstring)
|
||||
metadata = _attrs_to_metadata(attrs)
|
||||
if 'no-write' in metadata:
|
||||
def _denied(): raise SFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only")
|
||||
def _denied(): raise createSFTPError(FX_PERMISSION_DENIED, "cannot create a directory that is initially read-only")
|
||||
return defer.execute(_denied)
|
||||
|
||||
d = self._get_root(path)
|
||||
@ -1566,7 +1590,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _get_or_create_directories(self, node, path, metadata):
|
||||
if not IDirectoryNode.providedBy(node):
|
||||
# TODO: provide the name of the blocking file in the error message.
|
||||
def _blocked(): raise SFTPError(FX_FAILURE, "cannot create directory because there "
|
||||
def _blocked(): raise createSFTPError(FX_FAILURE, "cannot create directory because there "
|
||||
"is a file in the way") # close enough
|
||||
return defer.execute(_blocked)
|
||||
|
||||
@ -1604,7 +1628,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _got_parent(parent_and_childname):
|
||||
(parent, childname) = parent_and_childname
|
||||
if childname is None:
|
||||
raise SFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI")
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, "cannot remove an object specified by URI")
|
||||
|
||||
direntry = _direntry_for(parent, childname)
|
||||
d2 = defer.succeed(False)
|
||||
@ -1635,18 +1659,18 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
d.addCallback(_got_parent_or_node)
|
||||
def _list(dirnode):
|
||||
if dirnode.is_unknown():
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot list an unknown cap as a directory. Upgrading the gateway "
|
||||
"to a later Tahoe-LAFS version may help")
|
||||
if not IDirectoryNode.providedBy(dirnode):
|
||||
raise SFTPError(FX_PERMISSION_DENIED,
|
||||
raise createSFTPError(FX_PERMISSION_DENIED,
|
||||
"cannot list a file as if it were a directory")
|
||||
|
||||
d2 = dirnode.list()
|
||||
def _render(children):
|
||||
parent_readonly = dirnode.is_readonly()
|
||||
results = []
|
||||
for filename, (child, metadata) in children.iteritems():
|
||||
for filename, (child, metadata) in list(children.items()):
|
||||
# The file size may be cached or absent.
|
||||
metadata['no-write'] = _no_write(parent_readonly, child, metadata)
|
||||
attrs = _populate_attrs(child, metadata)
|
||||
@ -1726,7 +1750,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
if "size" in attrs:
|
||||
# this would require us to download and re-upload the truncated/extended
|
||||
# file contents
|
||||
def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported")
|
||||
def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "setAttrs wth size attribute unsupported")
|
||||
return defer.execute(_unsupported)
|
||||
|
||||
path = self._path_from_string(pathstring)
|
||||
@ -1743,7 +1767,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
if childname is None:
|
||||
if updated_heisenfiles:
|
||||
return None
|
||||
raise SFTPError(FX_NO_SUCH_FILE, userpath)
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, userpath)
|
||||
else:
|
||||
desired_metadata = _attrs_to_metadata(attrs)
|
||||
if noisy: self.log("desired_metadata = %r" % (desired_metadata,), level=NOISY)
|
||||
@ -1766,7 +1790,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def readLink(self, pathstring):
|
||||
self.log(".readLink(%r)" % (pathstring,), level=OPERATIONAL)
|
||||
|
||||
def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "readLink")
|
||||
def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "readLink")
|
||||
return defer.execute(_unsupported)
|
||||
|
||||
def makeLink(self, linkPathstring, targetPathstring):
|
||||
@ -1775,7 +1799,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
# If this is implemented, note the reversal of arguments described in point 7 of
|
||||
# <http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL?rev=1.15>.
|
||||
|
||||
def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "makeLink")
|
||||
def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "makeLink")
|
||||
return defer.execute(_unsupported)
|
||||
|
||||
def extendedRequest(self, extensionName, extensionData):
|
||||
@ -1784,8 +1808,8 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
# We implement the three main OpenSSH SFTP extensions; see
|
||||
# <http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL?rev=1.15>
|
||||
|
||||
if extensionName == 'posix-rename@openssh.com':
|
||||
def _bad(): raise SFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request")
|
||||
if extensionName == b'posix-rename@openssh.com':
|
||||
def _bad(): raise createSFTPError(FX_BAD_MESSAGE, "could not parse posix-rename@openssh.com request")
|
||||
|
||||
if 4 > len(extensionData): return defer.execute(_bad)
|
||||
(fromPathLen,) = struct.unpack('>L', extensionData[0:4])
|
||||
@ -1802,11 +1826,11 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
# an error, or an FXP_EXTENDED_REPLY. But it happens to do the right thing
|
||||
# (respond with an FXP_STATUS message) if we return a Failure with code FX_OK.
|
||||
def _succeeded(ign):
|
||||
raise SFTPError(FX_OK, "request succeeded")
|
||||
raise createSFTPError(FX_OK, "request succeeded")
|
||||
d.addCallback(_succeeded)
|
||||
return d
|
||||
|
||||
if extensionName == 'statvfs@openssh.com' or extensionName == 'fstatvfs@openssh.com':
|
||||
if extensionName == b'statvfs@openssh.com' or extensionName == b'fstatvfs@openssh.com':
|
||||
# f_bsize and f_frsize should be the same to avoid a bug in 'df'
|
||||
return defer.succeed(struct.pack('>11Q',
|
||||
1024, # uint64 f_bsize /* file system block size */
|
||||
@ -1822,7 +1846,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
65535, # uint64 f_namemax /* maximum filename length */
|
||||
))
|
||||
|
||||
def _unsupported(): raise SFTPError(FX_OP_UNSUPPORTED, "unsupported %r request <data of length %r>" %
|
||||
def _unsupported(): raise createSFTPError(FX_OP_UNSUPPORTED, "unsupported %r request <data of length %r>" %
|
||||
(extensionName, len(extensionData)))
|
||||
return defer.execute(_unsupported)
|
||||
|
||||
@ -1837,29 +1861,29 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
||||
def _path_from_string(self, pathstring):
|
||||
if noisy: self.log("CONVERT %r" % (pathstring,), level=NOISY)
|
||||
|
||||
_assert(isinstance(pathstring, str), pathstring=pathstring)
|
||||
_assert(isinstance(pathstring, bytes), pathstring=pathstring)
|
||||
|
||||
# The home directory is the root directory.
|
||||
pathstring = pathstring.strip("/")
|
||||
if pathstring == "" or pathstring == ".":
|
||||
pathstring = pathstring.strip(b"/")
|
||||
if pathstring == b"" or pathstring == b".":
|
||||
path_utf8 = []
|
||||
else:
|
||||
path_utf8 = pathstring.split("/")
|
||||
path_utf8 = pathstring.split(b"/")
|
||||
|
||||
# <http://tools.ietf.org/html/draft-ietf-secsh-filexfer-02#section-6.2>
|
||||
# "Servers SHOULD interpret a path name component ".." as referring to
|
||||
# the parent directory, and "." as referring to the current directory."
|
||||
path = []
|
||||
for p_utf8 in path_utf8:
|
||||
if p_utf8 == "..":
|
||||
if p_utf8 == b"..":
|
||||
# ignore excess .. components at the root
|
||||
if len(path) > 0:
|
||||
path = path[:-1]
|
||||
elif p_utf8 != ".":
|
||||
elif p_utf8 != b".":
|
||||
try:
|
||||
p = p_utf8.decode('utf-8', 'strict')
|
||||
except UnicodeError:
|
||||
raise SFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8")
|
||||
raise createSFTPError(FX_NO_SUCH_FILE, "path could not be decoded as UTF-8")
|
||||
path.append(p)
|
||||
|
||||
if noisy: self.log(" PATH %r" % (path,), level=NOISY)
|
||||
@ -1978,9 +2002,9 @@ class SFTPServer(service.MultiService):
|
||||
|
||||
def __init__(self, client, accountfile, accounturl,
|
||||
sftp_portstr, pubkey_file, privkey_file):
|
||||
precondition(isinstance(accountfile, (unicode, type(None))), accountfile)
|
||||
precondition(isinstance(pubkey_file, unicode), pubkey_file)
|
||||
precondition(isinstance(privkey_file, unicode), privkey_file)
|
||||
precondition(isinstance(accountfile, (str, type(None))), accountfile)
|
||||
precondition(isinstance(pubkey_file, str), pubkey_file)
|
||||
precondition(isinstance(privkey_file, str), privkey_file)
|
||||
service.MultiService.__init__(self)
|
||||
|
||||
r = Dispatcher(client)
|
||||
@ -2011,5 +2035,5 @@ class SFTPServer(service.MultiService):
|
||||
f = SSHFactory()
|
||||
f.portal = p
|
||||
|
||||
s = strports.service(sftp_portstr, f)
|
||||
s = strports.service(six.ensure_str(sftp_portstr), f)
|
||||
s.setServiceParent(self)
|
||||
|
197
src/allmydata/test/_win_subprocess.py
Normal file
197
src/allmydata/test/_win_subprocess.py
Normal file
@ -0,0 +1,197 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
## Copyright (C) 2021 Valentin Lab
|
||||
##
|
||||
## Redistribution and use in source and binary forms, with or without
|
||||
## modification, are permitted provided that the following conditions
|
||||
## are met:
|
||||
##
|
||||
## 1. Redistributions of source code must retain the above copyright
|
||||
## notice, this list of conditions and the following disclaimer.
|
||||
##
|
||||
## 2. Redistributions in binary form must reproduce the above
|
||||
## copyright notice, this list of conditions and the following
|
||||
## disclaimer in the documentation and/or other materials provided
|
||||
## with the distribution.
|
||||
##
|
||||
## 3. Neither the name of the copyright holder nor the names of its
|
||||
## contributors may be used to endorse or promote products derived
|
||||
## from this software without specific prior written permission.
|
||||
##
|
||||
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
## FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
## COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
||||
## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
||||
## OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
##
|
||||
|
||||
## issue: https://bugs.python.org/issue19264
|
||||
|
||||
# See allmydata/windows/fixups.py
|
||||
import sys
|
||||
assert sys.platform == "win32"
|
||||
|
||||
import os
|
||||
import ctypes
|
||||
import subprocess
|
||||
import _subprocess
|
||||
from ctypes import byref, windll, c_char_p, c_wchar_p, c_void_p, \
|
||||
Structure, sizeof, c_wchar, WinError
|
||||
from ctypes.wintypes import BYTE, WORD, LPWSTR, BOOL, DWORD, LPVOID, \
|
||||
HANDLE
|
||||
|
||||
|
||||
##
|
||||
## Types
|
||||
##
|
||||
|
||||
CREATE_UNICODE_ENVIRONMENT = 0x00000400
|
||||
LPCTSTR = c_char_p
|
||||
LPTSTR = c_wchar_p
|
||||
LPSECURITY_ATTRIBUTES = c_void_p
|
||||
LPBYTE = ctypes.POINTER(BYTE)
|
||||
|
||||
class STARTUPINFOW(Structure):
|
||||
_fields_ = [
|
||||
("cb", DWORD), ("lpReserved", LPWSTR),
|
||||
("lpDesktop", LPWSTR), ("lpTitle", LPWSTR),
|
||||
("dwX", DWORD), ("dwY", DWORD),
|
||||
("dwXSize", DWORD), ("dwYSize", DWORD),
|
||||
("dwXCountChars", DWORD), ("dwYCountChars", DWORD),
|
||||
("dwFillAtrribute", DWORD), ("dwFlags", DWORD),
|
||||
("wShowWindow", WORD), ("cbReserved2", WORD),
|
||||
("lpReserved2", LPBYTE), ("hStdInput", HANDLE),
|
||||
("hStdOutput", HANDLE), ("hStdError", HANDLE),
|
||||
]
|
||||
|
||||
LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW)
|
||||
|
||||
|
||||
class PROCESS_INFORMATION(Structure):
|
||||
_fields_ = [
|
||||
("hProcess", HANDLE), ("hThread", HANDLE),
|
||||
("dwProcessId", DWORD), ("dwThreadId", DWORD),
|
||||
]
|
||||
|
||||
LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION)
|
||||
|
||||
|
||||
class DUMMY_HANDLE(ctypes.c_void_p):
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
super(DUMMY_HANDLE, self).__init__(*a, **kw)
|
||||
self.closed = False
|
||||
|
||||
def Close(self):
|
||||
if not self.closed:
|
||||
windll.kernel32.CloseHandle(self)
|
||||
self.closed = True
|
||||
|
||||
def __int__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
CreateProcessW = windll.kernel32.CreateProcessW
|
||||
CreateProcessW.argtypes = [
|
||||
LPCTSTR, LPTSTR, LPSECURITY_ATTRIBUTES,
|
||||
LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCTSTR,
|
||||
LPSTARTUPINFOW, LPPROCESS_INFORMATION,
|
||||
]
|
||||
CreateProcessW.restype = BOOL
|
||||
|
||||
|
||||
##
|
||||
## Patched functions/classes
|
||||
##
|
||||
|
||||
def CreateProcess(executable, args, _p_attr, _t_attr,
|
||||
inherit_handles, creation_flags, env, cwd,
|
||||
startup_info):
|
||||
"""Create a process supporting unicode executable and args for win32
|
||||
|
||||
Python implementation of CreateProcess using CreateProcessW for Win32
|
||||
|
||||
"""
|
||||
|
||||
si = STARTUPINFOW(
|
||||
dwFlags=startup_info.dwFlags,
|
||||
wShowWindow=startup_info.wShowWindow,
|
||||
cb=sizeof(STARTUPINFOW),
|
||||
## XXXvlab: not sure of the casting here to ints.
|
||||
hStdInput=int(startup_info.hStdInput),
|
||||
hStdOutput=int(startup_info.hStdOutput),
|
||||
hStdError=int(startup_info.hStdError),
|
||||
)
|
||||
|
||||
wenv = None
|
||||
if env is not None:
|
||||
## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar
|
||||
env = (unicode("").join([
|
||||
unicode("%s=%s\0") % (k, v)
|
||||
for k, v in env.items()])) + unicode("\0")
|
||||
wenv = (c_wchar * len(env))()
|
||||
wenv.value = env
|
||||
|
||||
pi = PROCESS_INFORMATION()
|
||||
creation_flags |= CREATE_UNICODE_ENVIRONMENT
|
||||
|
||||
if CreateProcessW(executable, args, None, None,
|
||||
inherit_handles, creation_flags,
|
||||
wenv, cwd, byref(si), byref(pi)):
|
||||
return (DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread),
|
||||
pi.dwProcessId, pi.dwThreadId)
|
||||
raise WinError()
|
||||
|
||||
|
||||
class Popen(subprocess.Popen):
|
||||
"""This superseeds Popen and corrects a bug in cPython 2.7 implem"""
|
||||
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines,
|
||||
startupinfo, creationflags, shell, to_close,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite):
|
||||
"""Code from part of _execute_child from Python 2.7 (9fbb65e)
|
||||
|
||||
There are only 2 little changes concerning the construction of
|
||||
the the final string in shell mode: we preempt the creation of
|
||||
the command string when shell is True, because original function
|
||||
will try to encode unicode args which we want to avoid to be able to
|
||||
sending it as-is to ``CreateProcess``.
|
||||
|
||||
"""
|
||||
if not isinstance(args, subprocess.types.StringTypes):
|
||||
args = subprocess.list2cmdline(args)
|
||||
|
||||
if startupinfo is None:
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
if shell:
|
||||
startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = _subprocess.SW_HIDE
|
||||
comspec = os.environ.get("COMSPEC", unicode("cmd.exe"))
|
||||
args = unicode('{} /c "{}"').format(comspec, args)
|
||||
if (_subprocess.GetVersion() >= 0x80000000 or
|
||||
os.path.basename(comspec).lower() == "command.com"):
|
||||
w9xpopen = self._find_w9xpopen()
|
||||
args = unicode('"%s" %s') % (w9xpopen, args)
|
||||
creationflags |= _subprocess.CREATE_NEW_CONSOLE
|
||||
|
||||
cp = _subprocess.CreateProcess
|
||||
_subprocess.CreateProcess = CreateProcess
|
||||
try:
|
||||
super(Popen, self)._execute_child(
|
||||
args, executable,
|
||||
preexec_fn, close_fds, cwd, env, universal_newlines,
|
||||
startupinfo, creationflags, False, to_close, p2cread,
|
||||
p2cwrite, c2pread, c2pwrite, errread, errwrite,
|
||||
)
|
||||
finally:
|
||||
_subprocess.CreateProcess = cp
|
@ -432,7 +432,7 @@ class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
||||
return self.storage_index
|
||||
|
||||
def check(self, monitor, verify=False, add_lease=False):
|
||||
s = StubServer("\x00"*20)
|
||||
s = StubServer(b"\x00"*20)
|
||||
r = CheckResults(self.my_uri, self.storage_index,
|
||||
healthy=True, recoverable=True,
|
||||
count_happiness=10,
|
||||
@ -566,12 +566,12 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
self.file_types[self.storage_index] = version
|
||||
initial_contents = self._get_initial_contents(contents)
|
||||
data = initial_contents.read(initial_contents.get_size())
|
||||
data = "".join(data)
|
||||
data = b"".join(data)
|
||||
self.all_contents[self.storage_index] = data
|
||||
return defer.succeed(self)
|
||||
def _get_initial_contents(self, contents):
|
||||
if contents is None:
|
||||
return MutableData("")
|
||||
return MutableData(b"")
|
||||
|
||||
if IMutableUploadable.providedBy(contents):
|
||||
return contents
|
||||
@ -625,7 +625,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
def raise_error(self):
|
||||
pass
|
||||
def get_writekey(self):
|
||||
return "\x00"*16
|
||||
return b"\x00"*16
|
||||
def get_size(self):
|
||||
return len(self.all_contents[self.storage_index])
|
||||
def get_current_size(self):
|
||||
@ -644,7 +644,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
return self.file_types[self.storage_index]
|
||||
|
||||
def check(self, monitor, verify=False, add_lease=False):
|
||||
s = StubServer("\x00"*20)
|
||||
s = StubServer(b"\x00"*20)
|
||||
r = CheckResults(self.my_uri, self.storage_index,
|
||||
healthy=True, recoverable=True,
|
||||
count_happiness=10,
|
||||
@ -655,7 +655,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
count_recoverable_versions=1,
|
||||
count_unrecoverable_versions=0,
|
||||
servers_responding=[s],
|
||||
sharemap={"seq1-abcd-sh0": [s]},
|
||||
sharemap={b"seq1-abcd-sh0": [s]},
|
||||
count_wrong_shares=0,
|
||||
list_corrupt_shares=[],
|
||||
count_corrupt_shares=0,
|
||||
@ -709,7 +709,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
def overwrite(self, new_contents):
|
||||
assert not self.is_readonly()
|
||||
new_data = new_contents.read(new_contents.get_size())
|
||||
new_data = "".join(new_data)
|
||||
new_data = b"".join(new_data)
|
||||
self.all_contents[self.storage_index] = new_data
|
||||
return defer.succeed(None)
|
||||
def modify(self, modifier):
|
||||
@ -740,7 +740,7 @@ class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||
def update(self, data, offset):
|
||||
assert not self.is_readonly()
|
||||
def modifier(old, servermap, first_time):
|
||||
new = old[:offset] + "".join(data.read(data.get_size()))
|
||||
new = old[:offset] + b"".join(data.read(data.get_size()))
|
||||
new += old[len(new):]
|
||||
return new
|
||||
return self.modify(modifier)
|
||||
@ -859,6 +859,8 @@ class WebErrorMixin(object):
|
||||
body = yield response.content()
|
||||
self.assertEquals(response.code, code)
|
||||
if response_substring is not None:
|
||||
if isinstance(response_substring, unicode):
|
||||
response_substring = response_substring.encode("utf-8")
|
||||
self.assertIn(response_substring, body)
|
||||
returnValue(body)
|
||||
|
||||
|
@ -203,6 +203,14 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
# Make sure unicode strings are a consistent type. Specifically there's
|
||||
# Future newstr (backported Unicode type) vs. Python 2 native unicode
|
||||
# type. They're equal, and _logically_ the same type, but have
|
||||
# different types in practice.
|
||||
if a.__class__ == future_str:
|
||||
a = unicode(a)
|
||||
if b.__class__ == future_str:
|
||||
b = unicode(b)
|
||||
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
|
||||
|
||||
|
||||
|
@ -126,6 +126,42 @@ class HashUtilTests(unittest.TestCase):
|
||||
base32.a2b(b"2ckv3dfzh6rgjis6ogfqhyxnzy"),
|
||||
)
|
||||
|
||||
def test_convergence_hasher_tag(self):
|
||||
"""
|
||||
``_convergence_hasher_tag`` constructs the convergence hasher tag from a
|
||||
unique prefix, the required, total, and segment size parameters, and a
|
||||
convergence secret.
|
||||
"""
|
||||
self.assertEqual(
|
||||
b"allmydata_immutable_content_to_key_with_added_secret_v1+"
|
||||
b"16:\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42\x42,"
|
||||
b"9:3,10,1024,",
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=3,
|
||||
n=10,
|
||||
segsize=1024,
|
||||
convergence=b"\x42" * 16,
|
||||
),
|
||||
)
|
||||
|
||||
def test_convergence_hasher_out_of_bounds(self):
|
||||
"""
|
||||
``_convergence_hasher_tag`` raises ``ValueError`` if k or n is not between
|
||||
1 and 256 inclusive or if k is greater than n.
|
||||
"""
|
||||
segsize = 1024
|
||||
secret = b"\x42" * 16
|
||||
for bad_k in (0, 2, 257):
|
||||
with self.assertRaises(ValueError):
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=bad_k, n=1, segsize=segsize, convergence=secret,
|
||||
)
|
||||
for bad_n in (0, 1, 257):
|
||||
with self.assertRaises(ValueError):
|
||||
hashutil._convergence_hasher_tag(
|
||||
k=2, n=bad_n, segsize=segsize, convergence=secret,
|
||||
)
|
||||
|
||||
def test_known_answers(self):
|
||||
"""
|
||||
Verify backwards compatibility by comparing hash outputs for some
|
||||
|
@ -14,9 +14,11 @@ from testtools.matchers import (
|
||||
)
|
||||
|
||||
BLACKLIST = {
|
||||
"allmydata.test.check_load",
|
||||
"allmydata.windows.registry",
|
||||
"allmydata.scripts.types_",
|
||||
"allmydata.test.check_load",
|
||||
"allmydata.test._win_subprocess",
|
||||
"allmydata.windows.registry",
|
||||
"allmydata.windows.fixups",
|
||||
}
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -888,6 +888,34 @@ def is_happy_enough(servertoshnums, h, k):
|
||||
return True
|
||||
|
||||
|
||||
class FileHandleTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for ``FileHandle``.
|
||||
"""
|
||||
def test_get_encryption_key_convergent(self):
|
||||
"""
|
||||
When ``FileHandle`` is initialized with a convergence secret,
|
||||
``FileHandle.get_encryption_key`` returns a deterministic result that
|
||||
is a function of that secret.
|
||||
"""
|
||||
secret = b"\x42" * 16
|
||||
handle = upload.FileHandle(BytesIO(b"hello world"), secret)
|
||||
handle.set_default_encoding_parameters({
|
||||
"k": 3,
|
||||
"happy": 5,
|
||||
"n": 10,
|
||||
# Remember this is the *max* segment size. In reality, the data
|
||||
# size is much smaller so the actual segment size incorporated
|
||||
# into the encryption key is also smaller.
|
||||
"max_segment_size": 128 * 1024,
|
||||
})
|
||||
|
||||
self.assertEqual(
|
||||
b64encode(self.successResultOf(handle.get_encryption_key())),
|
||||
b"oBcuR/wKdCgCV2GKKXqiNg==",
|
||||
)
|
||||
|
||||
|
||||
class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
ShouldFailMixin):
|
||||
|
||||
|
@ -491,12 +491,16 @@ class JSONBytes(unittest.TestCase):
|
||||
"""Tests for BytesJSONEncoder."""
|
||||
|
||||
def test_encode_bytes(self):
|
||||
"""BytesJSONEncoder can encode bytes."""
|
||||
"""BytesJSONEncoder can encode bytes.
|
||||
|
||||
Bytes are presumed to be UTF-8 encoded.
|
||||
"""
|
||||
snowman = u"def\N{SNOWMAN}\uFF00"
|
||||
data = {
|
||||
b"hello": [1, b"cd"],
|
||||
b"hello": [1, b"cd", {b"abc": [123, snowman.encode("utf-8")]}],
|
||||
}
|
||||
expected = {
|
||||
u"hello": [1, u"cd"],
|
||||
u"hello": [1, u"cd", {u"abc": [123, snowman]}],
|
||||
}
|
||||
# Bytes get passed through as if they were UTF-8 Unicode:
|
||||
encoded = jsonbytes.dumps(data)
|
||||
|
235
src/allmydata/test/test_windows.py
Normal file
235
src/allmydata/test/test_windows.py
Normal file
@ -0,0 +1,235 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Tahoe-LAFS -- secure, distributed storage grid
|
||||
#
|
||||
# Copyright © 2020 The Tahoe-LAFS Software Foundation
|
||||
#
|
||||
# This file is part of Tahoe-LAFS.
|
||||
#
|
||||
# See the docs/about.rst file for licensing information.
|
||||
|
||||
"""
|
||||
Tests for the ``allmydata.windows``.
|
||||
"""
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from sys import (
|
||||
executable,
|
||||
)
|
||||
from json import (
|
||||
load,
|
||||
)
|
||||
from textwrap import (
|
||||
dedent,
|
||||
)
|
||||
from subprocess import (
|
||||
PIPE,
|
||||
Popen,
|
||||
)
|
||||
|
||||
from twisted.python.filepath import (
|
||||
FilePath,
|
||||
)
|
||||
from twisted.python.runtime import (
|
||||
platform,
|
||||
)
|
||||
|
||||
from testtools import (
|
||||
skipUnless,
|
||||
)
|
||||
|
||||
from testtools.matchers import (
|
||||
MatchesAll,
|
||||
AllMatch,
|
||||
IsInstance,
|
||||
Equals,
|
||||
)
|
||||
|
||||
from hypothesis import (
|
||||
HealthCheck,
|
||||
settings,
|
||||
given,
|
||||
note,
|
||||
)
|
||||
|
||||
from hypothesis.strategies import (
|
||||
lists,
|
||||
text,
|
||||
characters,
|
||||
)
|
||||
|
||||
from .common import (
|
||||
SyncTestCase,
|
||||
)
|
||||
|
||||
slow_settings = settings(
|
||||
suppress_health_check=[HealthCheck.too_slow],
|
||||
deadline=None,
|
||||
|
||||
# Reduce the number of examples required to consider the test a success.
|
||||
# The default is 100. Launching a process is expensive so we'll try to do
|
||||
# it as few times as we can get away with. To maintain good coverage,
|
||||
# we'll try to pass as much data to each process as we can so we're still
|
||||
# covering a good portion of the space.
|
||||
max_examples=10,
|
||||
)
|
||||
|
||||
@skipUnless(platform.isWindows(), "get_argv is Windows-only")
|
||||
class GetArgvTests(SyncTestCase):
|
||||
"""
|
||||
Tests for ``get_argv``.
|
||||
"""
|
||||
def test_get_argv_return_type(self):
|
||||
"""
|
||||
``get_argv`` returns a list of unicode strings
|
||||
"""
|
||||
# Hide the ``allmydata.windows.fixups.get_argv`` import here so it
|
||||
# doesn't cause failures on non-Windows platforms.
|
||||
from ..windows.fixups import (
|
||||
get_argv,
|
||||
)
|
||||
argv = get_argv()
|
||||
|
||||
# We don't know what this process's command line was so we just make
|
||||
# structural assertions here.
|
||||
self.assertThat(
|
||||
argv,
|
||||
MatchesAll(
|
||||
IsInstance(list),
|
||||
AllMatch(IsInstance(str)),
|
||||
),
|
||||
)
|
||||
|
||||
# This test runs a child process. This is unavoidably slow and variable.
|
||||
# Disable the two time-based Hypothesis health checks.
|
||||
@slow_settings
|
||||
@given(
|
||||
lists(
|
||||
text(
|
||||
alphabet=characters(
|
||||
blacklist_categories=('Cs',),
|
||||
# Windows CommandLine is a null-terminated string,
|
||||
# analogous to POSIX exec* arguments. So exclude nul from
|
||||
# our generated arguments.
|
||||
blacklist_characters=('\x00',),
|
||||
),
|
||||
min_size=10,
|
||||
max_size=20,
|
||||
),
|
||||
min_size=10,
|
||||
max_size=20,
|
||||
),
|
||||
)
|
||||
def test_argv_values(self, argv):
|
||||
"""
|
||||
``get_argv`` returns a list representing the result of tokenizing the
|
||||
"command line" argument string provided to Windows processes.
|
||||
"""
|
||||
# Python 2.7 doesn't have good options for launching a process with
|
||||
# non-ASCII in its command line. So use this alternative that does a
|
||||
# better job. Bury the import here because it only works on Windows.
|
||||
from ._win_subprocess import (
|
||||
Popen
|
||||
)
|
||||
|
||||
working_path = FilePath(self.mktemp())
|
||||
working_path.makedirs()
|
||||
save_argv_path = working_path.child("script.py")
|
||||
saved_argv_path = working_path.child("data.json")
|
||||
with open(save_argv_path.path, "wt") as f:
|
||||
# A simple program to save argv to a file. Using the file saves
|
||||
# us having to figure out how to reliably get non-ASCII back over
|
||||
# stdio which may pose an independent set of challenges. At least
|
||||
# file I/O is relatively simple and well-understood.
|
||||
f.write(dedent(
|
||||
"""
|
||||
from allmydata.windows.fixups import (
|
||||
get_argv,
|
||||
)
|
||||
import json
|
||||
with open({!r}, "wt") as f:
|
||||
f.write(json.dumps(get_argv()))
|
||||
""".format(saved_argv_path.path)),
|
||||
)
|
||||
argv = [executable.decode("utf-8"), save_argv_path.path] + argv
|
||||
p = Popen(argv, stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||
p.stdin.close()
|
||||
stdout = p.stdout.read()
|
||||
stderr = p.stderr.read()
|
||||
returncode = p.wait()
|
||||
|
||||
note("stdout: {!r}".format(stdout))
|
||||
note("stderr: {!r}".format(stderr))
|
||||
|
||||
self.assertThat(
|
||||
returncode,
|
||||
Equals(0),
|
||||
)
|
||||
with open(saved_argv_path.path, "rt") as f:
|
||||
saved_argv = load(f)
|
||||
|
||||
self.assertThat(
|
||||
saved_argv,
|
||||
Equals(argv),
|
||||
)
|
||||
|
||||
|
||||
@skipUnless(platform.isWindows(), "intended for Windows-only codepaths")
|
||||
class UnicodeOutputTests(SyncTestCase):
|
||||
"""
|
||||
Tests for writing unicode to stdout and stderr.
|
||||
"""
|
||||
@slow_settings
|
||||
@given(characters(), characters())
|
||||
def test_write_non_ascii(self, stdout_char, stderr_char):
|
||||
"""
|
||||
Non-ASCII unicode characters can be written to stdout and stderr with
|
||||
automatic UTF-8 encoding.
|
||||
"""
|
||||
working_path = FilePath(self.mktemp())
|
||||
working_path.makedirs()
|
||||
script = working_path.child("script.py")
|
||||
script.setContent(dedent(
|
||||
"""
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import chr
|
||||
|
||||
from allmydata.windows.fixups import initialize
|
||||
initialize()
|
||||
|
||||
# XXX A shortcoming of the monkey-patch approach is that you'd
|
||||
# better not import stdout or stderr before you call initialize.
|
||||
from sys import argv, stdout, stderr
|
||||
|
||||
stdout.write(chr(int(argv[1])))
|
||||
stdout.close()
|
||||
stderr.write(chr(int(argv[2])))
|
||||
stderr.close()
|
||||
"""
|
||||
))
|
||||
p = Popen([
|
||||
executable,
|
||||
script.path,
|
||||
str(ord(stdout_char)),
|
||||
str(ord(stderr_char)),
|
||||
], stdout=PIPE, stderr=PIPE)
|
||||
stdout = p.stdout.read().decode("utf-8").replace("\r\n", "\n")
|
||||
stderr = p.stderr.read().decode("utf-8").replace("\r\n", "\n")
|
||||
returncode = p.wait()
|
||||
|
||||
self.assertThat(
|
||||
(stdout, stderr, returncode),
|
||||
Equals((
|
||||
stdout_char,
|
||||
stderr_char,
|
||||
0,
|
||||
)),
|
||||
)
|
File diff suppressed because it is too large
Load Diff
@ -37,6 +37,7 @@ PORTED_MODULES = [
|
||||
"allmydata.crypto.util",
|
||||
"allmydata.deep_stats",
|
||||
"allmydata.dirnode",
|
||||
"allmydata.frontends.sftpd",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.immutable.checker",
|
||||
"allmydata.immutable.downloader",
|
||||
@ -170,6 +171,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_pipeline",
|
||||
"allmydata.test.test_python3",
|
||||
"allmydata.test.test_repairer",
|
||||
"allmydata.test.test_sftp",
|
||||
"allmydata.test.test_spans",
|
||||
"allmydata.test.test_statistics",
|
||||
"allmydata.test.test_stats",
|
||||
@ -194,5 +196,6 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.web.test_root",
|
||||
"allmydata.test.web.test_status",
|
||||
"allmydata.test.web.test_util",
|
||||
"allmydata.test.web.test_web",
|
||||
"allmydata.test.web.test_webish",
|
||||
]
|
||||
|
@ -176,10 +176,44 @@ def convergence_hash(k, n, segsize, data, convergence):
|
||||
return h.digest()
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
def _convergence_hasher_tag(k, n, segsize, convergence):
|
||||
"""
|
||||
Create the convergence hashing tag.
|
||||
|
||||
:param int k: Required shares (in [1..256]).
|
||||
:param int n: Total shares (in [1..256]).
|
||||
:param int segsize: Maximum segment size.
|
||||
:param bytes convergence: The convergence secret.
|
||||
|
||||
:return bytes: The bytestring to use as a tag in the convergence hash.
|
||||
"""
|
||||
assert isinstance(convergence, bytes)
|
||||
if k > n:
|
||||
raise ValueError(
|
||||
"k > n not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
if k < 1 or n < 1:
|
||||
# It doesn't make sense to have zero shares. Zero shares carry no
|
||||
# information, cannot encode any part of the application data.
|
||||
raise ValueError(
|
||||
"k, n < 1 not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
if k > 256 or n > 256:
|
||||
# ZFEC supports encoding application data into a maximum of 256
|
||||
# shares. If we ignore the limitations of ZFEC, it may be fine to use
|
||||
# a configuration with more shares than that and it may be fine to
|
||||
# construct a convergence tag from such a configuration. Since ZFEC
|
||||
# is the only supported encoder, though, this is moot for now.
|
||||
raise ValueError(
|
||||
"k, n > 256 not allowed; k = {}, n = {}".format(k, n),
|
||||
)
|
||||
param_tag = netstring(b"%d,%d,%d" % (k, n, segsize))
|
||||
tag = CONVERGENT_ENCRYPTION_TAG + netstring(convergence) + param_tag
|
||||
return tag
|
||||
|
||||
|
||||
def convergence_hasher(k, n, segsize, convergence):
|
||||
tag = _convergence_hasher_tag(k, n, segsize, convergence)
|
||||
return tagged_hasher(tag, KEYLEN)
|
||||
|
||||
|
||||
|
@ -13,20 +13,34 @@ from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def _bytes_to_unicode(obj):
|
||||
"""Convert any bytes objects to unicode, recursively."""
|
||||
if isinstance(obj, bytes):
|
||||
return obj.decode("utf-8")
|
||||
if isinstance(obj, dict):
|
||||
new_obj = {}
|
||||
for k, v in obj.items():
|
||||
if isinstance(k, bytes):
|
||||
k = k.decode("utf-8")
|
||||
v = _bytes_to_unicode(v)
|
||||
new_obj[k] = v
|
||||
return new_obj
|
||||
if isinstance(obj, (list, set, tuple)):
|
||||
return [_bytes_to_unicode(i) for i in obj]
|
||||
return obj
|
||||
|
||||
|
||||
class BytesJSONEncoder(json.JSONEncoder):
|
||||
"""
|
||||
A JSON encoder than can also encode bytes.
|
||||
|
||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||
"""
|
||||
def default(self, o):
|
||||
if isinstance(o, bytes):
|
||||
return o.decode("utf-8")
|
||||
return json.JSONEncoder.default(self, o)
|
||||
def iterencode(self, o, **kwargs):
|
||||
return json.JSONEncoder.iterencode(self, _bytes_to_unicode(o), **kwargs)
|
||||
|
||||
|
||||
def dumps(obj, *args, **kwargs):
|
||||
@ -34,13 +48,6 @@ def dumps(obj, *args, **kwargs):
|
||||
|
||||
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||
"""
|
||||
if isinstance(obj, dict):
|
||||
new_obj = {}
|
||||
for k, v in obj.items():
|
||||
if isinstance(k, bytes):
|
||||
k = k.decode("utf-8")
|
||||
new_obj[k] = v
|
||||
obj = new_obj
|
||||
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
||||
|
||||
|
||||
|
@ -432,7 +432,7 @@ class DeepCheckResultsRenderer(MultiFormatResource):
|
||||
return CheckResultsRenderer(self._client,
|
||||
r.get_results_for_storage_index(si))
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
raise WebError("No detailed results for SI %s" % html.escape(str(name, "utf-8")),
|
||||
http.NOT_FOUND)
|
||||
|
||||
@render_exception
|
||||
|
@ -186,7 +186,7 @@ def convert_children_json(nodemaker, children_json):
|
||||
children = {}
|
||||
if children_json:
|
||||
data = json.loads(children_json)
|
||||
for (namex, (ctype, propdict)) in data.iteritems():
|
||||
for (namex, (ctype, propdict)) in data.items():
|
||||
namex = unicode(namex)
|
||||
writecap = to_bytes(propdict.get("rw_uri"))
|
||||
readcap = to_bytes(propdict.get("ro_uri"))
|
||||
@ -283,8 +283,8 @@ def render_time_attr(t):
|
||||
# actual exception). The latter is growing increasingly annoying.
|
||||
|
||||
def should_create_intermediate_directories(req):
|
||||
t = get_arg(req, "t", "").strip()
|
||||
return bool(req.method in ("PUT", "POST") and
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
return bool(req.method in (b"PUT", b"POST") and
|
||||
t not in ("delete", "rename", "rename-form", "check"))
|
||||
|
||||
def humanize_exception(exc):
|
||||
@ -674,7 +674,7 @@ def url_for_string(req, url_string):
|
||||
and the given URL string.
|
||||
"""
|
||||
url = DecodedURL.from_text(url_string.decode("utf-8"))
|
||||
if url.host == b"":
|
||||
if not url.host:
|
||||
root = req.URLPath()
|
||||
netloc = root.netloc.split(b":", 1)
|
||||
if len(netloc) == 1:
|
||||
|
@ -40,8 +40,12 @@ def get_arg(req, argname, default=None, multiple=False):
|
||||
results = []
|
||||
if argname in req.args:
|
||||
results.extend(req.args[argname])
|
||||
if req.fields and argname in req.fields:
|
||||
results.append(req.fields[argname].value)
|
||||
argname_unicode = unicode(argname, "utf-8")
|
||||
if req.fields and argname_unicode in req.fields:
|
||||
value = req.fields[argname_unicode].value
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode("utf-8")
|
||||
results.append(value)
|
||||
if multiple:
|
||||
return tuple(results)
|
||||
if results:
|
||||
@ -79,7 +83,13 @@ class MultiFormatResource(resource.Resource, object):
|
||||
if isinstance(t, bytes):
|
||||
t = unicode(t, "ascii")
|
||||
renderer = self._get_renderer(t)
|
||||
return renderer(req)
|
||||
result = renderer(req)
|
||||
# On Python 3, json.dumps() returns Unicode for example, but
|
||||
# twisted.web expects bytes. Instead of updating every single render
|
||||
# method, just handle Unicode one time here.
|
||||
if isinstance(result, unicode):
|
||||
result = result.encode("utf-8")
|
||||
return result
|
||||
|
||||
def _get_renderer(self, fmt):
|
||||
"""
|
||||
|
@ -1,3 +1,11 @@
|
||||
"""
|
||||
TODO: When porting to Python 3, the filename handling logic seems wrong. On
|
||||
Python 3 filename will _already_ be correctly decoded. So only decode if it's
|
||||
bytes.
|
||||
|
||||
Also there's a lot of code duplication I think.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
@ -135,7 +143,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
||||
nonterminal = not terminal #len(req.postpath) > 0
|
||||
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
if isinstance(node_or_failure, Failure):
|
||||
f = node_or_failure
|
||||
f.trap(NoSuchChildError)
|
||||
@ -150,10 +158,10 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
else:
|
||||
# terminal node
|
||||
terminal_requests = (
|
||||
("POST", "mkdir"),
|
||||
("PUT", "mkdir"),
|
||||
("POST", "mkdir-with-children"),
|
||||
("POST", "mkdir-immutable")
|
||||
(b"POST", "mkdir"),
|
||||
(b"PUT", "mkdir"),
|
||||
(b"POST", "mkdir-with-children"),
|
||||
(b"POST", "mkdir-immutable")
|
||||
)
|
||||
if (req.method, t) in terminal_requests:
|
||||
# final directory
|
||||
@ -182,8 +190,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
)
|
||||
return d
|
||||
leaf_requests = (
|
||||
("PUT",""),
|
||||
("PUT","uri"),
|
||||
(b"PUT",""),
|
||||
(b"PUT","uri"),
|
||||
)
|
||||
if (req.method, t) in leaf_requests:
|
||||
# we were trying to find the leaf filenode (to put a new
|
||||
@ -224,7 +232,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
||||
if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
|
||||
si = self.node.get_storage_index()
|
||||
if si and req.setETag('DIR:%s-%s' % (base32.b2a(si), t or "")):
|
||||
if si and req.setETag(b'DIR:%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||
return b""
|
||||
|
||||
if not t:
|
||||
@ -255,7 +263,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = get_arg(req, b"t", b"").strip()
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
if t == "mkdir":
|
||||
@ -364,7 +372,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
return d
|
||||
|
||||
def _POST_upload(self, req):
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "utf-8")
|
||||
contents = req.fields["file"]
|
||||
assert contents.filename is None or isinstance(contents.filename, str)
|
||||
name = get_arg(req, "name")
|
||||
@ -374,8 +382,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
if not name:
|
||||
# this prohibts empty, missing, and all-whitespace filenames
|
||||
raise WebError("upload requires a name")
|
||||
assert isinstance(name, str)
|
||||
name = name.decode(charset)
|
||||
if isinstance(name, bytes):
|
||||
name = name.decode(charset)
|
||||
if "/" in name:
|
||||
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
||||
assert isinstance(name, unicode)
|
||||
@ -413,7 +421,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
name = get_arg(req, "name")
|
||||
if not name:
|
||||
raise WebError("set-uri requires a name")
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
@ -436,8 +444,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
# a slightly confusing error message if someone does a POST
|
||||
# without a name= field. For our own HTML this isn't a big
|
||||
# deal, because we create the 'unlink' POST buttons ourselves.
|
||||
name = ''
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
name = b''
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
d = self.node.delete(name)
|
||||
d.addCallback(lambda res: "thing unlinked")
|
||||
@ -453,7 +461,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
return self._POST_relink(req)
|
||||
|
||||
def _POST_relink(self, req):
|
||||
charset = get_arg(req, "_charset", "utf-8")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
from_name = get_arg(req, "from_name")
|
||||
@ -624,14 +632,14 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
# TODO test handling of bad JSON
|
||||
raise
|
||||
cs = {}
|
||||
for name, (file_or_dir, mddict) in children.iteritems():
|
||||
for name, (file_or_dir, mddict) in children.items():
|
||||
name = unicode(name) # json returns str *or* unicode
|
||||
writecap = mddict.get('rw_uri')
|
||||
if writecap is not None:
|
||||
writecap = str(writecap)
|
||||
writecap = writecap.encode("utf-8")
|
||||
readcap = mddict.get('ro_uri')
|
||||
if readcap is not None:
|
||||
readcap = str(readcap)
|
||||
readcap = readcap.encode("utf-8")
|
||||
cs[name] = (writecap, readcap, mddict.get('metadata'))
|
||||
d = self.node.set_children(cs, replace)
|
||||
d.addCallback(lambda res: "Okay so I did it.")
|
||||
@ -1144,8 +1152,8 @@ def _slashify_path(path):
|
||||
in it
|
||||
"""
|
||||
if not path:
|
||||
return ""
|
||||
return "/".join([p.encode("utf-8") for p in path])
|
||||
return b""
|
||||
return b"/".join([p.encode("utf-8") for p in path])
|
||||
|
||||
|
||||
def _cap_to_link(root, path, cap):
|
||||
@ -1234,10 +1242,10 @@ class ManifestResults(MultiFormatResource, ReloadMixin):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
lines = []
|
||||
is_finished = self.monitor.is_finished()
|
||||
lines.append("finished: " + {True: "yes", False: "no"}[is_finished])
|
||||
lines.append(b"finished: " + {True: b"yes", False: b"no"}[is_finished])
|
||||
for path, cap in self.monitor.get_status()["manifest"]:
|
||||
lines.append(_slashify_path(path) + " " + cap)
|
||||
return "\n".join(lines) + "\n"
|
||||
lines.append(_slashify_path(path) + b" " + cap)
|
||||
return b"\n".join(lines) + b"\n"
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
@ -1290,7 +1298,7 @@ class DeepSizeResults(MultiFormatResource):
|
||||
+ stats.get("size-mutable-files", 0)
|
||||
+ stats.get("size-directories", 0))
|
||||
output += "size: %d\n" % total
|
||||
return output
|
||||
return output.encode("utf-8")
|
||||
render_TEXT = render_HTML
|
||||
|
||||
def render_JSON(self, req):
|
||||
@ -1315,7 +1323,7 @@ class DeepStatsResults(Resource, object):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
s = self.monitor.get_status().copy()
|
||||
s["finished"] = self.monitor.is_finished()
|
||||
return json.dumps(s, indent=1)
|
||||
return json.dumps(s, indent=1).encode("utf-8")
|
||||
|
||||
|
||||
@implementer(IPushProducer)
|
||||
|
@ -127,7 +127,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||
http.NOT_IMPLEMENTED)
|
||||
if not t:
|
||||
return self.replace_me_with_a_child(req, self.client, replace)
|
||||
if t == "uri":
|
||||
if t == b"uri":
|
||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||
|
||||
raise WebError("PUT to a file: bad t=%s" % t)
|
||||
@ -188,8 +188,8 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
# if the client already has the ETag then we can
|
||||
# short-circuit the whole process.
|
||||
si = self.node.get_storage_index()
|
||||
if si and req.setETag('%s-%s' % (base32.b2a(si), t or "")):
|
||||
return ""
|
||||
if si and req.setETag(b'%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
|
||||
return b""
|
||||
|
||||
if not t:
|
||||
# just get the contents
|
||||
@ -281,7 +281,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
assert self.parentnode and self.name
|
||||
return self.replace_me_with_a_child(req, self.client, replace)
|
||||
|
||||
if t == "uri":
|
||||
if t == b"uri":
|
||||
if not replace:
|
||||
raise ExistingChildError()
|
||||
assert self.parentnode and self.name
|
||||
@ -309,7 +309,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
assert self.parentnode and self.name
|
||||
d = self.replace_me_with_a_formpost(req, self.client, replace)
|
||||
else:
|
||||
raise WebError("POST to file: bad t=%s" % t)
|
||||
raise WebError("POST to file: bad t=%s" % unicode(t, "ascii"))
|
||||
|
||||
return handle_when_done(req, d)
|
||||
|
||||
@ -439,7 +439,7 @@ class FileDownloader(Resource, object):
|
||||
# bytes we were given in the URL. See the comment in
|
||||
# FileNodeHandler.render_GET for the sad details.
|
||||
req.setHeader("content-disposition",
|
||||
'attachment; filename="%s"' % self.filename)
|
||||
b'attachment; filename="%s"' % self.filename)
|
||||
|
||||
filesize = self.filenode.get_size()
|
||||
assert isinstance(filesize, (int,long)), filesize
|
||||
@ -475,8 +475,8 @@ class FileDownloader(Resource, object):
|
||||
size = contentsize
|
||||
|
||||
req.setHeader("content-length", b"%d" % contentsize)
|
||||
if req.method == "HEAD":
|
||||
return ""
|
||||
if req.method == b"HEAD":
|
||||
return b""
|
||||
|
||||
d = self.filenode.read(req, first, size)
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
|
||||
import os, urllib
|
||||
import os
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import tags as T, Element, renderElement, XMLFile, renderer
|
||||
@ -180,7 +181,7 @@ class MoreInfoElement(Element):
|
||||
else:
|
||||
return ""
|
||||
root = self.get_root(req)
|
||||
quoted_uri = urllib.quote(node.get_uri())
|
||||
quoted_uri = urlquote(node.get_uri())
|
||||
text_plain_url = "%s/file/%s/@@named=/raw.txt" % (root, quoted_uri)
|
||||
return T.li("Raw data as ", T.a("text/plain", href=text_plain_url))
|
||||
|
||||
@ -196,7 +197,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def check_form(self, req, tag):
|
||||
node = self.original
|
||||
quoted_uri = urllib.quote(node.get_uri())
|
||||
quoted_uri = urlquote(node.get_uri())
|
||||
target = self.get_root(req) + "/uri/" + quoted_uri
|
||||
if IDirectoryNode.providedBy(node):
|
||||
target += "/"
|
||||
@ -236,8 +237,8 @@ class MoreInfoElement(Element):
|
||||
def overwrite_form(self, req, tag):
|
||||
node = self.original
|
||||
root = self.get_root(req)
|
||||
action = "%s/uri/%s" % (root, urllib.quote(node.get_uri()))
|
||||
done_url = "%s/uri/%s?t=info" % (root, urllib.quote(node.get_uri()))
|
||||
action = "%s/uri/%s" % (root, urlquote(node.get_uri()))
|
||||
done_url = "%s/uri/%s?t=info" % (root, urlquote(node.get_uri()))
|
||||
overwrite = T.form(action=action, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
|
@ -1,3 +1,4 @@
|
||||
from past.builtins import unicode
|
||||
|
||||
import time
|
||||
from hyperlink import (
|
||||
@ -101,12 +102,12 @@ class OphandleTable(resource.Resource, service.Service):
|
||||
def getChild(self, name, req):
|
||||
ophandle = name
|
||||
if ophandle not in self.handles:
|
||||
raise WebError("unknown/expired handle '%s'" % escape(ophandle),
|
||||
raise WebError("unknown/expired handle '%s'" % escape(unicode(ophandle, "utf-8")),
|
||||
NOT_FOUND)
|
||||
(monitor, renderer, when_added) = self.handles[ophandle]
|
||||
|
||||
t = get_arg(req, "t", "status")
|
||||
if t == "cancel" and req.method == "POST":
|
||||
if t == b"cancel" and req.method == b"POST":
|
||||
monitor.cancel()
|
||||
# return the status anyways, but release the handle
|
||||
self._release_ophandle(ophandle)
|
||||
@ -151,7 +152,7 @@ class ReloadMixin(object):
|
||||
@renderer
|
||||
def refresh(self, req, tag):
|
||||
if self.monitor.is_finished():
|
||||
return ""
|
||||
return b""
|
||||
tag.attributes["http-equiv"] = "refresh"
|
||||
tag.attributes["content"] = str(self.REFRESH_TIME)
|
||||
return tag
|
||||
|
@ -1,4 +1,5 @@
|
||||
from future.utils import PY3
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import time
|
||||
@ -97,7 +98,7 @@ class URIHandler(resource.Resource, object):
|
||||
either "PUT /uri" to create an unlinked file, or
|
||||
"PUT /uri?t=mkdir" to create an unlinked directory
|
||||
"""
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, "t", "").strip(), "utf-8")
|
||||
if t == "":
|
||||
file_format = get_format(req, "CHK")
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
@ -120,7 +121,7 @@ class URIHandler(resource.Resource, object):
|
||||
unlinked file or "POST /uri?t=mkdir" to create a
|
||||
new directory
|
||||
"""
|
||||
t = get_arg(req, "t", "").strip()
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
if t in ("", "upload"):
|
||||
file_format = get_format(req)
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
@ -177,7 +178,7 @@ class FileHandler(resource.Resource, object):
|
||||
|
||||
@exception_to_child
|
||||
def getChild(self, name, req):
|
||||
if req.method not in ("GET", "HEAD"):
|
||||
if req.method not in (b"GET", b"HEAD"):
|
||||
raise WebError("/file can only be used with GET or HEAD")
|
||||
# 'name' must be a file URI
|
||||
try:
|
||||
@ -200,7 +201,7 @@ class IncidentReporter(MultiFormatResource):
|
||||
|
||||
@render_exception
|
||||
def render(self, req):
|
||||
if req.method != "POST":
|
||||
if req.method != b"POST":
|
||||
raise WebError("/report_incident can only be used with POST")
|
||||
|
||||
log.msg(format="User reports incident through web page: %(details)s",
|
||||
@ -255,11 +256,11 @@ class Root(MultiFormatResource):
|
||||
if not path:
|
||||
# Render "/" path.
|
||||
return self
|
||||
if path == "helper_status":
|
||||
if path == b"helper_status":
|
||||
# the Helper isn't attached until after the Tub starts, so this child
|
||||
# needs to created on each request
|
||||
return status.HelperStatus(self._client.helper)
|
||||
if path == "storage":
|
||||
if path == b"storage":
|
||||
# Storage isn't initialized until after the web hierarchy is
|
||||
# constructed so this child needs to be created later than
|
||||
# `__init__`.
|
||||
@ -293,7 +294,7 @@ class Root(MultiFormatResource):
|
||||
self._describe_server(server)
|
||||
for server
|
||||
in broker.get_known_servers()
|
||||
))
|
||||
), key=lambda o: sorted(o.items()))
|
||||
|
||||
|
||||
def _describe_server(self, server):
|
||||
|
@ -284,7 +284,7 @@ def _find_overlap(events, start_key, end_key):
|
||||
rows = []
|
||||
for ev in events:
|
||||
ev = ev.copy()
|
||||
if ev.has_key('server'):
|
||||
if 'server' in ev:
|
||||
ev["serverid"] = ev["server"].get_longname()
|
||||
del ev["server"]
|
||||
# find an empty slot in the rows
|
||||
@ -362,8 +362,8 @@ def _find_overlap_requests(events):
|
||||
def _color(server):
|
||||
h = hashlib.sha256(server.get_serverid()).digest()
|
||||
def m(c):
|
||||
return min(ord(c) / 2 + 0x80, 0xff)
|
||||
return "#%02x%02x%02x" % (m(h[0]), m(h[1]), m(h[2]))
|
||||
return min(ord(c) // 2 + 0x80, 0xff)
|
||||
return "#%02x%02x%02x" % (m(h[0:1]), m(h[1:2]), m(h[2:3]))
|
||||
|
||||
class _EventJson(Resource, object):
|
||||
|
||||
@ -426,7 +426,7 @@ class DownloadStatusPage(Resource, object):
|
||||
"""
|
||||
super(DownloadStatusPage, self).__init__()
|
||||
self._download_status = download_status
|
||||
self.putChild("event_json", _EventJson(self._download_status))
|
||||
self.putChild(b"event_json", _EventJson(self._download_status))
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
@ -1288,14 +1288,14 @@ class Status(MultiFormatResource):
|
||||
# final URL segment will be an empty string. Resources can
|
||||
# thus know if they were requested with or without a final
|
||||
# slash."
|
||||
if not path and request.postpath != ['']:
|
||||
if not path and request.postpath != [b'']:
|
||||
return self
|
||||
|
||||
h = self.history
|
||||
try:
|
||||
stype, count_s = path.split("-")
|
||||
stype, count_s = path.split(b"-")
|
||||
except ValueError:
|
||||
raise WebError("no '-' in '{}'".format(path))
|
||||
raise WebError("no '-' in '{}'".format(unicode(path, "utf-8")))
|
||||
count = int(count_s)
|
||||
stype = unicode(stype, "ascii")
|
||||
if stype == "up":
|
||||
|
@ -1,5 +1,6 @@
|
||||
from past.builtins import unicode
|
||||
|
||||
import urllib
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
from twisted.web import http
|
||||
from twisted.internet import defer
|
||||
@ -65,8 +66,8 @@ def POSTUnlinkedCHK(req, client):
|
||||
# if when_done= is provided, return a redirect instead of our
|
||||
# usual upload-results page
|
||||
def _done(upload_results, redir_to):
|
||||
if "%(uri)s" in redir_to:
|
||||
redir_to = redir_to.replace("%(uri)s", urllib.quote(upload_results.get_uri()))
|
||||
if b"%(uri)s" in redir_to:
|
||||
redir_to = redir_to.replace(b"%(uri)s", urlquote(upload_results.get_uri()).encode("utf-8"))
|
||||
return url_for_string(req, redir_to)
|
||||
d.addCallback(_done, when_done)
|
||||
else:
|
||||
@ -118,8 +119,8 @@ class UploadResultsElement(status.UploadResultsRendererMixin):
|
||||
def download_link(self, req, tag):
|
||||
d = self.upload_results()
|
||||
d.addCallback(lambda res:
|
||||
tags.a("/uri/" + res.get_uri(),
|
||||
href="/uri/" + urllib.quote(res.get_uri())))
|
||||
tags.a("/uri/" + unicode(res.get_uri(), "utf-8"),
|
||||
href="/uri/" + urlquote(unicode(res.get_uri(), "utf-8"))))
|
||||
return d
|
||||
|
||||
|
||||
@ -158,7 +159,7 @@ def POSTUnlinkedCreateDirectory(req, client):
|
||||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
@ -176,7 +177,7 @@ def POSTUnlinkedCreateDirectoryWithChildren(req, client):
|
||||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
@ -194,7 +195,7 @@ def POSTUnlinkedCreateImmutableDirectory(req, client):
|
||||
redirect = get_arg(req, "redirect_to_result", "false")
|
||||
if boolean_of_arg(redirect):
|
||||
def _then_redir(res):
|
||||
new_url = "uri/" + urllib.quote(res.get_uri())
|
||||
new_url = "uri/" + urlquote(res.get_uri())
|
||||
req.setResponseCode(http.SEE_OTHER) # 303
|
||||
req.setHeader('location', new_url)
|
||||
return ''
|
||||
|
@ -44,6 +44,43 @@ from .web.storage_plugins import (
|
||||
StoragePlugins,
|
||||
)
|
||||
|
||||
|
||||
if PY2:
|
||||
FileUploadFieldStorage = FieldStorage
|
||||
else:
|
||||
class FileUploadFieldStorage(FieldStorage):
|
||||
"""
|
||||
Do terrible things to ensure files are still bytes.
|
||||
|
||||
On Python 2, uploaded files were always bytes. On Python 3, there's a
|
||||
heuristic: if the filename is set on a field, it's assumed to be a file
|
||||
upload and therefore bytes. If no filename is set, it's Unicode.
|
||||
|
||||
Unfortunately, we always want it to be bytes, and Tahoe-LAFS also
|
||||
enables setting the filename not via the MIME filename, but via a
|
||||
separate field called "name".
|
||||
|
||||
Thus we need to do this ridiculous workaround. Mypy doesn't like it
|
||||
either, thus the ``# type: ignore`` below.
|
||||
|
||||
Source for idea:
|
||||
https://mail.python.org/pipermail/python-dev/2017-February/147402.html
|
||||
"""
|
||||
@property # type: ignore
|
||||
def filename(self):
|
||||
if self.name == "file" and not self._mime_filename:
|
||||
# We use the file field to upload files, see directory.py's
|
||||
# _POST_upload. Lack of _mime_filename means we need to trick
|
||||
# FieldStorage into thinking there is a filename so it'll
|
||||
# return bytes.
|
||||
return "unknown-filename"
|
||||
return self._mime_filename
|
||||
|
||||
@filename.setter
|
||||
def filename(self, value):
|
||||
self._mime_filename = value
|
||||
|
||||
|
||||
class TahoeLAFSRequest(Request, object):
|
||||
"""
|
||||
``TahoeLAFSRequest`` adds several features to a Twisted Web ``Request``
|
||||
@ -94,7 +131,8 @@ class TahoeLAFSRequest(Request, object):
|
||||
headers['content-length'] = str(self.content.tell())
|
||||
self.content.seek(0)
|
||||
|
||||
self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||
self.fields = FileUploadFieldStorage(
|
||||
self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||
self.content.seek(0)
|
||||
|
||||
self._tahoeLAFSSecurityPolicy()
|
||||
@ -211,7 +249,7 @@ class WebishServer(service.MultiService):
|
||||
# use to test ophandle expiration.
|
||||
self._operations = OphandleTable(clock)
|
||||
self._operations.setServiceParent(self)
|
||||
self.root.putChild("operations", self._operations)
|
||||
self.root.putChild(b"operations", self._operations)
|
||||
|
||||
self.root.putChild(b"storage-plugins", StoragePlugins(client))
|
||||
|
||||
@ -220,7 +258,7 @@ class WebishServer(service.MultiService):
|
||||
self.site = TahoeLAFSSite(tempdir, self.root)
|
||||
self.staticdir = staticdir # so tests can check
|
||||
if staticdir:
|
||||
self.root.putChild("static", static.File(staticdir))
|
||||
self.root.putChild(b"static", static.File(staticdir))
|
||||
if re.search(r'^\d', webport):
|
||||
webport = "tcp:"+webport # twisted warns about bare "0" or "3456"
|
||||
# strports must be native strings.
|
||||
|
@ -1,29 +1,123 @@
|
||||
from __future__ import print_function
|
||||
|
||||
done = False
|
||||
# This code isn't loadable or sensible except on Windows. Importers all know
|
||||
# this and are careful. Normally I would just let an import error from ctypes
|
||||
# explain any mistakes but Mypy also needs some help here. This assert
|
||||
# explains to it that this module is Windows-only. This prevents errors about
|
||||
# ctypes.windll and such which only exist when running on Windows.
|
||||
#
|
||||
# Beware of the limitations of the Mypy AST analyzer. The check needs to take
|
||||
# exactly this form or it may not be recognized.
|
||||
#
|
||||
# https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks
|
||||
import sys
|
||||
assert sys.platform == "win32"
|
||||
|
||||
import codecs, re
|
||||
from functools import partial
|
||||
|
||||
from ctypes import WINFUNCTYPE, windll, POINTER, c_int, WinError, byref, get_last_error
|
||||
from ctypes.wintypes import BOOL, HANDLE, DWORD, LPWSTR, LPCWSTR, LPVOID
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx>
|
||||
from win32api import (
|
||||
STD_OUTPUT_HANDLE,
|
||||
STD_ERROR_HANDLE,
|
||||
SetErrorMode,
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
|
||||
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
|
||||
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
|
||||
GetStdHandle,
|
||||
)
|
||||
from win32con import (
|
||||
SEM_FAILCRITICALERRORS,
|
||||
SEM_NOOPENFILEERRORBOX,
|
||||
)
|
||||
|
||||
from win32file import (
|
||||
INVALID_HANDLE_VALUE,
|
||||
FILE_TYPE_CHAR,
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
|
||||
# DWORD WINAPI GetFileType(DWORD hFile);
|
||||
GetFileType,
|
||||
)
|
||||
|
||||
from allmydata.util import (
|
||||
log,
|
||||
)
|
||||
|
||||
# Keep track of whether `initialize` has run so we don't do any of the
|
||||
# initialization more than once.
|
||||
_done = False
|
||||
|
||||
#
|
||||
# pywin32 for Python 2.7 does not bind any of these *W variants so we do it
|
||||
# ourselves.
|
||||
#
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms687401%28v=vs.85%29.aspx>
|
||||
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
|
||||
# LPDWORD lpCharsWritten, LPVOID lpReserved);
|
||||
WriteConsoleW = WINFUNCTYPE(
|
||||
BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID,
|
||||
use_last_error=True
|
||||
)(("WriteConsoleW", windll.kernel32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms683156%28v=vs.85%29.aspx>
|
||||
GetCommandLineW = WINFUNCTYPE(
|
||||
LPWSTR,
|
||||
use_last_error=True
|
||||
)(("GetCommandLineW", windll.kernel32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/bb776391%28v=vs.85%29.aspx>
|
||||
CommandLineToArgvW = WINFUNCTYPE(
|
||||
POINTER(LPWSTR), LPCWSTR, POINTER(c_int),
|
||||
use_last_error=True
|
||||
)(("CommandLineToArgvW", windll.shell32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
|
||||
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
|
||||
GetConsoleMode = WINFUNCTYPE(
|
||||
BOOL, HANDLE, POINTER(DWORD),
|
||||
use_last_error=True
|
||||
)(("GetConsoleMode", windll.kernel32))
|
||||
|
||||
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
def get_argv():
|
||||
"""
|
||||
:return [unicode]: The argument list this process was invoked with, as
|
||||
unicode.
|
||||
|
||||
Python 2 does not do a good job exposing this information in
|
||||
``sys.argv`` on Windows so this code re-retrieves the underlying
|
||||
information using Windows API calls and massages it into the right
|
||||
shape.
|
||||
"""
|
||||
command_line = GetCommandLineW()
|
||||
argc = c_int(0)
|
||||
argv_unicode = CommandLineToArgvW(command_line, byref(argc))
|
||||
if argv_unicode is None:
|
||||
raise WinError(get_last_error())
|
||||
|
||||
# Convert it to a normal Python list
|
||||
return list(
|
||||
argv_unicode[i]
|
||||
for i
|
||||
in range(argc.value)
|
||||
)
|
||||
|
||||
|
||||
def initialize():
|
||||
global done
|
||||
global _done
|
||||
import sys
|
||||
if sys.platform != "win32" or done:
|
||||
if sys.platform != "win32" or _done:
|
||||
return True
|
||||
done = True
|
||||
|
||||
import codecs, re
|
||||
from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_int, get_last_error
|
||||
from ctypes.wintypes import BOOL, HANDLE, DWORD, UINT, LPWSTR, LPCWSTR, LPVOID
|
||||
|
||||
from allmydata.util import log
|
||||
from allmydata.util.encodingutil import canonical_encoding
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx>
|
||||
SetErrorMode = WINFUNCTYPE(
|
||||
UINT, UINT,
|
||||
use_last_error=True
|
||||
)(("SetErrorMode", windll.kernel32))
|
||||
|
||||
SEM_FAILCRITICALERRORS = 0x0001
|
||||
SEM_NOOPENFILEERRORBOX = 0x8000
|
||||
_done = True
|
||||
|
||||
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)
|
||||
|
||||
@ -33,10 +127,12 @@ def initialize():
|
||||
# which makes for frustrating debugging if stderr is directed to our wrapper.
|
||||
# So be paranoid about catching errors and reporting them to original_stderr,
|
||||
# so that we can at least see them.
|
||||
def _complain(message):
|
||||
print(isinstance(message, str) and message or repr(message), file=original_stderr)
|
||||
def _complain(output_file, message):
|
||||
print(isinstance(message, str) and message or repr(message), file=output_file)
|
||||
log.msg(message, level=log.WEIRD)
|
||||
|
||||
_complain = partial(_complain, original_stderr)
|
||||
|
||||
# Work around <http://bugs.python.org/issue6058>.
|
||||
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
|
||||
|
||||
@ -46,45 +142,6 @@ def initialize():
|
||||
# and TZOmegaTZIOY
|
||||
# <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
|
||||
try:
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
|
||||
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
|
||||
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
|
||||
#
|
||||
# <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
|
||||
# DWORD WINAPI GetFileType(DWORD hFile);
|
||||
#
|
||||
# <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
|
||||
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
|
||||
|
||||
GetStdHandle = WINFUNCTYPE(
|
||||
HANDLE, DWORD,
|
||||
use_last_error=True
|
||||
)(("GetStdHandle", windll.kernel32))
|
||||
|
||||
STD_OUTPUT_HANDLE = DWORD(-11)
|
||||
STD_ERROR_HANDLE = DWORD(-12)
|
||||
|
||||
GetFileType = WINFUNCTYPE(
|
||||
DWORD, DWORD,
|
||||
use_last_error=True
|
||||
)(("GetFileType", windll.kernel32))
|
||||
|
||||
FILE_TYPE_CHAR = 0x0002
|
||||
FILE_TYPE_REMOTE = 0x8000
|
||||
|
||||
GetConsoleMode = WINFUNCTYPE(
|
||||
BOOL, HANDLE, POINTER(DWORD),
|
||||
use_last_error=True
|
||||
)(("GetConsoleMode", windll.kernel32))
|
||||
|
||||
INVALID_HANDLE_VALUE = DWORD(-1).value
|
||||
|
||||
def not_a_console(handle):
|
||||
if handle == INVALID_HANDLE_VALUE or handle is None:
|
||||
return True
|
||||
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
|
||||
or GetConsoleMode(handle, byref(DWORD())) == 0)
|
||||
|
||||
old_stdout_fileno = None
|
||||
old_stderr_fileno = None
|
||||
if hasattr(sys.stdout, 'fileno'):
|
||||
@ -92,126 +149,33 @@ def initialize():
|
||||
if hasattr(sys.stderr, 'fileno'):
|
||||
old_stderr_fileno = sys.stderr.fileno()
|
||||
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
real_stdout = (old_stdout_fileno == STDOUT_FILENO)
|
||||
real_stderr = (old_stderr_fileno == STDERR_FILENO)
|
||||
|
||||
if real_stdout:
|
||||
hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
|
||||
if not_a_console(hStdout):
|
||||
if not a_console(hStdout):
|
||||
real_stdout = False
|
||||
|
||||
if real_stderr:
|
||||
hStderr = GetStdHandle(STD_ERROR_HANDLE)
|
||||
if not_a_console(hStderr):
|
||||
if not a_console(hStderr):
|
||||
real_stderr = False
|
||||
|
||||
if real_stdout or real_stderr:
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms687401%28v=vs.85%29.aspx>
|
||||
# BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
|
||||
# LPDWORD lpCharsWritten, LPVOID lpReserved);
|
||||
if real_stdout:
|
||||
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>', _complain)
|
||||
else:
|
||||
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>', _complain)
|
||||
|
||||
WriteConsoleW = WINFUNCTYPE(
|
||||
BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID,
|
||||
use_last_error=True
|
||||
)(("WriteConsoleW", windll.kernel32))
|
||||
|
||||
class UnicodeOutput(object):
|
||||
def __init__(self, hConsole, stream, fileno, name):
|
||||
self._hConsole = hConsole
|
||||
self._stream = stream
|
||||
self._fileno = fileno
|
||||
self.closed = False
|
||||
self.softspace = False
|
||||
self.mode = 'w'
|
||||
self.encoding = 'utf-8'
|
||||
self.name = name
|
||||
if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8':
|
||||
log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" %
|
||||
(name, stream, stream.encoding), level=log.CURIOUS)
|
||||
self.flush()
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
def close(self):
|
||||
# don't really close the handle, that would only cause problems
|
||||
self.closed = True
|
||||
def fileno(self):
|
||||
return self._fileno
|
||||
def flush(self):
|
||||
if self._hConsole is None:
|
||||
try:
|
||||
self._stream.flush()
|
||||
except Exception as e:
|
||||
_complain("%s.flush: %r from %r" % (self.name, e, self._stream))
|
||||
raise
|
||||
|
||||
def write(self, text):
|
||||
try:
|
||||
if self._hConsole is None:
|
||||
if isinstance(text, unicode):
|
||||
text = text.encode('utf-8')
|
||||
self._stream.write(text)
|
||||
else:
|
||||
if not isinstance(text, unicode):
|
||||
text = str(text).decode('utf-8')
|
||||
remaining = len(text)
|
||||
while remaining > 0:
|
||||
n = DWORD(0)
|
||||
# There is a shorter-than-documented limitation on the length of the string
|
||||
# passed to WriteConsoleW (see #1232).
|
||||
retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None)
|
||||
if retval == 0:
|
||||
raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),))
|
||||
if n.value == 0:
|
||||
raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,))
|
||||
remaining -= n.value
|
||||
if remaining == 0: break
|
||||
text = text[n.value:]
|
||||
except Exception as e:
|
||||
_complain("%s.write: %r" % (self.name, e))
|
||||
raise
|
||||
|
||||
def writelines(self, lines):
|
||||
try:
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
except Exception as e:
|
||||
_complain("%s.writelines: %r" % (self.name, e))
|
||||
raise
|
||||
|
||||
if real_stdout:
|
||||
sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>')
|
||||
else:
|
||||
sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>')
|
||||
|
||||
if real_stderr:
|
||||
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
|
||||
else:
|
||||
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
|
||||
if real_stderr:
|
||||
sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>', _complain)
|
||||
else:
|
||||
sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>', _complain)
|
||||
except Exception as e:
|
||||
_complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))
|
||||
|
||||
# This works around <http://bugs.python.org/issue2128>.
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/ms683156%28v=vs.85%29.aspx>
|
||||
GetCommandLineW = WINFUNCTYPE(
|
||||
LPWSTR,
|
||||
use_last_error=True
|
||||
)(("GetCommandLineW", windll.kernel32))
|
||||
|
||||
# <https://msdn.microsoft.com/en-us/library/windows/desktop/bb776391%28v=vs.85%29.aspx>
|
||||
CommandLineToArgvW = WINFUNCTYPE(
|
||||
POINTER(LPWSTR), LPCWSTR, POINTER(c_int),
|
||||
use_last_error=True
|
||||
)(("CommandLineToArgvW", windll.shell32))
|
||||
|
||||
argc = c_int(0)
|
||||
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
|
||||
if argv_unicode is None:
|
||||
raise WinError(get_last_error())
|
||||
|
||||
# Because of <http://bugs.python.org/issue8775> (and similar limitations in
|
||||
# twisted), the 'bin/tahoe' script cannot invoke us with the actual Unicode arguments.
|
||||
# Instead it "mangles" or escapes them using \x7F as an escape character, which we
|
||||
@ -224,11 +188,12 @@ def initialize():
|
||||
s,
|
||||
)
|
||||
|
||||
argv_unicode = get_argv()
|
||||
try:
|
||||
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
||||
argv = [unmangle(argv_u).encode('utf-8') for argv_u in argv_unicode]
|
||||
except Exception as e:
|
||||
_complain("%s: could not unmangle Unicode arguments.\n%r"
|
||||
% (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
|
||||
% (sys.argv[0], argv_unicode))
|
||||
raise
|
||||
|
||||
# Take only the suffix with the same number of arguments as sys.argv.
|
||||
@ -240,3 +205,112 @@ def initialize():
|
||||
sys.argv = argv[-len(sys.argv):]
|
||||
if sys.argv[0].endswith('.pyscript'):
|
||||
sys.argv[0] = sys.argv[0][:-9]
|
||||
|
||||
|
||||
def a_console(handle):
|
||||
"""
|
||||
:return: ``True`` if ``handle`` refers to a console, ``False`` otherwise.
|
||||
"""
|
||||
if handle == INVALID_HANDLE_VALUE:
|
||||
return False
|
||||
return (
|
||||
# It's a character file (eg a printer or a console)
|
||||
GetFileType(handle) == FILE_TYPE_CHAR and
|
||||
# Checking the console mode doesn't fail (thus it's a console)
|
||||
GetConsoleMode(handle, byref(DWORD())) != 0
|
||||
)
|
||||
|
||||
|
||||
class UnicodeOutput(object):
|
||||
"""
|
||||
``UnicodeOutput`` is a file-like object that encodes unicode to UTF-8 and
|
||||
writes it to another file or writes unicode natively to the Windows
|
||||
console.
|
||||
"""
|
||||
def __init__(self, hConsole, stream, fileno, name, _complain):
|
||||
"""
|
||||
:param hConsole: ``None`` or a handle on the console to which to write
|
||||
unicode. Mutually exclusive with ``stream``.
|
||||
|
||||
:param stream: ``None`` or a file-like object to which to write bytes.
|
||||
|
||||
:param fileno: A result to hand back from method of the same name.
|
||||
|
||||
:param name: A human-friendly identifier for this output object.
|
||||
|
||||
:param _complain: A one-argument callable which accepts bytes to be
|
||||
written when there's a problem. Care should be taken to not make
|
||||
this do a write on this object.
|
||||
"""
|
||||
self._hConsole = hConsole
|
||||
self._stream = stream
|
||||
self._fileno = fileno
|
||||
self.closed = False
|
||||
self.softspace = False
|
||||
self.mode = 'w'
|
||||
self.encoding = 'utf-8'
|
||||
self.name = name
|
||||
|
||||
self._complain = _complain
|
||||
|
||||
from allmydata.util.encodingutil import canonical_encoding
|
||||
from allmydata.util import log
|
||||
if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8':
|
||||
log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" %
|
||||
(name, stream, stream.encoding), level=log.CURIOUS)
|
||||
self.flush()
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
def close(self):
|
||||
# don't really close the handle, that would only cause problems
|
||||
self.closed = True
|
||||
def fileno(self):
|
||||
return self._fileno
|
||||
def flush(self):
|
||||
if self._hConsole is None:
|
||||
try:
|
||||
self._stream.flush()
|
||||
except Exception as e:
|
||||
self._complain("%s.flush: %r from %r" % (self.name, e, self._stream))
|
||||
raise
|
||||
|
||||
def write(self, text):
|
||||
try:
|
||||
if self._hConsole is None:
|
||||
# There is no Windows console available. That means we are
|
||||
# responsible for encoding the unicode to a byte string to
|
||||
# write it to a Python file object.
|
||||
if isinstance(text, unicode):
|
||||
text = text.encode('utf-8')
|
||||
self._stream.write(text)
|
||||
else:
|
||||
# There is a Windows console available. That means Windows is
|
||||
# responsible for dealing with the unicode itself.
|
||||
if not isinstance(text, unicode):
|
||||
text = str(text).decode('utf-8')
|
||||
remaining = len(text)
|
||||
while remaining > 0:
|
||||
n = DWORD(0)
|
||||
# There is a shorter-than-documented limitation on the
|
||||
# length of the string passed to WriteConsoleW (see
|
||||
# #1232).
|
||||
retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None)
|
||||
if retval == 0:
|
||||
raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),))
|
||||
if n.value == 0:
|
||||
raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,))
|
||||
remaining -= n.value
|
||||
if remaining == 0: break
|
||||
text = text[n.value:]
|
||||
except Exception as e:
|
||||
self._complain("%s.write: %r" % (self.name, e))
|
||||
raise
|
||||
|
||||
def writelines(self, lines):
|
||||
try:
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
except Exception as e:
|
||||
self._complain("%s.writelines: %r" % (self.name, e))
|
||||
raise
|
||||
|
3
tox.ini
3
tox.ini
@ -77,7 +77,7 @@ setenv =
|
||||
COVERAGE_PROCESS_START=.coveragerc
|
||||
commands =
|
||||
# NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
|
||||
py.test --coverage -v {posargs:integration}
|
||||
py.test --timeout=1800 --coverage -v {posargs:integration}
|
||||
coverage combine
|
||||
coverage report
|
||||
|
||||
@ -114,6 +114,7 @@ commands =
|
||||
|
||||
|
||||
[testenv:typechecks]
|
||||
basepython = python3
|
||||
skip_install = True
|
||||
deps =
|
||||
mypy
|
||||
|
Loading…
Reference in New Issue
Block a user