mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-22 06:17:50 +00:00
Merge remote-tracking branch 'origin/master' into 3536.sphinx-warnings
This commit is contained in:
commit
b139876a15
@ -91,6 +91,9 @@ workflows:
|
|||||||
- "build-porting-depgraph":
|
- "build-porting-depgraph":
|
||||||
<<: *DOCKERHUB_CONTEXT
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
|
- "typechecks":
|
||||||
|
<<: *DOCKERHUB_CONTEXT
|
||||||
|
|
||||||
images:
|
images:
|
||||||
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
# Build the Docker images used by the ci jobs. This makes the ci jobs
|
||||||
# faster and takes various spurious failures out of the critical path.
|
# faster and takes various spurious failures out of the critical path.
|
||||||
@ -475,6 +478,18 @@ jobs:
|
|||||||
. /tmp/venv/bin/activate
|
. /tmp/venv/bin/activate
|
||||||
./misc/python3/depgraph.sh
|
./misc/python3/depgraph.sh
|
||||||
|
|
||||||
|
typechecks:
|
||||||
|
docker:
|
||||||
|
- <<: *DOCKERHUB_AUTH
|
||||||
|
image: "tahoelafsci/ubuntu:18.04-py3"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- "checkout"
|
||||||
|
- run:
|
||||||
|
name: "Validate Types"
|
||||||
|
command: |
|
||||||
|
/tmp/venv/bin/tox -e typechecks
|
||||||
|
|
||||||
build-image: &BUILD_IMAGE
|
build-image: &BUILD_IMAGE
|
||||||
# This is a template for a job to build a Docker image that has as much of
|
# This is a template for a job to build a Docker image that has as much of
|
||||||
# the setup as we can manage already done and baked in. This cuts down on
|
# the setup as we can manage already done and baked in. This cuts down on
|
||||||
|
62
.github/workflows/ci.yml
vendored
62
.github/workflows/ci.yml
vendored
@ -30,17 +30,37 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
args: install vcpython27
|
args: install vcpython27
|
||||||
|
|
||||||
|
# See https://github.com/actions/checkout. A fetch-depth of 0
|
||||||
|
# fetches all tags and branches.
|
||||||
- name: Check out Tahoe-LAFS sources
|
- name: Check out Tahoe-LAFS sources
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
- name: Fetch all history for all tags and branches
|
fetch-depth: 0
|
||||||
run: git fetch --prune --unshallow
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
# To use pip caching with GitHub Actions in an OS-independent
|
||||||
|
# manner, we need `pip cache dir` command, which became
|
||||||
|
# available since pip v20.1+. At the time of writing this,
|
||||||
|
# GitHub Actions offers pip v20.3.3 for both ubuntu-latest and
|
||||||
|
# windows-latest, and pip v20.3.1 for macos-latest.
|
||||||
|
- name: Get pip cache directory
|
||||||
|
id: pip-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
|
# See https://github.com/actions/cache
|
||||||
|
- name: Use pip cache
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pip-
|
||||||
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade codecov tox setuptools
|
pip install --upgrade codecov tox setuptools
|
||||||
@ -103,15 +123,27 @@ jobs:
|
|||||||
|
|
||||||
- name: Check out Tahoe-LAFS sources
|
- name: Check out Tahoe-LAFS sources
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
- name: Fetch all history for all tags and branches
|
fetch-depth: 0
|
||||||
run: git fetch --prune --unshallow
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Get pip cache directory
|
||||||
|
id: pip-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
|
- name: Use pip cache
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pip-
|
||||||
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade tox
|
pip install --upgrade tox
|
||||||
@ -155,15 +187,27 @@ jobs:
|
|||||||
|
|
||||||
- name: Check out Tahoe-LAFS sources
|
- name: Check out Tahoe-LAFS sources
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
- name: Fetch all history for all tags and branches
|
fetch-depth: 0
|
||||||
run: git fetch --prune --unshallow
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Get pip cache directory
|
||||||
|
id: pip-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
|
- name: Use pip cache
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pip-
|
||||||
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade tox
|
pip install --upgrade tox
|
||||||
|
5
CREDITS
5
CREDITS
@ -207,3 +207,8 @@ D: various bug-fixes and features
|
|||||||
N: Viktoriia Savchuk
|
N: Viktoriia Savchuk
|
||||||
W: https://twitter.com/viktoriiasvchk
|
W: https://twitter.com/viktoriiasvchk
|
||||||
D: Developer community focused improvements on the README file.
|
D: Developer community focused improvements on the README file.
|
||||||
|
|
||||||
|
N: Lukas Pirl
|
||||||
|
E: tahoe@lukas-pirl.de
|
||||||
|
W: http://lukas-pirl.de
|
||||||
|
D: Buildslaves (Debian, Fedora, CentOS; 2016-2021)
|
||||||
|
@ -67,12 +67,12 @@ Here's how it works:
|
|||||||
A "storage grid" is made up of a number of storage servers. A storage server
|
A "storage grid" is made up of a number of storage servers. A storage server
|
||||||
has direct attached storage (typically one or more hard disks). A "gateway"
|
has direct attached storage (typically one or more hard disks). A "gateway"
|
||||||
communicates with storage nodes, and uses them to provide access to the
|
communicates with storage nodes, and uses them to provide access to the
|
||||||
grid over protocols such as HTTP(S), SFTP or FTP.
|
grid over protocols such as HTTP(S) and SFTP.
|
||||||
|
|
||||||
Note that you can find "client" used to refer to gateway nodes (which act as
|
Note that you can find "client" used to refer to gateway nodes (which act as
|
||||||
a client to storage servers), and also to processes or programs connecting to
|
a client to storage servers), and also to processes or programs connecting to
|
||||||
a gateway node and performing operations on the grid -- for example, a CLI
|
a gateway node and performing operations on the grid -- for example, a CLI
|
||||||
command, Web browser, SFTP client, or FTP client.
|
command, Web browser, or SFTP client.
|
||||||
|
|
||||||
Users do not rely on storage servers to provide *confidentiality* nor
|
Users do not rely on storage servers to provide *confidentiality* nor
|
||||||
*integrity* for their data -- instead all of the data is encrypted and
|
*integrity* for their data -- instead all of the data is encrypted and
|
||||||
|
@ -81,7 +81,6 @@ Client/server nodes provide one or more of the following services:
|
|||||||
|
|
||||||
* web-API service
|
* web-API service
|
||||||
* SFTP service
|
* SFTP service
|
||||||
* FTP service
|
|
||||||
* helper service
|
* helper service
|
||||||
* storage service.
|
* storage service.
|
||||||
|
|
||||||
@ -708,12 +707,12 @@ CLI
|
|||||||
file store, uploading/downloading files, and creating/running Tahoe
|
file store, uploading/downloading files, and creating/running Tahoe
|
||||||
nodes. See :doc:`frontends/CLI` for details.
|
nodes. See :doc:`frontends/CLI` for details.
|
||||||
|
|
||||||
SFTP, FTP
|
SFTP
|
||||||
|
|
||||||
Tahoe can also run both SFTP and FTP servers, and map a username/password
|
Tahoe can also run SFTP servers, and map a username/password
|
||||||
pair to a top-level Tahoe directory. See :doc:`frontends/FTP-and-SFTP`
|
pair to a top-level Tahoe directory. See :doc:`frontends/FTP-and-SFTP`
|
||||||
for instructions on configuring these services, and the ``[sftpd]`` and
|
for instructions on configuring this service, and the ``[sftpd]``
|
||||||
``[ftpd]`` sections of ``tahoe.cfg``.
|
section of ``tahoe.cfg``.
|
||||||
|
|
||||||
|
|
||||||
Storage Server Configuration
|
Storage Server Configuration
|
||||||
|
@ -1,22 +1,21 @@
|
|||||||
.. -*- coding: utf-8-with-signature -*-
|
.. -*- coding: utf-8-with-signature -*-
|
||||||
|
|
||||||
=================================
|
========================
|
||||||
Tahoe-LAFS SFTP and FTP Frontends
|
Tahoe-LAFS SFTP Frontend
|
||||||
=================================
|
========================
|
||||||
|
|
||||||
1. `SFTP/FTP Background`_
|
1. `SFTP Background`_
|
||||||
2. `Tahoe-LAFS Support`_
|
2. `Tahoe-LAFS Support`_
|
||||||
3. `Creating an Account File`_
|
3. `Creating an Account File`_
|
||||||
4. `Running An Account Server (accounts.url)`_
|
4. `Running An Account Server (accounts.url)`_
|
||||||
5. `Configuring SFTP Access`_
|
5. `Configuring SFTP Access`_
|
||||||
6. `Configuring FTP Access`_
|
6. `Dependencies`_
|
||||||
7. `Dependencies`_
|
7. `Immutable and Mutable Files`_
|
||||||
8. `Immutable and Mutable Files`_
|
8. `Known Issues`_
|
||||||
9. `Known Issues`_
|
|
||||||
|
|
||||||
|
|
||||||
SFTP/FTP Background
|
SFTP Background
|
||||||
===================
|
===============
|
||||||
|
|
||||||
FTP is the venerable internet file-transfer protocol, first developed in
|
FTP is the venerable internet file-transfer protocol, first developed in
|
||||||
1971. The FTP server usually listens on port 21. A separate connection is
|
1971. The FTP server usually listens on port 21. A separate connection is
|
||||||
@ -33,20 +32,18 @@ Both FTP and SFTP were developed assuming a UNIX-like server, with accounts
|
|||||||
and passwords, octal file modes (user/group/other, read/write/execute), and
|
and passwords, octal file modes (user/group/other, read/write/execute), and
|
||||||
ctime/mtime timestamps.
|
ctime/mtime timestamps.
|
||||||
|
|
||||||
We recommend SFTP over FTP, because the protocol is better, and the server
|
Previous versions of Tahoe-LAFS supported FTP, but now only the superior SFTP
|
||||||
implementation in Tahoe-LAFS is more complete. See `Known Issues`_, below,
|
frontend is supported. See `Known Issues`_, below, for details on the
|
||||||
for details.
|
limitations of SFTP.
|
||||||
|
|
||||||
Tahoe-LAFS Support
|
Tahoe-LAFS Support
|
||||||
==================
|
==================
|
||||||
|
|
||||||
All Tahoe-LAFS client nodes can run a frontend SFTP server, allowing regular
|
All Tahoe-LAFS client nodes can run a frontend SFTP server, allowing regular
|
||||||
SFTP clients (like ``/usr/bin/sftp``, the ``sshfs`` FUSE plugin, and many
|
SFTP clients (like ``/usr/bin/sftp``, the ``sshfs`` FUSE plugin, and many
|
||||||
others) to access the file store. They can also run an FTP server, so FTP
|
others) to access the file store.
|
||||||
clients (like ``/usr/bin/ftp``, ``ncftp``, and others) can too. These
|
|
||||||
frontends sit at the same level as the web-API interface.
|
|
||||||
|
|
||||||
Since Tahoe-LAFS does not use user accounts or passwords, the SFTP/FTP
|
Since Tahoe-LAFS does not use user accounts or passwords, the SFTP
|
||||||
servers must be configured with a way to first authenticate a user (confirm
|
servers must be configured with a way to first authenticate a user (confirm
|
||||||
that a prospective client has a legitimate claim to whatever authorities we
|
that a prospective client has a legitimate claim to whatever authorities we
|
||||||
might grant a particular user), and second to decide what directory cap
|
might grant a particular user), and second to decide what directory cap
|
||||||
@ -173,39 +170,6 @@ clients and with the sshfs filesystem, see wiki:SftpFrontend_
|
|||||||
|
|
||||||
.. _wiki:SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend
|
.. _wiki:SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend
|
||||||
|
|
||||||
Configuring FTP Access
|
|
||||||
======================
|
|
||||||
|
|
||||||
To enable the FTP server with an accounts file, add the following lines to
|
|
||||||
the BASEDIR/tahoe.cfg file::
|
|
||||||
|
|
||||||
[ftpd]
|
|
||||||
enabled = true
|
|
||||||
port = tcp:8021:interface=127.0.0.1
|
|
||||||
accounts.file = private/accounts
|
|
||||||
|
|
||||||
The FTP server will listen on the given port number and on the loopback
|
|
||||||
interface only. The "accounts.file" pathname will be interpreted relative to
|
|
||||||
the node's BASEDIR.
|
|
||||||
|
|
||||||
To enable the FTP server with an account server instead, provide the URL of
|
|
||||||
that server in an "accounts.url" directive::
|
|
||||||
|
|
||||||
[ftpd]
|
|
||||||
enabled = true
|
|
||||||
port = tcp:8021:interface=127.0.0.1
|
|
||||||
accounts.url = https://example.com/login
|
|
||||||
|
|
||||||
You can provide both accounts.file and accounts.url, although it probably
|
|
||||||
isn't very useful except for testing.
|
|
||||||
|
|
||||||
FTP provides no security, and so your password or caps could be eavesdropped
|
|
||||||
if you connect to the FTP server remotely. The examples above include
|
|
||||||
":interface=127.0.0.1" in the "port" option, which causes the server to only
|
|
||||||
accept connections from localhost.
|
|
||||||
|
|
||||||
Public key authentication is not supported for FTP.
|
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
|
|
||||||
@ -216,7 +180,7 @@ separately: debian puts it in the "python-twisted-conch" package.
|
|||||||
Immutable and Mutable Files
|
Immutable and Mutable Files
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
All files created via SFTP (and FTP) are immutable files. However, files can
|
All files created via SFTP are immutable files. However, files can
|
||||||
only be created in writeable directories, which allows the directory entry to
|
only be created in writeable directories, which allows the directory entry to
|
||||||
be relinked to a different file. Normally, when the path of an immutable file
|
be relinked to a different file. Normally, when the path of an immutable file
|
||||||
is opened for writing by SFTP, the directory entry is relinked to another
|
is opened for writing by SFTP, the directory entry is relinked to another
|
||||||
@ -256,18 +220,3 @@ See also wiki:SftpFrontend_.
|
|||||||
|
|
||||||
.. _ticket #1059: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1059
|
.. _ticket #1059: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1059
|
||||||
.. _ticket #1089: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1089
|
.. _ticket #1089: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1089
|
||||||
|
|
||||||
Known Issues in the FTP Frontend
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Mutable files are not supported by the FTP frontend (`ticket #680`_).
|
|
||||||
|
|
||||||
Non-ASCII filenames are not supported by FTP (`ticket #682`_).
|
|
||||||
|
|
||||||
The FTP frontend sometimes fails to report errors, for example if an upload
|
|
||||||
fails because it does meet the "servers of happiness" threshold (`ticket
|
|
||||||
#1081`_).
|
|
||||||
|
|
||||||
.. _ticket #680: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/680
|
|
||||||
.. _ticket #682: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/682
|
|
||||||
.. _ticket #1081: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1081
|
|
||||||
|
@ -2158,7 +2158,7 @@ When modifying the file, be careful to update it atomically, otherwise a
|
|||||||
request may arrive while the file is only halfway written, and the partial
|
request may arrive while the file is only halfway written, and the partial
|
||||||
file may be incorrectly parsed.
|
file may be incorrectly parsed.
|
||||||
|
|
||||||
The blacklist is applied to all access paths (including SFTP, FTP, and CLI
|
The blacklist is applied to all access paths (including SFTP and CLI
|
||||||
operations), not just the web-API. The blacklist also applies to directories.
|
operations), not just the web-API. The blacklist also applies to directories.
|
||||||
If a directory is blacklisted, the gateway will refuse access to both that
|
If a directory is blacklisted, the gateway will refuse access to both that
|
||||||
directory and any child files/directories underneath it, when accessed via
|
directory and any child files/directories underneath it, when accessed via
|
||||||
|
@ -122,7 +122,7 @@ Who should consider using a Helper?
|
|||||||
* clients who experience problems with TCP connection fairness: if other
|
* clients who experience problems with TCP connection fairness: if other
|
||||||
programs or machines in the same home are getting less than their fair
|
programs or machines in the same home are getting less than their fair
|
||||||
share of upload bandwidth. If the connection is being shared fairly, then
|
share of upload bandwidth. If the connection is being shared fairly, then
|
||||||
a Tahoe upload that is happening at the same time as a single FTP upload
|
a Tahoe upload that is happening at the same time as a single SFTP upload
|
||||||
should get half the bandwidth.
|
should get half the bandwidth.
|
||||||
* clients who have been given the helper.furl by someone who is running a
|
* clients who have been given the helper.furl by someone who is running a
|
||||||
Helper and is willing to let them use it
|
Helper and is willing to let them use it
|
||||||
|
@ -23,7 +23,7 @@ Known Issues in Tahoe-LAFS v1.10.3, released 30-Mar-2016
|
|||||||
* `Disclosure of file through embedded hyperlinks or JavaScript in that file`_
|
* `Disclosure of file through embedded hyperlinks or JavaScript in that file`_
|
||||||
* `Command-line arguments are leaked to other local users`_
|
* `Command-line arguments are leaked to other local users`_
|
||||||
* `Capabilities may be leaked to web browser phishing filter / "safe browsing" servers`_
|
* `Capabilities may be leaked to web browser phishing filter / "safe browsing" servers`_
|
||||||
* `Known issues in the FTP and SFTP frontends`_
|
* `Known issues in the SFTP frontend`_
|
||||||
* `Traffic analysis based on sizes of files/directories, storage indices, and timing`_
|
* `Traffic analysis based on sizes of files/directories, storage indices, and timing`_
|
||||||
* `Privacy leak via Google Chart API link in map-update timing web page`_
|
* `Privacy leak via Google Chart API link in map-update timing web page`_
|
||||||
|
|
||||||
@ -213,8 +213,8 @@ To disable the filter in Chrome:
|
|||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
Known issues in the FTP and SFTP frontends
|
Known issues in the SFTP frontend
|
||||||
------------------------------------------
|
---------------------------------
|
||||||
|
|
||||||
These are documented in :doc:`frontends/FTP-and-SFTP` and on `the
|
These are documented in :doc:`frontends/FTP-and-SFTP` and on `the
|
||||||
SftpFrontend page`_ on the wiki.
|
SftpFrontend page`_ on the wiki.
|
||||||
|
@ -116,10 +116,10 @@ they will need to evaluate which contributors' signatures they trust.
|
|||||||
|
|
||||||
- when satisfied, sign the tarballs:
|
- when satisfied, sign the tarballs:
|
||||||
|
|
||||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0-py2-none-any.whl
|
||||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.bz2
|
||||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.tar.gz
|
||||||
- gpg --pinentry=loopback --armor --sign dist/tahoe_lafs-1.15.0rc0.zip
|
- gpg --pinentry=loopback --armor --detach-sign dist/tahoe_lafs-1.15.0rc0.zip
|
||||||
|
|
||||||
|
|
||||||
Privileged Contributor
|
Privileged Contributor
|
||||||
|
@ -207,10 +207,10 @@ create a new directory and lose the capability to it, then you cannot
|
|||||||
access that directory ever again.
|
access that directory ever again.
|
||||||
|
|
||||||
|
|
||||||
The SFTP and FTP frontends
|
The SFTP frontend
|
||||||
--------------------------
|
-----------------
|
||||||
|
|
||||||
You can access your Tahoe-LAFS grid via any SFTP_ or FTP_ client. See
|
You can access your Tahoe-LAFS grid via any SFTP_ client. See
|
||||||
:doc:`frontends/FTP-and-SFTP` for how to set this up. On most Unix
|
:doc:`frontends/FTP-and-SFTP` for how to set this up. On most Unix
|
||||||
platforms, you can also use SFTP to plug Tahoe-LAFS into your computer's
|
platforms, you can also use SFTP to plug Tahoe-LAFS into your computer's
|
||||||
local filesystem via ``sshfs``, but see the `FAQ about performance
|
local filesystem via ``sshfs``, but see the `FAQ about performance
|
||||||
@ -220,7 +220,6 @@ The SftpFrontend_ page on the wiki has more information about using SFTP with
|
|||||||
Tahoe-LAFS.
|
Tahoe-LAFS.
|
||||||
|
|
||||||
.. _SFTP: https://en.wikipedia.org/wiki/SSH_file_transfer_protocol
|
.. _SFTP: https://en.wikipedia.org/wiki/SSH_file_transfer_protocol
|
||||||
.. _FTP: https://en.wikipedia.org/wiki/File_Transfer_Protocol
|
|
||||||
.. _FAQ about performance problems: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/FAQ#Q23_FUSE
|
.. _FAQ about performance problems: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/FAQ#Q23_FUSE
|
||||||
.. _SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend
|
.. _SftpFrontend: https://tahoe-lafs.org/trac/tahoe-lafs/wiki/SftpFrontend
|
||||||
|
|
||||||
|
3
mypy.ini
Normal file
3
mypy.ini
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[mypy]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
plugins=mypy_zope:plugin
|
0
newsfragments/2920.minor
Normal file
0
newsfragments/2920.minor
Normal file
1
newsfragments/3399.feature
Normal file
1
newsfragments/3399.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
Added 'typechecks' environment for tox running mypy and performing static typechecks.
|
0
newsfragments/3574.minor
Normal file
0
newsfragments/3574.minor
Normal file
0
newsfragments/3576.minor
Normal file
0
newsfragments/3576.minor
Normal file
0
newsfragments/3577.minor
Normal file
0
newsfragments/3577.minor
Normal file
0
newsfragments/3582.minor
Normal file
0
newsfragments/3582.minor
Normal file
1
newsfragments/3583.removed
Normal file
1
newsfragments/3583.removed
Normal file
@ -0,0 +1 @@
|
|||||||
|
FTP is no longer supported by Tahoe-LAFS. Please use the SFTP support instead.
|
1
newsfragments/3587.minor
Normal file
1
newsfragments/3587.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
0
newsfragments/3589.minor
Normal file
0
newsfragments/3589.minor
Normal file
0
newsfragments/3591.minor
Normal file
0
newsfragments/3591.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3594.minor
Normal file
0
newsfragments/3595.minor
Normal file
0
newsfragments/3595.minor
Normal file
4
setup.py
4
setup.py
@ -63,12 +63,8 @@ install_requires = [
|
|||||||
# version of cryptography will *really* be installed.
|
# version of cryptography will *really* be installed.
|
||||||
"cryptography >= 2.6",
|
"cryptography >= 2.6",
|
||||||
|
|
||||||
# * We need Twisted 10.1.0 for the FTP frontend in order for
|
|
||||||
# Twisted's FTP server to support asynchronous close.
|
|
||||||
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
|
# * The SFTP frontend depends on Twisted 11.0.0 to fix the SSH server
|
||||||
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
# rekeying bug <https://twistedmatrix.com/trac/ticket/4395>
|
||||||
# * The FTP frontend depends on Twisted >= 11.1.0 for
|
|
||||||
# filepath.Permissions
|
|
||||||
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
# * The SFTP frontend and manhole depend on the conch extra. However, we
|
||||||
# can't explicitly declare that without an undesirable dependency on gmpy,
|
# can't explicitly declare that without an undesirable dependency on gmpy,
|
||||||
# as explained in ticket #2740.
|
# as explained in ticket #2740.
|
||||||
|
@ -14,7 +14,9 @@ __all__ = [
|
|||||||
|
|
||||||
__version__ = "unknown"
|
__version__ = "unknown"
|
||||||
try:
|
try:
|
||||||
from allmydata._version import __version__
|
# type ignored as it fails in CI
|
||||||
|
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||||
|
from allmydata._version import __version__ # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# We're running in a tree that hasn't run update_version, and didn't
|
# We're running in a tree that hasn't run update_version, and didn't
|
||||||
# come with a _version.py, so we don't know what our version is.
|
# come with a _version.py, so we don't know what our version is.
|
||||||
@ -24,7 +26,9 @@ except ImportError:
|
|||||||
full_version = "unknown"
|
full_version = "unknown"
|
||||||
branch = "unknown"
|
branch = "unknown"
|
||||||
try:
|
try:
|
||||||
from allmydata._version import full_version, branch
|
# type ignored as it fails in CI
|
||||||
|
# (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
|
||||||
|
from allmydata._version import full_version, branch # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# We're running in a tree that hasn't run update_version, and didn't
|
# We're running in a tree that hasn't run update_version, and didn't
|
||||||
# come with a _version.py, so we don't know what our full version or
|
# come with a _version.py, so we don't know what our full version or
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -86,12 +86,6 @@ _client_config = configutil.ValidConfiguration(
|
|||||||
"shares.total",
|
"shares.total",
|
||||||
"storage.plugins",
|
"storage.plugins",
|
||||||
),
|
),
|
||||||
"ftpd": (
|
|
||||||
"accounts.file",
|
|
||||||
"accounts.url",
|
|
||||||
"enabled",
|
|
||||||
"port",
|
|
||||||
),
|
|
||||||
"storage": (
|
"storage": (
|
||||||
"debug_discard",
|
"debug_discard",
|
||||||
"enabled",
|
"enabled",
|
||||||
@ -656,7 +650,6 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
raise ValueError("config error: helper is enabled, but tub "
|
raise ValueError("config error: helper is enabled, but tub "
|
||||||
"is not listening ('tub.port=' is empty)")
|
"is not listening ('tub.port=' is empty)")
|
||||||
self.init_helper()
|
self.init_helper()
|
||||||
self.init_ftp_server()
|
|
||||||
self.init_sftp_server()
|
self.init_sftp_server()
|
||||||
|
|
||||||
# If the node sees an exit_trigger file, it will poll every second to see
|
# If the node sees an exit_trigger file, it will poll every second to see
|
||||||
@ -1032,18 +1025,6 @@ class _Client(node.Node, pollmixin.PollMixin):
|
|||||||
)
|
)
|
||||||
ws.setServiceParent(self)
|
ws.setServiceParent(self)
|
||||||
|
|
||||||
def init_ftp_server(self):
|
|
||||||
if self.config.get_config("ftpd", "enabled", False, boolean=True):
|
|
||||||
accountfile = self.config.get_config("ftpd", "accounts.file", None)
|
|
||||||
if accountfile:
|
|
||||||
accountfile = self.config.get_config_path(accountfile)
|
|
||||||
accounturl = self.config.get_config("ftpd", "accounts.url", None)
|
|
||||||
ftp_portstr = self.config.get_config("ftpd", "port", "8021")
|
|
||||||
|
|
||||||
from allmydata.frontends import ftpd
|
|
||||||
s = ftpd.FTPServer(self, accountfile, accounturl, ftp_portstr)
|
|
||||||
s.setServiceParent(self)
|
|
||||||
|
|
||||||
def init_sftp_server(self):
|
def init_sftp_server(self):
|
||||||
if self.config.get_config("sftpd", "enabled", False, boolean=True):
|
if self.config.get_config("sftpd", "enabled", False, boolean=True):
|
||||||
accountfile = self.config.get_config("sftpd", "accounts.file", None)
|
accountfile = self.config.get_config("sftpd", "accounts.file", None)
|
||||||
|
@ -57,6 +57,10 @@ class CRSEncoder(object):
|
|||||||
|
|
||||||
return defer.succeed((shares, desired_share_ids))
|
return defer.succeed((shares, desired_share_ids))
|
||||||
|
|
||||||
|
def encode_proposal(self, data, desired_share_ids=None):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
@implementer(ICodecDecoder)
|
@implementer(ICodecDecoder)
|
||||||
class CRSDecoder(object):
|
class CRSDecoder(object):
|
||||||
|
|
||||||
|
@ -1,4 +1,15 @@
|
|||||||
"""Implementation of the deep stats class."""
|
"""Implementation of the deep stats class.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import math
|
import math
|
||||||
|
|
||||||
@ -13,7 +24,7 @@ from allmydata.util import mathutil
|
|||||||
class DeepStats(object):
|
class DeepStats(object):
|
||||||
"""Deep stats object.
|
"""Deep stats object.
|
||||||
|
|
||||||
Holds results of the deep-stats opetation.
|
Holds results of the deep-stats operation.
|
||||||
Used for json generation in the API."""
|
Used for json generation in the API."""
|
||||||
|
|
||||||
# Json API version.
|
# Json API version.
|
||||||
@ -121,7 +132,7 @@ class DeepStats(object):
|
|||||||
h[bucket] += 1
|
h[bucket] += 1
|
||||||
|
|
||||||
def get_results(self):
|
def get_results(self):
|
||||||
"""Returns deep-stats resutls."""
|
"""Returns deep-stats results."""
|
||||||
stats = self.stats.copy()
|
stats = self.stats.copy()
|
||||||
for key in self.histograms:
|
for key in self.histograms:
|
||||||
h = self.histograms[key]
|
h = self.histograms[key]
|
||||||
|
@ -568,7 +568,7 @@ class DirectoryNode(object):
|
|||||||
d = self.get_child_and_metadata(childnamex)
|
d = self.get_child_and_metadata(childnamex)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def set_uri(self, namex, writecap, readcap, metadata=None, overwrite=True):
|
def set_uri(self, namex, writecap, readcap=None, metadata=None, overwrite=True):
|
||||||
precondition(isinstance(writecap, (bytes, type(None))), writecap)
|
precondition(isinstance(writecap, (bytes, type(None))), writecap)
|
||||||
precondition(isinstance(readcap, (bytes, type(None))), readcap)
|
precondition(isinstance(readcap, (bytes, type(None))), readcap)
|
||||||
|
|
||||||
|
@ -1,340 +0,0 @@
|
|||||||
from six import ensure_str
|
|
||||||
|
|
||||||
from types import NoneType
|
|
||||||
|
|
||||||
from zope.interface import implementer
|
|
||||||
from twisted.application import service, strports
|
|
||||||
from twisted.internet import defer
|
|
||||||
from twisted.internet.interfaces import IConsumer
|
|
||||||
from twisted.cred import portal
|
|
||||||
from twisted.python import filepath
|
|
||||||
from twisted.protocols import ftp
|
|
||||||
|
|
||||||
from allmydata.interfaces import IDirectoryNode, ExistingChildError, \
|
|
||||||
NoSuchChildError
|
|
||||||
from allmydata.immutable.upload import FileHandle
|
|
||||||
from allmydata.util.fileutil import EncryptedTemporaryFile
|
|
||||||
from allmydata.util.assertutil import precondition
|
|
||||||
|
|
||||||
@implementer(ftp.IReadFile)
|
|
||||||
class ReadFile(object):
|
|
||||||
def __init__(self, node):
|
|
||||||
self.node = node
|
|
||||||
def send(self, consumer):
|
|
||||||
d = self.node.read(consumer)
|
|
||||||
return d # when consumed
|
|
||||||
|
|
||||||
@implementer(IConsumer)
|
|
||||||
class FileWriter(object):
|
|
||||||
|
|
||||||
def registerProducer(self, producer, streaming):
|
|
||||||
if not streaming:
|
|
||||||
raise NotImplementedError("Non-streaming producer not supported.")
|
|
||||||
# we write the data to a temporary file, since Tahoe can't do
|
|
||||||
# streaming upload yet.
|
|
||||||
self.f = EncryptedTemporaryFile()
|
|
||||||
return None
|
|
||||||
|
|
||||||
def unregisterProducer(self):
|
|
||||||
# the upload actually happens in WriteFile.close()
|
|
||||||
pass
|
|
||||||
|
|
||||||
def write(self, data):
|
|
||||||
self.f.write(data)
|
|
||||||
|
|
||||||
@implementer(ftp.IWriteFile)
|
|
||||||
class WriteFile(object):
|
|
||||||
|
|
||||||
def __init__(self, parent, childname, convergence):
|
|
||||||
self.parent = parent
|
|
||||||
self.childname = childname
|
|
||||||
self.convergence = convergence
|
|
||||||
|
|
||||||
def receive(self):
|
|
||||||
self.c = FileWriter()
|
|
||||||
return defer.succeed(self.c)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
u = FileHandle(self.c.f, self.convergence)
|
|
||||||
d = self.parent.add_file(self.childname, u)
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class NoParentError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# filepath.Permissions was added in Twisted-11.1.0, which we require. Twisted
|
|
||||||
# <15.0.0 expected an int, and only does '&' on it. Twisted >=15.0.0 expects
|
|
||||||
# a filepath.Permissions. This satisfies both.
|
|
||||||
|
|
||||||
class IntishPermissions(filepath.Permissions):
|
|
||||||
def __init__(self, statModeInt):
|
|
||||||
self._tahoe_statModeInt = statModeInt
|
|
||||||
filepath.Permissions.__init__(self, statModeInt)
|
|
||||||
def __and__(self, other):
|
|
||||||
return self._tahoe_statModeInt & other
|
|
||||||
|
|
||||||
@implementer(ftp.IFTPShell)
|
|
||||||
class Handler(object):
|
|
||||||
def __init__(self, client, rootnode, username, convergence):
|
|
||||||
self.client = client
|
|
||||||
self.root = rootnode
|
|
||||||
self.username = username
|
|
||||||
self.convergence = convergence
|
|
||||||
|
|
||||||
def makeDirectory(self, path):
|
|
||||||
d = self._get_root(path)
|
|
||||||
d.addCallback(lambda root_and_path:
|
|
||||||
self._get_or_create_directories(root_and_path[0], root_and_path[1]))
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _get_or_create_directories(self, node, path):
|
|
||||||
if not IDirectoryNode.providedBy(node):
|
|
||||||
# unfortunately it is too late to provide the name of the
|
|
||||||
# blocking directory in the error message.
|
|
||||||
raise ftp.FileExistsError("cannot create directory because there "
|
|
||||||
"is a file in the way")
|
|
||||||
if not path:
|
|
||||||
return defer.succeed(node)
|
|
||||||
d = node.get(path[0])
|
|
||||||
def _maybe_create(f):
|
|
||||||
f.trap(NoSuchChildError)
|
|
||||||
return node.create_subdirectory(path[0])
|
|
||||||
d.addErrback(_maybe_create)
|
|
||||||
d.addCallback(self._get_or_create_directories, path[1:])
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _get_parent(self, path):
|
|
||||||
# fire with (parentnode, childname)
|
|
||||||
path = [unicode(p) for p in path]
|
|
||||||
if not path:
|
|
||||||
raise NoParentError
|
|
||||||
childname = path[-1]
|
|
||||||
d = self._get_root(path)
|
|
||||||
def _got_root(root_and_path):
|
|
||||||
(root, path) = root_and_path
|
|
||||||
if not path:
|
|
||||||
raise NoParentError
|
|
||||||
return root.get_child_at_path(path[:-1])
|
|
||||||
d.addCallback(_got_root)
|
|
||||||
def _got_parent(parent):
|
|
||||||
return (parent, childname)
|
|
||||||
d.addCallback(_got_parent)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _remove_thing(self, path, must_be_directory=False, must_be_file=False):
|
|
||||||
d = defer.maybeDeferred(self._get_parent, path)
|
|
||||||
def _convert_error(f):
|
|
||||||
f.trap(NoParentError)
|
|
||||||
raise ftp.PermissionDeniedError("cannot delete root directory")
|
|
||||||
d.addErrback(_convert_error)
|
|
||||||
def _got_parent(parent_and_childname):
|
|
||||||
(parent, childname) = parent_and_childname
|
|
||||||
d = parent.get(childname)
|
|
||||||
def _got_child(child):
|
|
||||||
if must_be_directory and not IDirectoryNode.providedBy(child):
|
|
||||||
raise ftp.IsNotADirectoryError("rmdir called on a file")
|
|
||||||
if must_be_file and IDirectoryNode.providedBy(child):
|
|
||||||
raise ftp.IsADirectoryError("rmfile called on a directory")
|
|
||||||
return parent.delete(childname)
|
|
||||||
d.addCallback(_got_child)
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
return d
|
|
||||||
d.addCallback(_got_parent)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def removeDirectory(self, path):
|
|
||||||
return self._remove_thing(path, must_be_directory=True)
|
|
||||||
|
|
||||||
def removeFile(self, path):
|
|
||||||
return self._remove_thing(path, must_be_file=True)
|
|
||||||
|
|
||||||
def rename(self, fromPath, toPath):
|
|
||||||
# the target directory must already exist
|
|
||||||
d = self._get_parent(fromPath)
|
|
||||||
def _got_from_parent(fromparent_and_childname):
|
|
||||||
(fromparent, childname) = fromparent_and_childname
|
|
||||||
d = self._get_parent(toPath)
|
|
||||||
d.addCallback(lambda toparent_and_tochildname:
|
|
||||||
fromparent.move_child_to(childname,
|
|
||||||
toparent_and_tochildname[0], toparent_and_tochildname[1],
|
|
||||||
overwrite=False))
|
|
||||||
return d
|
|
||||||
d.addCallback(_got_from_parent)
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def access(self, path):
|
|
||||||
# we allow access to everything that exists. We are required to raise
|
|
||||||
# an error for paths that don't exist: FTP clients (at least ncftp)
|
|
||||||
# uses this to decide whether to mkdir or not.
|
|
||||||
d = self._get_node_and_metadata_for_path(path)
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
d.addCallback(lambda res: None)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _convert_error(self, f):
|
|
||||||
if f.check(NoSuchChildError):
|
|
||||||
childname = f.value.args[0].encode("utf-8")
|
|
||||||
msg = "'%s' doesn't exist" % childname
|
|
||||||
raise ftp.FileNotFoundError(msg)
|
|
||||||
if f.check(ExistingChildError):
|
|
||||||
msg = f.value.args[0].encode("utf-8")
|
|
||||||
raise ftp.FileExistsError(msg)
|
|
||||||
return f
|
|
||||||
|
|
||||||
def _get_root(self, path):
|
|
||||||
# return (root, remaining_path)
|
|
||||||
path = [unicode(p) for p in path]
|
|
||||||
if path and path[0] == "uri":
|
|
||||||
d = defer.maybeDeferred(self.client.create_node_from_uri,
|
|
||||||
str(path[1]))
|
|
||||||
d.addCallback(lambda root: (root, path[2:]))
|
|
||||||
else:
|
|
||||||
d = defer.succeed((self.root,path))
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _get_node_and_metadata_for_path(self, path):
|
|
||||||
d = self._get_root(path)
|
|
||||||
def _got_root(root_and_path):
|
|
||||||
(root,path) = root_and_path
|
|
||||||
if path:
|
|
||||||
return root.get_child_and_metadata_at_path(path)
|
|
||||||
else:
|
|
||||||
return (root,{})
|
|
||||||
d.addCallback(_got_root)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _populate_row(self, keys, childnode_and_metadata):
|
|
||||||
(childnode, metadata) = childnode_and_metadata
|
|
||||||
values = []
|
|
||||||
isdir = bool(IDirectoryNode.providedBy(childnode))
|
|
||||||
for key in keys:
|
|
||||||
if key == "size":
|
|
||||||
if isdir:
|
|
||||||
value = 0
|
|
||||||
else:
|
|
||||||
value = childnode.get_size() or 0
|
|
||||||
elif key == "directory":
|
|
||||||
value = isdir
|
|
||||||
elif key == "permissions":
|
|
||||||
# Twisted-14.0.2 (and earlier) expected an int, and used it
|
|
||||||
# in a rendering function that did (mode & NUMBER).
|
|
||||||
# Twisted-15.0.0 expects a
|
|
||||||
# twisted.python.filepath.Permissions , and calls its
|
|
||||||
# .shorthand() method. This provides both.
|
|
||||||
value = IntishPermissions(0o600)
|
|
||||||
elif key == "hardlinks":
|
|
||||||
value = 1
|
|
||||||
elif key == "modified":
|
|
||||||
# follow sftpd convention (i.e. linkmotime in preference to mtime)
|
|
||||||
if "linkmotime" in metadata.get("tahoe", {}):
|
|
||||||
value = metadata["tahoe"]["linkmotime"]
|
|
||||||
else:
|
|
||||||
value = metadata.get("mtime", 0)
|
|
||||||
elif key == "owner":
|
|
||||||
value = self.username
|
|
||||||
elif key == "group":
|
|
||||||
value = self.username
|
|
||||||
else:
|
|
||||||
value = "??"
|
|
||||||
values.append(value)
|
|
||||||
return values
|
|
||||||
|
|
||||||
def stat(self, path, keys=()):
|
|
||||||
# for files only, I think
|
|
||||||
d = self._get_node_and_metadata_for_path(path)
|
|
||||||
def _render(node_and_metadata):
|
|
||||||
(node, metadata) = node_and_metadata
|
|
||||||
assert not IDirectoryNode.providedBy(node)
|
|
||||||
return self._populate_row(keys, (node,metadata))
|
|
||||||
d.addCallback(_render)
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def list(self, path, keys=()):
|
|
||||||
# the interface claims that path is a list of unicodes, but in
|
|
||||||
# practice it is not
|
|
||||||
d = self._get_node_and_metadata_for_path(path)
|
|
||||||
def _list(node_and_metadata):
|
|
||||||
(node, metadata) = node_and_metadata
|
|
||||||
if IDirectoryNode.providedBy(node):
|
|
||||||
return node.list()
|
|
||||||
return { path[-1]: (node, metadata) } # need last-edge metadata
|
|
||||||
d.addCallback(_list)
|
|
||||||
def _render(children):
|
|
||||||
results = []
|
|
||||||
for (name, childnode) in children.iteritems():
|
|
||||||
# the interface claims that the result should have a unicode
|
|
||||||
# object as the name, but it fails unless you give it a
|
|
||||||
# bytestring
|
|
||||||
results.append( (name.encode("utf-8"),
|
|
||||||
self._populate_row(keys, childnode) ) )
|
|
||||||
return results
|
|
||||||
d.addCallback(_render)
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def openForReading(self, path):
|
|
||||||
d = self._get_node_and_metadata_for_path(path)
|
|
||||||
d.addCallback(lambda node_and_metadata: ReadFile(node_and_metadata[0]))
|
|
||||||
d.addErrback(self._convert_error)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def openForWriting(self, path):
|
|
||||||
path = [unicode(p) for p in path]
|
|
||||||
if not path:
|
|
||||||
raise ftp.PermissionDeniedError("cannot STOR to root directory")
|
|
||||||
childname = path[-1]
|
|
||||||
d = self._get_root(path)
|
|
||||||
def _got_root(root_and_path):
|
|
||||||
(root, path) = root_and_path
|
|
||||||
if not path:
|
|
||||||
raise ftp.PermissionDeniedError("cannot STOR to root directory")
|
|
||||||
return root.get_child_at_path(path[:-1])
|
|
||||||
d.addCallback(_got_root)
|
|
||||||
def _got_parent(parent):
|
|
||||||
return WriteFile(parent, childname, self.convergence)
|
|
||||||
d.addCallback(_got_parent)
|
|
||||||
return d
|
|
||||||
|
|
||||||
from allmydata.frontends.auth import AccountURLChecker, AccountFileChecker, NeedRootcapLookupScheme
|
|
||||||
|
|
||||||
|
|
||||||
@implementer(portal.IRealm)
|
|
||||||
class Dispatcher(object):
|
|
||||||
def __init__(self, client):
|
|
||||||
self.client = client
|
|
||||||
|
|
||||||
def requestAvatar(self, avatarID, mind, interface):
|
|
||||||
assert interface == ftp.IFTPShell
|
|
||||||
rootnode = self.client.create_node_from_uri(avatarID.rootcap)
|
|
||||||
convergence = self.client.convergence
|
|
||||||
s = Handler(self.client, rootnode, avatarID.username, convergence)
|
|
||||||
def logout(): pass
|
|
||||||
return (interface, s, None)
|
|
||||||
|
|
||||||
|
|
||||||
class FTPServer(service.MultiService):
|
|
||||||
def __init__(self, client, accountfile, accounturl, ftp_portstr):
|
|
||||||
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
|
||||||
service.MultiService.__init__(self)
|
|
||||||
|
|
||||||
r = Dispatcher(client)
|
|
||||||
p = portal.Portal(r)
|
|
||||||
|
|
||||||
if accountfile:
|
|
||||||
c = AccountFileChecker(self, accountfile)
|
|
||||||
p.registerChecker(c)
|
|
||||||
if accounturl:
|
|
||||||
c = AccountURLChecker(self, accounturl)
|
|
||||||
p.registerChecker(c)
|
|
||||||
if not accountfile and not accounturl:
|
|
||||||
# we could leave this anonymous, with just the /uri/CAP form
|
|
||||||
raise NeedRootcapLookupScheme("must provide some translation")
|
|
||||||
|
|
||||||
f = ftp.FTPFactory(p)
|
|
||||||
# strports requires a native string.
|
|
||||||
ftp_portstr = ensure_str(ftp_portstr)
|
|
||||||
s = strports.service(ftp_portstr, f)
|
|
||||||
s.setServiceParent(self)
|
|
@ -1,6 +1,5 @@
|
|||||||
import six
|
import six
|
||||||
import heapq, traceback, array, stat, struct
|
import heapq, traceback, array, stat, struct
|
||||||
from types import NoneType
|
|
||||||
from stat import S_IFREG, S_IFDIR
|
from stat import S_IFREG, S_IFDIR
|
||||||
from time import time, strftime, localtime
|
from time import time, strftime, localtime
|
||||||
|
|
||||||
@ -267,7 +266,7 @@ def _attrs_to_metadata(attrs):
|
|||||||
|
|
||||||
|
|
||||||
def _direntry_for(filenode_or_parent, childname, filenode=None):
|
def _direntry_for(filenode_or_parent, childname, filenode=None):
|
||||||
precondition(isinstance(childname, (unicode, NoneType)), childname=childname)
|
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||||
|
|
||||||
if childname is None:
|
if childname is None:
|
||||||
filenode_or_parent = filenode
|
filenode_or_parent = filenode
|
||||||
@ -672,7 +671,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" %
|
self.log(".open(parent=%r, childname=%r, filenode=%r, metadata=%r)" %
|
||||||
(parent, childname, filenode, metadata), level=OPERATIONAL)
|
(parent, childname, filenode, metadata), level=OPERATIONAL)
|
||||||
|
|
||||||
precondition(isinstance(childname, (unicode, NoneType)), childname=childname)
|
precondition(isinstance(childname, (unicode, type(None))), childname=childname)
|
||||||
precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode)
|
precondition(filenode is None or IFileNode.providedBy(filenode), filenode=filenode)
|
||||||
precondition(not self.closed, sftpfile=self)
|
precondition(not self.closed, sftpfile=self)
|
||||||
|
|
||||||
@ -1194,7 +1193,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
|||||||
request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore)
|
request = "._sync_heisenfiles(%r, %r, ignore=%r)" % (userpath, direntry, ignore)
|
||||||
self.log(request, level=OPERATIONAL)
|
self.log(request, level=OPERATIONAL)
|
||||||
|
|
||||||
_assert(isinstance(userpath, str) and isinstance(direntry, (str, NoneType)),
|
_assert(isinstance(userpath, str) and isinstance(direntry, (str, type(None))),
|
||||||
userpath=userpath, direntry=direntry)
|
userpath=userpath, direntry=direntry)
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
@ -1219,7 +1218,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
|||||||
def _remove_heisenfile(self, userpath, parent, childname, file_to_remove):
|
def _remove_heisenfile(self, userpath, parent, childname, file_to_remove):
|
||||||
if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY)
|
if noisy: self.log("._remove_heisenfile(%r, %r, %r, %r)" % (userpath, parent, childname, file_to_remove), level=NOISY)
|
||||||
|
|
||||||
_assert(isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)),
|
_assert(isinstance(userpath, str) and isinstance(childname, (unicode, type(None))),
|
||||||
userpath=userpath, childname=childname)
|
userpath=userpath, childname=childname)
|
||||||
|
|
||||||
direntry = _direntry_for(parent, childname)
|
direntry = _direntry_for(parent, childname)
|
||||||
@ -1246,7 +1245,7 @@ class SFTPUserHandler(ConchUser, PrefixingLogMixin):
|
|||||||
(existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata),
|
(existing_file, userpath, flags, _repr_flags(flags), parent, childname, filenode, metadata),
|
||||||
level=NOISY)
|
level=NOISY)
|
||||||
|
|
||||||
_assert((isinstance(userpath, str) and isinstance(childname, (unicode, NoneType)) and
|
_assert((isinstance(userpath, str) and isinstance(childname, (unicode, type(None))) and
|
||||||
(metadata is None or 'no-write' in metadata)),
|
(metadata is None or 'no-write' in metadata)),
|
||||||
userpath=userpath, childname=childname, metadata=metadata)
|
userpath=userpath, childname=childname, metadata=metadata)
|
||||||
|
|
||||||
@ -1979,7 +1978,7 @@ class SFTPServer(service.MultiService):
|
|||||||
|
|
||||||
def __init__(self, client, accountfile, accounturl,
|
def __init__(self, client, accountfile, accounturl,
|
||||||
sftp_portstr, pubkey_file, privkey_file):
|
sftp_portstr, pubkey_file, privkey_file):
|
||||||
precondition(isinstance(accountfile, (unicode, NoneType)), accountfile)
|
precondition(isinstance(accountfile, (unicode, type(None))), accountfile)
|
||||||
precondition(isinstance(pubkey_file, unicode), pubkey_file)
|
precondition(isinstance(pubkey_file, unicode), pubkey_file)
|
||||||
precondition(isinstance(privkey_file, unicode), privkey_file)
|
precondition(isinstance(privkey_file, unicode), privkey_file)
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
|
@ -19,7 +19,7 @@ from twisted.protocols import basic
|
|||||||
from allmydata.interfaces import IImmutableFileNode, ICheckable
|
from allmydata.interfaces import IImmutableFileNode, ICheckable
|
||||||
from allmydata.uri import LiteralFileURI
|
from allmydata.uri import LiteralFileURI
|
||||||
|
|
||||||
@implementer(IImmutableFileNode, ICheckable)
|
|
||||||
class _ImmutableFileNodeBase(object):
|
class _ImmutableFileNodeBase(object):
|
||||||
|
|
||||||
def get_write_uri(self):
|
def get_write_uri(self):
|
||||||
@ -56,6 +56,7 @@ class _ImmutableFileNodeBase(object):
|
|||||||
return not self == other
|
return not self == other
|
||||||
|
|
||||||
|
|
||||||
|
@implementer(IImmutableFileNode, ICheckable)
|
||||||
class LiteralFileNode(_ImmutableFileNodeBase):
|
class LiteralFileNode(_ImmutableFileNodeBase):
|
||||||
|
|
||||||
def __init__(self, filecap):
|
def __init__(self, filecap):
|
||||||
|
@ -141,7 +141,7 @@ class CHKCheckerAndUEBFetcher(object):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(interfaces.RICHKUploadHelper)
|
@implementer(interfaces.RICHKUploadHelper)
|
||||||
class CHKUploadHelper(Referenceable, upload.CHKUploader):
|
class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warner/foolscap#78
|
||||||
"""I am the helper-server -side counterpart to AssistedUploader. I handle
|
"""I am the helper-server -side counterpart to AssistedUploader. I handle
|
||||||
peer selection, encoding, and share pushing. I read ciphertext from the
|
peer selection, encoding, and share pushing. I read ciphertext from the
|
||||||
remote AssistedUploader.
|
remote AssistedUploader.
|
||||||
@ -499,10 +499,13 @@ class LocalCiphertextReader(AskUntilSuccessMixin):
|
|||||||
# ??. I'm not sure if it makes sense to forward the close message.
|
# ??. I'm not sure if it makes sense to forward the close message.
|
||||||
return self.call("close")
|
return self.call("close")
|
||||||
|
|
||||||
|
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3561
|
||||||
|
def set_upload_status(self, upload_status):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
@implementer(interfaces.RIHelper, interfaces.IStatsProducer)
|
||||||
class Helper(Referenceable):
|
class Helper(Referenceable): # type: ignore # warner/foolscap#78
|
||||||
"""
|
"""
|
||||||
:ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which
|
:ivar dict[bytes, CHKUploadHelper] _active_uploads: For any uploads which
|
||||||
have been started but not finished, a mapping from storage index to the
|
have been started but not finished, a mapping from storage index to the
|
||||||
|
@ -13,19 +13,30 @@ if PY2:
|
|||||||
from past.builtins import long, unicode
|
from past.builtins import long, unicode
|
||||||
from six import ensure_str
|
from six import ensure_str
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import List
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
import os, time, weakref, itertools
|
import os, time, weakref, itertools
|
||||||
|
|
||||||
|
import attr
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from foolscap.api import Referenceable, Copyable, RemoteCopy, fireEventually
|
from foolscap.api import Referenceable, Copyable, RemoteCopy
|
||||||
|
|
||||||
from allmydata.crypto import aes
|
from allmydata.crypto import aes
|
||||||
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
from allmydata.util.hashutil import file_renewal_secret_hash, \
|
||||||
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
file_cancel_secret_hash, bucket_renewal_secret_hash, \
|
||||||
bucket_cancel_secret_hash, plaintext_hasher, \
|
bucket_cancel_secret_hash, plaintext_hasher, \
|
||||||
storage_index_hash, plaintext_segment_hasher, convergence_hasher
|
storage_index_hash, plaintext_segment_hasher, convergence_hasher
|
||||||
from allmydata.util.deferredutil import timeout_call
|
from allmydata.util.deferredutil import (
|
||||||
|
timeout_call,
|
||||||
|
until,
|
||||||
|
)
|
||||||
from allmydata import hashtree, uri
|
from allmydata import hashtree, uri
|
||||||
from allmydata.storage.server import si_b2a
|
from allmydata.storage.server import si_b2a
|
||||||
from allmydata.immutable import encode
|
from allmydata.immutable import encode
|
||||||
@ -386,6 +397,9 @@ class PeerSelector(object):
|
|||||||
)
|
)
|
||||||
return self.happiness_mappings
|
return self.happiness_mappings
|
||||||
|
|
||||||
|
def add_peers(self, peerids=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class _QueryStatistics(object):
|
class _QueryStatistics(object):
|
||||||
|
|
||||||
@ -897,13 +911,45 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
|||||||
raise UploadUnhappinessError(msg)
|
raise UploadUnhappinessError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class _Accum(object):
|
||||||
|
"""
|
||||||
|
Accumulate up to some known amount of ciphertext.
|
||||||
|
|
||||||
|
:ivar remaining: The number of bytes still expected.
|
||||||
|
:ivar ciphertext: The bytes accumulated so far.
|
||||||
|
"""
|
||||||
|
remaining = attr.ib(validator=attr.validators.instance_of(int)) # type: int
|
||||||
|
ciphertext = attr.ib(default=attr.Factory(list)) # type: List[bytes]
|
||||||
|
|
||||||
|
def extend(self,
|
||||||
|
size, # type: int
|
||||||
|
ciphertext, # type: List[bytes]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Accumulate some more ciphertext.
|
||||||
|
|
||||||
|
:param size: The amount of data the new ciphertext represents towards
|
||||||
|
the goal. This may be more than the actual size of the given
|
||||||
|
ciphertext if the source has run out of data.
|
||||||
|
|
||||||
|
:param ciphertext: The new ciphertext to accumulate.
|
||||||
|
"""
|
||||||
|
self.remaining -= size
|
||||||
|
self.ciphertext.extend(ciphertext)
|
||||||
|
|
||||||
|
|
||||||
@implementer(IEncryptedUploadable)
|
@implementer(IEncryptedUploadable)
|
||||||
class EncryptAnUploadable(object):
|
class EncryptAnUploadable(object):
|
||||||
"""This is a wrapper that takes an IUploadable and provides
|
"""This is a wrapper that takes an IUploadable and provides
|
||||||
IEncryptedUploadable."""
|
IEncryptedUploadable."""
|
||||||
CHUNKSIZE = 50*1024
|
CHUNKSIZE = 50*1024
|
||||||
|
|
||||||
def __init__(self, original, log_parent=None, progress=None):
|
def __init__(self, original, log_parent=None, progress=None, chunk_size=None):
|
||||||
|
"""
|
||||||
|
:param chunk_size: The number of bytes to read from the uploadable at a
|
||||||
|
time, or None for some default.
|
||||||
|
"""
|
||||||
precondition(original.default_params_set,
|
precondition(original.default_params_set,
|
||||||
"set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,))
|
"set_default_encoding_parameters not called on %r before wrapping with EncryptAnUploadable" % (original,))
|
||||||
self.original = IUploadable(original)
|
self.original = IUploadable(original)
|
||||||
@ -917,6 +963,8 @@ class EncryptAnUploadable(object):
|
|||||||
self._ciphertext_bytes_read = 0
|
self._ciphertext_bytes_read = 0
|
||||||
self._status = None
|
self._status = None
|
||||||
self._progress = progress
|
self._progress = progress
|
||||||
|
if chunk_size is not None:
|
||||||
|
self.CHUNKSIZE = chunk_size
|
||||||
|
|
||||||
def set_upload_status(self, upload_status):
|
def set_upload_status(self, upload_status):
|
||||||
self._status = IUploadStatus(upload_status)
|
self._status = IUploadStatus(upload_status)
|
||||||
@ -1023,47 +1071,53 @@ class EncryptAnUploadable(object):
|
|||||||
# and size
|
# and size
|
||||||
d.addCallback(lambda ignored: self.get_size())
|
d.addCallback(lambda ignored: self.get_size())
|
||||||
d.addCallback(lambda ignored: self._get_encryptor())
|
d.addCallback(lambda ignored: self._get_encryptor())
|
||||||
# then fetch and encrypt the plaintext. The unusual structure here
|
|
||||||
# (passing a Deferred *into* a function) is needed to avoid
|
accum = _Accum(length)
|
||||||
# overflowing the stack: Deferreds don't optimize out tail recursion.
|
|
||||||
# We also pass in a list, to which _read_encrypted will append
|
def action():
|
||||||
# ciphertext.
|
"""
|
||||||
ciphertext = []
|
Read some bytes into the accumulator.
|
||||||
d2 = defer.Deferred()
|
"""
|
||||||
d.addCallback(lambda ignored:
|
return self._read_encrypted(accum, hash_only)
|
||||||
self._read_encrypted(length, ciphertext, hash_only, d2))
|
|
||||||
d.addCallback(lambda ignored: d2)
|
def condition():
|
||||||
|
"""
|
||||||
|
Check to see if the accumulator has all the data.
|
||||||
|
"""
|
||||||
|
return accum.remaining == 0
|
||||||
|
|
||||||
|
d.addCallback(lambda ignored: until(action, condition))
|
||||||
|
d.addCallback(lambda ignored: accum.ciphertext)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _read_encrypted(self, remaining, ciphertext, hash_only, fire_when_done):
|
def _read_encrypted(self,
|
||||||
if not remaining:
|
ciphertext_accum, # type: _Accum
|
||||||
fire_when_done.callback(ciphertext)
|
hash_only, # type: bool
|
||||||
return None
|
):
|
||||||
|
# type: (...) -> defer.Deferred
|
||||||
|
"""
|
||||||
|
Read the next chunk of plaintext, encrypt it, and extend the accumulator
|
||||||
|
with the resulting ciphertext.
|
||||||
|
"""
|
||||||
# tolerate large length= values without consuming a lot of RAM by
|
# tolerate large length= values without consuming a lot of RAM by
|
||||||
# reading just a chunk (say 50kB) at a time. This only really matters
|
# reading just a chunk (say 50kB) at a time. This only really matters
|
||||||
# when hash_only==True (i.e. resuming an interrupted upload), since
|
# when hash_only==True (i.e. resuming an interrupted upload), since
|
||||||
# that's the case where we will be skipping over a lot of data.
|
# that's the case where we will be skipping over a lot of data.
|
||||||
size = min(remaining, self.CHUNKSIZE)
|
size = min(ciphertext_accum.remaining, self.CHUNKSIZE)
|
||||||
remaining = remaining - size
|
|
||||||
# read a chunk of plaintext..
|
# read a chunk of plaintext..
|
||||||
d = defer.maybeDeferred(self.original.read, size)
|
d = defer.maybeDeferred(self.original.read, size)
|
||||||
# N.B.: if read() is synchronous, then since everything else is
|
|
||||||
# actually synchronous too, we'd blow the stack unless we stall for a
|
|
||||||
# tick. Once you accept a Deferred from IUploadable.read(), you must
|
|
||||||
# be prepared to have it fire immediately too.
|
|
||||||
d.addCallback(fireEventually)
|
|
||||||
def _good(plaintext):
|
def _good(plaintext):
|
||||||
# and encrypt it..
|
# and encrypt it..
|
||||||
# o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/'
|
# o/' over the fields we go, hashing all the way, sHA! sHA! sHA! o/'
|
||||||
ct = self._hash_and_encrypt_plaintext(plaintext, hash_only)
|
ct = self._hash_and_encrypt_plaintext(plaintext, hash_only)
|
||||||
ciphertext.extend(ct)
|
# Intentionally tell the accumulator about the expected size, not
|
||||||
self._read_encrypted(remaining, ciphertext, hash_only,
|
# the actual size. If we run out of data we still want remaining
|
||||||
fire_when_done)
|
# to drop otherwise it will never reach 0 and the loop will never
|
||||||
def _err(why):
|
# end.
|
||||||
fire_when_done.errback(why)
|
ciphertext_accum.extend(size, ct)
|
||||||
d.addCallback(_good)
|
d.addCallback(_good)
|
||||||
d.addErrback(_err)
|
return d
|
||||||
return None
|
|
||||||
|
|
||||||
def _hash_and_encrypt_plaintext(self, data, hash_only):
|
def _hash_and_encrypt_plaintext(self, data, hash_only):
|
||||||
assert isinstance(data, (tuple, list)), type(data)
|
assert isinstance(data, (tuple, list)), type(data)
|
||||||
@ -1424,7 +1478,7 @@ class LiteralUploader(object):
|
|||||||
return self._status
|
return self._status
|
||||||
|
|
||||||
@implementer(RIEncryptedUploadable)
|
@implementer(RIEncryptedUploadable)
|
||||||
class RemoteEncryptedUploadable(Referenceable):
|
class RemoteEncryptedUploadable(Referenceable): # type: ignore # warner/foolscap#78
|
||||||
|
|
||||||
def __init__(self, encrypted_uploadable, upload_status):
|
def __init__(self, encrypted_uploadable, upload_status):
|
||||||
self._eu = IEncryptedUploadable(encrypted_uploadable)
|
self._eu = IEncryptedUploadable(encrypted_uploadable)
|
||||||
|
@ -681,7 +681,7 @@ class IURI(Interface):
|
|||||||
passing into init_from_string."""
|
passing into init_from_string."""
|
||||||
|
|
||||||
|
|
||||||
class IVerifierURI(Interface, IURI):
|
class IVerifierURI(IURI):
|
||||||
def init_from_string(uri):
|
def init_from_string(uri):
|
||||||
"""Accept a string (as created by my to_string() method) and populate
|
"""Accept a string (as created by my to_string() method) and populate
|
||||||
this instance with its data. I am not normally called directly,
|
this instance with its data. I am not normally called directly,
|
||||||
@ -748,7 +748,7 @@ class IProgress(Interface):
|
|||||||
"Current amount of progress (in percentage)"
|
"Current amount of progress (in percentage)"
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_progress(self, value):
|
def set_progress(value):
|
||||||
"""
|
"""
|
||||||
Sets the current amount of progress.
|
Sets the current amount of progress.
|
||||||
|
|
||||||
@ -756,7 +756,7 @@ class IProgress(Interface):
|
|||||||
set_progress_total.
|
set_progress_total.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def set_progress_total(self, value):
|
def set_progress_total(value):
|
||||||
"""
|
"""
|
||||||
Sets the total amount of expected progress
|
Sets the total amount of expected progress
|
||||||
|
|
||||||
@ -859,12 +859,6 @@ class IPeerSelector(Interface):
|
|||||||
peer selection begins.
|
peer selection begins.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def confirm_share_allocation(peerid, shnum):
|
|
||||||
"""
|
|
||||||
Confirm that an allocated peer=>share pairing has been
|
|
||||||
successfully established.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def add_peers(peerids=set):
|
def add_peers(peerids=set):
|
||||||
"""
|
"""
|
||||||
Update my internal state to include the peers in peerids as
|
Update my internal state to include the peers in peerids as
|
||||||
@ -1824,11 +1818,6 @@ class IEncoder(Interface):
|
|||||||
willing to receive data.
|
willing to receive data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def set_size(size):
|
|
||||||
"""Specify the number of bytes that will be encoded. This must be
|
|
||||||
peformed before get_serialized_params() can be called.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def set_encrypted_uploadable(u):
|
def set_encrypted_uploadable(u):
|
||||||
"""Provide a source of encrypted upload data. 'u' must implement
|
"""Provide a source of encrypted upload data. 'u' must implement
|
||||||
IEncryptedUploadable.
|
IEncryptedUploadable.
|
||||||
|
@ -178,9 +178,9 @@ class IntroducerClient(service.Service, Referenceable):
|
|||||||
kwargs["facility"] = "tahoe.introducer.client"
|
kwargs["facility"] = "tahoe.introducer.client"
|
||||||
return log.msg(*args, **kwargs)
|
return log.msg(*args, **kwargs)
|
||||||
|
|
||||||
def subscribe_to(self, service_name, cb, *args, **kwargs):
|
def subscribe_to(self, service_name, callback, *args, **kwargs):
|
||||||
obs = self._local_subscribers.setdefault(service_name, ObserverList())
|
obs = self._local_subscribers.setdefault(service_name, ObserverList())
|
||||||
obs.subscribe(lambda key_s, ann: cb(key_s, ann, *args, **kwargs))
|
obs.subscribe(lambda key_s, ann: callback(key_s, ann, *args, **kwargs))
|
||||||
self._maybe_subscribe()
|
self._maybe_subscribe()
|
||||||
for index,(ann,key_s,when) in list(self._inbound_announcements.items()):
|
for index,(ann,key_s,when) in list(self._inbound_announcements.items()):
|
||||||
precondition(isinstance(key_s, bytes), key_s)
|
precondition(isinstance(key_s, bytes), key_s)
|
||||||
|
@ -73,7 +73,7 @@ class IIntroducerClient(Interface):
|
|||||||
publish their services to the rest of the world, and I help them learn
|
publish their services to the rest of the world, and I help them learn
|
||||||
about services available on other nodes."""
|
about services available on other nodes."""
|
||||||
|
|
||||||
def publish(service_name, ann, signing_key=None):
|
def publish(service_name, ann, signing_key):
|
||||||
"""Publish the given announcement dictionary (which must be
|
"""Publish the given announcement dictionary (which must be
|
||||||
JSON-serializable), plus some additional keys, to the world.
|
JSON-serializable), plus some additional keys, to the world.
|
||||||
|
|
||||||
@ -83,8 +83,7 @@ class IIntroducerClient(Interface):
|
|||||||
the signing_key, if present, otherwise it is derived from the
|
the signing_key, if present, otherwise it is derived from the
|
||||||
'anonymous-storage-FURL' key.
|
'anonymous-storage-FURL' key.
|
||||||
|
|
||||||
If signing_key= is set to an instance of SigningKey, it will be
|
signing_key (a SigningKey) will be used to sign the announcement."""
|
||||||
used to sign the announcement."""
|
|
||||||
|
|
||||||
def subscribe_to(service_name, callback, *args, **kwargs):
|
def subscribe_to(service_name, callback, *args, **kwargs):
|
||||||
"""Call this if you will eventually want to use services with the
|
"""Call this if you will eventually want to use services with the
|
||||||
|
@ -15,6 +15,12 @@ from past.builtins import long
|
|||||||
from six import ensure_text
|
from six import ensure_text
|
||||||
|
|
||||||
import time, os.path, textwrap
|
import time, os.path, textwrap
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Any, Dict, Union
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -147,10 +153,12 @@ class IntroducerService(service.MultiService, Referenceable):
|
|||||||
name = "introducer"
|
name = "introducer"
|
||||||
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
# v1 is the original protocol, added in 1.0 (but only advertised starting
|
||||||
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
# in 1.3), removed in 1.12. v2 is the new signed protocol, added in 1.10
|
||||||
VERSION = { #"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
# TODO: reconcile bytes/str for keys
|
||||||
|
VERSION = {
|
||||||
|
#"http://allmydata.org/tahoe/protocols/introducer/v1": { },
|
||||||
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
b"http://allmydata.org/tahoe/protocols/introducer/v2": { },
|
||||||
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
b"application-version": allmydata.__full_version__.encode("utf-8"),
|
||||||
}
|
} # type: Dict[Union[bytes, str], Any]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
|
@ -564,7 +564,7 @@ class MutableFileNode(object):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def upload(self, new_contents, servermap):
|
def upload(self, new_contents, servermap, progress=None):
|
||||||
"""
|
"""
|
||||||
I overwrite the contents of the best recoverable version of this
|
I overwrite the contents of the best recoverable version of this
|
||||||
mutable file with new_contents, using servermap instead of
|
mutable file with new_contents, using servermap instead of
|
||||||
@ -951,7 +951,7 @@ class MutableFileVersion(object):
|
|||||||
return self._servermap.size_of_version(self._version)
|
return self._servermap.size_of_version(self._version)
|
||||||
|
|
||||||
|
|
||||||
def download_to_data(self, fetch_privkey=False, progress=None):
|
def download_to_data(self, fetch_privkey=False, progress=None): # type: ignore # fixme
|
||||||
"""
|
"""
|
||||||
I return a Deferred that fires with the contents of this
|
I return a Deferred that fires with the contents of this
|
||||||
readable object as a byte string.
|
readable object as a byte string.
|
||||||
@ -1205,3 +1205,7 @@ class MutableFileVersion(object):
|
|||||||
self._servermap,
|
self._servermap,
|
||||||
mode=mode)
|
mode=mode)
|
||||||
return u.update()
|
return u.update()
|
||||||
|
|
||||||
|
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3562
|
||||||
|
def get_servermap(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
@ -23,6 +23,11 @@ from base64 import b32decode, b32encode
|
|||||||
from errno import ENOENT, EPERM
|
from errno import ENOENT, EPERM
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Union
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
# On Python 2 this will be the backported package.
|
# On Python 2 this will be the backported package.
|
||||||
@ -273,6 +278,11 @@ def _error_about_old_config_files(basedir, generated_files):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_text_and_abspath_expanduser_unicode(basedir):
|
||||||
|
# type: (Union[bytes, str]) -> str
|
||||||
|
return abspath_expanduser_unicode(ensure_text(basedir))
|
||||||
|
|
||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
class _Config(object):
|
class _Config(object):
|
||||||
"""
|
"""
|
||||||
@ -300,8 +310,8 @@ class _Config(object):
|
|||||||
config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser))
|
config = attr.ib(validator=attr.validators.instance_of(configparser.ConfigParser))
|
||||||
portnum_fname = attr.ib()
|
portnum_fname = attr.ib()
|
||||||
_basedir = attr.ib(
|
_basedir = attr.ib(
|
||||||
converter=lambda basedir: abspath_expanduser_unicode(ensure_text(basedir)),
|
converter=ensure_text_and_abspath_expanduser_unicode,
|
||||||
)
|
) # type: str
|
||||||
config_path = attr.ib(
|
config_path = attr.ib(
|
||||||
validator=attr.validators.optional(
|
validator=attr.validators.optional(
|
||||||
attr.validators.instance_of(FilePath),
|
attr.validators.instance_of(FilePath),
|
||||||
@ -927,7 +937,6 @@ class Node(service.MultiService):
|
|||||||
"""
|
"""
|
||||||
NODETYPE = "unknown NODETYPE"
|
NODETYPE = "unknown NODETYPE"
|
||||||
CERTFILE = "node.pem"
|
CERTFILE = "node.pem"
|
||||||
GENERATED_FILES = []
|
|
||||||
|
|
||||||
def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider):
|
def __init__(self, config, main_tub, control_tub, i2p_provider, tor_provider):
|
||||||
"""
|
"""
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.scripts.common import BaseOptions
|
from allmydata.scripts.common import BaseOptions
|
||||||
|
|
||||||
@ -79,8 +84,8 @@ def do_admin(options):
|
|||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands = [
|
||||||
["admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"],
|
("admin", None, AdminCommand, "admin subcommands: use 'tahoe admin' for a list"),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"admin": do_admin,
|
"admin": do_admin,
|
||||||
|
@ -1,6 +1,12 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path, re, fnmatch
|
import os.path, re, fnmatch
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands, Parameters
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
from allmydata.scripts.common import get_aliases, get_default_nodedir, \
|
||||||
DEFAULT_ALIAS, BaseOptions
|
DEFAULT_ALIAS, BaseOptions
|
||||||
@ -19,7 +25,7 @@ class FileStoreOptions(BaseOptions):
|
|||||||
"This overrides the URL found in the --node-directory ."],
|
"This overrides the URL found in the --node-directory ."],
|
||||||
["dir-cap", None, None,
|
["dir-cap", None, None,
|
||||||
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
"Specify which dirnode URI should be used as the 'tahoe' alias."]
|
||||||
]
|
] # type: Parameters
|
||||||
|
|
||||||
def postOptions(self):
|
def postOptions(self):
|
||||||
self["quiet"] = self.parent["quiet"]
|
self["quiet"] = self.parent["quiet"]
|
||||||
@ -455,25 +461,25 @@ class DeepCheckOptions(FileStoreOptions):
|
|||||||
Optionally repair any problems found."""
|
Optionally repair any problems found."""
|
||||||
|
|
||||||
subCommands = [
|
subCommands = [
|
||||||
["mkdir", None, MakeDirectoryOptions, "Create a new directory."],
|
("mkdir", None, MakeDirectoryOptions, "Create a new directory."),
|
||||||
["add-alias", None, AddAliasOptions, "Add a new alias cap."],
|
("add-alias", None, AddAliasOptions, "Add a new alias cap."),
|
||||||
["create-alias", None, CreateAliasOptions, "Create a new alias cap."],
|
("create-alias", None, CreateAliasOptions, "Create a new alias cap."),
|
||||||
["list-aliases", None, ListAliasesOptions, "List all alias caps."],
|
("list-aliases", None, ListAliasesOptions, "List all alias caps."),
|
||||||
["ls", None, ListOptions, "List a directory."],
|
("ls", None, ListOptions, "List a directory."),
|
||||||
["get", None, GetOptions, "Retrieve a file from the grid."],
|
("get", None, GetOptions, "Retrieve a file from the grid."),
|
||||||
["put", None, PutOptions, "Upload a file into the grid."],
|
("put", None, PutOptions, "Upload a file into the grid."),
|
||||||
["cp", None, CpOptions, "Copy one or more files or directories."],
|
("cp", None, CpOptions, "Copy one or more files or directories."),
|
||||||
["unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."],
|
("unlink", None, UnlinkOptions, "Unlink a file or directory on the grid."),
|
||||||
["mv", None, MvOptions, "Move a file within the grid."],
|
("mv", None, MvOptions, "Move a file within the grid."),
|
||||||
["ln", None, LnOptions, "Make an additional link to an existing file or directory."],
|
("ln", None, LnOptions, "Make an additional link to an existing file or directory."),
|
||||||
["backup", None, BackupOptions, "Make target dir look like local dir."],
|
("backup", None, BackupOptions, "Make target dir look like local dir."),
|
||||||
["webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."],
|
("webopen", None, WebopenOptions, "Open a web browser to a grid file or directory."),
|
||||||
["manifest", None, ManifestOptions, "List all files/directories in a subtree."],
|
("manifest", None, ManifestOptions, "List all files/directories in a subtree."),
|
||||||
["stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."],
|
("stats", None, StatsOptions, "Print statistics about all files/directories in a subtree."),
|
||||||
["check", None, CheckOptions, "Check a single file or directory."],
|
("check", None, CheckOptions, "Check a single file or directory."),
|
||||||
["deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."],
|
("deep-check", None, DeepCheckOptions, "Check all files/directories reachable from a starting point."),
|
||||||
["status", None, TahoeStatusCommand, "Various status information."],
|
("status", None, TahoeStatusCommand, "Various status information."),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
def mkdir(options):
|
def mkdir(options):
|
||||||
from allmydata.scripts import tahoe_mkdir
|
from allmydata.scripts import tahoe_mkdir
|
||||||
|
@ -4,6 +4,12 @@ import os, sys, urllib, textwrap
|
|||||||
import codecs
|
import codecs
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Optional
|
||||||
|
from .types_ import Parameters
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from yaml import (
|
from yaml import (
|
||||||
safe_dump,
|
safe_dump,
|
||||||
)
|
)
|
||||||
@ -41,8 +47,8 @@ class BaseOptions(usage.Options):
|
|||||||
def opt_version(self):
|
def opt_version(self):
|
||||||
raise usage.UsageError("--version not allowed on subcommands")
|
raise usage.UsageError("--version not allowed on subcommands")
|
||||||
|
|
||||||
description = None
|
description = None # type: Optional[str]
|
||||||
description_unwrapped = None
|
description_unwrapped = None # type: Optional[str]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
width = int(os.environ.get('COLUMNS', '80'))
|
width = int(os.environ.get('COLUMNS', '80'))
|
||||||
@ -65,7 +71,7 @@ class BasedirOptions(BaseOptions):
|
|||||||
optParameters = [
|
optParameters = [
|
||||||
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
["basedir", "C", None, "Specify which Tahoe base directory should be used. [default: %s]"
|
||||||
% quote_local_unicode_path(_default_nodedir)],
|
% quote_local_unicode_path(_default_nodedir)],
|
||||||
]
|
] # type: Parameters
|
||||||
|
|
||||||
def parseArgs(self, basedir=None):
|
def parseArgs(self, basedir=None):
|
||||||
# This finds the node-directory option correctly even if we are in a subcommand.
|
# This finds the node-directory option correctly even if we are in a subcommand.
|
||||||
@ -102,7 +108,7 @@ class NoDefaultBasedirOptions(BasedirOptions):
|
|||||||
|
|
||||||
optParameters = [
|
optParameters = [
|
||||||
["basedir", "C", None, "Specify which Tahoe base directory should be used."],
|
["basedir", "C", None, "Specify which Tahoe base directory should be used."],
|
||||||
]
|
] # type: Parameters
|
||||||
|
|
||||||
# This is overridden in order to ensure we get a "Wrong number of arguments."
|
# This is overridden in order to ensure we get a "Wrong number of arguments."
|
||||||
# error when more than one argument is given.
|
# error when more than one argument is given.
|
||||||
|
@ -3,6 +3,11 @@ from __future__ import print_function
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python.usage import UsageError
|
from twisted.python.usage import UsageError
|
||||||
from twisted.python.filepath import (
|
from twisted.python.filepath import (
|
||||||
@ -492,10 +497,10 @@ def create_introducer(config):
|
|||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands = [
|
||||||
["create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."],
|
("create-node", None, CreateNodeOptions, "Create a node that acts as a client, server or both."),
|
||||||
["create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."],
|
("create-client", None, CreateClientOptions, "Create a client node (with storage initially disabled)."),
|
||||||
["create-introducer", None, CreateIntroducerOptions, "Create an introducer node."],
|
("create-introducer", None, CreateIntroducerOptions, "Create an introducer node."),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"create-node": create_node,
|
"create-node": create_node,
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from future.utils import bchr
|
from future.utils import bchr
|
||||||
|
|
||||||
# do not import any allmydata modules at this level. Do that from inside
|
# do not import any allmydata modules at this level. Do that from inside
|
||||||
@ -1053,8 +1058,8 @@ def do_debug(options):
|
|||||||
|
|
||||||
|
|
||||||
subCommands = [
|
subCommands = [
|
||||||
["debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."],
|
("debug", None, DebugCommand, "debug subcommands: use 'tahoe debug' for a list."),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"debug": do_debug,
|
"debug": do_debug,
|
||||||
|
@ -4,6 +4,11 @@ import os, sys
|
|||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, task, threads
|
from twisted.internet import defer, task, threads
|
||||||
|
|
||||||
@ -40,8 +45,8 @@ _control_node_dispatch = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
process_control_commands = [
|
process_control_commands = [
|
||||||
["run", None, tahoe_run.RunOptions, "run a node without daemonizing"],
|
("run", None, tahoe_run.RunOptions, "run a node without daemonizing"),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
|
|
||||||
class Options(usage.Options):
|
class Options(usage.Options):
|
||||||
@ -98,7 +103,7 @@ class Options(usage.Options):
|
|||||||
|
|
||||||
create_dispatch = {}
|
create_dispatch = {}
|
||||||
for module in (create_node,):
|
for module in (create_node,):
|
||||||
create_dispatch.update(module.dispatch)
|
create_dispatch.update(module.dispatch) # type: ignore
|
||||||
|
|
||||||
def parse_options(argv, config=None):
|
def parse_options(argv, config=None):
|
||||||
if not config:
|
if not config:
|
||||||
|
@ -2,6 +2,11 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allmydata.scripts.types_ import SubCommands
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
@ -103,7 +108,7 @@ def invite(options):
|
|||||||
subCommands = [
|
subCommands = [
|
||||||
("invite", None, InviteOptions,
|
("invite", None, InviteOptions,
|
||||||
"Invite a new node to this grid"),
|
"Invite a new node to this grid"),
|
||||||
]
|
] # type: SubCommands
|
||||||
|
|
||||||
dispatch = {
|
dispatch = {
|
||||||
"invite": invite,
|
"invite": invite,
|
||||||
|
12
src/allmydata/scripts/types_.py
Normal file
12
src/allmydata/scripts/types_.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from typing import List, Tuple, Type, Sequence, Any
|
||||||
|
from allmydata.scripts.common import BaseOptions
|
||||||
|
|
||||||
|
|
||||||
|
# Historically, subcommands were implemented as lists, but due to a
|
||||||
|
# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170),
|
||||||
|
# a Tuple is required.
|
||||||
|
SubCommand = Tuple[str, None, Type[BaseOptions], str]
|
||||||
|
|
||||||
|
SubCommands = List[SubCommand]
|
||||||
|
|
||||||
|
Parameters = List[Sequence[Any]]
|
@ -1,11 +1,16 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
# Python 2 compatibility
|
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import str # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.application.internet import TimerService
|
from twisted.application.internet import TimerService
|
||||||
@ -18,7 +23,7 @@ from allmydata.interfaces import IStatsProducer
|
|||||||
@implementer(IStatsProducer)
|
@implementer(IStatsProducer)
|
||||||
class CPUUsageMonitor(service.MultiService):
|
class CPUUsageMonitor(service.MultiService):
|
||||||
HISTORY_LENGTH = 15
|
HISTORY_LENGTH = 15
|
||||||
POLL_INTERVAL = 60
|
POLL_INTERVAL = 60 # type: float
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
service.MultiService.__init__(self)
|
service.MultiService.__init__(self)
|
||||||
|
@ -19,7 +19,7 @@ import os, time, struct
|
|||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle
|
import pickle # type: ignore
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from allmydata.storage.common import si_b2a
|
from allmydata.storage.common import si_b2a
|
||||||
|
@ -202,7 +202,7 @@ class ShareFile(object):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(RIBucketWriter)
|
@implementer(RIBucketWriter)
|
||||||
class BucketWriter(Referenceable):
|
class BucketWriter(Referenceable): # type: ignore # warner/foolscap#78
|
||||||
|
|
||||||
def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary):
|
def __init__(self, ss, incominghome, finalhome, max_size, lease_info, canary):
|
||||||
self.ss = ss
|
self.ss = ss
|
||||||
@ -301,7 +301,7 @@ class BucketWriter(Referenceable):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(RIBucketReader)
|
@implementer(RIBucketReader)
|
||||||
class BucketReader(Referenceable):
|
class BucketReader(Referenceable): # type: ignore # warner/foolscap#78
|
||||||
|
|
||||||
def __init__(self, ss, sharefname, storage_index=None, shnum=None):
|
def __init__(self, ss, sharefname, storage_index=None, shnum=None):
|
||||||
self.ss = ss
|
self.ss = ss
|
||||||
|
@ -581,7 +581,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
for share in six.viewvalues(shares):
|
for share in six.viewvalues(shares):
|
||||||
share.add_or_renew_lease(lease_info)
|
share.add_or_renew_lease(lease_info)
|
||||||
|
|
||||||
def slot_testv_and_readv_and_writev(
|
def slot_testv_and_readv_and_writev( # type: ignore # warner/foolscap#78
|
||||||
self,
|
self,
|
||||||
storage_index,
|
storage_index,
|
||||||
secrets,
|
secrets,
|
||||||
|
@ -37,6 +37,11 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
|||||||
import os, sys, httplib, binascii
|
import os, sys, httplib, binascii
|
||||||
import urllib, json, random, time, urlparse
|
import urllib, json, random, time, urlparse
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Dict
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
# Python 2 compatibility
|
# Python 2 compatibility
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
@ -49,13 +54,13 @@ if sys.argv[1] == "--stats":
|
|||||||
DELAY = 10
|
DELAY = 10
|
||||||
MAXSAMPLES = 6
|
MAXSAMPLES = 6
|
||||||
totals = []
|
totals = []
|
||||||
last_stats = {}
|
last_stats = {} # type: Dict[str, float]
|
||||||
while True:
|
while True:
|
||||||
stats = {}
|
stats = {} # type: Dict[str, float]
|
||||||
for sf in statsfiles:
|
for sf in statsfiles:
|
||||||
for line in open(sf, "r").readlines():
|
for line in open(sf, "r").readlines():
|
||||||
name, value = line.split(":")
|
name, str_value = line.split(":")
|
||||||
value = int(value.strip())
|
value = int(str_value.strip())
|
||||||
if name not in stats:
|
if name not in stats:
|
||||||
stats[name] = 0
|
stats[name] = 0
|
||||||
stats[name] += float(value)
|
stats[name] += float(value)
|
||||||
|
@ -508,13 +508,13 @@ if __name__ == '__main__':
|
|||||||
mode = "upload"
|
mode = "upload"
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
mode = sys.argv[1]
|
mode = sys.argv[1]
|
||||||
if sys.maxint == 2147483647:
|
if sys.maxsize == 2147483647:
|
||||||
bits = "32"
|
bits = "32"
|
||||||
elif sys.maxint == 9223372036854775807:
|
elif sys.maxsize == 9223372036854775807:
|
||||||
bits = "64"
|
bits = "64"
|
||||||
else:
|
else:
|
||||||
bits = "?"
|
bits = "?"
|
||||||
print("%s-bit system (sys.maxint=%d)" % (bits, sys.maxint))
|
print("%s-bit system (sys.maxsize=%d)" % (bits, sys.maxsize))
|
||||||
# put the logfile and stats.out in _test_memory/ . These stick around.
|
# put the logfile and stats.out in _test_memory/ . These stick around.
|
||||||
# put the nodes and other files in _test_memory/test/ . These are
|
# put the nodes and other files in _test_memory/test/ . These are
|
||||||
# removed each time we run.
|
# removed each time we run.
|
||||||
|
@ -406,7 +406,7 @@ class DummyProducer(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@implementer(IImmutableFileNode)
|
@implementer(IImmutableFileNode)
|
||||||
class FakeCHKFileNode(object):
|
class FakeCHKFileNode(object): # type: ignore # incomplete implementation
|
||||||
"""I provide IImmutableFileNode, but all of my data is stored in a
|
"""I provide IImmutableFileNode, but all of my data is stored in a
|
||||||
class-level dictionary."""
|
class-level dictionary."""
|
||||||
|
|
||||||
@ -544,7 +544,7 @@ def create_chk_filenode(contents, all_contents):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(IMutableFileNode, ICheckable)
|
@implementer(IMutableFileNode, ICheckable)
|
||||||
class FakeMutableFileNode(object):
|
class FakeMutableFileNode(object): # type: ignore # incomplete implementation
|
||||||
"""I provide IMutableFileNode, but all of my data is stored in a
|
"""I provide IMutableFileNode, but all of my data is stored in a
|
||||||
class-level dictionary."""
|
class-level dictionary."""
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ class Marker(object):
|
|||||||
|
|
||||||
fireNow = partial(defer.succeed, None)
|
fireNow = partial(defer.succeed, None)
|
||||||
|
|
||||||
@implementer(IRemoteReference)
|
@implementer(IRemoteReference) # type: ignore # warner/foolscap#79
|
||||||
class LocalWrapper(object):
|
class LocalWrapper(object):
|
||||||
"""
|
"""
|
||||||
A ``LocalWrapper`` presents the remote reference interface to a local
|
A ``LocalWrapper`` presents the remote reference interface to a local
|
||||||
@ -213,9 +213,12 @@ class NoNetworkServer(object):
|
|||||||
return _StorageServer(lambda: self.rref)
|
return _StorageServer(lambda: self.rref)
|
||||||
def get_version(self):
|
def get_version(self):
|
||||||
return self.rref.version
|
return self.rref.version
|
||||||
|
def start_connecting(self, trigger_cb):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
@implementer(IStorageBroker)
|
@implementer(IStorageBroker)
|
||||||
class NoNetworkStorageBroker(object):
|
class NoNetworkStorageBroker(object): # type: ignore # missing many methods
|
||||||
def get_servers_for_psi(self, peer_selection_index):
|
def get_servers_for_psi(self, peer_selection_index):
|
||||||
def _permuted(server):
|
def _permuted(server):
|
||||||
seed = server.get_permutation_seed()
|
seed = server.get_permutation_seed()
|
||||||
@ -259,7 +262,7 @@ def create_no_network_client(basedir):
|
|||||||
return defer.succeed(client)
|
return defer.succeed(client)
|
||||||
|
|
||||||
|
|
||||||
class _NoNetworkClient(_Client):
|
class _NoNetworkClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||||
"""
|
"""
|
||||||
Overrides all _Client networking functionality to do nothing.
|
Overrides all _Client networking functionality to do nothing.
|
||||||
"""
|
"""
|
||||||
|
@ -47,8 +47,9 @@ class RIDummy(RemoteInterface):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# type ignored due to missing stubs for Twisted
|
||||||
@implementer(IFoolscapStoragePlugin)
|
# https://twistedmatrix.com/trac/ticket/9717
|
||||||
|
@implementer(IFoolscapStoragePlugin) # type: ignore
|
||||||
@attr.s
|
@attr.s
|
||||||
class DummyStorage(object):
|
class DummyStorage(object):
|
||||||
name = attr.ib()
|
name = attr.ib()
|
||||||
@ -107,7 +108,7 @@ class GetCounter(Resource, object):
|
|||||||
|
|
||||||
@implementer(RIDummy)
|
@implementer(RIDummy)
|
||||||
@attr.s(frozen=True)
|
@attr.s(frozen=True)
|
||||||
class DummyStorageServer(object):
|
class DummyStorageServer(object): # type: ignore # warner/foolscap#78
|
||||||
get_anonymous_storage_server = attr.ib()
|
get_anonymous_storage_server = attr.ib()
|
||||||
|
|
||||||
def remote_just_some_method(self):
|
def remote_just_some_method(self):
|
||||||
@ -116,7 +117,7 @@ class DummyStorageServer(object):
|
|||||||
|
|
||||||
@implementer(IStorageServer)
|
@implementer(IStorageServer)
|
||||||
@attr.s
|
@attr.s
|
||||||
class DummyStorageClient(object):
|
class DummyStorageClient(object): # type: ignore # incomplete implementation
|
||||||
get_rref = attr.ib()
|
get_rref = attr.ib()
|
||||||
configuration = attr.ib()
|
configuration = attr.ib()
|
||||||
announcement = attr.ib()
|
announcement = attr.ib()
|
||||||
|
@ -62,7 +62,7 @@ class FakeClient(object):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(IServer)
|
@implementer(IServer)
|
||||||
class FakeServer(object):
|
class FakeServer(object): # type: ignore # incomplete implementation
|
||||||
|
|
||||||
def get_name(self):
|
def get_name(self):
|
||||||
return "fake name"
|
return "fake name"
|
||||||
@ -75,7 +75,7 @@ class FakeServer(object):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(ICheckResults)
|
@implementer(ICheckResults)
|
||||||
class FakeCheckResults(object):
|
class FakeCheckResults(object): # type: ignore # incomplete implementation
|
||||||
|
|
||||||
def __init__(self, si=None,
|
def __init__(self, si=None,
|
||||||
healthy=False, recoverable=False,
|
healthy=False, recoverable=False,
|
||||||
@ -106,7 +106,7 @@ class FakeCheckResults(object):
|
|||||||
|
|
||||||
|
|
||||||
@implementer(ICheckAndRepairResults)
|
@implementer(ICheckAndRepairResults)
|
||||||
class FakeCheckAndRepairResults(object):
|
class FakeCheckAndRepairResults(object): # type: ignore # incomplete implementation
|
||||||
|
|
||||||
def __init__(self, si=None,
|
def __init__(self, si=None,
|
||||||
repair_attempted=False,
|
repair_attempted=False,
|
||||||
|
@ -51,7 +51,6 @@ from allmydata.nodemaker import (
|
|||||||
NodeMaker,
|
NodeMaker,
|
||||||
)
|
)
|
||||||
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
from allmydata.node import OldConfigError, UnescapedHashError, create_node_dir
|
||||||
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
|
||||||
from allmydata import client
|
from allmydata import client
|
||||||
from allmydata.storage_client import (
|
from allmydata.storage_client import (
|
||||||
StorageClientConfig,
|
StorageClientConfig,
|
||||||
@ -424,88 +423,8 @@ class Basic(testutil.ReallyEqualMixin, unittest.TestCase):
|
|||||||
expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
|
expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir)
|
||||||
self.failUnlessReallyEqual(w.staticdir, expected)
|
self.failUnlessReallyEqual(w.staticdir, expected)
|
||||||
|
|
||||||
# TODO: also test config options for SFTP.
|
# TODO: also test config options for SFTP. See Git history for deleted FTP
|
||||||
|
# tests that could be used as basis for these tests.
|
||||||
@defer.inlineCallbacks
|
|
||||||
def test_ftp_create(self):
|
|
||||||
"""
|
|
||||||
configuration for sftpd results in it being started
|
|
||||||
"""
|
|
||||||
root = FilePath(self.mktemp())
|
|
||||||
root.makedirs()
|
|
||||||
accounts = root.child(b"sftp-accounts")
|
|
||||||
accounts.touch()
|
|
||||||
|
|
||||||
data = FilePath(__file__).sibling(b"data")
|
|
||||||
privkey = data.child(b"openssh-rsa-2048.txt")
|
|
||||||
pubkey = data.child(b"openssh-rsa-2048.pub.txt")
|
|
||||||
|
|
||||||
basedir = u"client.Basic.test_ftp_create"
|
|
||||||
create_node_dir(basedir, "testing")
|
|
||||||
with open(os.path.join(basedir, "tahoe.cfg"), "w") as f:
|
|
||||||
f.write((
|
|
||||||
'[sftpd]\n'
|
|
||||||
'enabled = true\n'
|
|
||||||
'accounts.file = {}\n'
|
|
||||||
'host_pubkey_file = {}\n'
|
|
||||||
'host_privkey_file = {}\n'
|
|
||||||
).format(accounts.path, pubkey.path, privkey.path))
|
|
||||||
|
|
||||||
client_node = yield client.create_client(
|
|
||||||
basedir,
|
|
||||||
)
|
|
||||||
sftp = client_node.getServiceNamed("frontend:sftp")
|
|
||||||
self.assertIs(sftp.parent, client_node)
|
|
||||||
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def test_ftp_auth_keyfile(self):
|
|
||||||
"""
|
|
||||||
ftpd accounts.file is parsed properly
|
|
||||||
"""
|
|
||||||
basedir = u"client.Basic.test_ftp_auth_keyfile"
|
|
||||||
os.mkdir(basedir)
|
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
|
||||||
(BASECONFIG +
|
|
||||||
"[ftpd]\n"
|
|
||||||
"enabled = true\n"
|
|
||||||
"port = tcp:0:interface=127.0.0.1\n"
|
|
||||||
"accounts.file = private/accounts\n"))
|
|
||||||
os.mkdir(os.path.join(basedir, "private"))
|
|
||||||
fileutil.write(os.path.join(basedir, "private", "accounts"), "\n")
|
|
||||||
c = yield client.create_client(basedir) # just make sure it can be instantiated
|
|
||||||
del c
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def test_ftp_auth_url(self):
|
|
||||||
"""
|
|
||||||
ftpd accounts.url is parsed properly
|
|
||||||
"""
|
|
||||||
basedir = u"client.Basic.test_ftp_auth_url"
|
|
||||||
os.mkdir(basedir)
|
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
|
||||||
(BASECONFIG +
|
|
||||||
"[ftpd]\n"
|
|
||||||
"enabled = true\n"
|
|
||||||
"port = tcp:0:interface=127.0.0.1\n"
|
|
||||||
"accounts.url = http://0.0.0.0/\n"))
|
|
||||||
c = yield client.create_client(basedir) # just make sure it can be instantiated
|
|
||||||
del c
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def test_ftp_auth_no_accountfile_or_url(self):
|
|
||||||
"""
|
|
||||||
ftpd requires some way to look up accounts
|
|
||||||
"""
|
|
||||||
basedir = u"client.Basic.test_ftp_auth_no_accountfile_or_url"
|
|
||||||
os.mkdir(basedir)
|
|
||||||
fileutil.write(os.path.join(basedir, "tahoe.cfg"),
|
|
||||||
(BASECONFIG +
|
|
||||||
"[ftpd]\n"
|
|
||||||
"enabled = true\n"
|
|
||||||
"port = tcp:0:interface=127.0.0.1\n"))
|
|
||||||
with self.assertRaises(NeedRootcapLookupScheme):
|
|
||||||
yield client.create_client(basedir)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _storage_dir_test(self, basedir, storage_path, expected_path):
|
def _storage_dir_test(self, basedir, storage_path, expected_path):
|
||||||
|
@ -74,3 +74,58 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin
|
|||||||
d = defer.succeed(None)
|
d = defer.succeed(None)
|
||||||
d.addBoth(self.wait_for_delayed_calls)
|
d.addBoth(self.wait_for_delayed_calls)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class UntilTests(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Tests for ``deferredutil.until``.
|
||||||
|
"""
|
||||||
|
def test_exception(self):
|
||||||
|
"""
|
||||||
|
If the action raises an exception, the ``Deferred`` returned by ``until``
|
||||||
|
fires with a ``Failure``.
|
||||||
|
"""
|
||||||
|
self.assertFailure(
|
||||||
|
deferredutil.until(lambda: 1/0, lambda: True),
|
||||||
|
ZeroDivisionError,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stops_on_condition(self):
|
||||||
|
"""
|
||||||
|
The action is called repeatedly until ``condition`` returns ``True``.
|
||||||
|
"""
|
||||||
|
calls = []
|
||||||
|
def action():
|
||||||
|
calls.append(None)
|
||||||
|
|
||||||
|
def condition():
|
||||||
|
return len(calls) == 3
|
||||||
|
|
||||||
|
self.assertIs(
|
||||||
|
self.successResultOf(
|
||||||
|
deferredutil.until(action, condition),
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
self.assertEqual(3, len(calls))
|
||||||
|
|
||||||
|
def test_waits_for_deferred(self):
|
||||||
|
"""
|
||||||
|
If the action returns a ``Deferred`` then it is called again when the
|
||||||
|
``Deferred`` fires.
|
||||||
|
"""
|
||||||
|
counter = [0]
|
||||||
|
r1 = defer.Deferred()
|
||||||
|
r2 = defer.Deferred()
|
||||||
|
results = [r1, r2]
|
||||||
|
def action():
|
||||||
|
counter[0] += 1
|
||||||
|
return results.pop(0)
|
||||||
|
|
||||||
|
def condition():
|
||||||
|
return False
|
||||||
|
|
||||||
|
deferredutil.until(action, condition)
|
||||||
|
self.assertEqual([1], counter)
|
||||||
|
r1.callback(None)
|
||||||
|
self.assertEqual([2], counter)
|
||||||
|
@ -1561,7 +1561,7 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase):
|
|||||||
kids, fn.get_writekey(), deep_immutable=True)
|
kids, fn.get_writekey(), deep_immutable=True)
|
||||||
|
|
||||||
@implementer(IMutableFileNode)
|
@implementer(IMutableFileNode)
|
||||||
class FakeMutableFile(object):
|
class FakeMutableFile(object): # type: ignore # incomplete implementation
|
||||||
counter = 0
|
counter = 0
|
||||||
def __init__(self, initial_contents=b""):
|
def __init__(self, initial_contents=b""):
|
||||||
data = self._get_initial_contents(initial_contents)
|
data = self._get_initial_contents(initial_contents)
|
||||||
@ -1622,7 +1622,7 @@ class FakeNodeMaker(NodeMaker):
|
|||||||
def create_mutable_file(self, contents=b"", keysize=None, version=None):
|
def create_mutable_file(self, contents=b"", keysize=None, version=None):
|
||||||
return defer.succeed(FakeMutableFile(contents))
|
return defer.succeed(FakeMutableFile(contents))
|
||||||
|
|
||||||
class FakeClient2(_Client):
|
class FakeClient2(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.nodemaker = FakeNodeMaker(None, None, None,
|
self.nodemaker = FakeNodeMaker(None, None, None,
|
||||||
None, None,
|
None, None,
|
||||||
|
@ -1,106 +0,0 @@
|
|||||||
|
|
||||||
from twisted.trial import unittest
|
|
||||||
|
|
||||||
from allmydata.frontends import ftpd
|
|
||||||
from allmydata.immutable import upload
|
|
||||||
from allmydata.mutable import publish
|
|
||||||
from allmydata.test.no_network import GridTestMixin
|
|
||||||
from allmydata.test.common_util import ReallyEqualMixin
|
|
||||||
|
|
||||||
class Handler(GridTestMixin, ReallyEqualMixin, unittest.TestCase):
|
|
||||||
"""
|
|
||||||
This is a no-network unit test of ftpd.Handler and the abstractions
|
|
||||||
it uses.
|
|
||||||
"""
|
|
||||||
|
|
||||||
FALL_OF_BERLIN_WALL = 626644800
|
|
||||||
TURN_OF_MILLENIUM = 946684800
|
|
||||||
|
|
||||||
def _set_up(self, basedir, num_clients=1, num_servers=10):
|
|
||||||
self.basedir = "ftp/" + basedir
|
|
||||||
self.set_up_grid(num_clients=num_clients, num_servers=num_servers,
|
|
||||||
oneshare=True)
|
|
||||||
|
|
||||||
self.client = self.g.clients[0]
|
|
||||||
self.username = "alice"
|
|
||||||
self.convergence = ""
|
|
||||||
|
|
||||||
d = self.client.create_dirnode()
|
|
||||||
def _created_root(node):
|
|
||||||
self.root = node
|
|
||||||
self.root_uri = node.get_uri()
|
|
||||||
self.handler = ftpd.Handler(self.client, self.root, self.username,
|
|
||||||
self.convergence)
|
|
||||||
d.addCallback(_created_root)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _set_metadata(self, name, metadata):
|
|
||||||
"""Set metadata for `name', avoiding MetadataSetter's timestamp reset
|
|
||||||
behavior."""
|
|
||||||
def _modifier(old_contents, servermap, first_time):
|
|
||||||
children = self.root._unpack_contents(old_contents)
|
|
||||||
children[name] = (children[name][0], metadata)
|
|
||||||
return self.root._pack_contents(children)
|
|
||||||
|
|
||||||
return self.root._node.modify(_modifier)
|
|
||||||
|
|
||||||
def _set_up_tree(self):
|
|
||||||
# add immutable file at root
|
|
||||||
immutable = upload.Data("immutable file contents", None)
|
|
||||||
d = self.root.add_file(u"immutable", immutable)
|
|
||||||
|
|
||||||
# `mtime' and `linkmotime' both set
|
|
||||||
md_both = {'mtime': self.FALL_OF_BERLIN_WALL,
|
|
||||||
'tahoe': {'linkmotime': self.TURN_OF_MILLENIUM}}
|
|
||||||
d.addCallback(lambda _: self._set_metadata(u"immutable", md_both))
|
|
||||||
|
|
||||||
# add link to root from root
|
|
||||||
d.addCallback(lambda _: self.root.set_node(u"loop", self.root))
|
|
||||||
|
|
||||||
# `mtime' set, but no `linkmotime'
|
|
||||||
md_just_mtime = {'mtime': self.FALL_OF_BERLIN_WALL, 'tahoe': {}}
|
|
||||||
d.addCallback(lambda _: self._set_metadata(u"loop", md_just_mtime))
|
|
||||||
|
|
||||||
# add mutable file at root
|
|
||||||
mutable = publish.MutableData("mutable file contents")
|
|
||||||
d.addCallback(lambda _: self.client.create_mutable_file(mutable))
|
|
||||||
d.addCallback(lambda node: self.root.set_node(u"mutable", node))
|
|
||||||
|
|
||||||
# neither `mtime' nor `linkmotime' set
|
|
||||||
d.addCallback(lambda _: self._set_metadata(u"mutable", {}))
|
|
||||||
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _compareDirLists(self, actual, expected):
|
|
||||||
actual_list = sorted(actual)
|
|
||||||
expected_list = sorted(expected)
|
|
||||||
|
|
||||||
self.failUnlessReallyEqual(len(actual_list), len(expected_list),
|
|
||||||
"%r is wrong length, expecting %r" % (
|
|
||||||
actual_list, expected_list))
|
|
||||||
for (a, b) in zip(actual_list, expected_list):
|
|
||||||
(name, meta) = a
|
|
||||||
(expected_name, expected_meta) = b
|
|
||||||
self.failUnlessReallyEqual(name, expected_name)
|
|
||||||
self.failUnlessReallyEqual(meta, expected_meta)
|
|
||||||
|
|
||||||
def test_list(self):
|
|
||||||
keys = ("size", "directory", "permissions", "hardlinks", "modified",
|
|
||||||
"owner", "group", "unexpected")
|
|
||||||
d = self._set_up("list")
|
|
||||||
|
|
||||||
d.addCallback(lambda _: self._set_up_tree())
|
|
||||||
d.addCallback(lambda _: self.handler.list("", keys=keys))
|
|
||||||
|
|
||||||
expected_root = [
|
|
||||||
('loop',
|
|
||||||
[0, True, ftpd.IntishPermissions(0o600), 1, self.FALL_OF_BERLIN_WALL, 'alice', 'alice', '??']),
|
|
||||||
('immutable',
|
|
||||||
[23, False, ftpd.IntishPermissions(0o600), 1, self.TURN_OF_MILLENIUM, 'alice', 'alice', '??']),
|
|
||||||
('mutable',
|
|
||||||
# timestamp should be 0 if no timestamp metadata is present
|
|
||||||
[0, False, ftpd.IntishPermissions(0o600), 1, 0, 'alice', 'alice', '??'])]
|
|
||||||
|
|
||||||
d.addCallback(lambda root: self._compareDirLists(root, expected_root))
|
|
||||||
|
|
||||||
return d
|
|
@ -19,6 +19,12 @@ from functools import (
|
|||||||
)
|
)
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import List
|
||||||
|
from allmydata.introducer.client import IntroducerClient
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
@ -125,7 +131,7 @@ class FakeCHKCheckerAndUEBFetcher(object):
|
|||||||
))
|
))
|
||||||
|
|
||||||
class FakeClient(service.MultiService):
|
class FakeClient(service.MultiService):
|
||||||
introducer_clients = []
|
introducer_clients = [] # type: List[IntroducerClient]
|
||||||
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
||||||
"happy": 75,
|
"happy": 75,
|
||||||
"n": 100,
|
"n": 100,
|
||||||
|
@ -564,7 +564,7 @@ class TestMissingPorts(unittest.TestCase):
|
|||||||
config = config_from_string(self.basedir, "portnum", config_data)
|
config = config_from_string(self.basedir, "portnum", config_data)
|
||||||
with self.assertRaises(PortAssignmentRequired):
|
with self.assertRaises(PortAssignmentRequired):
|
||||||
_tub_portlocation(config, None, None)
|
_tub_portlocation(config, None, None)
|
||||||
test_listen_on_zero_with_host.todo = native_str(
|
test_listen_on_zero_with_host.todo = native_str( # type: ignore
|
||||||
"https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563"
|
"https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3563"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ from testtools.matchers import (
|
|||||||
BLACKLIST = {
|
BLACKLIST = {
|
||||||
"allmydata.test.check_load",
|
"allmydata.test.check_load",
|
||||||
"allmydata.windows.registry",
|
"allmydata.windows.registry",
|
||||||
|
"allmydata.scripts.types_",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ class Python3PortingEffortTests(SynchronousTestCase):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
test_finished_porting.todo = native_str(
|
test_finished_porting.todo = native_str( # type: ignore
|
||||||
"https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed",
|
"https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -9,18 +9,15 @@ from twisted.python.failure import Failure
|
|||||||
from twisted.internet.error import ProcessDone, ProcessTerminated
|
from twisted.internet.error import ProcessDone, ProcessTerminated
|
||||||
from allmydata.util import deferredutil
|
from allmydata.util import deferredutil
|
||||||
|
|
||||||
conch_interfaces = None
|
|
||||||
sftp = None
|
|
||||||
sftpd = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from twisted.conch import interfaces as conch_interfaces
|
from twisted.conch import interfaces as conch_interfaces
|
||||||
from twisted.conch.ssh import filetransfer as sftp
|
from twisted.conch.ssh import filetransfer as sftp
|
||||||
from allmydata.frontends import sftpd
|
from allmydata.frontends import sftpd
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
|
conch_interfaces = sftp = sftpd = None # type: ignore
|
||||||
conch_unavailable_reason = e
|
conch_unavailable_reason = e
|
||||||
else:
|
else:
|
||||||
conch_unavailable_reason = None
|
conch_unavailable_reason = None # type: ignore
|
||||||
|
|
||||||
from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError
|
from allmydata.interfaces import IDirectoryNode, ExistingChildError, NoSuchChildError
|
||||||
from allmydata.mutable.common import NotWriteableError
|
from allmydata.mutable.common import NotWriteableError
|
||||||
|
@ -1,3 +1,14 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
|
@ -105,7 +105,8 @@ from allmydata.interfaces import (
|
|||||||
|
|
||||||
SOME_FURL = "pb://abcde@nowhere/fake"
|
SOME_FURL = "pb://abcde@nowhere/fake"
|
||||||
|
|
||||||
class NativeStorageServerWithVersion(NativeStorageServer):
|
|
||||||
|
class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||||
def __init__(self, version):
|
def __init__(self, version):
|
||||||
# note: these instances won't work for anything other than
|
# note: these instances won't work for anything other than
|
||||||
# get_available_space() because we don't upcall
|
# get_available_space() because we don't upcall
|
||||||
@ -569,7 +570,7 @@ class SpyEndpoint(object):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@implementer(IConnectionHintHandler)
|
@implementer(IConnectionHintHandler) # type: ignore # warner/foolscap#78
|
||||||
@attr.s
|
@attr.s
|
||||||
class SpyHandler(object):
|
class SpyHandler(object):
|
||||||
"""
|
"""
|
||||||
|
@ -14,6 +14,17 @@ if PY2:
|
|||||||
|
|
||||||
import os, shutil
|
import os, shutil
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from base64 import (
|
||||||
|
b64encode,
|
||||||
|
)
|
||||||
|
|
||||||
|
from hypothesis import (
|
||||||
|
given,
|
||||||
|
)
|
||||||
|
from hypothesis.strategies import (
|
||||||
|
just,
|
||||||
|
integers,
|
||||||
|
)
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
@ -2029,6 +2040,91 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
|||||||
f.close()
|
f.close()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class EncryptAnUploadableTests(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Tests for ``EncryptAnUploadable``.
|
||||||
|
"""
|
||||||
|
def test_same_length(self):
|
||||||
|
"""
|
||||||
|
``EncryptAnUploadable.read_encrypted`` returns ciphertext of the same
|
||||||
|
length as the underlying plaintext.
|
||||||
|
"""
|
||||||
|
plaintext = b"hello world"
|
||||||
|
uploadable = upload.FileHandle(BytesIO(plaintext), None)
|
||||||
|
uploadable.set_default_encoding_parameters({
|
||||||
|
# These values shouldn't matter.
|
||||||
|
"k": 3,
|
||||||
|
"happy": 5,
|
||||||
|
"n": 10,
|
||||||
|
"max_segment_size": 128 * 1024,
|
||||||
|
})
|
||||||
|
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||||
|
ciphertext = b"".join(self.successResultOf(encrypter.read_encrypted(1024, False)))
|
||||||
|
self.assertEqual(len(ciphertext), len(plaintext))
|
||||||
|
|
||||||
|
@given(just(b"hello world"), integers(min_value=0, max_value=len(b"hello world")))
|
||||||
|
def test_known_result(self, plaintext, split_at):
|
||||||
|
"""
|
||||||
|
``EncryptAnUploadable.read_encrypted`` returns a known-correct ciphertext
|
||||||
|
string for certain inputs. The ciphertext is independent of the read
|
||||||
|
sizes.
|
||||||
|
"""
|
||||||
|
convergence = b"\x42" * 16
|
||||||
|
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||||
|
uploadable.set_default_encoding_parameters({
|
||||||
|
# The convergence key is a function of k, n, and max_segment_size
|
||||||
|
# (among other things). The value for happy doesn't matter
|
||||||
|
# though.
|
||||||
|
"k": 3,
|
||||||
|
"happy": 5,
|
||||||
|
"n": 10,
|
||||||
|
"max_segment_size": 128 * 1024,
|
||||||
|
})
|
||||||
|
encrypter = upload.EncryptAnUploadable(uploadable)
|
||||||
|
def read(n):
|
||||||
|
return b"".join(self.successResultOf(encrypter.read_encrypted(n, False)))
|
||||||
|
|
||||||
|
# Read the string in one or two pieces to make sure underlying state
|
||||||
|
# is maintained properly.
|
||||||
|
first = read(split_at)
|
||||||
|
second = read(len(plaintext) - split_at)
|
||||||
|
third = read(1)
|
||||||
|
ciphertext = first + second + third
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
b"Jd2LHCRXozwrEJc=",
|
||||||
|
b64encode(ciphertext),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_large_read(self):
|
||||||
|
"""
|
||||||
|
``EncryptAnUploadable.read_encrypted`` succeeds even when the requested
|
||||||
|
data length is much larger than the chunk size.
|
||||||
|
"""
|
||||||
|
convergence = b"\x42" * 16
|
||||||
|
# 4kB of plaintext
|
||||||
|
plaintext = b"\xde\xad\xbe\xef" * 1024
|
||||||
|
uploadable = upload.FileHandle(BytesIO(plaintext), convergence)
|
||||||
|
uploadable.set_default_encoding_parameters({
|
||||||
|
"k": 3,
|
||||||
|
"happy": 5,
|
||||||
|
"n": 10,
|
||||||
|
"max_segment_size": 128 * 1024,
|
||||||
|
})
|
||||||
|
# Make the chunk size very small so we don't have to operate on a huge
|
||||||
|
# amount of data to exercise the relevant codepath.
|
||||||
|
encrypter = upload.EncryptAnUploadable(uploadable, chunk_size=1)
|
||||||
|
d = encrypter.read_encrypted(len(plaintext), False)
|
||||||
|
ciphertext = self.successResultOf(d)
|
||||||
|
self.assertEqual(
|
||||||
|
list(map(len, ciphertext)),
|
||||||
|
# Chunk size was specified as 1 above so we will get the whole
|
||||||
|
# plaintext in one byte chunks.
|
||||||
|
[1] * len(plaintext),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# upload with exactly 75 servers (shares_of_happiness)
|
# upload with exactly 75 servers (shares_of_happiness)
|
||||||
# have a download fail
|
# have a download fail
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from twisted.trial import unittest
|
|
||||||
from twisted.internet.defer import inlineCallbacks
|
|
||||||
|
|
||||||
from eliot import log_call
|
|
||||||
|
|
||||||
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
|
||||||
|
|
||||||
from allmydata.web.logs import TokenAuthenticatedWebSocketServerProtocol
|
|
||||||
|
|
||||||
|
|
||||||
class TestStreamingLogs(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Test websocket streaming of logs
|
|
||||||
"""
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.reactor = MemoryReactorClockResolver()
|
|
||||||
self.pumper = create_pumper()
|
|
||||||
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
|
||||||
return self.pumper.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
return self.pumper.stop()
|
|
||||||
|
|
||||||
@inlineCallbacks
|
|
||||||
def test_one_log(self):
|
|
||||||
"""
|
|
||||||
write a single Eliot log and see it streamed via websocket
|
|
||||||
"""
|
|
||||||
|
|
||||||
proto = yield self.agent.open(
|
|
||||||
transport_config=u"ws://localhost:1234/ws",
|
|
||||||
options={},
|
|
||||||
)
|
|
||||||
|
|
||||||
messages = []
|
|
||||||
def got_message(msg, is_binary=False):
|
|
||||||
messages.append(json.loads(msg))
|
|
||||||
proto.on("message", got_message)
|
|
||||||
|
|
||||||
@log_call(action_type=u"test:cli:some-exciting-action")
|
|
||||||
def do_a_thing():
|
|
||||||
pass
|
|
||||||
|
|
||||||
do_a_thing()
|
|
||||||
|
|
||||||
proto.transport.loseConnection()
|
|
||||||
yield proto.is_closed
|
|
||||||
|
|
||||||
self.assertEqual(len(messages), 2)
|
|
||||||
self.assertEqual("started", messages[0]["action_status"])
|
|
||||||
self.assertEqual("succeeded", messages[1]["action_status"])
|
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
@ -213,7 +225,7 @@ class IntroducerRootTests(unittest.TestCase):
|
|||||||
resource = IntroducerRoot(introducer_node)
|
resource = IntroducerRoot(introducer_node)
|
||||||
response = json.loads(
|
response = json.loads(
|
||||||
self.successResultOf(
|
self.successResultOf(
|
||||||
render(resource, {"t": [b"json"]}),
|
render(resource, {b"t": [b"json"]}),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Tests for ``allmydata.web.logs``.
|
Tests for ``allmydata.web.logs``.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import (
|
from __future__ import (
|
||||||
@ -9,6 +11,19 @@ from __future__ import (
|
|||||||
division,
|
division,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from twisted.trial import unittest
|
||||||
|
from twisted.internet.defer import inlineCallbacks
|
||||||
|
|
||||||
|
from eliot import log_call
|
||||||
|
|
||||||
|
from autobahn.twisted.testing import create_memory_agent, MemoryReactorClockResolver, create_pumper
|
||||||
|
|
||||||
from testtools.matchers import (
|
from testtools.matchers import (
|
||||||
Equals,
|
Equals,
|
||||||
)
|
)
|
||||||
@ -37,6 +52,7 @@ from ..common import (
|
|||||||
|
|
||||||
from ...web.logs import (
|
from ...web.logs import (
|
||||||
create_log_resources,
|
create_log_resources,
|
||||||
|
TokenAuthenticatedWebSocketServerProtocol,
|
||||||
)
|
)
|
||||||
|
|
||||||
class StreamingEliotLogsTests(SyncTestCase):
|
class StreamingEliotLogsTests(SyncTestCase):
|
||||||
@ -57,3 +73,47 @@ class StreamingEliotLogsTests(SyncTestCase):
|
|||||||
self.client.get(b"http:///v1"),
|
self.client.get(b"http:///v1"),
|
||||||
succeeded(has_response_code(Equals(OK))),
|
succeeded(has_response_code(Equals(OK))),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestStreamingLogs(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Test websocket streaming of logs
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.reactor = MemoryReactorClockResolver()
|
||||||
|
self.pumper = create_pumper()
|
||||||
|
self.agent = create_memory_agent(self.reactor, self.pumper, TokenAuthenticatedWebSocketServerProtocol)
|
||||||
|
return self.pumper.start()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
return self.pumper.stop()
|
||||||
|
|
||||||
|
@inlineCallbacks
|
||||||
|
def test_one_log(self):
|
||||||
|
"""
|
||||||
|
write a single Eliot log and see it streamed via websocket
|
||||||
|
"""
|
||||||
|
|
||||||
|
proto = yield self.agent.open(
|
||||||
|
transport_config=u"ws://localhost:1234/ws",
|
||||||
|
options={},
|
||||||
|
)
|
||||||
|
|
||||||
|
messages = []
|
||||||
|
def got_message(msg, is_binary=False):
|
||||||
|
messages.append(json.loads(msg))
|
||||||
|
proto.on("message", got_message)
|
||||||
|
|
||||||
|
@log_call(action_type=u"test:cli:some-exciting-action")
|
||||||
|
def do_a_thing():
|
||||||
|
pass
|
||||||
|
|
||||||
|
do_a_thing()
|
||||||
|
|
||||||
|
proto.transport.loseConnection()
|
||||||
|
yield proto.is_closed
|
||||||
|
|
||||||
|
self.assertEqual(len(messages), 2)
|
||||||
|
self.assertEqual("started", messages[0]["action_status"])
|
||||||
|
self.assertEqual("succeeded", messages[1]["action_status"])
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Tests for ``allmydata.web.private``.
|
Tests for ``allmydata.web.private``.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import (
|
from __future__ import (
|
||||||
@ -9,6 +11,10 @@ from __future__ import (
|
|||||||
division,
|
division,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from testtools.matchers import (
|
from testtools.matchers import (
|
||||||
Equals,
|
Equals,
|
||||||
)
|
)
|
||||||
@ -56,6 +62,7 @@ class PrivacyTests(SyncTestCase):
|
|||||||
return super(PrivacyTests, self).setUp()
|
return super(PrivacyTests, self).setUp()
|
||||||
|
|
||||||
def _authorization(self, scheme, value):
|
def _authorization(self, scheme, value):
|
||||||
|
value = str(value, "utf-8")
|
||||||
return Headers({
|
return Headers({
|
||||||
u"authorization": [u"{} {}".format(scheme, value)],
|
u"authorization": [u"{} {}".format(scheme, value)],
|
||||||
})
|
})
|
||||||
@ -90,7 +97,7 @@ class PrivacyTests(SyncTestCase):
|
|||||||
self.assertThat(
|
self.assertThat(
|
||||||
self.client.head(
|
self.client.head(
|
||||||
b"http:///foo/bar",
|
b"http:///foo/bar",
|
||||||
headers=self._authorization(SCHEME, u"foo bar"),
|
headers=self._authorization(str(SCHEME, "utf-8"), b"foo bar"),
|
||||||
),
|
),
|
||||||
succeeded(has_response_code(Equals(UNAUTHORIZED))),
|
succeeded(has_response_code(Equals(UNAUTHORIZED))),
|
||||||
)
|
)
|
||||||
@ -103,7 +110,7 @@ class PrivacyTests(SyncTestCase):
|
|||||||
self.assertThat(
|
self.assertThat(
|
||||||
self.client.head(
|
self.client.head(
|
||||||
b"http:///foo/bar",
|
b"http:///foo/bar",
|
||||||
headers=self._authorization(SCHEME, self.token),
|
headers=self._authorization(str(SCHEME, "utf-8"), self.token),
|
||||||
),
|
),
|
||||||
# It's a made up URL so we don't get a 200, either, but a 404.
|
# It's a made up URL so we don't get a 200, either, but a 404.
|
||||||
succeeded(has_response_code(Equals(NOT_FOUND))),
|
succeeded(has_response_code(Equals(NOT_FOUND))),
|
||||||
|
@ -1,6 +1,18 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from urllib import (
|
from urllib.parse import (
|
||||||
quote,
|
quote,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -77,7 +89,7 @@ class RenderSlashUri(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response_body,
|
response_body,
|
||||||
"Invalid capability",
|
b"Invalid capability",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -92,7 +104,7 @@ class RenderServiceRow(unittest.TestCase):
|
|||||||
ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x",
|
ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x",
|
||||||
"permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3",
|
"permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3",
|
||||||
}
|
}
|
||||||
srv = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
srv = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
|
||||||
srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0)
|
srv.get_connection_status = lambda: ConnectionStatus(False, "summary", {}, 0, 0)
|
||||||
|
|
||||||
class FakeClient(_Client):
|
class FakeClient(_Client):
|
||||||
@ -103,7 +115,7 @@ class RenderServiceRow(unittest.TestCase):
|
|||||||
tub_maker=None,
|
tub_maker=None,
|
||||||
node_config=EMPTY_CLIENT_CONFIG,
|
node_config=EMPTY_CLIENT_CONFIG,
|
||||||
)
|
)
|
||||||
self.storage_broker.test_add_server("test-srv", srv)
|
self.storage_broker.test_add_server(b"test-srv", srv)
|
||||||
|
|
||||||
root = RootElement(FakeClient(), time.time)
|
root = RootElement(FakeClient(), time.time)
|
||||||
req = DummyRequest(b"")
|
req = DummyRequest(b"")
|
||||||
|
@ -189,7 +189,7 @@ class FakeHistory(object):
|
|||||||
def list_all_helper_statuses(self):
|
def list_all_helper_statuses(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
class FakeDisplayableServer(StubServer):
|
class FakeDisplayableServer(StubServer): # type: ignore # tahoe-lafs/ticket/3573
|
||||||
def __init__(self, serverid, nickname, connected,
|
def __init__(self, serverid, nickname, connected,
|
||||||
last_connect_time, last_loss_time, last_rx_time):
|
last_connect_time, last_loss_time, last_rx_time):
|
||||||
StubServer.__init__(self, serverid)
|
StubServer.__init__(self, serverid)
|
||||||
@ -255,7 +255,7 @@ class FakeStorageServer(service.MultiService):
|
|||||||
def on_status_changed(self, cb):
|
def on_status_changed(self, cb):
|
||||||
cb(self)
|
cb(self)
|
||||||
|
|
||||||
class FakeClient(_Client):
|
class FakeClient(_Client): # type: ignore # tahoe-lafs/ticket/3573
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# don't upcall to Client.__init__, since we only want to initialize a
|
# don't upcall to Client.__init__, since we only want to initialize a
|
||||||
# minimal subset
|
# minimal subset
|
||||||
|
@ -1,6 +1,16 @@
|
|||||||
"""
|
"""
|
||||||
Tests for ``allmydata.webish``.
|
Tests for ``allmydata.webish``.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from uuid import (
|
from uuid import (
|
||||||
uuid4,
|
uuid4,
|
||||||
@ -96,7 +106,7 @@ class TahoeLAFSRequestTests(SyncTestCase):
|
|||||||
])
|
])
|
||||||
self._fields_test(
|
self._fields_test(
|
||||||
b"POST",
|
b"POST",
|
||||||
{b"content-type": b"multipart/form-data; boundary={}".format(boundary)},
|
{b"content-type": b"multipart/form-data; boundary=" + bytes(boundary, 'ascii')},
|
||||||
form_data.encode("ascii"),
|
form_data.encode("ascii"),
|
||||||
AfterPreprocessing(
|
AfterPreprocessing(
|
||||||
lambda fs: {
|
lambda fs: {
|
||||||
@ -105,8 +115,8 @@ class TahoeLAFSRequestTests(SyncTestCase):
|
|||||||
in fs.keys()
|
in fs.keys()
|
||||||
},
|
},
|
||||||
Equals({
|
Equals({
|
||||||
b"foo": b"bar",
|
"foo": "bar",
|
||||||
b"baz": b"some file contents",
|
"baz": b"some file contents",
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
|
"""Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -22,6 +22,11 @@ from past.builtins import unicode, long
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Type
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.python.components import registerAdapter
|
from twisted.python.components import registerAdapter
|
||||||
|
|
||||||
@ -489,7 +494,7 @@ class MDMFVerifierURI(_BaseURI):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
@implementer(IURI, IDirnodeURI)
|
@implementer(IDirnodeURI)
|
||||||
class _DirectoryBaseURI(_BaseURI):
|
class _DirectoryBaseURI(_BaseURI):
|
||||||
def __init__(self, filenode_uri=None):
|
def __init__(self, filenode_uri=None):
|
||||||
self._filenode_uri = filenode_uri
|
self._filenode_uri = filenode_uri
|
||||||
@ -536,7 +541,7 @@ class _DirectoryBaseURI(_BaseURI):
|
|||||||
return self._filenode_uri.get_storage_index()
|
return self._filenode_uri.get_storage_index()
|
||||||
|
|
||||||
|
|
||||||
@implementer(IDirectoryURI)
|
@implementer(IURI, IDirectoryURI)
|
||||||
class DirectoryURI(_DirectoryBaseURI):
|
class DirectoryURI(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2:'
|
BASE_STRING=b'URI:DIR2:'
|
||||||
@ -555,7 +560,7 @@ class DirectoryURI(_DirectoryBaseURI):
|
|||||||
return ReadonlyDirectoryURI(self._filenode_uri.get_readonly())
|
return ReadonlyDirectoryURI(self._filenode_uri.get_readonly())
|
||||||
|
|
||||||
|
|
||||||
@implementer(IReadonlyDirectoryURI)
|
@implementer(IURI, IReadonlyDirectoryURI)
|
||||||
class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-RO:'
|
BASE_STRING=b'URI:DIR2-RO:'
|
||||||
@ -574,6 +579,7 @@ class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
@implementer(IURI, IDirnodeURI)
|
||||||
class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
|
class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
|
||||||
def __init__(self, filenode_uri=None):
|
def __init__(self, filenode_uri=None):
|
||||||
if filenode_uri:
|
if filenode_uri:
|
||||||
@ -611,7 +617,7 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@implementer(IDirectoryURI)
|
@implementer(IURI, IDirectoryURI)
|
||||||
class MDMFDirectoryURI(_DirectoryBaseURI):
|
class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-MDMF:'
|
BASE_STRING=b'URI:DIR2-MDMF:'
|
||||||
@ -633,7 +639,7 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
|
|||||||
return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap())
|
return MDMFDirectoryURIVerifier(self._filenode_uri.get_verify_cap())
|
||||||
|
|
||||||
|
|
||||||
@implementer(IReadonlyDirectoryURI)
|
@implementer(IURI, IReadonlyDirectoryURI)
|
||||||
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
|
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-MDMF-RO:'
|
BASE_STRING=b'URI:DIR2-MDMF-RO:'
|
||||||
@ -671,7 +677,7 @@ def wrap_dirnode_cap(filecap):
|
|||||||
raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__)
|
raise AssertionError("cannot interpret as a directory cap: %s" % filecap.__class__)
|
||||||
|
|
||||||
|
|
||||||
@implementer(IVerifierURI)
|
@implementer(IURI, IVerifierURI)
|
||||||
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
|
BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
|
||||||
@ -696,12 +702,12 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
@implementer(IVerifierURI)
|
@implementer(IURI, IVerifierURI)
|
||||||
class DirectoryURIVerifier(_DirectoryBaseURI):
|
class DirectoryURIVerifier(_DirectoryBaseURI):
|
||||||
|
|
||||||
BASE_STRING=b'URI:DIR2-Verifier:'
|
BASE_STRING=b'URI:DIR2-Verifier:'
|
||||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||||
INNER_URI_CLASS=SSKVerifierURI
|
INNER_URI_CLASS=SSKVerifierURI # type: Type[IVerifierURI]
|
||||||
|
|
||||||
def __init__(self, filenode_uri=None):
|
def __init__(self, filenode_uri=None):
|
||||||
if filenode_uri:
|
if filenode_uri:
|
||||||
|
@ -27,6 +27,7 @@ PORTED_MODULES = [
|
|||||||
"allmydata.__main__",
|
"allmydata.__main__",
|
||||||
"allmydata._auto_deps",
|
"allmydata._auto_deps",
|
||||||
"allmydata._monkeypatch",
|
"allmydata._monkeypatch",
|
||||||
|
"allmydata.blacklist",
|
||||||
"allmydata.codec",
|
"allmydata.codec",
|
||||||
"allmydata.crypto",
|
"allmydata.crypto",
|
||||||
"allmydata.crypto.aes",
|
"allmydata.crypto.aes",
|
||||||
@ -34,6 +35,7 @@ PORTED_MODULES = [
|
|||||||
"allmydata.crypto.error",
|
"allmydata.crypto.error",
|
||||||
"allmydata.crypto.rsa",
|
"allmydata.crypto.rsa",
|
||||||
"allmydata.crypto.util",
|
"allmydata.crypto.util",
|
||||||
|
"allmydata.deep_stats",
|
||||||
"allmydata.dirnode",
|
"allmydata.dirnode",
|
||||||
"allmydata.hashtree",
|
"allmydata.hashtree",
|
||||||
"allmydata.immutable.checker",
|
"allmydata.immutable.checker",
|
||||||
@ -69,6 +71,7 @@ PORTED_MODULES = [
|
|||||||
"allmydata.mutable.servermap",
|
"allmydata.mutable.servermap",
|
||||||
"allmydata.node",
|
"allmydata.node",
|
||||||
"allmydata.nodemaker",
|
"allmydata.nodemaker",
|
||||||
|
"allmydata.stats",
|
||||||
"allmydata.storage_client",
|
"allmydata.storage_client",
|
||||||
"allmydata.storage.common",
|
"allmydata.storage.common",
|
||||||
"allmydata.storage.crawler",
|
"allmydata.storage.crawler",
|
||||||
@ -80,6 +83,7 @@ PORTED_MODULES = [
|
|||||||
"allmydata.storage.shares",
|
"allmydata.storage.shares",
|
||||||
"allmydata.test.no_network",
|
"allmydata.test.no_network",
|
||||||
"allmydata.test.mutable.util",
|
"allmydata.test.mutable.util",
|
||||||
|
"allmydata.unknown",
|
||||||
"allmydata.uri",
|
"allmydata.uri",
|
||||||
"allmydata.util._python3",
|
"allmydata.util._python3",
|
||||||
"allmydata.util.abbreviate",
|
"allmydata.util.abbreviate",
|
||||||
@ -110,6 +114,8 @@ PORTED_MODULES = [
|
|||||||
"allmydata.util.spans",
|
"allmydata.util.spans",
|
||||||
"allmydata.util.statistics",
|
"allmydata.util.statistics",
|
||||||
"allmydata.util.time_format",
|
"allmydata.util.time_format",
|
||||||
|
"allmydata.web.logs",
|
||||||
|
"allmydata.webish",
|
||||||
]
|
]
|
||||||
|
|
||||||
PORTED_TEST_MODULES = [
|
PORTED_TEST_MODULES = [
|
||||||
@ -166,6 +172,7 @@ PORTED_TEST_MODULES = [
|
|||||||
"allmydata.test.test_repairer",
|
"allmydata.test.test_repairer",
|
||||||
"allmydata.test.test_spans",
|
"allmydata.test.test_spans",
|
||||||
"allmydata.test.test_statistics",
|
"allmydata.test.test_statistics",
|
||||||
|
"allmydata.test.test_stats",
|
||||||
"allmydata.test.test_storage",
|
"allmydata.test.test_storage",
|
||||||
"allmydata.test.test_storage_client",
|
"allmydata.test.test_storage_client",
|
||||||
"allmydata.test.test_storage_web",
|
"allmydata.test.test_storage_web",
|
||||||
@ -181,6 +188,11 @@ PORTED_TEST_MODULES = [
|
|||||||
"allmydata.test.test_util",
|
"allmydata.test.test_util",
|
||||||
"allmydata.test.web.test_common",
|
"allmydata.test.web.test_common",
|
||||||
"allmydata.test.web.test_grid",
|
"allmydata.test.web.test_grid",
|
||||||
"allmydata.test.web.test_util",
|
"allmydata.test.web.test_introducer",
|
||||||
|
"allmydata.test.web.test_logs",
|
||||||
|
"allmydata.test.web.test_private",
|
||||||
|
"allmydata.test.web.test_root",
|
||||||
"allmydata.test.web.test_status",
|
"allmydata.test.web.test_status",
|
||||||
|
"allmydata.test.web.test_util",
|
||||||
|
"allmydata.test.web.test_webish",
|
||||||
]
|
]
|
||||||
|
@ -15,7 +15,18 @@ if PY2:
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from foolscap.api import eventually
|
from foolscap.api import eventually
|
||||||
|
from eliot.twisted import (
|
||||||
|
inline_callbacks,
|
||||||
|
)
|
||||||
from twisted.internet import defer, reactor, error
|
from twisted.internet import defer, reactor, error
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
|
||||||
@ -201,3 +212,22 @@ class WaitForDelayedCallsMixin(PollMixin):
|
|||||||
d.addErrback(log.err, "error while waiting for delayed calls")
|
d.addErrback(log.err, "error while waiting for delayed calls")
|
||||||
d.addBoth(lambda ign: res)
|
d.addBoth(lambda ign: res)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
@inline_callbacks
|
||||||
|
def until(
|
||||||
|
action, # type: Callable[[], defer.Deferred[Any]]
|
||||||
|
condition, # type: Callable[[], bool]
|
||||||
|
):
|
||||||
|
# type: (...) -> defer.Deferred[None]
|
||||||
|
"""
|
||||||
|
Run a Deferred-returning function until a condition is true.
|
||||||
|
|
||||||
|
:param action: The action to run.
|
||||||
|
:param condition: The predicate signaling stop.
|
||||||
|
|
||||||
|
:return: A Deferred that fires after the condition signals stop.
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
yield action()
|
||||||
|
if condition():
|
||||||
|
break
|
||||||
|
@ -311,7 +311,7 @@ def precondition_abspath(path):
|
|||||||
|
|
||||||
_getfullpathname = None
|
_getfullpathname = None
|
||||||
try:
|
try:
|
||||||
from nt import _getfullpathname
|
from nt import _getfullpathname # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -14,6 +14,12 @@ if PY2:
|
|||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import List
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.internet import task
|
from twisted.internet import task
|
||||||
|
|
||||||
class TimeoutError(Exception):
|
class TimeoutError(Exception):
|
||||||
@ -23,7 +29,7 @@ class PollComplete(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
class PollMixin(object):
|
class PollMixin(object):
|
||||||
_poll_should_ignore_these_errors = []
|
_poll_should_ignore_these_errors = [] # type: List[Exception]
|
||||||
|
|
||||||
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
def poll(self, check_f, pollinterval=0.01, timeout=1000):
|
||||||
# Return a Deferred, then call check_f periodically until it returns
|
# Return a Deferred, then call check_f periodically until it returns
|
||||||
|
@ -6,6 +6,11 @@ Can eventually be merged back into allmydata.web.common.
|
|||||||
|
|
||||||
from past.builtins import unicode
|
from past.builtins import unicode
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Optional
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
from twisted.web import resource, http
|
from twisted.web import resource, http
|
||||||
|
|
||||||
from allmydata.util import abbreviate
|
from allmydata.util import abbreviate
|
||||||
@ -55,7 +60,7 @@ class MultiFormatResource(resource.Resource, object):
|
|||||||
format if nothing else is given as the ``formatDefault``.
|
format if nothing else is given as the ``formatDefault``.
|
||||||
"""
|
"""
|
||||||
formatArgument = "t"
|
formatArgument = "t"
|
||||||
formatDefault = None
|
formatDefault = None # type: Optional[str]
|
||||||
|
|
||||||
def render(self, req):
|
def render(self, req):
|
||||||
"""
|
"""
|
||||||
|
@ -26,10 +26,10 @@ class IntroducerRoot(MultiFormatResource):
|
|||||||
self.introducer_node = introducer_node
|
self.introducer_node = introducer_node
|
||||||
self.introducer_service = introducer_node.getServiceNamed("introducer")
|
self.introducer_service = introducer_node.getServiceNamed("introducer")
|
||||||
# necessary as a root Resource
|
# necessary as a root Resource
|
||||||
self.putChild("", self)
|
self.putChild(b"", self)
|
||||||
static_dir = resource_filename("allmydata.web", "static")
|
static_dir = resource_filename("allmydata.web", "static")
|
||||||
for filen in os.listdir(static_dir):
|
for filen in os.listdir(static_dir):
|
||||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
self.putChild(filen.encode("utf-8"), static.File(os.path.join(static_dir, filen)))
|
||||||
|
|
||||||
def _create_element(self):
|
def _create_element(self):
|
||||||
"""
|
"""
|
||||||
@ -66,7 +66,7 @@ class IntroducerRoot(MultiFormatResource):
|
|||||||
announcement_summary[service_name] += 1
|
announcement_summary[service_name] += 1
|
||||||
res[u"announcement_summary"] = announcement_summary
|
res[u"announcement_summary"] = announcement_summary
|
||||||
|
|
||||||
return json.dumps(res, indent=1) + b"\n"
|
return (json.dumps(res, indent=1) + "\n").encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class IntroducerRootElement(Element):
|
class IntroducerRootElement(Element):
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
from __future__ import (
|
from __future__ import (
|
||||||
print_function,
|
print_function,
|
||||||
unicode_literals,
|
unicode_literals,
|
||||||
@ -49,7 +52,11 @@ class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
|||||||
"""
|
"""
|
||||||
# probably want a try/except around here? what do we do if
|
# probably want a try/except around here? what do we do if
|
||||||
# transmission fails or anything else bad happens?
|
# transmission fails or anything else bad happens?
|
||||||
self.sendMessage(json.dumps(message))
|
encoded = json.dumps(message)
|
||||||
|
if isinstance(encoded, str):
|
||||||
|
# On Python 3 dumps() returns Unicode...
|
||||||
|
encoded = encoded.encode("utf-8")
|
||||||
|
self.sendMessage(encoded)
|
||||||
|
|
||||||
def onOpen(self):
|
def onOpen(self):
|
||||||
"""
|
"""
|
||||||
|
@ -61,7 +61,16 @@ class IToken(ICredentials):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@implementer(IToken)
|
# Workaround for Shoobx/mypy-zope#26, where without suitable
|
||||||
|
# stubs for twisted classes (ICredentials), IToken does not
|
||||||
|
# appear to be an Interface. The proper fix appears to be to
|
||||||
|
# create stubs for twisted
|
||||||
|
# (https://twistedmatrix.com/trac/ticket/9717). For now,
|
||||||
|
# bypassing the inline decorator syntax works around the issue.
|
||||||
|
_itoken_impl = implementer(IToken)
|
||||||
|
|
||||||
|
|
||||||
|
@_itoken_impl
|
||||||
@attr.s
|
@attr.s
|
||||||
class Token(object):
|
class Token(object):
|
||||||
proposed_token = attr.ib(type=bytes)
|
proposed_token = attr.ib(type=bytes)
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
|
from future.utils import PY3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import urllib
|
from urllib.parse import quote as urlquote
|
||||||
|
|
||||||
from hyperlink import DecodedURL, URL
|
from hyperlink import DecodedURL, URL
|
||||||
from pkg_resources import resource_filename
|
from pkg_resources import resource_filename
|
||||||
@ -81,7 +83,7 @@ class URIHandler(resource.Resource, object):
|
|||||||
# it seems Nevow was creating absolute URLs including
|
# it seems Nevow was creating absolute URLs including
|
||||||
# host/port whereas req.uri is absolute (but lacks host/port)
|
# host/port whereas req.uri is absolute (but lacks host/port)
|
||||||
redir_uri = URL.from_text(req.prePathURL().decode('utf8'))
|
redir_uri = URL.from_text(req.prePathURL().decode('utf8'))
|
||||||
redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8'))
|
redir_uri = redir_uri.child(urlquote(uri_arg))
|
||||||
# add back all the query args that AREN'T "?uri="
|
# add back all the query args that AREN'T "?uri="
|
||||||
for k, values in req.args.items():
|
for k, values in req.args.items():
|
||||||
if k != b"uri":
|
if k != b"uri":
|
||||||
@ -227,26 +229,26 @@ class Root(MultiFormatResource):
|
|||||||
self._client = client
|
self._client = client
|
||||||
self._now_fn = now_fn
|
self._now_fn = now_fn
|
||||||
|
|
||||||
# Children need to be bytes; for now just doing these to make specific
|
|
||||||
# tests pass on Python 3, but eventually will do all them when this
|
|
||||||
# module is ported to Python 3 (if not earlier).
|
|
||||||
self.putChild(b"uri", URIHandler(client))
|
self.putChild(b"uri", URIHandler(client))
|
||||||
self.putChild("cap", URIHandler(client))
|
self.putChild(b"cap", URIHandler(client))
|
||||||
|
|
||||||
# Handler for everything beneath "/private", an area of the resource
|
# Handler for everything beneath "/private", an area of the resource
|
||||||
# hierarchy which is only accessible with the private per-node API
|
# hierarchy which is only accessible with the private per-node API
|
||||||
# auth token.
|
# auth token.
|
||||||
self.putChild("private", create_private_tree(client.get_auth_token))
|
self.putChild(b"private", create_private_tree(client.get_auth_token))
|
||||||
|
|
||||||
self.putChild("file", FileHandler(client))
|
self.putChild(b"file", FileHandler(client))
|
||||||
self.putChild("named", FileHandler(client))
|
self.putChild(b"named", FileHandler(client))
|
||||||
self.putChild("status", status.Status(client.get_history()))
|
self.putChild(b"status", status.Status(client.get_history()))
|
||||||
self.putChild("statistics", status.Statistics(client.stats_provider))
|
self.putChild(b"statistics", status.Statistics(client.stats_provider))
|
||||||
static_dir = resource_filename("allmydata.web", "static")
|
static_dir = resource_filename("allmydata.web", "static")
|
||||||
for filen in os.listdir(static_dir):
|
for filen in os.listdir(static_dir):
|
||||||
self.putChild(filen, static.File(os.path.join(static_dir, filen)))
|
child_path = filen
|
||||||
|
if PY3:
|
||||||
|
child_path = filen.encode("utf-8")
|
||||||
|
self.putChild(child_path, static.File(os.path.join(static_dir, filen)))
|
||||||
|
|
||||||
self.putChild("report_incident", IncidentReporter())
|
self.putChild(b"report_incident", IncidentReporter())
|
||||||
|
|
||||||
@exception_to_child
|
@exception_to_child
|
||||||
def getChild(self, path, request):
|
def getChild(self, path, request):
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from six import ensure_str
|
from six import ensure_str
|
||||||
|
|
||||||
import re, time, tempfile
|
import re, time, tempfile
|
||||||
@ -65,18 +77,24 @@ class TahoeLAFSRequest(Request, object):
|
|||||||
self.path, argstring = x
|
self.path, argstring = x
|
||||||
self.args = parse_qs(argstring, 1)
|
self.args = parse_qs(argstring, 1)
|
||||||
|
|
||||||
if self.method == 'POST':
|
if self.method == b'POST':
|
||||||
# We use FieldStorage here because it performs better than
|
# We use FieldStorage here because it performs better than
|
||||||
# cgi.parse_multipart(self.content, pdict) which is what
|
# cgi.parse_multipart(self.content, pdict) which is what
|
||||||
# twisted.web.http.Request uses.
|
# twisted.web.http.Request uses.
|
||||||
self.fields = FieldStorage(
|
|
||||||
self.content,
|
headers = {
|
||||||
{
|
ensure_str(name.lower()): ensure_str(value[-1])
|
||||||
name.lower(): value[-1]
|
|
||||||
for (name, value)
|
for (name, value)
|
||||||
in self.requestHeaders.getAllRawHeaders()
|
in self.requestHeaders.getAllRawHeaders()
|
||||||
},
|
}
|
||||||
environ={'REQUEST_METHOD': 'POST'})
|
|
||||||
|
if 'content-length' not in headers:
|
||||||
|
# Python 3's cgi module would really, really like us to set Content-Length.
|
||||||
|
self.content.seek(0, 2)
|
||||||
|
headers['content-length'] = str(self.content.tell())
|
||||||
|
self.content.seek(0)
|
||||||
|
|
||||||
|
self.fields = FieldStorage(self.content, headers, environ={'REQUEST_METHOD': 'POST'})
|
||||||
self.content.seek(0)
|
self.content.seek(0)
|
||||||
|
|
||||||
self._tahoeLAFSSecurityPolicy()
|
self._tahoeLAFSSecurityPolicy()
|
||||||
|
@ -217,7 +217,12 @@ def initialize():
|
|||||||
# Instead it "mangles" or escapes them using \x7F as an escape character, which we
|
# Instead it "mangles" or escapes them using \x7F as an escape character, which we
|
||||||
# unescape here.
|
# unescape here.
|
||||||
def unmangle(s):
|
def unmangle(s):
|
||||||
return re.sub(u'\\x7F[0-9a-fA-F]*\\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s)
|
return re.sub(
|
||||||
|
u'\\x7F[0-9a-fA-F]*\\;',
|
||||||
|
# type ignored for 'unichr' (Python 2 only)
|
||||||
|
lambda m: unichr(int(m.group(0)[1:-1], 16)), # type: ignore
|
||||||
|
s,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
|
||||||
|
12
tox.ini
12
tox.ini
@ -7,7 +7,7 @@
|
|||||||
twisted = 1
|
twisted = 1
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = codechecks,py27,py36,pypy27
|
envlist = typechecks,codechecks,py27,py36,pypy27
|
||||||
minversion = 2.4
|
minversion = 2.4
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
@ -112,6 +112,16 @@ commands =
|
|||||||
# file. See pyproject.toml for legal <change type> values.
|
# file. See pyproject.toml for legal <change type> values.
|
||||||
python -m towncrier.check --pyproject towncrier.pyproject.toml
|
python -m towncrier.check --pyproject towncrier.pyproject.toml
|
||||||
|
|
||||||
|
|
||||||
|
[testenv:typechecks]
|
||||||
|
skip_install = True
|
||||||
|
deps =
|
||||||
|
mypy
|
||||||
|
git+https://github.com/Shoobx/mypy-zope
|
||||||
|
git+https://github.com/warner/foolscap
|
||||||
|
commands = mypy src
|
||||||
|
|
||||||
|
|
||||||
[testenv:draftnews]
|
[testenv:draftnews]
|
||||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||||
# see comment in [testenv] about "certifi"
|
# see comment in [testenv] about "certifi"
|
||||||
|
Loading…
Reference in New Issue
Block a user