Merge branch 'master' into 2916.grid-manager-proposal.5

This commit is contained in:
meejah
2021-03-23 16:04:50 -06:00
162 changed files with 3120 additions and 1753 deletions

View File

@ -29,7 +29,7 @@ workflows:
- "debian-9": &DOCKERHUB_CONTEXT
context: "dockerhub-auth"
- "debian-8":
- "debian-10":
<<: *DOCKERHUB_CONTEXT
requires:
- "debian-9"
@ -86,11 +86,6 @@ workflows:
# integration tests.
- "debian-9"
# Generate the underlying data for a visualization to aid with Python 3
# porting.
- "build-porting-depgraph":
<<: *DOCKERHUB_CONTEXT
- "typechecks":
<<: *DOCKERHUB_CONTEXT
@ -107,7 +102,7 @@ workflows:
- "master"
jobs:
- "build-image-debian-8":
- "build-image-debian-10":
<<: *DOCKERHUB_CONTEXT
- "build-image-debian-9":
<<: *DOCKERHUB_CONTEXT
@ -213,7 +208,7 @@ jobs:
# filenames and argv).
LANG: "en_US.UTF-8"
# Select a tox environment to run for this job.
TAHOE_LAFS_TOX_ENVIRONMENT: "py27-coverage"
TAHOE_LAFS_TOX_ENVIRONMENT: "py27"
# Additional arguments to pass to tox.
TAHOE_LAFS_TOX_ARGS: ""
# The path in which test artifacts will be placed.
@ -223,7 +218,7 @@ jobs:
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
# Upload the coverage report.
UPLOAD_COVERAGE: "yes"
UPLOAD_COVERAGE: ""
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
@ -277,11 +272,11 @@ jobs:
fi
debian-8:
debian-10:
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:8-py2.7"
image: "tahoelafsci/debian:10-py2.7"
user: "nobody"
@ -376,7 +371,7 @@ jobs:
# this reporter on Python 3. So drop that and just specify the
# reporter.
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
TAHOE_LAFS_TOX_ENVIRONMENT: "py36-coverage"
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
ubuntu-20-04:
@ -451,33 +446,6 @@ jobs:
# them in parallel.
nix-build --cores 3 --max-jobs 2 nix/
# Generate up-to-date data for the dependency graph visualizer.
build-porting-depgraph:
# Get a system in which we can easily install Tahoe-LAFS and all its
# dependencies. The dependency graph analyzer works by executing the code.
# It's Python, what do you expect?
<<: *DEBIAN
steps:
- "checkout"
- add_ssh_keys:
fingerprints:
# Jean-Paul Calderone <exarkun@twistedmatrix.com> (CircleCI depgraph key)
# This lets us push to tahoe-lafs/tahoe-depgraph in the next step.
- "86:38:18:a7:c0:97:42:43:18:46:55:d6:21:b0:5f:d4"
- run:
name: "Setup Python Environment"
command: |
/tmp/venv/bin/pip install -e /tmp/project
- run:
name: "Generate dependency graph data"
command: |
. /tmp/venv/bin/activate
./misc/python3/depgraph.sh
typechecks:
docker:
- <<: *DOCKERHUB_AUTH
@ -529,12 +497,12 @@ jobs:
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
build-image-debian-8:
build-image-debian-10:
<<: *BUILD_IMAGE
environment:
DISTRO: "debian"
TAG: "8"
TAG: "10"
PYTHON_VERSION: "2.7"

View File

@ -1,48 +0,0 @@
# Override defaults for codecov.io checks.
#
# Documentation is at https://docs.codecov.io/docs/codecov-yaml;
# reference is at https://docs.codecov.io/docs/codecovyml-reference.
#
# To validate this file, use:
#
# curl --data-binary @.codecov.yml https://codecov.io/validate
#
# Codecov's defaults seem to leave red marks in GitHub CI checks in a
# rather arbitrary manner, probably because of non-determinism in
# coverage (see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2891)
# and maybe because computers are bad with floating point numbers.
# Allow coverage percentage a precision of zero decimals, and round to
# the nearest number (for example, 89.957 to to 90; 89.497 to 89%).
# Coverage above 90% is good, below 80% is bad.
coverage:
round: nearest
range: 80..90
precision: 0
# Aim for a target test coverage of 90% in codecov/project check (do
# not allow project coverage to drop below that), and allow
# codecov/patch a threshold of 1% (allow coverage in changes to drop
# by that much, and no less). That should be good enough for us.
status:
project:
default:
target: 90%
threshold: 1%
patch:
default:
threshold: 1%
codecov:
# This is a public repository so supposedly we don't "need" to use an upload
# token. However, using one makes sure that CI jobs running against forked
# repositories have coverage uploaded to the right place in codecov so
# their reports aren't incomplete.
token: "abf679b6-e2e6-4b33-b7b5-6cfbd41ee691"
notify:
# The reference documentation suggests that this is the default setting:
# https://docs.codecov.io/docs/codecovyml-reference#codecovnotifywait_for_ci
# However observation suggests otherwise.
wait_for_ci: true

View File

@ -6,6 +6,10 @@ on:
- "master"
pull_request:
env:
# Tell Hypothesis which configuration we want it to use.
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
jobs:
coverage:
@ -79,11 +83,110 @@ jobs:
name: eliot.log
path: eliot.log
- name: Upload coverage report
uses: codecov/codecov-action@v1
with:
token: abf679b6-e2e6-4b33-b7b5-6cfbd41ee691
file: coverage.xml
# Upload this job's coverage data to Coveralls. While there is a GitHub
# Action for this, as of Jan 2021 it does not support Python coverage
# files - only lcov files. Therefore, we use coveralls-python, the
# coveralls.io-supplied Python reporter, for this.
- name: "Report Coverage to Coveralls"
run: |
pip install coveralls
python -m coveralls
env:
# Some magic value required for some magic reason.
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
# Help coveralls identify our project.
COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
# Every source of coverage reports needs a unique "flag name".
# Construct one by smashing a few variables from the matrix together
# here.
COVERALLS_FLAG_NAME: "run-${{ matrix.os }}-${{ matrix.python-version }}"
# Mark the data as just one piece of many because we have more than
# one instance of this job (Windows, macOS) which collects and
# reports coverage. This is necessary to cause Coveralls to merge
# multiple coverage results into a single report. Note the merge
# only happens when we "finish" a particular build, as identified by
# its "build_num" (aka "service_number").
COVERALLS_PARALLEL: true
# Tell Coveralls that we're done reporting coverage data. Since we're using
# the "parallel" mode where more than one coverage data file is merged into
# a single report, we have to tell Coveralls when we've uploaded all of the
# data files. This does it. We make sure it runs last by making it depend
# on *all* of the coverage-collecting jobs.
finish-coverage-report:
# There happens to just be one coverage-collecting job at the moment. If
# the coverage reports are broken and someone added more
# coverage-collecting jobs to this workflow but didn't update this, that's
# why.
needs:
- "coverage"
runs-on: "ubuntu-latest"
steps:
- name: "Check out Tahoe-LAFS sources"
uses: "actions/checkout@v2"
- name: "Finish Coveralls Reporting"
run: |
# coveralls-python does have a `--finish` option but it doesn't seem
# to work, at least for us.
# https://github.com/coveralls-clients/coveralls-python/issues/248
#
# But all it does is this simple POST so we can just send it
# ourselves. The only hard part is guessing what the POST
# parameters mean. And I've done that for you already.
#
# Since the build is done I'm going to guess that "done" is a fine
# value for status.
#
# That leaves "build_num". The coveralls documentation gives some
# hints about it. It suggests using $CIRCLE_WORKFLOW_ID if your job
# is on CircleCI. CircleCI documentation says this about
# CIRCLE_WORKFLOW_ID:
#
# Observation of the coveralls.io web interface, logs from the
# coveralls command in action, and experimentation suggests the
# value for PRs is something more like:
#
# <GIT MERGE COMMIT HASH>-PR-<PR NUM>
#
# For branches, it's just the git branch tip hash.
# For pull requests, refs/pull/<PR NUM>/merge was just checked out
# by so HEAD will refer to the right revision. For branches, HEAD
# is also the tip of the branch.
REV=$(git rev-parse HEAD)
# We can get the PR number from the "context".
#
# https://docs.github.com/en/free-pro-team@latest/developers/webhooks-and-events/webhook-events-and-payloads#pull_request
#
# (via <https://github.community/t/github-ref-is-inconsistent/17728/3>).
#
# If this is a pull request, `github.event` is a `pull_request`
# structure which has `number` right in it.
#
# If this is a push, `github.event` is a `push` instead but we only
# need the revision to construct the build_num.
PR=${{ github.event.number }}
if [ "${PR}" = "" ]; then
BUILD_NUM=$REV
else
BUILD_NUM=$REV-PR-$PR
fi
REPO_NAME=$GITHUB_REPOSITORY
curl \
-k \
https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN \
-d \
"payload[build_num]=$BUILD_NUM&payload[status]=done&payload[repo_name]=$REPO_NAME"
env:
# Some magic value required for some magic reason.
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
# Help coveralls identify our project.
COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
integration:
runs-on: ${{ matrix.os }}

View File

@ -6,7 +6,7 @@ Free and Open decentralized data store
`Tahoe-LAFS <https://www.tahoe-lafs.org>`__ (Tahoe Least-Authority File Store) is the first free software / open-source storage technology that distributes your data across multiple servers. Even if some servers fail or are taken over by an attacker, the entire file store continues to function correctly, preserving your privacy and security.
|Contributor Covenant| |readthedocs| |travis| |circleci| |codecov|
|Contributor Covenant| |readthedocs| |travis| |circleci| |coveralls|
Table of contents
@ -125,9 +125,9 @@ See `TGPPL.PDF <https://tahoe-lafs.org/~zooko/tgppl.pdf>`__ for why the TGPPL ex
.. |circleci| image:: https://circleci.com/gh/tahoe-lafs/tahoe-lafs.svg?style=svg
:target: https://circleci.com/gh/tahoe-lafs/tahoe-lafs
.. |codecov| image:: https://codecov.io/github/tahoe-lafs/tahoe-lafs/coverage.svg?branch=master
:alt: test coverage percentage
:target: https://codecov.io/github/tahoe-lafs/tahoe-lafs?branch=master
.. |coveralls| image:: https://coveralls.io/repos/github/tahoe-lafs/tahoe-lafs/badge.svg
:alt: code coverage
:target: https://coveralls.io/github/tahoe-lafs/tahoe-lafs
.. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg
:alt: code of conduct

View File

@ -173,7 +173,9 @@ from PyPI with ``venv/bin/pip install tahoe-lafs``. After installation, run
Install From a Source Tarball
-----------------------------
You can also install directly from the source tarball URL::
You can also install directly from the source tarball URL. To verify
signatures, first see verifying_signatures_ and replace the URL in the
following instructions with the local filename.
% virtualenv venv
New python executable in ~/venv/bin/python2.7
@ -189,6 +191,40 @@ You can also install directly from the source tarball URL::
tahoe-lafs: 1.14.0
...
.. _verifying_signatures:
Verifying Signatures
--------------------
First download the source tarball and then any signatures. There are several
developers who are able to produce signatures for a release. A release may
have multiple signatures. All should be valid and you should confirm at least
one of them (ideally, confirm all).
This statement, signed by the existing Tahoe release-signing key, attests to
those developers authorized to sign a Tahoe release:
.. include:: developer-release-signatures
:code:
Signatures are made available beside the release. So for example, a release
like ``https://tahoe-lafs.org/downloads/tahoe-lafs-1.16.0.tar.bz2`` might
have signatures ``tahoe-lafs-1.16.0.tar.bz2.meejah.asc`` and
``tahoe-lafs-1.16.0.tar.bz2.warner.asc``.
To verify the signatures using GnuPG::
% gpg --verify tahoe-lafs-1.16.0.tar.bz2.meejah.asc tahoe-lafs-1.16.0.tar.bz2
gpg: Signature made XXX
gpg: using RSA key 9D5A2BD5688ECB889DEBCD3FC2602803128069A7
gpg: Good signature from "meejah <meejah@meejah.ca>" [full]
% gpg --verify tahoe-lafs-1.16.0.tar.bz2.warner.asc tahoe-lafs-1.16.0.tar.bz2
gpg: Signature made XXX
gpg: using RSA key 967EFE06699872411A77DF36D43B4C9C73225AAF
gpg: Good signature from "Brian Warner <warner@lothar.com>" [full]
Extras
------

View File

@ -28,7 +28,7 @@ import os
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['recommonmark']
extensions = ['recommonmark', 'sphinx_rtd_theme']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@ -107,7 +107,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the

View File

@ -0,0 +1,42 @@
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512
January 20, 2021
Any of the following core Tahoe contributers may sign a release. Each
release MUST be signed by at least one developer but MAY have
additional signatures. Each developer independently produces a
signature which is made available beside Tahoe releases after 1.15.0
This statement is signed by the existing Tahoe release key. Any future
such statements may be signed by it OR by any two developers (for
example, to add or remove developers from the list).
meejah
0xC2602803128069A7
9D5A 2BD5 688E CB88 9DEB CD3F C260 2803 1280 69A7
https://meejah.ca/meejah.asc
jean-paul calderone (exarkun)
0xE27B085EDEAA4B1B
96B9 C5DA B2EA 9EB6 7941 9DB7 E27B 085E DEAA 4B1B
https://twistedmatrix.com/~exarkun/E27B085EDEAA4B1B.asc
brian warner (lothar)
0x863333C265497810
5810 F125 7F8C F753 7753 895A 8633 33C2 6549 7810
https://www.lothar.com/warner-gpg.html
-----BEGIN PGP SIGNATURE-----
iQEzBAEBCgAdFiEE405i0G0Oac/KQXn/veDTHWhmanoFAmAHIyIACgkQveDTHWhm
anqhqQf/YSbMXL+gwFhAZsjX39EVlbr/Ik7WPPkJW7v1oHybTnwFpFIc52COU1x/
sqRfk4OyYtz9IBgOPXoWgXu9R4qdK6vYKxEsekcGT9C5l0OyDz8YWXEWgbGK5mvI
aEub9WucD8r2uOQnnW6DtznFuEpvOjtf/+2BU767+bvLsbViW88ocbuLfCqLdOgD
WZT9j3M+Y2Dc56DAJzP/4fkrUSVIofZStYp5u9HBjburgcYIp0g/cyc4xXRoi6Mp
lFTRFv3MIjmoamzSQseoIgP6fi8QRqPrffPrsyqAp+06mJnPhxxFqxtO/ZErmpSa
+BGrLBxdWa8IF9U1A4Fs5nuAzAKMEg==
=E9J+
-----END PGP SIGNATURE-----

View File

@ -101,12 +101,12 @@ Alice generates a key pair and secures it properly.
Alice generates a self-signed storage node certificate with the key pair.
Alice's storage node announces (to an introducer) a fURL containing (among other information) the SPKI hash.
Imagine the SPKI hash is ``i5xb...``.
This results in a fURL of ``pb://i5xb...@example.com:443/g3m5...#v=2`` [#]_.
This results in a fURL of ``pb://i5xb...@example.com:443/g3m5...#v=1``.
Bob creates a client node pointed at the same introducer.
Bob's client node receives the announcement from Alice's storage node
(indirected through the introducer).
Bob's client node recognizes the fURL as referring to an HTTP-dialect server due to the ``v=2`` fragment.
Bob's client node recognizes the fURL as referring to an HTTP-dialect server due to the ``v=1`` fragment.
Bob's client node can now perform a TLS handshake with a server at the address in the fURL location hints
(``example.com:443`` in this example).
Following the above described validation procedures,
@ -156,7 +156,7 @@ Such an announcement will resemble this::
{
"anonymous-storage-FURL": "pb://...", # The old key
"gbs-anonymous-storage-url": "pb://...#v=2" # The new key
"gbs-anonymous-storage-url": "pb://...#v=1" # The new key
}
The transition process will proceed in three stages:
@ -252,7 +252,7 @@ For example::
"delete-mutable-shares-with-zero-length-writev": true,
"fills-holes-with-zero-bytes": true,
"prevents-read-past-end-of-share-data": true,
"gbs-anonymous-storage-url": "pb://...#v=2"
"gbs-anonymous-storage-url": "pb://...#v=1"
},
"application-version": "1.13.0"
}
@ -286,8 +286,13 @@ We considered making this ``POST /v1/immutable`` instead.
The motivation was to keep *storage index* out of the request URL.
Request URLs have an elevated chance of being logged by something.
We were concerned that having the *storage index* logged may increase some risks.
However, we decided this does not matter because the *storage index* can only be used to read the share (which is ciphertext).
TODO Verify this conclusion.
However, we decided this does not matter because:
* the *storage index* can only be used to retrieve (not decrypt) the ciphertext-bearing share.
* the *storage index* is already persistently present on the storage node in the form of directory names in the storage servers ``shares`` directory.
* the request is made via HTTPS and so only Tahoe-LAFS can see the contents,
therefore no proxy servers can perform any extra logging.
* Tahoe-LAFS itself does not currently log HTTP request URLs.
``PUT /v1/immutable/:storage_index/:share_number``
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@ -493,13 +498,6 @@ Just like the immutable version.
Note we use `base64url`_ rather than the Foolscap- and Tahoe-LAFS-preferred Base32.
.. [#]
Other schemes for differentiating between the two server types is possible.
If the tubID length remains different,
that provides an unambiguous (if obscure) signal about which protocol to use.
Or a different scheme could be adopted
(``[x-]pb+http``, ``x-tahoe+http``, ``x-gbs`` come to mind).
.. [#]
https://www.cvedetails.com/cve/CVE-2017-5638/
.. [#]

View File

@ -137,6 +137,12 @@ Did anyone contribute a hack since the last release? If so, then
https://tahoe-lafs.org/hacktahoelafs/ needs to be updated.
Sign Git Tag
````````````
- git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
Upload Artifacts
````````````````

View File

@ -46,7 +46,7 @@ class ProvisioningTool(rend.Page):
req = inevow.IRequest(ctx)
def getarg(name, astype=int):
if req.method != "POST":
if req.method != b"POST":
return None
if name in req.fields:
return astype(req.fields[name].value)

View File

@ -30,7 +30,7 @@ class Root(rend.Page):
def run(portnum):
root = Root()
root.putChild("tahoe.css", static.File("tahoe.css"))
root.putChild(b"tahoe.css", static.File("tahoe.css"))
site = appserver.NevowSite(root)
s = strports.service("tcp:%d" % portnum, site)
s.startService()

View File

@ -0,0 +1 @@
Debian 8 support has been replaced with Debian 10 support.

View File

@ -0,0 +1 @@
The Tahoe command line now always uses UTF-8 to decode its arguments, regardless of locale.

0
newsfragments/3588.minor Normal file
View File

0
newsfragments/3592.minor Normal file
View File

0
newsfragments/3593.minor Normal file
View File

0
newsfragments/3596.minor Normal file
View File

0
newsfragments/3600.minor Normal file
View File

View File

0
newsfragments/3605.minor Normal file
View File

0
newsfragments/3606.minor Normal file
View File

0
newsfragments/3607.minor Normal file
View File

0
newsfragments/3608.minor Normal file
View File

0
newsfragments/3611.minor Normal file
View File

0
newsfragments/3612.minor Normal file
View File

0
newsfragments/3613.minor Normal file
View File

0
newsfragments/3615.minor Normal file
View File

0
newsfragments/3617.minor Normal file
View File

0
newsfragments/3618.minor Normal file
View File

0
newsfragments/3620.minor Normal file
View File

0
newsfragments/3621.minor Normal file
View File

1
newsfragments/3623.minor Normal file
View File

@ -0,0 +1 @@

0
newsfragments/3624.minor Normal file
View File

0
newsfragments/3625.minor Normal file
View File

0
newsfragments/3628.minor Normal file
View File

View File

@ -0,0 +1 @@
The NixOS-packaged Tahoe-LAFS now knows its own version.

0
newsfragments/3631.minor Normal file
View File

View File

@ -0,0 +1 @@
Tahoe-LAFS now uses a forked version of txi2p (named txi2p-tahoe) with Python 3 support.

0
newsfragments/3634.minor Normal file
View File

0
newsfragments/3635.minor Normal file
View File

0
newsfragments/3637.minor Normal file
View File

0
newsfragments/3638.minor Normal file
View File

0
newsfragments/3640.minor Normal file
View File

0
newsfragments/3642.minor Normal file
View File

1
newsfragments/3644.other Normal file
View File

@ -0,0 +1 @@
The "Great Black Swamp" proposed specification has been changed use ``v=1`` as the URL version identifier.

0
newsfragments/3646.minor Normal file
View File

View File

@ -1,5 +1,5 @@
{ fetchFromGitHub, lib
, python
, git, python
, twisted, foolscap, zfec
, setuptools, setuptoolsTrial, pyasn1, zope_interface
, service-identity, pyyaml, magic-wormhole, treq, appdirs
@ -9,7 +9,35 @@
python.pkgs.buildPythonPackage rec {
version = "1.14.0.dev";
name = "tahoe-lafs-${version}";
src = lib.cleanSource ../.;
src = lib.cleanSourceWith {
src = ../.;
filter = name: type:
let
basename = baseNameOf name;
split = lib.splitString ".";
join = builtins.concatStringsSep ".";
ext = join (builtins.tail (split basename));
# Build up a bunch of knowledge about what kind of file this is.
isTox = type == "directory" && basename == ".tox";
isTrialTemp = type == "directory" && basename == "_trial_temp";
isVersion = basename == "version.py";
isBytecode = ext == "pyc" || ext == "pyo";
isBackup = lib.hasSuffix "~" basename;
isTemporary = lib.hasPrefix "#" basename && lib.hasSuffix "#" basename;
isSymlink = type == "symlink";
in
# Exclude all these things
! (isTrialTemp
|| isTox
|| isVersion
|| isBytecode
|| isBackup
|| isTemporary
|| isSymlink
);
};
postPatch = ''
# Chroots don't have /etc/hosts and /etc/resolv.conf, so work around
@ -28,10 +56,17 @@ python.pkgs.buildPythonPackage rec {
rm src/allmydata/test/test_i2p_provider.py
rm src/allmydata/test/test_connections.py
rm src/allmydata/test/cli/test_create.py
rm src/allmydata/test/test_client.py
# Since we're deleting files, this complains they're missing. For now Nix
# is Python 2-only, anyway, so these tests don't add anything yet.
rm src/allmydata/test/test_python3.py
'';
nativeBuildInputs = [
git
];
propagatedBuildInputs = with python.pkgs; [
twisted foolscap zfec appdirs
setuptoolsTrial pyasn1 zope_interface

View File

@ -151,10 +151,13 @@ tor_requires = [
]
i2p_requires = [
# txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
# URL lookups are in PEP-508 (via https://stackoverflow.com/a/54794506).
# Also see the comment in tor_requires.
"txi2p @ git+https://github.com/str4d/txi2p@0611b9a86172cb70d2f5e415a88eee9f230590b3#egg=txi2p",
# txi2p has Python 3 support in master branch, but it has not been
# released -- see https://github.com/str4d/txi2p/issues/10. We
# could use a fork for Python 3 until txi2p's maintainers are back
# in action. For Python 2, we could continue using the txi2p
# version about which no one has complained to us so far.
"txi2p; python_version < '3.0'",
"txi2p-tahoe >= 0.3.5; python_version > '3.0'",
]
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':

View File

@ -63,7 +63,7 @@ class Blacklist(object):
reason = self.entries.get(si, None)
if reason is not None:
# log this to logs/twistd.log, since web logs go there too
twisted_log.msg("blacklist prohibited access to SI %s: %s" %
twisted_log.msg("blacklist prohibited access to SI %r: %r" %
(base32.b2a(si), reason))
return reason

View File

@ -1,3 +1,14 @@
"""Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode
from zope.interface import implementer

View File

@ -1,4 +1,16 @@
from past.builtins import unicode
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
# Don't use future str to prevent leaking future's newbytes into foolscap, which they break.
from past.builtins import unicode as str
import os
import stat
@ -371,8 +383,8 @@ class _StoragePlugins(object):
"""
return set(
config.get_config(
"storage", "plugins", b""
).decode("ascii").split(u",")
"storage", "plugins", ""
).split(u",")
) - {u""}
@classmethod
@ -467,7 +479,7 @@ def create_introducer_clients(config, main_tub, _introducer_factory=None):
introducers = config.get_introducer_configuration()
for petname, (furl, cache_path) in introducers.items():
for petname, (furl, cache_path) in list(introducers.items()):
ic = _introducer_factory(
main_tub,
furl.encode("ascii"),
@ -687,7 +699,7 @@ class _Client(node.Node, pollmixin.PollMixin):
def init_secrets(self):
# configs are always unicode
def _unicode_make_secret():
return unicode(_make_secret(), "ascii")
return str(_make_secret(), "ascii")
lease_s = self.config.get_or_create_private_config(
"secret", _unicode_make_secret).encode("utf-8")
lease_secret = base32.a2b(lease_s)
@ -702,7 +714,7 @@ class _Client(node.Node, pollmixin.PollMixin):
def _make_key():
private_key, _ = ed25519.create_signing_keypair()
# Config values are always unicode:
return unicode(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
return str(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
private_key_str = self.config.get_or_create_private_config(
"node.privkey", _make_key).encode("utf-8")
@ -890,7 +902,7 @@ class _Client(node.Node, pollmixin.PollMixin):
"""
Register a storage server.
"""
config_key = b"storage-plugin.{}.furl".format(
config_key = "storage-plugin.{}.furl".format(
# Oops, why don't I have a better handle on this value?
announceable_storage_server.announcement[u"name"],
)

View File

@ -1,3 +1,13 @@
"""Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, time, tempfile
from zope.interface import implementer
@ -13,17 +23,17 @@ from twisted.python import log
def get_memory_usage():
# this is obviously linux-specific
stat_names = ("VmPeak",
"VmSize",
#"VmHWM",
"VmData")
stat_names = (b"VmPeak",
b"VmSize",
#b"VmHWM",
b"VmData")
stats = {}
try:
with open("/proc/self/status", "r") as f:
with open("/proc/self/status", "rb") as f:
for line in f:
name, right = line.split(":",2)
name, right = line.split(b":",2)
if name in stat_names:
assert right.endswith(" kB\n")
assert right.endswith(b" kB\n")
right = right[:-4]
stats[name] = int(right) * 1024
except:
@ -34,8 +44,8 @@ def get_memory_usage():
def log_memory_usage(where=""):
stats = get_memory_usage()
log.msg("VmSize: %9d VmPeak: %9d %s" % (stats["VmSize"],
stats["VmPeak"],
log.msg("VmSize: %9d VmPeak: %9d %s" % (stats[b"VmSize"],
stats[b"VmPeak"],
where))
@implementer(IConsumer)
@ -65,7 +75,7 @@ class ControlServer(Referenceable, service.Service):
tempdir = tempfile.mkdtemp()
filename = os.path.join(tempdir, "data")
f = open(filename, "wb")
block = "a" * 8192
block = b"a" * 8192
while size > 0:
l = min(size, 8192)
f.write(block[:l])
@ -126,7 +136,7 @@ class ControlServer(Referenceable, service.Service):
server_name = server.get_longname()
storage_server = server.get_storage_server()
start = time.time()
d = storage_server.get_buckets("\x00" * 16)
d = storage_server.get_buckets(b"\x00" * 16)
def _done(ignored):
stop = time.time()
elapsed = stop - start
@ -138,7 +148,7 @@ class ControlServer(Referenceable, service.Service):
d.addCallback(self._do_one_ping, everyone_left, results)
def _average(res):
averaged = {}
for server_name,times in results.iteritems():
for server_name,times in results.items():
averaged[server_name] = sum(times) / len(times)
return averaged
d.addCallback(_average)
@ -168,19 +178,19 @@ class SpeedTest(object):
fn = os.path.join(self.basedir, str(i))
if os.path.exists(fn):
os.unlink(fn)
f = open(fn, "w")
f = open(fn, "wb")
f.write(os.urandom(8))
s -= 8
while s > 0:
chunk = min(s, 4096)
f.write("\x00" * chunk)
f.write(b"\x00" * chunk)
s -= chunk
f.close()
def do_upload(self):
d = defer.succeed(None)
def _create_slot(res):
d1 = self.parent.create_mutable_file("")
d1 = self.parent.create_mutable_file(b"")
def _created(n):
self._n = n
d1.addCallback(_created)

View File

@ -30,5 +30,5 @@ def remove_prefix(s_bytes, prefix):
if s_bytes.startswith(prefix):
return s_bytes[len(prefix):]
raise BadPrefixError(
"did not see expected '{}' prefix".format(prefix)
"did not see expected '{!r}' prefix".format(prefix)
)

View File

@ -74,6 +74,13 @@ ADD_FILE = ActionType(
u"Add a new file as a child of a directory.",
)
class _OnlyFiles(object):
"""Marker for replacement option of only replacing files."""
ONLY_FILES = _OnlyFiles()
def update_metadata(metadata, new_metadata, now):
"""Updates 'metadata' in-place with the information in 'new_metadata'.
@ -175,11 +182,16 @@ class MetadataSetter(object):
class Adder(object):
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
"""
:param overwrite: Either True (allow overwriting anything existing),
False (don't allow overwriting), or ONLY_FILES (only files can be
overwritten).
"""
self.node = node
if entries is None:
entries = {}
precondition(isinstance(entries, dict), entries)
precondition(overwrite in (True, False, "only-files"), overwrite)
precondition(overwrite in (True, False, ONLY_FILES), overwrite)
# keys of 'entries' may not be normalized.
self.entries = entries
self.overwrite = overwrite
@ -205,7 +217,7 @@ class Adder(object):
if not self.overwrite:
raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))
if self.overwrite == "only-files" and IDirectoryNode.providedBy(children[name][0]):
if self.overwrite == ONLY_FILES and IDirectoryNode.providedBy(children[name][0]):
raise ExistingChildError("child %s already exists as a directory" % quote_output(name, encoding='utf-8'))
metadata = children[name][1].copy()
@ -316,7 +328,7 @@ class DirectoryNode(object):
return "<%s %s-%s %s>" % (self.__class__.__name__,
self.is_readonly() and "RO" or "RW",
self.is_mutable() and "MUT" or "IMM",
hasattr(self, '_uri') and self._uri.abbrev())
hasattr(self, '_uri') and str(self._uri.abbrev(), "utf-8"))
def get_size(self):
"""Return the size of our backing mutable file, in bytes, if we've
@ -701,7 +713,7 @@ class DirectoryNode(object):
'new_child_namex' and 'current_child_namex' need not be normalized.
The overwrite parameter may be True (overwrite any existing child),
False (error if the new child link already exists), or "only-files"
False (error if the new child link already exists), or ONLY_FILES
(error if the new child link exists and points to a directory).
"""
if self.is_readonly() or new_parent.is_readonly():

View File

@ -7,6 +7,7 @@ from twisted.cred import error, checkers, credentials
from twisted.conch.ssh import keys
from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
from allmydata.util.dictutil import BytesKeyDict
from allmydata.util import base32
from allmydata.util.fileutil import abspath_expanduser_unicode
@ -28,18 +29,18 @@ class AccountFileChecker(object):
credentials.ISSHPrivateKey)
def __init__(self, client, accountfile):
self.client = client
self.passwords = {}
pubkeys = {}
self.rootcaps = {}
with open(abspath_expanduser_unicode(accountfile), "r") as f:
self.passwords = BytesKeyDict()
pubkeys = BytesKeyDict()
self.rootcaps = BytesKeyDict()
with open(abspath_expanduser_unicode(accountfile), "rb") as f:
for line in f:
line = line.strip()
if line.startswith("#") or not line:
if line.startswith(b"#") or not line:
continue
name, passwd, rest = line.split(None, 2)
if passwd.startswith("ssh-"):
if passwd.startswith(b"ssh-"):
bits = rest.split()
keystring = " ".join([passwd] + bits[:-1])
keystring = b" ".join([passwd] + bits[:-1])
key = keys.Key.fromString(keystring)
rootcap = bits[-1]
pubkeys[name] = [key]

View File

@ -164,8 +164,10 @@ class CompleteBinaryTreeMixin(object):
def dump(self):
lines = []
for i,depth in self.depth_first():
lines.append("%s%3d: %s" % (" "*depth, i,
base32.b2a_or_none(self[i])))
value = base32.b2a_or_none(self[i])
if value is not None:
value = str(value, "utf-8")
lines.append("%s%3d: %s" % (" "*depth, i, value))
return "\n".join(lines) + "\n"
def get_leaf_index(self, leafnum):
@ -430,8 +432,8 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
for i,h in new_hashes.items():
if self[i]:
if self[i] != h:
raise BadHashError("new hash %s does not match "
"existing hash %s at %s"
raise BadHashError("new hash %r does not match "
"existing hash %r at %r"
% (base32.b2a(h),
base32.b2a(self[i]),
self._name_hash(i)))

View File

@ -1,3 +1,14 @@
"""Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import weakref

View File

@ -67,12 +67,12 @@ class ValidatedExtendedURIProxy(object):
self.crypttext_hash = None
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self._verifycap.to_string())
return "<%s %r>" % (self.__class__.__name__, self._verifycap.to_string())
def _check_integrity(self, data):
h = uri_extension_hash(data)
if h != self._verifycap.uri_extension_hash:
msg = ("The copy of uri_extension we received from %s was bad: wanted %s, got %s" %
msg = ("The copy of uri_extension we received from %s was bad: wanted %r, got %r" %
(self._readbucketproxy,
base32.b2a(self._verifycap.uri_extension_hash),
base32.b2a(h)))
@ -234,7 +234,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
UEB"""
precondition(share_hash_tree[0] is not None, share_hash_tree)
prefix = "%d-%s-%s" % (sharenum, bucket,
base32.b2a(share_hash_tree[0][:8])[:12])
str(base32.b2a(share_hash_tree[0][:8])[:12], "ascii"))
log.PrefixingLogMixin.__init__(self,
facility="tahoe.immutable.download",
prefix=prefix)
@ -427,7 +427,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
received from the remote peer were bad.""")
self.log(" have candidate_share_hash: %s" % bool(candidate_share_hash))
self.log(" block length: %d" % len(blockdata))
self.log(" block hash: %s" % base32.b2a_or_none(blockhash))
self.log(" block hash: %r" % base32.b2a_or_none(blockhash))
if len(blockdata) < 100:
self.log(" block data: %r" % (blockdata,))
else:
@ -477,7 +477,7 @@ class Checker(log.PrefixingLogMixin):
monitor):
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
prefix = "%s" % base32.b2a(verifycap.get_storage_index()[:8])[:12]
prefix = str(base32.b2a(verifycap.get_storage_index()[:8])[:12], "utf-8")
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
self._verifycap = verifycap

View File

@ -63,7 +63,7 @@ class SegmentFetcher(object):
self._running = True
def stop(self):
log.msg("SegmentFetcher(%s).stop" % self._node._si_prefix,
log.msg("SegmentFetcher(%r).stop" % self._node._si_prefix,
level=log.NOISY, parent=self._lp, umid="LWyqpg")
self._cancel_all_requests()
self._running = False
@ -127,7 +127,7 @@ class SegmentFetcher(object):
# we could have sent something if we'd been allowed to pull
# more shares per server. Increase the limit and try again.
self._max_shares_per_server += 1
log.msg("SegmentFetcher(%s) increasing diversity limit to %d"
log.msg("SegmentFetcher(%r) increasing diversity limit to %d"
% (self._node._si_prefix, self._max_shares_per_server),
level=log.NOISY, umid="xY2pBA")
# Also ask for more shares, in the hopes of achieving better
@ -241,7 +241,7 @@ class SegmentFetcher(object):
# called by Shares, in response to our s.send_request() calls.
if not self._running:
return
log.msg("SegmentFetcher(%s)._block_request_activity: %s -> %s" %
log.msg("SegmentFetcher(%r)._block_request_activity: %s -> %r" %
(self._node._si_prefix, repr(share), state),
level=log.NOISY, parent=self._lp, umid="vilNWA")
# COMPLETE, CORRUPT, DEAD, BADSEGNUM are terminal. Remove the share

View File

@ -125,7 +125,7 @@ class DownloadNode(object):
self.ciphertext_hash_tree_leaves = self.guessed_num_segments
def __repr__(self):
return "ImmutableDownloadNode(%s)" % (self._si_prefix,)
return "ImmutableDownloadNode(%r)" % (self._si_prefix,)
def stop(self):
# called by the Terminator at shutdown, mostly for tests
@ -500,7 +500,7 @@ class DownloadNode(object):
return (offset, segment, decodetime)
except (BadHashError, NotEnoughHashesError):
format = ("hash failure in ciphertext_hash_tree:"
" segnum=%(segnum)d, SI=%(si)s")
" segnum=%(segnum)d, SI=%(si)r")
log.msg(format=format, segnum=segnum, si=self._si_prefix,
failure=Failure(),
level=log.WEIRD, parent=self._lp, umid="MTwNnw")

View File

@ -120,7 +120,7 @@ class Segmentation(object):
# we didn't get the first byte, so we can't use this segment
log.msg("Segmentation handed wrong data:"
" want [%d-%d), given [%d-%d), for segnum=%d,"
" for si=%s"
" for si=%r"
% (self._offset, self._offset+self._size,
segment_start, segment_start+len(segment),
wanted_segnum, self._node._si_prefix),

View File

@ -108,7 +108,7 @@ class Share(object):
self.had_corruption = False # for unit tests
def __repr__(self):
return "Share(sh%d-on-%s)" % (self._shnum, self._server.get_name())
return "Share(sh%d-on-%s)" % (self._shnum, str(self._server.get_name(), "utf-8"))
def is_alive(self):
# XXX: reconsider. If the share sees a single error, should it remain

View File

@ -106,7 +106,7 @@ class Encoder(object):
def __repr__(self):
if hasattr(self, "_storage_index"):
return "<Encoder for %s>" % si_b2a(self._storage_index)[:5]
return "<Encoder for %r>" % si_b2a(self._storage_index)[:5]
return "<Encoder for unknown storage index>"
def log(self, *args, **kwargs):

View File

@ -175,7 +175,7 @@ class WriteBucketProxy(object):
self._offset_data = offset_data
def __repr__(self):
return "<WriteBucketProxy for node %s>" % self._server.get_name()
return "<WriteBucketProxy for node %r>" % self._server.get_name()
def put_header(self):
return self._write(0, self._offset_data)
@ -317,7 +317,7 @@ class ReadBucketProxy(object):
return self._server.get_serverid()
def __repr__(self):
return "<ReadBucketProxy %s to peer [%s] SI %s>" % \
return "<ReadBucketProxy %r to peer [%r] SI %r>" % \
(id(self), self._server.get_name(), si_b2a(self._storage_index))
def _start_if_needed(self):

View File

@ -81,7 +81,7 @@ class CHKCheckerAndUEBFetcher(object):
def _got_response(self, buckets, server):
# buckets is a dict: maps shum to an rref of the server who holds it
shnums_s = ",".join([str(shnum) for shnum in buckets])
self.log("got_response: [%s] has %d shares (%s)" %
self.log("got_response: [%r] has %d shares (%s)" %
(server.get_name(), len(buckets), shnums_s),
level=log.NOISY)
self._found_shares.update(buckets.keys())
@ -167,7 +167,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warn
self._upload_status.set_storage_index(storage_index)
self._upload_status.set_status("fetching ciphertext")
self._upload_status.set_progress(0, 1.0)
self._helper.log("CHKUploadHelper starting for SI %s" % self._upload_id,
self._helper.log("CHKUploadHelper starting for SI %r" % self._upload_id,
parent=log_number)
self._storage_broker = storage_broker

View File

@ -278,7 +278,7 @@ class ServerTracker(object):
self.cancel_secret = bucket_cancel_secret
def __repr__(self):
return ("<ServerTracker for server %s and SI %s>"
return ("<ServerTracker for server %r and SI %r>"
% (self._server.get_name(), si_b2a(self.storage_index)[:5]))
def get_server(self):
@ -338,7 +338,7 @@ class ServerTracker(object):
def str_shareloc(shnum, bucketwriter):
return "%s: %s" % (shnum, bucketwriter.get_servername(),)
return "%s: %s" % (shnum, ensure_str(bucketwriter.get_servername()),)
@implementer(IPeerSelector)
@ -437,7 +437,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
self._reactor = reactor
def __repr__(self):
return "<Tahoe2ServerSelector for upload %s>" % self.upload_id
return "<Tahoe2ServerSelector for upload %r>" % self.upload_id
def _create_trackers(self, candidate_servers, allocated_size,
file_renewal_secret, file_cancel_secret, create_server_tracker):
@ -590,7 +590,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
d = timeout_call(self._reactor, tracker.ask_about_existing_shares(), 15)
d.addBoth(self._handle_existing_response, tracker)
ds.append(d)
self.log("asking server %s for any existing shares" %
self.log("asking server %r for any existing shares" %
(tracker.get_name(),), level=log.NOISY)
for tracker in write_trackers:
@ -605,7 +605,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
d.addErrback(timed_out, tracker)
d.addBoth(self._handle_existing_write_response, tracker, set())
ds.append(d)
self.log("asking server %s for any existing shares" %
self.log("asking server %r for any existing shares" %
(tracker.get_name(),), level=log.NOISY)
trackers = set(write_trackers) | set(readonly_trackers)
@ -749,7 +749,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
buckets = res
if buckets:
self.serverids_with_shares.add(serverid)
self.log("response to get_buckets() from server %s: alreadygot=%s"
self.log("response to get_buckets() from server %r: alreadygot=%s"
% (tracker.get_name(), tuple(sorted(buckets))),
level=log.NOISY)
for bucket in buckets:
@ -818,7 +818,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
self.homeless_shares.remove(shnum)
if self._status:
self._status.set_status("Contacting Servers [%s] (first query),"
self._status.set_status("Contacting Servers [%r] (first query),"
" %d shares left.."
% (tracker.get_name(),
len(self.homeless_shares)))
@ -845,7 +845,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
else:
(alreadygot, allocated) = res
self.log("response to allocate_buckets() from server %s: alreadygot=%s, allocated=%s"
self.log("response to allocate_buckets() from server %r: alreadygot=%s, allocated=%s"
% (tracker.get_name(),
tuple(sorted(alreadygot)), tuple(sorted(allocated))),
level=log.NOISY)
@ -1314,7 +1314,7 @@ class CHKUploader(object):
storage_index = encoder.get_param("storage_index")
self._storage_index = storage_index
upload_id = si_b2a(storage_index)[:5]
self.log("using storage index %s" % upload_id)
self.log("using storage index %r" % upload_id)
server_selector = Tahoe2ServerSelector(
upload_id,
self._log_number,

View File

@ -2862,7 +2862,7 @@ class RIControlClient(RemoteInterface):
@return: a dictionary mapping peerid to a float (RTT time in seconds)
"""
return DictOf(str, float)
return DictOf(bytes, float)
UploadResults = Any() #DictOf(bytes, bytes)

View File

@ -300,7 +300,7 @@ class IntroducerService(service.MultiService, Referenceable):
level=log.UNUSUAL, umid="jfGMXQ")
def remote_subscribe_v2(self, subscriber, service_name, subscriber_info):
self.log("introducer: subscription[%s] request at %s"
self.log("introducer: subscription[%r] request at %r"
% (service_name, subscriber), umid="U3uzLg")
service_name = ensure_text(service_name)
subscriber_info = dictutil.UnicodeKeyDict({

View File

@ -9,6 +9,7 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_str
from allmydata.uri import from_string
from allmydata.util import base32, log, dictutil
@ -202,7 +203,7 @@ class MutableChecker(object):
serverid = server.get_serverid()
locator = (server, self._storage_index, shnum)
corrupt_share_locators.append(locator)
s = "%s-sh%d" % (server.get_name(), shnum)
s = "%s-sh%d" % (ensure_str(server.get_name()), shnum)
if f.check(CorruptShareError):
ft = f.value.reason
else:

View File

@ -63,7 +63,7 @@ class CorruptShareError(BadShareError):
self.shnum = shnum
self.reason = reason
def __str__(self):
return "<CorruptShareError server=%s shnum[%d]: %s" % \
return "<CorruptShareError server=%r shnum[%d]: %s" % \
(self.server.get_name(), self.shnum, self.reason)
class UnknownVersionError(BadShareError):

View File

@ -98,7 +98,7 @@ class MutableFileNode(object):
def __repr__(self):
if hasattr(self, '_uri'):
return "<%s %x %s %s>" % (self.__class__.__name__, id(self), self.is_readonly() and 'RO' or 'RW', self._uri.abbrev())
return "<%s %x %s %r>" % (self.__class__.__name__, id(self), self.is_readonly() and 'RO' or 'RW', self._uri.abbrev())
else:
return "<%s %x %s %s>" % (self.__class__.__name__, id(self), None, None)

View File

@ -132,7 +132,7 @@ class Publish(object):
self._servermap = servermap
self._storage_index = self._node.get_storage_index()
self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
num = self.log("Publish(%s): starting" % prefix, parent=None)
num = self.log("Publish(%r): starting" % prefix, parent=None)
self._log_number = num
self._running = True
self._first_write_error = None
@ -920,7 +920,7 @@ class Publish(object):
def log_goal(self, goal, message=""):
logmsg = [message]
for (shnum, server) in sorted([(s,p) for (p,s) in goal], key=lambda t: (id(t[0]), id(t[1]))):
logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
logmsg.append("sh%d to [%r]" % (shnum, server.get_name()))
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,
level=log.NOISY)
@ -1023,7 +1023,7 @@ class Publish(object):
return
server = writer.server
lp = self.log("_got_write_answer from %s, share %d" %
lp = self.log("_got_write_answer from %r, share %d" %
(server.get_name(), writer.shnum))
now = time.time()
@ -1159,14 +1159,14 @@ class Publish(object):
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
offsets_tuple) = expected_version
msg = ("somebody modified the share on us:"
" shnum=%d: I thought they had #%d:R=%s," %
" shnum=%d: I thought they had #%d:R=%r," %
(shnum,
seqnum, base32.b2a(root_hash)[:4]))
if unknown_format:
msg += (" but I don't know how to read share"
" format %d" % version)
else:
msg += " but testv reported #%d:R=%s" % \
msg += " but testv reported #%d:R=%r" % \
(other_seqnum, base32.b2a(other_roothash)[:4])
self.log(msg, parent=lp, level=log.NOISY)
# if expected_version==None, then we didn't expect to see a

View File

@ -122,7 +122,7 @@ class Retrieve(object):
_assert(self._node.get_readkey())
self._last_failure = None
prefix = si_b2a(self._storage_index)[:5]
self._log_number = log.msg("Retrieve(%s): starting" % prefix)
self._log_number = log.msg("Retrieve(%r): starting" % prefix)
self._running = True
self._decoding = False
self._bad_shares = set()
@ -574,7 +574,7 @@ class Retrieve(object):
remote server (with no guarantee of success) that its share is
corrupt.
"""
self.log("marking share %d on server %s as bad" % \
self.log("marking share %d on server %r as bad" % \
(shnum, server.get_name()))
prefix = self.verinfo[-2]
self.servermap.mark_bad_share(server, shnum, prefix)

View File

@ -11,6 +11,7 @@ if PY2:
# Doesn't import str to prevent API leakage on Python 2
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
from past.builtins import unicode
from six import ensure_str
import sys, time, copy
from zope.interface import implementer
@ -202,8 +203,8 @@ class ServerMap(object):
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
offsets_tuple) = verinfo
print("[%s]: sh#%d seq%d-%s %d-of-%d len%d" %
(server.get_name(), shnum,
seqnum, base32.b2a(root_hash)[:4], k, N,
(unicode(server.get_name(), "utf-8"), shnum,
seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"), k, N,
datalength), file=out)
if self._problems:
print("%d PROBLEMS" % len(self._problems), file=out)
@ -275,7 +276,7 @@ class ServerMap(object):
"""Take a versionid, return a string that describes it."""
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
offsets_tuple) = verinfo
return "seq%d-%s" % (seqnum, base32.b2a(root_hash)[:4])
return "seq%d-%s" % (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"))
def summarize_versions(self):
"""Return a string describing which versions we know about."""
@ -868,8 +869,8 @@ class ServermapUpdater(object):
# ok, it's a valid verinfo. Add it to the list of validated
# versions.
self.log(" found valid version %d-%s from %s-sh%d: %d-%d/%d/%d"
% (seqnum, base32.b2a(root_hash)[:4],
server.get_name(), shnum,
% (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"),
ensure_str(server.get_name()), shnum,
k, n, segsize, datalen),
parent=lp)
self._valid_versions.add(verinfo)
@ -943,13 +944,13 @@ class ServermapUpdater(object):
alleged_privkey_s = self._node._decrypt_privkey(enc_privkey)
alleged_writekey = hashutil.ssk_writekey_hash(alleged_privkey_s)
if alleged_writekey != self._node.get_writekey():
self.log("invalid privkey from %s shnum %d" %
self.log("invalid privkey from %r shnum %d" %
(server.get_name(), shnum),
parent=lp, level=log.WEIRD, umid="aJVccw")
return
# it's good
self.log("got valid privkey from shnum %d on serverid %s" %
self.log("got valid privkey from shnum %d on serverid %r" %
(shnum, server.get_name()),
parent=lp)
privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s)
@ -1213,7 +1214,7 @@ class ServermapUpdater(object):
self.log(format="sending %(more)d more queries: %(who)s",
more=len(more_queries),
who=" ".join(["[%s]" % s.get_name() for s in more_queries]),
who=" ".join(["[%r]" % s.get_name() for s in more_queries]),
level=log.NOISY)
for server in more_queries:

View File

@ -970,7 +970,7 @@ def create_main_tub(config, tub_options,
tubport,
location,
)
log.msg("Tub location set to %s" % (location,))
log.msg("Tub location set to %r" % (location,))
return tub

View File

@ -303,8 +303,8 @@ class BackupDB_v2(object):
for name in contents:
entries.append( [name.encode("utf-8"), contents[name]] )
entries.sort()
data = "".join([netstring(name_utf8)+netstring(cap)
for (name_utf8,cap) in entries])
data = b"".join([netstring(name_utf8)+netstring(cap)
for (name_utf8,cap) in entries])
dirhash = backupdb_dirhash(data)
dirhash_s = base32.b2a(dirhash)
c = self.cursor

View File

@ -501,7 +501,7 @@ def list_aliases(options):
rc = tahoe_add_alias.list_aliases(options)
return rc
def list(options):
def list_(options):
from allmydata.scripts import tahoe_ls
rc = tahoe_ls.list(options)
return rc
@ -587,7 +587,7 @@ dispatch = {
"add-alias": add_alias,
"create-alias": create_alias,
"list-aliases": list_aliases,
"ls": list,
"ls": list_,
"get": get,
"put": put,
"cp": cp,

View File

@ -1,8 +1,12 @@
from __future__ import print_function
# coding: utf-8
import os, sys, urllib, textwrap
from __future__ import print_function
from six import ensure_str
import os, sys, textwrap
import codecs
from os.path import join
import urllib.parse
try:
from typing import Optional
@ -225,19 +229,19 @@ def get_alias(aliases, path_unicode, default):
precondition(isinstance(path_unicode, str), path_unicode)
from allmydata import uri
path = path_unicode.encode('utf-8').strip(" ")
path = path_unicode.encode('utf-8').strip(b" ")
if uri.has_uri_prefix(path):
# We used to require "URI:blah:./foo" in order to get a subpath,
# stripping out the ":./" sequence. We still allow that for compatibility,
# but now also allow just "URI:blah/foo".
sep = path.find(":./")
sep = path.find(b":./")
if sep != -1:
return path[:sep], path[sep+3:]
sep = path.find("/")
sep = path.find(b"/")
if sep != -1:
return path[:sep], path[sep+1:]
return path, ""
colon = path.find(":")
return path, b""
colon = path.find(b":")
if colon == -1:
# no alias
if default == None:
@ -270,6 +274,18 @@ def get_alias(aliases, path_unicode, default):
return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
def escape_path(path):
# this always returns bytes, specifically US-ASCII, valid URL characters
# type: (str) -> str
u"""
Return path quoted to US-ASCII, valid URL characters.
>>> path = u'/føö/bar/☃'
>>> escaped = escape_path(path)
>>> str(escaped)
'/f%C3%B8%C3%B6/bar/%E2%98%83'
>>> escaped.encode('ascii').decode('ascii') == escaped
True
"""
segments = path.split("/")
return "/".join([urllib.quote(unicode_to_url(s)) for s in segments])
result = "/".join([urllib.parse.quote(unicode_to_url(s)) for s in segments])
result = ensure_str(result, "ascii")
return result

View File

@ -1,7 +1,7 @@
from __future__ import print_function
import os
from six.moves import cStringIO as StringIO
from io import BytesIO
from six.moves import urllib, http_client
import six
import allmydata # for __full_version__
@ -38,9 +38,9 @@ class BadResponse(object):
return ""
def do_http(method, url, body=""):
if isinstance(body, str):
body = StringIO(body)
def do_http(method, url, body=b""):
if isinstance(body, bytes):
body = BytesIO(body)
elif isinstance(body, six.text_type):
raise TypeError("do_http body must be a bytestring, not unicode")
else:

View File

@ -1,10 +1,24 @@
from __future__ import print_function
# Ported to Python 3
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import io
import os
import json
try:
from allmydata.scripts.types_ import SubCommands
from allmydata.scripts.types_ import (
SubCommands,
Parameters,
Flags,
)
except ImportError:
pass
@ -47,29 +61,29 @@ WHERE_OPTS = [
"Hostname to automatically set --location/--port when --listen=tcp"),
("listen", None, "tcp",
"Comma-separated list of listener types (tcp,tor,i2p,none)."),
]
] # type: Parameters
TOR_OPTS = [
("tor-control-port", None, None,
"Tor's control port endpoint descriptor string (e.g. tcp:127.0.0.1:9051 or unix:/var/run/tor/control)"),
("tor-executable", None, None,
"The 'tor' executable to run (default is to search $PATH)."),
]
] # type: Parameters
TOR_FLAGS = [
("tor-launch", None, "Launch a tor instead of connecting to a tor control port."),
]
] # type: Flags
I2P_OPTS = [
("i2p-sam-port", None, None,
"I2P's SAM API port endpoint descriptor string (e.g. tcp:127.0.0.1:7656)"),
("i2p-executable", None, None,
"(future) The 'i2prouter' executable to run (default is to search $PATH)."),
]
] # type: Parameters
I2P_FLAGS = [
("i2p-launch", None, "(future) Launch an I2P router instead of connecting to a SAM API port."),
]
] # type: Flags
def validate_where_options(o):
if o['listen'] == "none":
@ -175,7 +189,7 @@ class CreateClientOptions(_CreateBaseOptions):
("shares-happy", None, 7, "How many servers new files must be placed on."),
("shares-total", None, 10, "Total shares required for uploaded files."),
("join", None, None, "Join a grid with the given Invite Code."),
]
] # type: Parameters
# This is overridden in order to ensure we get a "Wrong number of
# arguments." error when more than one argument is given.
@ -225,7 +239,7 @@ class CreateIntroducerOptions(NoDefaultBasedirOptions):
@defer.inlineCallbacks
def write_node_config(c, config):
# this is shared between clients and introducers
c.write("# -*- mode: conf; coding: utf-8 -*-\n")
c.write("# -*- mode: conf; coding: {c.encoding} -*-\n".format(c=c))
c.write("\n")
c.write("# This file controls the configuration of the Tahoe node that\n")
c.write("# lives in this directory. It is only read at node startup.\n")
@ -244,7 +258,7 @@ def write_node_config(c, config):
c.write("[node]\n")
nickname = argv_to_unicode(config.get("nickname") or "")
c.write("nickname = %s\n" % (nickname.encode('utf-8'),))
c.write("nickname = %s\n" % (nickname,))
if config["hide-ip"]:
c.write("reveal-IP-address = false\n")
else:
@ -254,7 +268,7 @@ def write_node_config(c, config):
webport = argv_to_unicode(config.get("webport") or "none")
if webport.lower() == "none":
webport = ""
c.write("web.port = %s\n" % (webport.encode('utf-8'),))
c.write("web.port = %s\n" % (webport,))
c.write("web.static = public_html\n")
listeners = config['listen'].split(",")
@ -279,15 +293,14 @@ def write_node_config(c, config):
tub_locations.append(i2p_location)
if "tcp" in listeners:
if config["port"]: # --port/--location are a pair
tub_ports.append(config["port"].encode('utf-8'))
tub_locations.append(config["location"].encode('utf-8'))
tub_ports.append(config["port"])
tub_locations.append(config["location"])
else:
assert "hostname" in config
hostname = config["hostname"]
new_port = iputil.allocate_tcp_port()
tub_ports.append("tcp:%s" % new_port)
tub_locations.append("tcp:%s:%s" % (hostname.encode('utf-8'),
new_port))
tub_locations.append("tcp:%s:%s" % (hostname, new_port))
c.write("tub.port = %s\n" % ",".join(tub_ports))
c.write("tub.location = %s\n" % ",".join(tub_locations))
c.write("\n")
@ -301,13 +314,13 @@ def write_node_config(c, config):
if tor_config:
c.write("[tor]\n")
for key, value in tor_config.items():
for key, value in list(tor_config.items()):
c.write("%s = %s\n" % (key, value))
c.write("\n")
if i2p_config:
c.write("[i2p]\n")
for key, value in i2p_config.items():
for key, value in list(i2p_config.items()):
c.write("%s = %s\n" % (key, value))
c.write("\n")
@ -370,7 +383,7 @@ def _get_config_via_wormhole(config):
relay_url=relay_url,
reactor=reactor,
)
code = unicode(config['join'])
code = str(config['join'])
wh.set_code(code)
yield wh.get_welcome()
print("Connected to wormhole server", file=out)
@ -402,7 +415,7 @@ def create_node(config):
err = config.stderr
basedir = config['basedir']
# This should always be called with an absolute Unicode basedir.
precondition(isinstance(basedir, unicode), basedir)
precondition(isinstance(basedir, str), basedir)
if os.path.exists(basedir):
if listdir_unicode(basedir):
@ -437,7 +450,7 @@ def create_node(config):
v = remote_config.get(k, None)
if v is not None:
# we're faking usually argv-supplied options :/
if isinstance(v, unicode):
if isinstance(v, str):
v = v.encode(get_io_encoding())
config[k] = v
if k not in sensitive_keys:
@ -447,7 +460,8 @@ def create_node(config):
print(" {}: [sensitive data; see tahoe.cfg]".format(k), file=out)
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
with open(os.path.join(basedir, "tahoe.cfg"), "w") as c:
cfg_name = os.path.join(basedir, "tahoe.cfg")
with io.open(cfg_name, "w", encoding='utf-8') as c:
yield write_node_config(c, config)
write_client_config(c, config)
@ -475,7 +489,7 @@ def create_introducer(config):
err = config.stderr
basedir = config['basedir']
# This should always be called with an absolute Unicode basedir.
precondition(isinstance(basedir, unicode), basedir)
precondition(isinstance(basedir, str), basedir)
if os.path.exists(basedir):
if listdir_unicode(basedir):
@ -489,7 +503,8 @@ def create_introducer(config):
write_tac(basedir, "introducer")
fileutil.make_dirs(os.path.join(basedir, "private"), 0o700)
with open(os.path.join(basedir, "tahoe.cfg"), "w") as c:
cfg_name = os.path.join(basedir, "tahoe.cfg")
with io.open(cfg_name, "w", encoding='utf-8') as c:
yield write_node_config(c, config)
print("Introducer created in %s" % quote_local_unicode_path(basedir), file=out)

View File

@ -6,6 +6,7 @@ except ImportError:
pass
from future.utils import bchr
from past.builtins import unicode
# do not import any allmydata modules at this level. Do that from inside
# individual functions instead.
@ -90,27 +91,34 @@ def dump_immutable_chk_share(f, out, options):
"crypttext_hash", "crypttext_root_hash",
"share_root_hash", "UEB_hash")
display_keys = {"size": "file_size"}
def to_string(v):
if isinstance(v, bytes):
return unicode(v, "utf-8")
else:
return str(v)
for k in keys1:
if k in unpacked:
dk = display_keys.get(k, k)
print("%20s: %s" % (dk, unpacked[k]), file=out)
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
print(file=out)
for k in keys2:
if k in unpacked:
dk = display_keys.get(k, k)
print("%20s: %s" % (dk, unpacked[k]), file=out)
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
print(file=out)
for k in keys3:
if k in unpacked:
dk = display_keys.get(k, k)
print("%20s: %s" % (dk, unpacked[k]), file=out)
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
if leftover:
print(file=out)
print("LEFTOVER:", file=out)
for k in sorted(leftover):
print("%20s: %s" % (k, unpacked[k]), file=out)
print("%20s: %s" % (k, to_string(unpacked[k])), file=out)
# the storage index isn't stored in the share itself, so we depend upon
# knowing the parent directory name to get it
@ -197,7 +205,7 @@ def dump_mutable_share(options):
print(file=out)
print("Mutable slot found:", file=out)
print(" share_type: %s" % share_type, file=out)
print(" write_enabler: %s" % base32.b2a(WE), file=out)
print(" write_enabler: %s" % unicode(base32.b2a(WE), "utf-8"), file=out)
print(" WE for nodeid: %s" % idlib.nodeid_b2a(nodeid), file=out)
print(" num_extra_leases: %d" % num_extra_leases, file=out)
print(" container_size: %d" % container_size, file=out)
@ -209,8 +217,8 @@ def dump_mutable_share(options):
print(" ownerid: %d" % lease.owner_num, file=out)
when = format_expiration_time(lease.expiration_time)
print(" expires in %s" % when, file=out)
print(" renew_secret: %s" % base32.b2a(lease.renew_secret), file=out)
print(" cancel_secret: %s" % base32.b2a(lease.cancel_secret), file=out)
print(" renew_secret: %s" % unicode(base32.b2a(lease.renew_secret), "utf-8"), file=out)
print(" cancel_secret: %s" % unicode(base32.b2a(lease.cancel_secret), "utf-8"), file=out)
print(" secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out)
else:
print("No leases.", file=out)
@ -258,8 +266,8 @@ def dump_SDMF_share(m, length, options):
print(" SDMF contents:", file=out)
print(" seqnum: %d" % seqnum, file=out)
print(" root_hash: %s" % base32.b2a(root_hash), file=out)
print(" IV: %s" % base32.b2a(IV), file=out)
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
print(" IV: %s" % unicode(base32.b2a(IV), "utf-8"), file=out)
print(" required_shares: %d" % k, file=out)
print(" total_shares: %d" % N, file=out)
print(" segsize: %d" % segsize, file=out)
@ -352,7 +360,7 @@ def dump_MDMF_share(m, length, options):
print(" MDMF contents:", file=out)
print(" seqnum: %d" % seqnum, file=out)
print(" root_hash: %s" % base32.b2a(root_hash), file=out)
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
#print(" IV: %s" % base32.b2a(IV), file=out)
print(" required_shares: %d" % k, file=out)
print(" total_shares: %d" % N, file=out)
@ -645,7 +653,7 @@ def find_shares(options):
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
out = options.stdout
sharedir = storage_index_to_dir(si_a2b(options.si_s))
sharedir = storage_index_to_dir(si_a2b(options.si_s.encode("utf-8")))
for d in options.nodedirs:
d = os.path.join(d, "storage", "shares", sharedir)
if os.path.exists(d):
@ -745,7 +753,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
print("SDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, base32.b2a(root_hash),
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
expiration, quote_output(abs_sharefile)), file=out)
elif share_type == "MDMF":
from allmydata.mutable.layout import MDMFSlotReadProxy
@ -774,7 +782,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
offsets) = verinfo
print("MDMF %s %d/%d %d #%d:%s %d %s" % \
(si_s, k, N, datalen,
seqnum, base32.b2a(root_hash),
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
expiration, quote_output(abs_sharefile)), file=out)
else:
print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out)
@ -808,8 +816,8 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
ueb_hash = unpacked["UEB_hash"]
print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
ueb_hash, expiration,
quote_output(abs_sharefile)), file=out)
unicode(ueb_hash, "utf-8"), expiration,
quote_output(abs_sharefile)), file=out)
else:
print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile), file=out)

View File

@ -1,5 +1,6 @@
from __future__ import print_function
import warnings
import os, sys
from six.moves import StringIO
import six
@ -15,7 +16,7 @@ from twisted.internet import defer, task, threads
from allmydata.scripts.common import get_default_nodedir
from allmydata.scripts import debug, create_node, cli, \
admin, tahoe_run, tahoe_invite
from allmydata.util.encodingutil import quote_output, quote_local_unicode_path, get_io_encoding
from allmydata.util.encodingutil import quote_local_unicode_path
from allmydata.util.eliotutil import (
opt_eliot_destination,
opt_help_eliot_destinations,
@ -123,11 +124,7 @@ def parse_or_exit_with_explanation(argv, stdout=sys.stdout, stderr=sys.stderr, s
while hasattr(c, 'subOptions'):
c = c.subOptions
print(str(c), file=stdout)
try:
msg = e.args[0].decode(get_io_encoding())
except Exception:
msg = repr(e)
print("%s: %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False)), file=stdout)
print("%s: %s\n" % (sys.argv[0], e), file=stdout)
sys.exit(1)
return config
@ -181,9 +178,9 @@ def _maybe_enable_eliot_logging(options, reactor):
return options
def run():
# TODO(3035): Remove tox-check when error becomes a warning
if 'TOX_ENV_NAME' not in os.environ:
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
if six.PY3:
warnings.warn("Support for Python 3 is an incomplete work-in-progress."
" Use at your own risk.")
if sys.platform == "win32":
from allmydata.windows.fixups import initialize

View File

@ -1,9 +1,10 @@
from __future__ import print_function
from __future__ import unicode_literals
from past.builtins import unicode
import os.path
import codecs
import json
from allmydata.util.assertutil import precondition
@ -12,6 +13,7 @@ from allmydata.scripts.common_http import do_http, check_http_error
from allmydata.scripts.common import get_aliases
from allmydata.util.fileutil import move_into_place
from allmydata.util.encodingutil import quote_output, quote_output_u
from allmydata.util import jsonbytes as json
def add_line_to_aliasfile(aliasfile, alias, cap):
@ -52,7 +54,7 @@ def add_alias(options):
show_output(stderr, "Alias {alias} already exists!", alias=alias)
return 1
aliasfile = os.path.join(nodedir, "private", "aliases")
cap = uri.from_string_dirnode(cap).to_string()
cap = unicode(uri.from_string_dirnode(cap).to_string(), 'utf-8')
add_line_to_aliasfile(aliasfile, alias, cap)
show_output(stdout, "Alias {alias} added", alias=alias)
@ -92,7 +94,7 @@ def create_alias(options):
# probably check for others..
add_line_to_aliasfile(aliasfile, alias, new_uri)
add_line_to_aliasfile(aliasfile, alias, unicode(new_uri, "utf-8"))
show_output(stdout, "Alias {alias} created", alias=alias)
return 0
@ -167,7 +169,10 @@ def list_aliases(options):
data = _get_alias_details(options['node-directory'])
if options['json']:
output = _escape_format(json.dumps(data, indent=4).decode("ascii"))
dumped = json.dumps(data, indent=4)
if isinstance(dumped, bytes):
dumped = dumped.decode("utf-8")
output = _escape_format(dumped)
else:
def dircap(details):
return (

View File

@ -122,7 +122,7 @@ class FakeTransport(object):
disconnecting = False
class DeepCheckOutput(LineOnlyReceiver, object):
delimiter = "\n"
delimiter = b"\n"
def __init__(self, streamer, options):
self.streamer = streamer
self.transport = FakeTransport()
@ -181,7 +181,7 @@ class DeepCheckOutput(LineOnlyReceiver, object):
% (self.num_objects, self.files_healthy, self.files_unhealthy), file=stdout)
class DeepCheckAndRepairOutput(LineOnlyReceiver, object):
delimiter = "\n"
delimiter = b"\n"
def __init__(self, streamer, options):
self.streamer = streamer
self.transport = FakeTransport()

View File

@ -13,7 +13,7 @@ class FakeTransport(object):
disconnecting = False
class ManifestStreamer(LineOnlyReceiver, object):
delimiter = "\n"
delimiter = b"\n"
def __init__(self):
self.transport = FakeTransport()

View File

@ -1,7 +1,9 @@
from __future__ import print_function
from future.builtins import chr
import os
import urllib
from urllib.parse import urlencode, quote as url_quote
import json
@ -25,12 +27,12 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
if method == 'POST':
if post_args is None:
raise ValueError("Must pass post_args= for POST method")
body = urllib.urlencode(post_args)
body = urlencode(post_args)
else:
body = ''
if post_args is not None:
raise ValueError("post_args= only valid for POST method")
resp = do_http(method, url, body=body)
resp = do_http(method, url, body=body.encode("utf-8"))
if isinstance(resp, BadResponse):
# specifically NOT using format_http_error() here because the
# URL is pretty sensitive (we're doing /uri/<key>).
@ -48,7 +50,7 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
def _get_json_for_cap(options, cap):
return _get_json_for_fragment(
options,
'uri/%s?t=json' % urllib.quote(cap),
'uri/%s?t=json' % url_quote(cap),
)
def pretty_progress(percent, size=10, ascii=False):
@ -74,8 +76,8 @@ def pretty_progress(percent, size=10, ascii=False):
# unicode 0x2581 -> 2589 are vertical bar chunks, like rainbarf uses
# and following are narrow -> wider bars
part = unichr(0x258f - part) # for smooth bar
# part = unichr(0x2581 + part) # for neater-looking thing
part = chr(0x258f - part) # for smooth bar
# part = chr(0x2581 + part) # for neater-looking thing
# hack for 100+ full so we don't print extra really-narrow/high bar
if percent >= 100.0:

View File

@ -1,12 +1,20 @@
"""
Type definitions used by modules in this package.
"""
# Python 3 only
from typing import List, Tuple, Type, Sequence, Any
from allmydata.scripts.common import BaseOptions
from twisted.python.usage import Options
# Historically, subcommands were implemented as lists, but due to a
# [designed contraint in mypy](https://stackoverflow.com/a/52559625/70170),
# a Tuple is required.
SubCommand = Tuple[str, None, Type[BaseOptions], str]
SubCommand = Tuple[str, None, Type[Options], str]
SubCommands = List[SubCommand]
Parameters = List[Sequence[Any]]
Flags = List[Tuple[str, None, str]]

View File

@ -17,7 +17,7 @@ from twisted.application.internet import TimerService
from zope.interface import implementer
from foolscap.api import eventually
from allmydata.util import log
from allmydata.util import log, dictutil
from allmydata.interfaces import IStatsProducer
@implementer(IStatsProducer)
@ -79,15 +79,13 @@ class StatsProvider(service.MultiService):
service.MultiService.__init__(self)
self.node = node
self.counters = {}
self.counters = dictutil.UnicodeKeyDict()
self.stats_producers = []
self.cpu_monitor = CPUUsageMonitor()
self.cpu_monitor.setServiceParent(self)
self.register_producer(self.cpu_monitor)
def count(self, name, delta=1):
if isinstance(name, str):
name = name.encode("utf-8")
val = self.counters.setdefault(name, 0)
self.counters[name] = val + delta

View File

@ -271,7 +271,7 @@ class StorageServer(service.MultiService, Referenceable):
si_dir = storage_index_to_dir(storage_index)
si_s = si_b2a(storage_index)
log.msg("storage: allocate_buckets %s" % si_s)
log.msg("storage: allocate_buckets %r" % si_s)
# in this implementation, the lease information (including secrets)
# goes into the share files themselves. It could also be put into a
@ -397,7 +397,7 @@ class StorageServer(service.MultiService, Referenceable):
start = time.time()
self.count("get")
si_s = si_b2a(storage_index)
log.msg("storage: get_buckets %s" % si_s)
log.msg("storage: get_buckets %r" % si_s)
bucketreaders = {} # k: sharenum, v: BucketReader
for shnum, filename in self._get_bucket_shares(storage_index):
bucketreaders[shnum] = BucketReader(self, filename,
@ -602,7 +602,7 @@ class StorageServer(service.MultiService, Referenceable):
start = time.time()
self.count("writev")
si_s = si_b2a(storage_index)
log.msg("storage: slot_writev %s" % si_s)
log.msg("storage: slot_writev %r" % si_s)
si_dir = storage_index_to_dir(storage_index)
(write_enabler, renew_secret, cancel_secret) = secrets
bucketdir = os.path.join(self.sharedir, si_dir)
@ -669,7 +669,7 @@ class StorageServer(service.MultiService, Referenceable):
start = time.time()
self.count("readv")
si_s = si_b2a(storage_index)
lp = log.msg("storage: slot_readv %s %s" % (si_s, shares),
lp = log.msg("storage: slot_readv %r %r" % (si_s, shares),
facility="tahoe.storage", level=log.OPERATIONAL)
si_dir = storage_index_to_dir(storage_index)
# shares exist if there is a file for them
@ -703,7 +703,7 @@ class StorageServer(service.MultiService, Referenceable):
si_s = si_b2a(storage_index)
# windows can't handle colons in the filename
fn = os.path.join(self.corruption_advisory_dir,
"%s--%s-%d" % (now, si_s, shnum)).replace(":","")
"%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
with open(fn, "w") as f:
f.write("report: Share Corruption\n")
f.write("type: %s\n" % bytes_to_native_str(share_type))

View File

@ -37,7 +37,7 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_text
import re
import time
@ -221,6 +221,7 @@ class StorageFarmBroker(service.MultiService):
# doesn't really matter but it makes the logging behavior more
# predictable and easier to test (and at least one test does depend on
# this sorted order).
servers = {ensure_text(key): value for (key, value) in servers.items()}
for (server_id, server) in sorted(servers.items()):
try:
storage_server = self._make_storage_server(
@ -259,11 +260,11 @@ class StorageFarmBroker(service.MultiService):
for plugin
in getPlugins(IFoolscapStoragePlugin)
}
return {
return UnicodeKeyDict({
name: plugins[name].get_client_resource(node_config)
for (name, config)
in self.storage_client_config.storage_plugins.items()
}
})
@log_call(
action_type=u"storage-client:broker:make-storage-server",
@ -886,7 +887,7 @@ class NativeStorageServer(service.MultiService):
return self
def __repr__(self):
return "<NativeStorageServer for %s>" % self.get_name()
return "<NativeStorageServer for %r>" % self.get_name()
def get_serverid(self):
return self._server_id
def get_version(self):
@ -910,10 +911,10 @@ class NativeStorageServer(service.MultiService):
version = self.get_version()
if version is None:
return None
protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', UnicodeKeyDict())
available_space = protocol_v1_version.get('available-space')
protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', BytesKeyDict())
available_space = protocol_v1_version.get(b'available-space')
if available_space is None:
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
available_space = protocol_v1_version.get(b'maximum-immutable-share-size', None)
return available_space
def start_connecting(self, trigger_cb):

View File

@ -14,13 +14,23 @@ Rather than defining interesting APIs for other code to use, this just causes
some side-effects which make things better when the test suite runs.
"""
from future.utils import PY3
import warnings
from traceback import extract_stack, format_list
from foolscap.pb import Listener
from twisted.python.log import err
from twisted.application import service
from foolscap.logging.incident import IncidentQualifier
if PY3:
# Error on BytesWarnings, to catch things like str(b""), but only for
# allmydata code.
warnings.filterwarnings("error", category=BytesWarning, module="allmydata.*")
class NonQualifier(IncidentQualifier, object):
def check_event(self, ev):
return False

View File

@ -0,0 +1,197 @@
# -*- coding: utf-8 -*-
## Copyright (C) 2021 Valentin Lab
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions
## are met:
##
## 1. Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
##
## 2. Redistributions in binary form must reproduce the above
## copyright notice, this list of conditions and the following
## disclaimer in the documentation and/or other materials provided
## with the distribution.
##
## 3. Neither the name of the copyright holder nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
## FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
## COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
## STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
## OF THE POSSIBILITY OF SUCH DAMAGE.
##
## issue: https://bugs.python.org/issue19264
# See allmydata/windows/fixups.py
import sys
assert sys.platform == "win32"
import os
import ctypes
import subprocess
import _subprocess
from ctypes import byref, windll, c_char_p, c_wchar_p, c_void_p, \
Structure, sizeof, c_wchar, WinError
from ctypes.wintypes import BYTE, WORD, LPWSTR, BOOL, DWORD, LPVOID, \
HANDLE
##
## Types
##
CREATE_UNICODE_ENVIRONMENT = 0x00000400
LPCTSTR = c_char_p
LPTSTR = c_wchar_p
LPSECURITY_ATTRIBUTES = c_void_p
LPBYTE = ctypes.POINTER(BYTE)
class STARTUPINFOW(Structure):
_fields_ = [
("cb", DWORD), ("lpReserved", LPWSTR),
("lpDesktop", LPWSTR), ("lpTitle", LPWSTR),
("dwX", DWORD), ("dwY", DWORD),
("dwXSize", DWORD), ("dwYSize", DWORD),
("dwXCountChars", DWORD), ("dwYCountChars", DWORD),
("dwFillAtrribute", DWORD), ("dwFlags", DWORD),
("wShowWindow", WORD), ("cbReserved2", WORD),
("lpReserved2", LPBYTE), ("hStdInput", HANDLE),
("hStdOutput", HANDLE), ("hStdError", HANDLE),
]
LPSTARTUPINFOW = ctypes.POINTER(STARTUPINFOW)
class PROCESS_INFORMATION(Structure):
_fields_ = [
("hProcess", HANDLE), ("hThread", HANDLE),
("dwProcessId", DWORD), ("dwThreadId", DWORD),
]
LPPROCESS_INFORMATION = ctypes.POINTER(PROCESS_INFORMATION)
class DUMMY_HANDLE(ctypes.c_void_p):
def __init__(self, *a, **kw):
super(DUMMY_HANDLE, self).__init__(*a, **kw)
self.closed = False
def Close(self):
if not self.closed:
windll.kernel32.CloseHandle(self)
self.closed = True
def __int__(self):
return self.value
CreateProcessW = windll.kernel32.CreateProcessW
CreateProcessW.argtypes = [
LPCTSTR, LPTSTR, LPSECURITY_ATTRIBUTES,
LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPCTSTR,
LPSTARTUPINFOW, LPPROCESS_INFORMATION,
]
CreateProcessW.restype = BOOL
##
## Patched functions/classes
##
def CreateProcess(executable, args, _p_attr, _t_attr,
inherit_handles, creation_flags, env, cwd,
startup_info):
"""Create a process supporting unicode executable and args for win32
Python implementation of CreateProcess using CreateProcessW for Win32
"""
si = STARTUPINFOW(
dwFlags=startup_info.dwFlags,
wShowWindow=startup_info.wShowWindow,
cb=sizeof(STARTUPINFOW),
## XXXvlab: not sure of the casting here to ints.
hStdInput=int(startup_info.hStdInput),
hStdOutput=int(startup_info.hStdOutput),
hStdError=int(startup_info.hStdError),
)
wenv = None
if env is not None:
## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar
env = (unicode("").join([
unicode("%s=%s\0") % (k, v)
for k, v in env.items()])) + unicode("\0")
wenv = (c_wchar * len(env))()
wenv.value = env
pi = PROCESS_INFORMATION()
creation_flags |= CREATE_UNICODE_ENVIRONMENT
if CreateProcessW(executable, args, None, None,
inherit_handles, creation_flags,
wenv, cwd, byref(si), byref(pi)):
return (DUMMY_HANDLE(pi.hProcess), DUMMY_HANDLE(pi.hThread),
pi.dwProcessId, pi.dwThreadId)
raise WinError()
class Popen(subprocess.Popen):
"""This superseeds Popen and corrects a bug in cPython 2.7 implem"""
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell, to_close,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Code from part of _execute_child from Python 2.7 (9fbb65e)
There are only 2 little changes concerning the construction of
the the final string in shell mode: we preempt the creation of
the command string when shell is True, because original function
will try to encode unicode args which we want to avoid to be able to
sending it as-is to ``CreateProcess``.
"""
if not isinstance(args, subprocess.types.StringTypes):
args = subprocess.list2cmdline(args)
if startupinfo is None:
startupinfo = subprocess.STARTUPINFO()
if shell:
startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = _subprocess.SW_HIDE
comspec = os.environ.get("COMSPEC", unicode("cmd.exe"))
args = unicode('{} /c "{}"').format(comspec, args)
if (_subprocess.GetVersion() >= 0x80000000 or
os.path.basename(comspec).lower() == "command.com"):
w9xpopen = self._find_w9xpopen()
args = unicode('"%s" %s') % (w9xpopen, args)
creationflags |= _subprocess.CREATE_NEW_CONSOLE
cp = _subprocess.CreateProcess
_subprocess.CreateProcess = CreateProcess
try:
super(Popen, self)._execute_child(
args, executable,
preexec_fn, close_fds, cwd, env, universal_newlines,
startupinfo, creationflags, False, to_close, p2cread,
p2cwrite, c2pread, c2pwrite, errread, errwrite,
)
finally:
_subprocess.CreateProcess = cp

View File

@ -1,4 +1,5 @@
from ...util.encodingutil import unicode_to_argv
from six import ensure_str
from ...scripts import runner
from ..common_util import ReallyEqualMixin, run_cli, run_cli_unicode
@ -45,6 +46,14 @@ class CLITestMixin(ReallyEqualMixin):
# client_num is used to execute client CLI commands on a specific
# client.
client_num = kwargs.pop("client_num", 0)
client_dir = unicode_to_argv(self.get_clientdir(i=client_num))
nodeargs = [ b"--node-directory", client_dir ]
# If we were really going to launch a child process then
# `unicode_to_argv` would be the right thing to do here. However,
# we're just going to call some Python functions directly and those
# Python functions want native strings. So ignore the requirements
# for passing arguments to another process and make sure this argument
# is a native string.
verb = ensure_str(verb)
args = [ensure_str(arg) for arg in args]
client_dir = ensure_str(self.get_clientdir(i=client_num))
nodeargs = [ "--node-directory", client_dir ]
return run_cli(verb, *args, nodeargs=nodeargs, **kwargs)

View File

@ -1,3 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import json
from twisted.trial import unittest
@ -59,7 +71,7 @@ class ListAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
# the node filesystem state.
aliases = get_aliases(self.get_clientdir())
self.assertIn(alias, aliases)
self.assertTrue(aliases[alias].startswith(u"URI:DIR2:"))
self.assertTrue(aliases[alias].startswith(b"URI:DIR2:"))
# And inspect the state via the user interface list-aliases command
# too.
@ -99,22 +111,6 @@ class ListAlias(GridTestMixin, CLITestMixin, unittest.TestCase):
)
def test_list_latin_1(self):
"""
An alias composed of all Latin-1-encodeable code points can be created
when the active encoding is Latin-1.
This is very similar to ``test_list_utf_8`` but the assumption of
UTF-8 is nearly ubiquitous and explicitly exercising the codepaths
with a UTF-8-incompatible encoding helps flush out unintentional UTF-8
assumptions.
"""
return self._check_create_alias(
u"taho\N{LATIN SMALL LETTER E WITH ACUTE}",
encoding="latin-1",
)
def test_list_utf_8(self):
"""
An alias composed of all UTF-8-encodeable code points can be created when

View File

@ -1,4 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import future bytes so we don't break a couple of tests
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
import sys
import os.path, time
@ -8,7 +19,7 @@ from twisted.trial import unittest
from allmydata.util import fileutil
from allmydata.util.encodingutil import listdir_unicode
from allmydata.scripts import backupdb
from .common_util import skip_if_cannot_represent_filename
from ..common_util import skip_if_cannot_represent_filename
class BackupDB(unittest.TestCase):
def create(self, dbfile):
@ -70,7 +81,7 @@ class BackupDB(unittest.TestCase):
def writeto(self, filename, data):
fn = os.path.join(self.basedir, unicode(filename))
fn = os.path.join(self.basedir, filename)
parentdir = os.path.dirname(fn)
fileutil.make_dirs(parentdir)
fileutil.write(fn, data)
@ -87,15 +98,15 @@ class BackupDB(unittest.TestCase):
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("foo-cap")
r.did_upload(b"foo-cap")
r = bdb.check_file(blah_fn)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("blah-cap")
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), "foo-cap")
self.failUnlessEqual(type(r.was_uploaded()), str)
self.failUnlessEqual(r.was_uploaded(), b"foo-cap")
self.failUnlessEqual(type(r.was_uploaded()), bytes)
self.failUnlessEqual(r.should_check(), False)
time.sleep(1.0) # make sure the timestamp changes
@ -103,28 +114,28 @@ class BackupDB(unittest.TestCase):
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("new-cap")
r.did_upload(b"new-cap")
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), "new-cap")
self.failUnlessEqual(r.was_uploaded(), b"new-cap")
self.failUnlessEqual(r.should_check(), False)
# if we spontaneously decide to upload it anyways, nothing should
# break
r.did_upload("new-cap")
r.did_upload(b"new-cap")
r = bdb.check_file(foo_fn, use_timestamps=False)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("new-cap")
r.did_upload(b"new-cap")
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), "new-cap")
self.failUnlessEqual(r.was_uploaded(), b"new-cap")
self.failUnlessEqual(r.should_check(), False)
bdb.NO_CHECK_BEFORE = 0
bdb.ALWAYS_CHECK_AFTER = 0.1
r = bdb.check_file(blah_fn)
self.failUnlessEqual(r.was_uploaded(), "blah-cap")
self.failUnlessEqual(r.was_uploaded(), b"blah-cap")
self.failUnlessEqual(r.should_check(), True)
r.did_check_healthy("results") # we know they're ignored for now
@ -132,7 +143,7 @@ class BackupDB(unittest.TestCase):
bdb.ALWAYS_CHECK_AFTER = 400
r = bdb.check_file(blah_fn)
self.failUnlessEqual(r.was_uploaded(), "blah-cap")
self.failUnlessEqual(r.was_uploaded(), b"blah-cap")
self.failUnlessEqual(r.should_check(), False)
os.unlink(os.path.join(basedir, "foo.txt"))
@ -165,13 +176,13 @@ class BackupDB(unittest.TestCase):
dbfile = os.path.join(basedir, "dbfile")
bdb = self.create(dbfile)
contents = {u"file1": "URI:CHK:blah1",
u"file2": "URI:CHK:blah2",
u"dir1": "URI:DIR2-CHK:baz2"}
contents = {u"file1": b"URI:CHK:blah1",
u"file2": b"URI:CHK:blah2",
u"dir1": b"URI:DIR2-CHK:baz2"}
r = bdb.check_directory(contents)
self.failUnless(isinstance(r, backupdb.DirectoryResult))
self.failIf(r.was_created())
dircap = "URI:DIR2-CHK:foo1"
dircap = b"URI:DIR2-CHK:foo1"
r.did_create(dircap)
r = bdb.check_directory(contents)
@ -185,7 +196,7 @@ class BackupDB(unittest.TestCase):
r = bdb.check_directory(contents)
self.failUnless(r.was_created())
self.failUnlessEqual(r.was_created(), dircap)
self.failUnlessEqual(type(r.was_created()), str)
self.failUnlessEqual(type(r.was_created()), bytes)
self.failUnlessEqual(r.should_check(), False)
bdb.NO_CHECK_BEFORE = 0
@ -207,14 +218,14 @@ class BackupDB(unittest.TestCase):
self.failUnlessEqual(r.should_check(), False)
contents2 = {u"file1": "URI:CHK:blah1",
u"dir1": "URI:DIR2-CHK:baz2"}
contents2 = {u"file1": b"URI:CHK:blah1",
u"dir1": b"URI:DIR2-CHK:baz2"}
r = bdb.check_directory(contents2)
self.failIf(r.was_created())
contents3 = {u"file1": "URI:CHK:blah1",
u"file2": "URI:CHK:blah3",
u"dir1": "URI:DIR2-CHK:baz2"}
contents3 = {u"file1": b"URI:CHK:blah1",
u"file2": b"URI:CHK:blah3",
u"dir1": b"URI:DIR2-CHK:baz2"}
r = bdb.check_directory(contents3)
self.failIf(r.was_created())
@ -228,17 +239,17 @@ class BackupDB(unittest.TestCase):
bdb = self.create(dbfile)
self.writeto(u"f\u00f6\u00f6.txt", "foo.txt")
files = [fn for fn in listdir_unicode(unicode(basedir)) if fn.endswith(".txt")]
files = [fn for fn in listdir_unicode(str(basedir)) if fn.endswith(".txt")]
self.failUnlessEqual(len(files), 1)
foo_fn = os.path.join(basedir, files[0])
#print(foo_fn, type(foo_fn))
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("foo-cap")
r.did_upload(b"foo-cap")
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), "foo-cap")
self.failUnlessEqual(r.was_uploaded(), b"foo-cap")
self.failUnlessEqual(r.should_check(), False)
bar_fn = self.writeto(u"b\u00e5r.txt", "bar.txt")
@ -246,9 +257,9 @@ class BackupDB(unittest.TestCase):
r = bdb.check_file(bar_fn)
self.failUnlessEqual(r.was_uploaded(), False)
r.did_upload("bar-cap")
r.did_upload(b"bar-cap")
r = bdb.check_file(bar_fn)
self.failUnlessEqual(r.was_uploaded(), "bar-cap")
self.failUnlessEqual(r.was_uploaded(), b"bar-cap")
self.failUnlessEqual(r.should_check(), False)

View File

@ -1,3 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os
import mock
from twisted.trial import unittest

View File

@ -7,7 +7,7 @@ from allmydata.scripts.common import get_aliases
from allmydata.scripts import cli
from ..no_network import GridTestMixin
from ..common_util import skip_if_cannot_represent_filename
from allmydata.util.encodingutil import get_io_encoding, unicode_to_argv
from allmydata.util.encodingutil import get_io_encoding
from allmydata.util.fileutil import abspath_expanduser_unicode
from .common import CLITestMixin
@ -46,21 +46,21 @@ class Put(GridTestMixin, CLITestMixin, unittest.TestCase):
self.basedir = "cli/Put/unlinked_immutable_from_file"
self.set_up_grid(oneshare=True)
rel_fn = os.path.join(self.basedir, "DATAFILE")
abs_fn = unicode_to_argv(abspath_expanduser_unicode(unicode(rel_fn)))
rel_fn = unicode(os.path.join(self.basedir, "DATAFILE"))
abs_fn = abspath_expanduser_unicode(rel_fn)
# we make the file small enough to fit in a LIT file, for speed
fileutil.write(rel_fn, "short file")
d = self.do_cli("put", rel_fn)
d = self.do_cli_unicode(u"put", [rel_fn])
def _uploaded(args):
(rc, out, err) = args
readcap = out
self.failUnless(readcap.startswith("URI:LIT:"), readcap)
self.readcap = readcap
d.addCallback(_uploaded)
d.addCallback(lambda res: self.do_cli("put", "./" + rel_fn))
d.addCallback(lambda res: self.do_cli_unicode(u"put", [u"./" + rel_fn]))
d.addCallback(lambda rc_stdout_stderr:
self.failUnlessReallyEqual(rc_stdout_stderr[1], self.readcap))
d.addCallback(lambda res: self.do_cli("put", abs_fn))
d.addCallback(lambda res: self.do_cli_unicode(u"put", [abs_fn]))
d.addCallback(lambda rc_stdout_stderr:
self.failUnlessReallyEqual(rc_stdout_stderr[1], self.readcap))
# we just have to assume that ~ is handled properly

View File

@ -1,10 +1,21 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_text
import os
import mock
import json
import tempfile
from six.moves import StringIO
from io import BytesIO, StringIO
from os.path import join
from UserDict import UserDict
from twisted.trial import unittest
from twisted.internet import defer
@ -22,6 +33,7 @@ from allmydata.immutable.downloader.status import DownloadStatus
from allmydata.mutable.publish import PublishStatus
from allmydata.mutable.retrieve import RetrieveStatus
from allmydata.mutable.servermap import UpdateStatus
from allmydata.util import jsonbytes as json
from ..no_network import GridTestMixin
from ..common_web import do_http
@ -60,9 +72,8 @@ class ProgressBar(unittest.TestCase):
)
class _FakeOptions(UserDict, object):
class _FakeOptions(dict):
def __init__(self):
super(_FakeOptions, self).__init__()
self._tmp = tempfile.mkdtemp()
os.mkdir(join(self._tmp, 'private'), 0o777)
with open(join(self._tmp, 'private', 'api_auth_token'), 'w') as f:
@ -86,7 +97,7 @@ class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
# upload something
c0 = self.g.clients[0]
data = MutableData("data" * 100)
data = MutableData(b"data" * 100)
filenode = yield c0.create_mutable_file(data)
self.uri = filenode.get_uri()
@ -97,8 +108,8 @@ class Integration(GridTestMixin, CLITestMixin, unittest.TestCase):
d = self.do_cli('status')# '--verbose')
def _check(ign):
code, stdout, stdin = ign
self.assertEqual(code, 0)
code, stdout, stderr = ign
self.assertEqual(code, 0, stderr)
self.assertTrue('Skipped 1' in stdout)
d.addCallback(_check)
return d
@ -124,18 +135,18 @@ class CommandStatus(unittest.TestCase):
@mock.patch('sys.stdout', StringIO())
def test_no_operations(self, http):
values = [
StringIO(json.dumps({
StringIO(ensure_text(json.dumps({
"active": [],
"recent": [],
})),
StringIO(json.dumps({
}))),
StringIO(ensure_text(json.dumps({
"counters": {
"bytes_downloaded": 0,
},
"stats": {
"node.uptime": 0,
}
})),
}))),
]
http.side_effect = lambda *args, **kw: values.pop(0)
do_status(self.options)
@ -145,14 +156,14 @@ class CommandStatus(unittest.TestCase):
def test_simple(self, http):
recent_items = active_items = [
UploadStatus(),
DownloadStatus("abcd", 12345),
DownloadStatus(b"abcd", 12345),
PublishStatus(),
RetrieveStatus(),
UpdateStatus(),
FakeStatus(),
]
values = [
StringIO(json.dumps({
BytesIO(json.dumps({
"active": list(
marshal_json(item)
for item
@ -163,15 +174,15 @@ class CommandStatus(unittest.TestCase):
for item
in recent_items
),
})),
StringIO(json.dumps({
}).encode("utf-8")),
BytesIO(json.dumps({
"counters": {
"bytes_downloaded": 0,
},
"stats": {
"node.uptime": 0,
}
})),
}).encode("utf-8")),
]
http.side_effect = lambda *args, **kw: values.pop(0)
do_status(self.options)

Some files were not shown because too many files have changed in this diff Show More