mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 07:06:41 +00:00
Merge remote-tracking branch 'origin/master' into 3374.codec-monitor-python-3-take-2
This commit is contained in:
commit
6726c6ec5b
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM centos:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
||||
git \
|
||||
sudo \
|
||||
make automake gcc gcc-c++ \
|
||||
python2 \
|
||||
python2-devel \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-devel \
|
||||
libffi-devel \
|
||||
openssl-devel \
|
||||
libyaml \
|
||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM debian:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -8,22 +9,22 @@ ENV BUILD_SRC_ROOT /tmp/project
|
||||
|
||||
RUN apt-get --quiet update && \
|
||||
apt-get --quiet --yes install \
|
||||
git \
|
||||
lsb-release \
|
||||
git \
|
||||
lsb-release \
|
||||
sudo \
|
||||
build-essential \
|
||||
python2.7 \
|
||||
python2.7-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
virtualenv
|
||||
build-essential \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
virtualenv
|
||||
|
||||
# Get the project source. This is better than it seems. CircleCI will
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
||||
# Only the integration tests currently need this but it doesn't hurt to always
|
||||
# have it present and it's simpler than building a whole extra image just for
|
||||
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM fedora:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
||||
git \
|
||||
sudo \
|
||||
make automake gcc gcc-c++ \
|
||||
python \
|
||||
python-devel \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-devel \
|
||||
libffi-devel \
|
||||
openssl-devel \
|
||||
libyaml-devel \
|
||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -1,49 +0,0 @@
|
||||
ARG TAG
|
||||
FROM vbatts/slackware:${TAG}
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
# This will get updated by the CircleCI checkout step.
|
||||
ENV BUILD_SRC_ROOT /tmp/project
|
||||
|
||||
# Be careful with slackpkg. If the package name given doesn't match anything,
|
||||
# slackpkg still claims to succeed but you're totally screwed. Slackware
|
||||
# updates versions of packaged software so including too much version prefix
|
||||
# is a good way to have your install commands suddenly begin not installing
|
||||
# anything.
|
||||
RUN slackpkg update && \
|
||||
slackpkg install \
|
||||
openssh-7 git-2 \
|
||||
ca-certificates \
|
||||
sudo-1 \
|
||||
make-4 \
|
||||
automake-1 \
|
||||
kernel-headers \
|
||||
glibc-2 \
|
||||
binutils-2 \
|
||||
gcc-5 \
|
||||
gcc-g++-5 \
|
||||
python-2 \
|
||||
libffi-3 \
|
||||
libyaml-0 \
|
||||
sqlite-3 \
|
||||
icu4c-56 \
|
||||
libmpc-1 </dev/null && \
|
||||
slackpkg upgrade \
|
||||
openssl-1 </dev/null
|
||||
|
||||
# neither virtualenv nor pip is packaged.
|
||||
# do it the hard way.
|
||||
# and it is extra hard since it is slackware.
|
||||
RUN slackpkg install \
|
||||
cyrus-sasl-2 \
|
||||
curl-7 </dev/null && \
|
||||
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||
python get-pip.py && \
|
||||
pip install virtualenv
|
||||
|
||||
# Get the project source. This is better than it seems. CircleCI will
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM ubuntu:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -13,8 +14,8 @@ RUN apt-get --quiet update && \
|
||||
apt-get --quiet --yes install \
|
||||
sudo \
|
||||
build-essential \
|
||||
python2.7 \
|
||||
python2.7-dev \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
@ -26,4 +27,4 @@ RUN apt-get --quiet update && \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -11,10 +11,13 @@ workflows:
|
||||
requires:
|
||||
- "debian-9"
|
||||
|
||||
- "ubuntu-18.04"
|
||||
- "ubuntu-20.04"
|
||||
- "ubuntu-18.04":
|
||||
requires:
|
||||
- "ubuntu-20.04"
|
||||
- "ubuntu-16.04":
|
||||
requires:
|
||||
- "ubuntu-18.04"
|
||||
- "ubuntu-20.04"
|
||||
|
||||
- "fedora-29"
|
||||
- "fedora-28":
|
||||
@ -23,13 +26,14 @@ workflows:
|
||||
|
||||
- "centos-8"
|
||||
|
||||
- "slackware-14.2"
|
||||
|
||||
- "nixos-19.09"
|
||||
|
||||
# Test against PyPy 2.7
|
||||
- "pypy2.7-buster"
|
||||
|
||||
# Just one Python 3.6 configuration while the port is in-progress.
|
||||
- "python3.6"
|
||||
|
||||
# Other assorted tasks and configurations
|
||||
- "lint"
|
||||
- "pyinstaller"
|
||||
@ -65,11 +69,12 @@ workflows:
|
||||
- "build-image-debian-9"
|
||||
- "build-image-ubuntu-16.04"
|
||||
- "build-image-ubuntu-18.04"
|
||||
- "build-image-ubuntu-20.04"
|
||||
- "build-image-fedora-28"
|
||||
- "build-image-fedora-29"
|
||||
- "build-image-centos-8"
|
||||
- "build-image-slackware-14.2"
|
||||
- "build-image-pypy-2.7-buster"
|
||||
- "build-image-python36-ubuntu"
|
||||
|
||||
|
||||
jobs:
|
||||
@ -117,7 +122,7 @@ jobs:
|
||||
|
||||
debian-9: &DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/debian:9"
|
||||
- image: "tahoelafsci/debian:9-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
environment: &UTF_8_ENVIRONMENT
|
||||
@ -194,14 +199,14 @@ jobs:
|
||||
debian-8:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/debian:8"
|
||||
- image: "tahoelafsci/debian:8-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
pypy2.7-buster:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/pypy:2.7-buster"
|
||||
- image: "tahoelafsci/pypy:buster-py2"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
@ -257,20 +262,42 @@ jobs:
|
||||
ubuntu-16.04:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:16.04"
|
||||
- image: "tahoelafsci/ubuntu:16.04-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
ubuntu-18.04:
|
||||
ubuntu-18.04: &UBUNTU_18_04
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:18.04"
|
||||
- image: "tahoelafsci/ubuntu:18.04-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
python3.6:
|
||||
<<: *UBUNTU_18_04
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:18.04-py3"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
# The default trial args include --rterrors which is incompatible with
|
||||
# this reporter on Python 3. So drop that and just specify the
|
||||
# reporter.
|
||||
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
|
||||
|
||||
|
||||
ubuntu-20.04:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:20.04"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
centos-8: &RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/centos:8"
|
||||
- image: "tahoelafsci/centos:8-py2"
|
||||
user: "nobody"
|
||||
|
||||
environment: *UTF_8_ENVIRONMENT
|
||||
@ -292,37 +319,17 @@ jobs:
|
||||
fedora-28:
|
||||
<<: *RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/fedora:28"
|
||||
- image: "tahoelafsci/fedora:28-py"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
fedora-29:
|
||||
<<: *RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/fedora:29"
|
||||
- image: "tahoelafsci/fedora:29-py"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
slackware-14.2:
|
||||
docker:
|
||||
- image: "tahoelafsci/slackware:14.2"
|
||||
user: "nobody"
|
||||
|
||||
environment: *UTF_8_ENVIRONMENT
|
||||
|
||||
# pip cannot install packages if the working directory is not readable.
|
||||
# We want to run a lot of steps as nobody instead of as root.
|
||||
working_directory: "/tmp/project"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run: *SETUP_VIRTUALENV
|
||||
- run: *RUN_TESTS
|
||||
- store_test_results: *STORE_TEST_RESULTS
|
||||
- store_artifacts: *STORE_TEST_LOG
|
||||
- store_artifacts: *STORE_OTHER_ARTIFACTS
|
||||
- run: *SUBMIT_COVERAGE
|
||||
|
||||
nixos-19.09:
|
||||
docker:
|
||||
# Run in a highly Nix-capable environment.
|
||||
@ -386,8 +393,9 @@ jobs:
|
||||
- image: "docker:17.05.0-ce-git"
|
||||
|
||||
environment:
|
||||
DISTRO: "tahoelafsci/<DISTRO>:foo"
|
||||
TAG: "tahoelafsci/distro:<TAG>"
|
||||
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
|
||||
TAG: "tahoelafsci/distro:<TAG>-py2"
|
||||
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
@ -439,13 +447,14 @@ jobs:
|
||||
docker \
|
||||
build \
|
||||
--build-arg TAG=${TAG} \
|
||||
-t tahoelafsci/${DISTRO}:${TAG} \
|
||||
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
|
||||
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
|
||||
-f ~/project/.circleci/Dockerfile.${DISTRO} \
|
||||
~/project/
|
||||
- run:
|
||||
name: "Push image"
|
||||
command: |
|
||||
docker push tahoelafsci/${DISTRO}:${TAG}
|
||||
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||
|
||||
|
||||
build-image-debian-8:
|
||||
@ -454,6 +463,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "8"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-debian-9:
|
||||
@ -462,6 +472,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "9"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-ubuntu-16.04:
|
||||
@ -470,6 +481,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "16.04"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-ubuntu-18.04:
|
||||
@ -478,6 +490,25 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "18.04"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-python36-ubuntu:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "18.04"
|
||||
PYTHON_VERSION: "3"
|
||||
|
||||
|
||||
build-image-ubuntu-20.04:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "20.04"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-centos-8:
|
||||
@ -486,6 +517,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "centos"
|
||||
TAG: "8"
|
||||
PYTHON_VERSION: "2"
|
||||
|
||||
|
||||
build-image-fedora-28:
|
||||
@ -494,6 +526,8 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "fedora"
|
||||
TAG: "28"
|
||||
# The default on Fedora (this version anyway) is still Python 2.
|
||||
PYTHON_VERSION: ""
|
||||
|
||||
|
||||
build-image-fedora-29:
|
||||
@ -504,17 +538,13 @@ jobs:
|
||||
TAG: "29"
|
||||
|
||||
|
||||
build-image-slackware-14.2:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "slackware"
|
||||
TAG: "14.2"
|
||||
|
||||
|
||||
build-image-pypy-2.7-buster:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "pypy"
|
||||
TAG: "2.7-buster"
|
||||
TAG: "buster"
|
||||
# We only have Python 2 for PyPy right now so there's no support for
|
||||
# setting up PyPy 3 in the image building toolchain. This value is just
|
||||
# for constructing the right Docker image tag.
|
||||
PYTHON_VERSION: "2"
|
||||
|
@ -36,8 +36,9 @@ PIP="${BOOTSTRAP_VENV}/bin/pip"
|
||||
# Tell pip where it can find any existing wheels.
|
||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
|
||||
# Populate the wheelhouse, if necessary.
|
||||
"${PIP}" \
|
||||
# Populate the wheelhouse, if necessary. zfec 1.5.3 can only be built with a
|
||||
# UTF-8 environment so make sure we have one, at least for this invocation.
|
||||
LANG="en_US.UTF-8" "${PIP}" \
|
||||
wheel \
|
||||
--wheel-dir "${WHEELHOUSE_PATH}" \
|
||||
"${PROJECT_ROOT}"[test] \
|
||||
|
@ -65,7 +65,7 @@ TIMEOUT="timeout --kill-after 1m 15m"
|
||||
# Send the output directly to a file because transporting the binary subunit2
|
||||
# via tox and then scraping it out is hideous and failure prone.
|
||||
export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="--reporter=subunitv2-file --rterrors"
|
||||
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
|
||||
export PIP_NO_INDEX="1"
|
||||
|
||||
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
||||
@ -81,7 +81,12 @@ ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
|
||||
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
||||
|
||||
if [ -n "${ARTIFACTS}" ]; then
|
||||
if [ ! -e "${SUBUNIT2}" ]; then
|
||||
echo "subunitv2 output file does not exist: ${SUBUNIT2}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create a junitxml results area.
|
||||
mkdir -p "$(dirname "${JUNITXML}")"
|
||||
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
fi
|
||||
|
40
.travis.yml
40
.travis.yml
@ -1,40 +0,0 @@
|
||||
sudo: false
|
||||
language: python
|
||||
cache: pip
|
||||
dist: xenial
|
||||
before_cache:
|
||||
- rm -f $HOME/.cache/pip/log/debug.log
|
||||
git:
|
||||
depth: 1000
|
||||
|
||||
env:
|
||||
global:
|
||||
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
||||
|
||||
install:
|
||||
- pip install --upgrade tox setuptools virtualenv
|
||||
- echo $PATH; which python; which pip; which tox
|
||||
- python misc/build_helpers/show-tool-versions.py
|
||||
|
||||
script:
|
||||
- |
|
||||
set -eo pipefail
|
||||
tox -e ${T}
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels: "chat.freenode.net#tahoe-lafs"
|
||||
on_success: always # for testing
|
||||
on_failure: always
|
||||
template:
|
||||
- "%{repository}#%{build_number} [%{branch}: %{commit} by %{author}] %{message}"
|
||||
- "Changes: %{compare_url} | Details: %{build_url}"
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
python: '3.6'
|
||||
env: T=py36
|
||||
|
||||
fast_finish: true
|
@ -1,148 +0,0 @@
|
||||
allmydata.test.mutable.test_exceptions.Exceptions.test_repr
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_1s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_25s
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_day
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_future_5_minutes
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_hours
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_month
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_year
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_parse_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_space
|
||||
allmydata.test.test_abbreviate.Abbreviate.test_time
|
||||
allmydata.test.test_base32.Base32.test_a2b
|
||||
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
|
||||
allmydata.test.test_base32.Base32.test_b2a
|
||||
allmydata.test.test_base32.Base32.test_b2a_or_none
|
||||
allmydata.test.test_base62.Base62.test_ende_0x00
|
||||
allmydata.test.test_base62.Base62.test_ende_0x000000
|
||||
allmydata.test.test_base62.Base62.test_ende_0x01
|
||||
allmydata.test.test_base62.Base62.test_ende_0x0100
|
||||
allmydata.test.test_base62.Base62.test_ende_0x010000
|
||||
allmydata.test.test_base62.Base62.test_ende_longrandstr
|
||||
allmydata.test.test_base62.Base62.test_ende_randstr
|
||||
allmydata.test.test_base62.Base62.test_known_values
|
||||
allmydata.test.test_base62.Base62.test_num_octets_that_encode_to_this_many_chars
|
||||
allmydata.test.test_base62.Base62.test_odd_sizes
|
||||
allmydata.test.test_base62.Base62.test_roundtrip
|
||||
allmydata.test.test_codec.T.test_encode
|
||||
allmydata.test.test_codec.T.test_encode1
|
||||
allmydata.test.test_codec.T.test_encode2
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_private_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_public_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_key_serialization
|
||||
allmydata.test.test_crypto.TestEd25519.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signed_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_ed15519_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_rsa_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_encrypt_data_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_incorrect_iv_size
|
||||
allmydata.test.test_crypto.TestRegression.test_iv_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_key_incorrect_size
|
||||
allmydata.test.test_crypto.TestRegression.test_old_start_up_test
|
||||
allmydata.test.test_crypto.TestRsa.test_keys
|
||||
allmydata.test.test_crypto.TestRsa.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRsa.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_bad
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_entire_string
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_good
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_partial
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_zero
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_dictutil.DictUtil.test_auxdict
|
||||
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
allmydata.test.test_hashtree.Incomplete.test_check
|
||||
allmydata.test.test_hashtree.Incomplete.test_create
|
||||
allmydata.test.test_hashtree.Incomplete.test_depth_of
|
||||
allmydata.test.test_hashtree.Incomplete.test_large
|
||||
allmydata.test.test_hashtree.Incomplete.test_needed_hashes
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_chk
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_hashers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_known_answers
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_random_key
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_iputil.GcUtil.test_gc_after_allocations
|
||||
allmydata.test.test_iputil.GcUtil.test_release_delays_gc
|
||||
allmydata.test.test_iputil.ListAddresses.test_get_local_ip_for
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_cygwin
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ifconfig
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ip_addr
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_route
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_random_port
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_specific_port
|
||||
allmydata.test.test_log.Log.test_default_facility
|
||||
allmydata.test.test_log.Log.test_err
|
||||
allmydata.test.test_log.Log.test_grandparent_id
|
||||
allmydata.test.test_log.Log.test_no_prefix
|
||||
allmydata.test.test_log.Log.test_numming
|
||||
allmydata.test.test_log.Log.test_parent_id
|
||||
allmydata.test.test_log.Log.test_with_bytes_prefix
|
||||
allmydata.test.test_log.Log.test_with_prefix
|
||||
allmydata.test.test_monitor.MonitorTests.test_cancellation
|
||||
allmydata.test.test_monitor.MonitorTests.test_finish
|
||||
allmydata.test.test_monitor.MonitorTests.test_status
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
allmydata.test.test_netstring.Netstring.test_split
|
||||
allmydata.test.test_observer.Observer.test_lazy_oneshot
|
||||
allmydata.test.test_observer.Observer.test_observerlist
|
||||
allmydata.test.test_observer.Observer.test_oneshot
|
||||
allmydata.test.test_observer.Observer.test_oneshot_fireagain
|
||||
allmydata.test.test_pipeline.Pipeline.test_basic
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors
|
||||
allmydata.test.test_pipeline.Pipeline.test_errors2
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_finished_porting
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_distinct
|
||||
allmydata.test.test_python3.Python3PortingEffortTests.test_ported_modules_exist
|
||||
allmydata.test.test_spans.ByteSpans.test_basic
|
||||
allmydata.test.test_spans.ByteSpans.test_large
|
||||
allmydata.test.test_spans.ByteSpans.test_math
|
||||
allmydata.test.test_spans.ByteSpans.test_overlap
|
||||
allmydata.test.test_spans.ByteSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_basic
|
||||
allmydata.test.test_spans.StringSpans.test_random
|
||||
allmydata.test.test_spans.StringSpans.test_test
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_coeff
|
||||
allmydata.test.test_statistics.Statistics.test_binomial_distribution_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_convolve
|
||||
allmydata.test.test_statistics.Statistics.test_find_k
|
||||
allmydata.test.test_statistics.Statistics.test_pr_backup_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_pr_file_loss
|
||||
allmydata.test.test_statistics.Statistics.test_repair_cost
|
||||
allmydata.test.test_statistics.Statistics.test_repair_count_pmf
|
||||
allmydata.test.test_statistics.Statistics.test_survival_pmf
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch
|
||||
allmydata.test.test_time_format.TimeFormat.test_epoch_in_London
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_delta
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time
|
||||
allmydata.test.test_time_format.TimeFormat.test_format_time_y2038
|
||||
allmydata.test.test_time_format.TimeFormat.test_iso_utc
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_date
|
||||
allmydata.test.test_time_format.TimeFormat.test_parse_duration
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check
|
||||
allmydata.test.test_version.CheckRequirement.test_cross_check_unparseable_versions
|
||||
allmydata.test.test_version.CheckRequirement.test_extract_openssl_version
|
||||
allmydata.test.test_version.CheckRequirement.test_packages_from_pkg_resources
|
||||
allmydata.test.test_version.T.test_report_import_error
|
||||
allmydata.test.test_version.VersionTestCase.test_basic_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_comparison
|
||||
allmydata.test.test_version.VersionTestCase.test_from_parts
|
||||
allmydata.test.test_version.VersionTestCase.test_irrational_versions
|
||||
allmydata.test.test_version.VersionTestCase.test_suggest_normalized_version
|
@ -1,409 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
'''Ratchet up passing tests, or ratchet down failing tests.
|
||||
|
||||
Usage:
|
||||
|
||||
ratchet.py <"up" or "down"> <junitxml file path> <tracking file path>
|
||||
|
||||
This script helps when you expect a large test suite to fail spectactularly in
|
||||
some environment, and you want to gradually improve the situation with minimal
|
||||
impact to forward development of the same codebase for other environments. The
|
||||
initial and primary usecase is porting from Python 2 to Python 3.
|
||||
|
||||
The idea is to emit JUnit XML from your test runner, and then invoke ratchet.py
|
||||
to consume this XML output and operate on a so-called "tracking" file. When
|
||||
ratcheting up passing tests, the tracking file will contain a list of tests,
|
||||
one per line, that passed. When ratching down, the tracking file contains a
|
||||
list of failing tests. On each subsequent run, ratchet.py will compare the
|
||||
prior results in the tracking file with the new results in the XML, and will
|
||||
report on both welcome and unwelcome changes. It will modify the tracking file
|
||||
in the case of welcome changes, and therein lies the ratcheting.
|
||||
|
||||
The exit codes are:
|
||||
|
||||
0 - no changes observed
|
||||
1 - changes observed, whether welcome or unwelcome
|
||||
2 - invocation error
|
||||
|
||||
If <junitxml file path> does not exist, you'll get a FileNotFoundError:
|
||||
|
||||
>>> _test('up', None, None) # doctest: +ELLIPSIS
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
FileNotFoundError: ...
|
||||
|
||||
If <tracking file path> does not exist, that's fine:
|
||||
|
||||
>>> _test('up', '1', None)
|
||||
Some tests not required to pass did:
|
||||
c0.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 0 test(s) were required to pass, but instead 1 did. 🐭
|
||||
|
||||
Same if you're ratcheting down:
|
||||
|
||||
>>> _test('down', '1', None)
|
||||
All and only tests expected to fail did. 💃
|
||||
|
||||
If the test run has the same output as last time, it's all good:
|
||||
|
||||
>>> _test('up', '01001110', '01001110')
|
||||
All and only tests required to pass did. 💃
|
||||
|
||||
>>> _test('down', '01001110', '10110001')
|
||||
All and only tests expected to fail did. 💃
|
||||
|
||||
If there's a welcome change, that's noted:
|
||||
|
||||
>>> _test('up', '0101', '0100')
|
||||
Some tests not required to pass did:
|
||||
c3.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 1 test(s) were required to pass, but instead 2 did. 🐭
|
||||
|
||||
>>> _test('down', '0011', '1110')
|
||||
Some tests expected to fail didn't:
|
||||
c2.t
|
||||
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 3 test(s) were expected to fail, but instead 2 did. 🐭
|
||||
|
||||
And if there is an unwelcome change, that is noted as well:
|
||||
|
||||
>>> _test('up', '1101', '1111')
|
||||
Some tests required to pass didn't:
|
||||
c2.t
|
||||
Eep! 4 test(s) were required to pass, but instead 3 did. 🐭
|
||||
|
||||
>>> _test('down', '0000', '1101')
|
||||
Some tests not expected to fail did:
|
||||
c2.t
|
||||
Eep! 3 test(s) were expected to fail, but instead 4 did. 🐭
|
||||
|
||||
And if there are both welcome and unwelcome changes, they are both noted:
|
||||
|
||||
>>> _test('up', '1101', '1011')
|
||||
Some tests not required to pass did:
|
||||
c1.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Some tests required to pass didn't:
|
||||
c2.t
|
||||
Eep! 3 test(s) were required to pass, but instead 3 did. 🐭
|
||||
|
||||
>>> _test('down', '0100', '1100')
|
||||
Some tests not expected to fail did:
|
||||
c2.t
|
||||
c3.t
|
||||
Some tests expected to fail didn't:
|
||||
c1.t
|
||||
Conveniently, they have been removed from `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 2 test(s) were expected to fail, but instead 3 did. 🐭
|
||||
|
||||
|
||||
To test ratchet.py itself:
|
||||
|
||||
python3 -m doctest ratchet.py
|
||||
|
||||
'''
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import xml.etree.ElementTree as Etree
|
||||
|
||||
|
||||
class JUnitXMLFile(object):
|
||||
'''Represent a file containing test results in JUnit XML format.
|
||||
|
||||
>>> eg = _mktemp_junitxml('0100111')
|
||||
>>> results = JUnitXMLFile(eg.name).parse()
|
||||
>>> results.failed
|
||||
['c0.t', 'c2.t', 'c3.t']
|
||||
>>> results.passed
|
||||
['c1.t', 'c4.t', 'c5.t', 'c6.t']
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
self.failed = []
|
||||
self.failed_aggregates = {}
|
||||
self.stderr_output = []
|
||||
self.passed = []
|
||||
self._tree = None
|
||||
|
||||
def parse(self):
|
||||
if self._tree:
|
||||
raise RuntimeError('already parsed')
|
||||
self._tree = Etree.parse(self.filepath)
|
||||
for testcase in self._tree.findall('testcase'):
|
||||
self.process_testcase(testcase)
|
||||
return self
|
||||
|
||||
def process_testcase(self, case):
|
||||
key = self.case_key(case)
|
||||
|
||||
# look at children but throw away stderr output
|
||||
nonpassing = [c for c in case if not c.tag == 'system-err']
|
||||
n = len(nonpassing)
|
||||
if n > 1:
|
||||
raise RuntimeError(f'multiple results for {key}: {nonpassing}')
|
||||
elif n == 1:
|
||||
result = nonpassing.pop()
|
||||
self.failed.append(key)
|
||||
message = result.get('message')
|
||||
self.failed_aggregates.setdefault(message, []).append(key)
|
||||
else:
|
||||
self.passed.append(key)
|
||||
|
||||
@staticmethod
|
||||
def case_key(case):
|
||||
return f'{case.get("classname")}.{case.get("name")}'
|
||||
|
||||
def report(self, details=False):
|
||||
for k, v in sorted(
|
||||
self.failed_aggregates.items(),
|
||||
key = lambda i: len(i[1]),
|
||||
reverse=True):
|
||||
print(f'# {k}')
|
||||
for t in v:
|
||||
print(f' - {t}')
|
||||
|
||||
|
||||
def load_previous_results(txt):
|
||||
try:
|
||||
previous_results = open(txt).read()
|
||||
except FileNotFoundError:
|
||||
previous_results = ''
|
||||
parsed = set()
|
||||
for line in previous_results.splitlines():
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
parsed.add(line)
|
||||
return parsed
|
||||
|
||||
|
||||
def print_tests(tests):
|
||||
for test in sorted(tests):
|
||||
print(' ', test)
|
||||
|
||||
|
||||
def ratchet_up_passing(tracking_path, tests):
|
||||
try:
|
||||
old = set(open(tracking_path, 'r'))
|
||||
except FileNotFoundError:
|
||||
old = set()
|
||||
new = set(t + '\n' for t in tests)
|
||||
merged = sorted(old | new)
|
||||
open(tracking_path, 'w+').writelines(merged)
|
||||
|
||||
|
||||
def ratchet_down_failing(tracking_path, tests):
|
||||
new = set(t + '\n' for t in tests)
|
||||
open(tracking_path, 'w+').writelines(sorted(new))
|
||||
|
||||
|
||||
def main(direction, junitxml_path, tracking_path):
|
||||
'''Takes a string indicating which direction to ratchet, "up" or "down,"
|
||||
and two paths, one to test-runner output in JUnit XML format, the other to
|
||||
a file tracking test results (one test case dotted name per line). Walk the
|
||||
former looking for the latter, and react appropriately.
|
||||
|
||||
>>> inp = _mktemp_junitxml('0100111')
|
||||
>>> out = _mktemp_tracking('0000000')
|
||||
>>> _test_main('up', inp.name, out.name)
|
||||
Some tests not required to pass did:
|
||||
c1.t
|
||||
c4.t
|
||||
c5.t
|
||||
c6.t
|
||||
Conveniently, they have been added to `<tracking_path>` for you. Perhaps commit that?
|
||||
Eep! 0 test(s) were required to pass, but instead 4 did. 🐭
|
||||
|
||||
'''
|
||||
|
||||
results = JUnitXMLFile(junitxml_path).parse()
|
||||
|
||||
if tracking_path == '...':
|
||||
# Shortcut to aid in debugging XML parsing issues.
|
||||
results.report()
|
||||
return
|
||||
|
||||
previous = load_previous_results(tracking_path)
|
||||
current = set(results.passed if direction == 'up' else results.failed)
|
||||
|
||||
subjunctive = {'up': 'required to pass', 'down': 'expected to fail'}[direction]
|
||||
ratchet = None
|
||||
|
||||
too_many = current - previous
|
||||
if too_many:
|
||||
print(f'Some tests not {subjunctive} did:')
|
||||
print_tests(too_many)
|
||||
if direction == 'up':
|
||||
# Too many passing tests is good -- let's do more of those!
|
||||
ratchet_up_passing(tracking_path, current)
|
||||
print(f'Conveniently, they have been added to `{tracking_path}` for you. Perhaps commit that?')
|
||||
|
||||
not_enough = previous - current
|
||||
if not_enough:
|
||||
print(f'Some tests {subjunctive} didn\'t:')
|
||||
print_tests(not_enough)
|
||||
if direction == 'down':
|
||||
# Not enough failing tests is good -- let's do more of those!
|
||||
ratchet_down_failing(tracking_path, current)
|
||||
print(f'Conveniently, they have been removed from `{tracking_path}` for you. Perhaps commit that?')
|
||||
|
||||
if too_many or not_enough:
|
||||
print(f'Eep! {len(previous)} test(s) were {subjunctive}, but instead {len(current)} did. 🐭')
|
||||
return 1
|
||||
|
||||
print(f'All and only tests {subjunctive} did. 💃')
|
||||
return 0
|
||||
|
||||
|
||||
# When called as an executable ...
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
direction, junitxml_path, tracking_path = sys.argv[1:4]
|
||||
if direction not in ('up', 'down'):
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
doc = '\n'.join(__doc__.splitlines()[:6])
|
||||
doc = re.sub(' ratchet.py', f' {sys.argv[0]}', doc)
|
||||
print(doc, file=sys.stderr)
|
||||
exit_code = 2
|
||||
else:
|
||||
exit_code = main(direction, junitxml_path, tracking_path)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
# Helpers for when called under doctest ...
|
||||
|
||||
def _test(*a):
|
||||
return _test_main(*_mk(*a))
|
||||
|
||||
|
||||
def _test_main(direction, junitxml, tracking):
|
||||
'''Takes a string 'up' or 'down' and paths to (or open file objects for)
|
||||
the JUnit XML and tracking files to use for this test run. Captures and
|
||||
emits stdout (slightly modified) for inspection via doctest.'''
|
||||
junitxml_path = junitxml.name if hasattr(junitxml, 'name') else junitxml
|
||||
tracking_path = tracking.name if hasattr(tracking, 'name') else tracking
|
||||
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = io.StringIO()
|
||||
try:
|
||||
main(direction, junitxml_path, tracking_path)
|
||||
finally:
|
||||
sys.stdout.seek(0)
|
||||
out = sys.stdout.read()
|
||||
out = re.sub('`.*?`', '`<tracking_path>`', out).strip()
|
||||
sys.stdout = old_stdout
|
||||
print(out)
|
||||
|
||||
|
||||
class _PotentialFile(object):
|
||||
'''Represent a file that we are able to create but which doesn't exist yet,
|
||||
and which, if we create it, will be automatically torn down when the test
|
||||
run is over.'''
|
||||
|
||||
def __init__(self, filename):
|
||||
self.d = tempfile.TemporaryDirectory()
|
||||
self.name = os.path.join(self.d.name, filename)
|
||||
|
||||
|
||||
def _mk(direction, spec_junitxml, spec_tracking):
|
||||
'''Takes a string 'up' or 'down' and two bit strings specifying the state
|
||||
of the JUnit XML results file and the tracking file to set up for this test
|
||||
case. Returns the direction (unharmed) and two file-ish objects.
|
||||
|
||||
If a spec string is None the corresponding return value will be a
|
||||
_PotentialFile object, which has a .name attribute (like a true file
|
||||
object) that points to a file that does not exist, but could.
|
||||
|
||||
The reason not to simply return the path in all cases is that the file
|
||||
objects are actually temporary file objects that destroy the underlying
|
||||
file when they go out of scope, and we want to keep the underlying file
|
||||
around until the end of the test run.'''
|
||||
|
||||
if None not in(spec_junitxml, spec_tracking):
|
||||
if len(spec_junitxml) != len(spec_tracking):
|
||||
raise ValueError('if both given, must be the same length: `{spec_junitxml}` and `{spec_tracking}`')
|
||||
if spec_junitxml is None:
|
||||
junitxml_fp = _PotentialFile('results.xml')
|
||||
else:
|
||||
junitxml_fp = _mktemp_junitxml(spec_junitxml)
|
||||
if spec_tracking is None:
|
||||
tracking_fp = _PotentialFile('tracking')
|
||||
else:
|
||||
tracking_fp = _mktemp_tracking(spec_tracking)
|
||||
return direction, junitxml_fp, tracking_fp
|
||||
|
||||
|
||||
def _mktemp_junitxml(spec):
|
||||
'''Test helper to generate a raw JUnit XML file.
|
||||
|
||||
>>> fp = _mktemp_junitxml('00101')
|
||||
>>> open(fp.name).read()[:11]
|
||||
'<testsuite>'
|
||||
|
||||
'''
|
||||
fp = tempfile.NamedTemporaryFile()
|
||||
fp.write(b'<testsuite>')
|
||||
|
||||
passed = '''\
|
||||
<testcase classname="c{i}" name="t"></testcase>
|
||||
'''
|
||||
failed = '''\
|
||||
<testcase classname="c{i}" name="t">
|
||||
<failure>Traceback (most recent call last):
|
||||
File "/foo/bar/baz/buz.py", line 1, in <module>
|
||||
NameError: name 'heck' is not defined
|
||||
</failure>
|
||||
</testcase>
|
||||
'''
|
||||
|
||||
i = 0
|
||||
for c in spec:
|
||||
if c == '0':
|
||||
out = failed
|
||||
elif c == '1':
|
||||
out = passed
|
||||
else:
|
||||
raise ValueError(f'bad c: `{c}`')
|
||||
fp.write(out.format(i=i).encode('utf8'))
|
||||
i += 1
|
||||
|
||||
fp.write(b'</testsuite>')
|
||||
fp.flush()
|
||||
return fp
|
||||
|
||||
|
||||
def _mktemp_tracking(spec):
|
||||
'''Test helper to prefabricate a tracking file.
|
||||
|
||||
>>> fp = _mktemp_tracking('01101')
|
||||
>>> print(open(fp.name).read()[:-1])
|
||||
c1.t
|
||||
c2.t
|
||||
c4.t
|
||||
|
||||
'''
|
||||
fp = tempfile.NamedTemporaryFile()
|
||||
|
||||
i = 0
|
||||
for c in spec:
|
||||
if c == '0':
|
||||
pass
|
||||
elif c == '1':
|
||||
fp.write(f'c{i}.t\n'.encode('utf8'))
|
||||
else:
|
||||
raise ValueError(f'bad c: `{c}`')
|
||||
i += 1
|
||||
|
||||
fp.flush()
|
||||
return fp
|
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euxo pipefail
|
||||
tracking_filename="ratchet-passing"
|
||||
|
||||
# Start somewhere predictable.
|
||||
cd "$(dirname $0)"
|
||||
base=$(pwd)
|
||||
|
||||
# Actually, though, trial outputs some things that are only gitignored in the project root.
|
||||
cd "../.."
|
||||
|
||||
# Since both of the next calls are expected to exit non-0, relax our guard.
|
||||
set +e
|
||||
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
|
||||
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
|
||||
set -e
|
||||
|
||||
# Okay, now we're clear.
|
||||
cd "$base"
|
||||
|
||||
# Make sure ratchet.py itself is clean.
|
||||
python3 -m doctest ratchet.py
|
||||
|
||||
# Now see about Tahoe-LAFS (also expected to fail) ...
|
||||
set +e
|
||||
python3 ratchet.py up results.xml "$tracking_filename"
|
||||
code=$?
|
||||
set -e
|
||||
|
||||
# Emit a diff of the tracking file, to aid in the situation where changes are
|
||||
# not discovered until CI (where TERM might `dumb`).
|
||||
if [ $TERM = 'dumb' ]; then
|
||||
export TERM=ansi
|
||||
fi
|
||||
git diff "$tracking_filename"
|
||||
|
||||
exit $code
|
1
newsfragments/3316.minor
Normal file
1
newsfragments/3316.minor
Normal file
@ -0,0 +1 @@
|
||||
Port checker result pages' rendering from nevow to twisted web templates.
|
1
newsfragments/3323.removed
Normal file
1
newsfragments/3323.removed
Normal file
@ -0,0 +1 @@
|
||||
Slackware 14.2 is no longer a Tahoe-LAFS supported platform.
|
1
newsfragments/3328.installation
Normal file
1
newsfragments/3328.installation
Normal file
@ -0,0 +1 @@
|
||||
Tahoe-LAFS now supports Ubuntu 20.04.
|
0
newsfragments/3336.minor
Normal file
0
newsfragments/3336.minor
Normal file
0
newsfragments/3358.minor
Normal file
0
newsfragments/3358.minor
Normal file
0
newsfragments/3367.minor
Normal file
0
newsfragments/3367.minor
Normal file
0
newsfragments/3370.minor
Normal file
0
newsfragments/3370.minor
Normal file
@ -0,0 +1 @@
|
||||
|
0
newsfragments/3373.minor
Normal file
0
newsfragments/3373.minor
Normal file
0
newsfragments/3375.minor
Normal file
0
newsfragments/3375.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3378.minor
Normal file
0
newsfragments/3378.minor
Normal file
0
newsfragments/3380.minor
Normal file
0
newsfragments/3380.minor
Normal file
0
newsfragments/3383.minor
Normal file
0
newsfragments/3383.minor
Normal file
0
newsfragments/3386.minor
Normal file
0
newsfragments/3386.minor
Normal file
0
newsfragments/3388.minor
Normal file
0
newsfragments/3388.minor
Normal file
0
newsfragments/3389.minor
Normal file
0
newsfragments/3389.minor
Normal file
3
setup.py
3
setup.py
@ -117,7 +117,8 @@ install_requires = [
|
||||
"eliot ~= 1.7",
|
||||
|
||||
# A great way to define types of values.
|
||||
"attrs >= 18.2.0",
|
||||
# XXX: drop the upper bound: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3390
|
||||
"attrs >= 18.2.0, < 20",
|
||||
|
||||
# WebSocket library for twisted and asyncio
|
||||
"autobahn >= 19.5.2",
|
||||
|
@ -741,7 +741,7 @@ class _Client(node.Node, pollmixin.PollMixin):
|
||||
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
|
||||
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
self.config.write_config_file("node.pubkey", public_key_str + "\n")
|
||||
self.config.write_config_file("node.pubkey", public_key_str + "\n", "w")
|
||||
self._node_private_key = private_key
|
||||
self._node_public_key = public_key
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Directory Node implementation."""
|
||||
import time, unicodedata
|
||||
import time
|
||||
|
||||
from zope.interface import implementer
|
||||
from twisted.internet import defer
|
||||
@ -18,7 +18,7 @@ from allmydata.check_results import DeepCheckResults, \
|
||||
DeepCheckAndRepairResults
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.util import hashutil, base32, log
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
from allmydata.util.encodingutil import quote_output, normalize
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.netstring import netstring, split_netstring
|
||||
from allmydata.util.consumer import download_to_data
|
||||
@ -101,12 +101,6 @@ def update_metadata(metadata, new_metadata, now):
|
||||
return metadata
|
||||
|
||||
|
||||
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
|
||||
# be NFC-normalized.
|
||||
|
||||
def normalize(namex):
|
||||
return unicodedata.normalize('NFC', namex)
|
||||
|
||||
# TODO: {Deleter,MetadataSetter,Adder}.modify all start by unpacking the
|
||||
# contents and end by repacking them. It might be better to apply them to
|
||||
# the unpacked contents.
|
||||
|
@ -1,5 +1,20 @@
|
||||
"""
|
||||
Algorithms for figuring out happiness, the number of unique nodes the data is
|
||||
on.
|
||||
|
||||
from Queue import PriorityQueue
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things for external Python 2 code.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from queue import PriorityQueue
|
||||
|
||||
|
||||
def augmenting_path_for(graph):
|
||||
@ -35,9 +50,9 @@ def bfs(graph, s):
|
||||
GRAY = 1
|
||||
# BLACK vertices are those we have seen and explored
|
||||
BLACK = 2
|
||||
color = [WHITE for i in xrange(len(graph))]
|
||||
predecessor = [None for i in xrange(len(graph))]
|
||||
distance = [-1 for i in xrange(len(graph))]
|
||||
color = [WHITE for i in range(len(graph))]
|
||||
predecessor = [None for i in range(len(graph))]
|
||||
distance = [-1 for i in range(len(graph))]
|
||||
queue = [s] # vertices that we haven't explored yet.
|
||||
color[s] = GRAY
|
||||
distance[s] = 0
|
||||
@ -58,9 +73,9 @@ def residual_network(graph, f):
|
||||
flow network represented by my graph and f arguments. graph is a
|
||||
flow network in adjacency-list form, and f is a flow in graph.
|
||||
"""
|
||||
new_graph = [[] for i in xrange(len(graph))]
|
||||
cf = [[0 for s in xrange(len(graph))] for sh in xrange(len(graph))]
|
||||
for i in xrange(len(graph)):
|
||||
new_graph = [[] for i in range(len(graph))]
|
||||
cf = [[0 for s in range(len(graph))] for sh in range(len(graph))]
|
||||
for i in range(len(graph)):
|
||||
for v in graph[i]:
|
||||
if f[i][v] == 1:
|
||||
# We add an edge (v, i) with cf[v,i] = 1. This means
|
||||
@ -135,7 +150,7 @@ def _compute_maximum_graph(graph, shareIndices):
|
||||
return {}
|
||||
|
||||
dim = len(graph)
|
||||
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
|
||||
flow_function = [[0 for sh in range(dim)] for s in range(dim)]
|
||||
residual_graph, residual_function = residual_network(graph, flow_function)
|
||||
|
||||
while augmenting_path_for(residual_graph):
|
||||
@ -260,9 +275,9 @@ def _servermap_flow_graph(peers, shares, servermap):
|
||||
#print "share_to_index %s" % share_to_index
|
||||
#print "servermap %s" % servermap
|
||||
for peer in peers:
|
||||
if servermap.has_key(peer):
|
||||
if peer in servermap:
|
||||
for s in servermap[peer]:
|
||||
if share_to_index.has_key(s):
|
||||
if s in share_to_index:
|
||||
indexedShares.append(share_to_index[s])
|
||||
graph.insert(peer_to_index[peer], indexedShares)
|
||||
for share in shares:
|
||||
@ -373,7 +388,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
new_mappings = _calculate_mappings(new_peers, new_shares)
|
||||
#print "new_peers %s" % new_peers
|
||||
#print "new_mappings %s" % new_mappings
|
||||
mappings = dict(readonly_mappings.items() + existing_mappings.items() + new_mappings.items())
|
||||
mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
|
||||
homeless_shares = set()
|
||||
for share in mappings:
|
||||
if mappings[share] is None:
|
||||
@ -384,7 +399,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
mappings, homeless_shares,
|
||||
{
|
||||
k: v
|
||||
for k, v in peers_to_shares.items()
|
||||
for k, v in list(peers_to_shares.items())
|
||||
if k not in readonly_peers
|
||||
}
|
||||
)
|
||||
@ -401,5 +416,5 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
|
||||
return {
|
||||
k: v.pop() if v else next(peer_iter)
|
||||
for k, v in mappings.items()
|
||||
for k, v in list(mappings.items())
|
||||
}
|
||||
|
@ -16,6 +16,8 @@ if PY2:
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
from zope.interface import Interface, Attribute
|
||||
from twisted.plugin import (
|
||||
IPlugin,
|
||||
|
@ -6,7 +6,7 @@ from allmydata.util.netstring import netstring
|
||||
from allmydata.util.hashutil import backupdb_dirhash
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||
from allmydata.util.encodingutil import to_str
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
from allmydata.util.dbutil import get_db, DBError
|
||||
|
||||
|
||||
@ -218,7 +218,7 @@ class BackupDB_v2(object):
|
||||
probability = min(max(probability, 0.0), 1.0)
|
||||
should_check = bool(random.random() < probability)
|
||||
|
||||
return FileResult(self, to_str(filecap), should_check,
|
||||
return FileResult(self, to_bytes(filecap), should_check,
|
||||
path, mtime, ctime, size)
|
||||
|
||||
def get_or_allocate_fileid_for_cap(self, filecap):
|
||||
@ -321,7 +321,7 @@ class BackupDB_v2(object):
|
||||
probability = min(max(probability, 0.0), 1.0)
|
||||
should_check = bool(random.random() < probability)
|
||||
|
||||
return DirectoryResult(self, dirhash_s, to_str(dircap), should_check)
|
||||
return DirectoryResult(self, dirhash_s, to_bytes(dircap), should_check)
|
||||
|
||||
def did_create_directory(self, dircap, dirhash):
|
||||
now = time.time()
|
||||
|
@ -63,7 +63,7 @@ def dump_immutable_chk_share(f, out, options):
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
from allmydata.immutable.layout import ReadBucketProxy
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
||||
|
||||
# use a ReadBucketProxy to parse the bucket and find the uri extension
|
||||
bp = ReadBucketProxy(None, None, '')
|
||||
@ -109,7 +109,7 @@ def dump_immutable_chk_share(f, out, options):
|
||||
# knowing the parent directory name to get it
|
||||
pieces = options['filename'].split(os.sep)
|
||||
if len(pieces) >= 2:
|
||||
piece = to_str(pieces[-2])
|
||||
piece = to_bytes(pieces[-2])
|
||||
if base32.could_be_base32_encoded(piece):
|
||||
storage_index = base32.a2b(piece)
|
||||
uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
|
||||
@ -221,7 +221,7 @@ def dump_SDMF_share(m, length, options):
|
||||
from allmydata.mutable.common import NeedMoreDataError
|
||||
from allmydata.util import base32, hashutil
|
||||
from allmydata.uri import SSKVerifierURI
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
||||
|
||||
offset = m.DATA_OFFSET
|
||||
|
||||
@ -269,7 +269,7 @@ def dump_SDMF_share(m, length, options):
|
||||
# knowing the parent directory name to get it
|
||||
pieces = options['filename'].split(os.sep)
|
||||
if len(pieces) >= 2:
|
||||
piece = to_str(pieces[-2])
|
||||
piece = to_bytes(pieces[-2])
|
||||
if base32.could_be_base32_encoded(piece):
|
||||
storage_index = base32.a2b(piece)
|
||||
fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
|
||||
@ -307,7 +307,7 @@ def dump_MDMF_share(m, length, options):
|
||||
from allmydata.mutable.layout import MDMFSlotReadProxy
|
||||
from allmydata.util import base32, hashutil
|
||||
from allmydata.uri import MDMFVerifierURI
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
||||
|
||||
offset = m.DATA_OFFSET
|
||||
out = options.stdout
|
||||
@ -363,7 +363,7 @@ def dump_MDMF_share(m, length, options):
|
||||
# knowing the parent directory name to get it
|
||||
pieces = options['filename'].split(os.sep)
|
||||
if len(pieces) >= 2:
|
||||
piece = to_str(pieces[-2])
|
||||
piece = to_bytes(pieces[-2])
|
||||
if base32.could_be_base32_encoded(piece):
|
||||
storage_index = base32.a2b(piece)
|
||||
fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
|
||||
|
@ -11,7 +11,7 @@ from allmydata.scripts.common_http import do_http, HTTPError, format_http_error
|
||||
from allmydata.util import time_format
|
||||
from allmydata.scripts import backupdb
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_output, \
|
||||
quote_local_unicode_path, to_str, FilenameEncodingError, unicode_to_url
|
||||
quote_local_unicode_path, to_bytes, FilenameEncodingError, unicode_to_url
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abspath
|
||||
|
||||
@ -47,7 +47,7 @@ def mkdir(contents, options):
|
||||
if resp.status < 200 or resp.status >= 300:
|
||||
raise HTTPError("Error during mkdir", resp)
|
||||
|
||||
dircap = to_str(resp.read().strip())
|
||||
dircap = to_bytes(resp.read().strip())
|
||||
return dircap
|
||||
|
||||
def put_child(dirurl, childname, childcap):
|
||||
|
@ -13,7 +13,7 @@ from allmydata import uri
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.fileutil import abspath_expanduser_unicode, precondition_abspath
|
||||
from allmydata.util.encodingutil import unicode_to_url, listdir_unicode, quote_output, \
|
||||
quote_local_unicode_path, to_str
|
||||
quote_local_unicode_path, to_bytes
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
|
||||
|
||||
@ -254,8 +254,8 @@ class TahoeDirectorySource(object):
|
||||
|
||||
def init_from_parsed(self, parsed):
|
||||
nodetype, d = parsed
|
||||
self.writecap = to_str(d.get("rw_uri"))
|
||||
self.readcap = to_str(d.get("ro_uri"))
|
||||
self.writecap = to_bytes(d.get("rw_uri"))
|
||||
self.readcap = to_bytes(d.get("ro_uri"))
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
@ -270,13 +270,13 @@ class TahoeDirectorySource(object):
|
||||
self.progressfunc("examining %d of %d" % (i+1, len(self.children_d)))
|
||||
if data[0] == "filenode":
|
||||
mutable = data[1].get("mutable", False)
|
||||
writecap = to_str(data[1].get("rw_uri"))
|
||||
readcap = to_str(data[1].get("ro_uri"))
|
||||
writecap = to_bytes(data[1].get("rw_uri"))
|
||||
readcap = to_bytes(data[1].get("ro_uri"))
|
||||
self.children[name] = TahoeFileSource(self.nodeurl, mutable,
|
||||
writecap, readcap, name)
|
||||
elif data[0] == "dirnode":
|
||||
writecap = to_str(data[1].get("rw_uri"))
|
||||
readcap = to_str(data[1].get("ro_uri"))
|
||||
writecap = to_bytes(data[1].get("rw_uri"))
|
||||
readcap = to_bytes(data[1].get("ro_uri"))
|
||||
if writecap and writecap in self.cache:
|
||||
child = self.cache[writecap]
|
||||
elif readcap and readcap in self.cache:
|
||||
@ -324,8 +324,8 @@ class TahoeDirectoryTarget(object):
|
||||
|
||||
def init_from_parsed(self, parsed):
|
||||
nodetype, d = parsed
|
||||
self.writecap = to_str(d.get("rw_uri"))
|
||||
self.readcap = to_str(d.get("ro_uri"))
|
||||
self.writecap = to_bytes(d.get("rw_uri"))
|
||||
self.readcap = to_bytes(d.get("ro_uri"))
|
||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
self.children_d = dict( [(unicode(name),value)
|
||||
for (name,value)
|
||||
@ -365,8 +365,8 @@ class TahoeDirectoryTarget(object):
|
||||
self.progressfunc("examining %d of %d" % (i+1, len(self.children_d)))
|
||||
if data[0] == "filenode":
|
||||
mutable = data[1].get("mutable", False)
|
||||
writecap = to_str(data[1].get("rw_uri"))
|
||||
readcap = to_str(data[1].get("ro_uri"))
|
||||
writecap = to_bytes(data[1].get("rw_uri"))
|
||||
readcap = to_bytes(data[1].get("ro_uri"))
|
||||
url = None
|
||||
if self.writecap:
|
||||
url = self.nodeurl + "/".join(["uri",
|
||||
@ -375,8 +375,8 @@ class TahoeDirectoryTarget(object):
|
||||
self.children[name] = TahoeFileTarget(self.nodeurl, mutable,
|
||||
writecap, readcap, url)
|
||||
elif data[0] == "dirnode":
|
||||
writecap = to_str(data[1].get("rw_uri"))
|
||||
readcap = to_str(data[1].get("ro_uri"))
|
||||
writecap = to_bytes(data[1].get("rw_uri"))
|
||||
readcap = to_bytes(data[1].get("ro_uri"))
|
||||
if writecap and writecap in self.cache:
|
||||
child = self.cache[writecap]
|
||||
elif readcap and readcap in self.cache:
|
||||
@ -619,8 +619,8 @@ class Copier(object):
|
||||
self.progress)
|
||||
t.init_from_parsed(parsed)
|
||||
else:
|
||||
writecap = to_str(d.get("rw_uri"))
|
||||
readcap = to_str(d.get("ro_uri"))
|
||||
writecap = to_bytes(d.get("rw_uri"))
|
||||
readcap = to_bytes(d.get("ro_uri"))
|
||||
mutable = d.get("mutable", False)
|
||||
t = TahoeFileTarget(self.nodeurl, mutable,
|
||||
writecap, readcap, url)
|
||||
@ -682,8 +682,8 @@ class Copier(object):
|
||||
else:
|
||||
if had_trailing_slash:
|
||||
raise FilenameWithTrailingSlashError(source_spec)
|
||||
writecap = to_str(d.get("rw_uri"))
|
||||
readcap = to_str(d.get("ro_uri"))
|
||||
writecap = to_bytes(d.get("rw_uri"))
|
||||
readcap = to_bytes(d.get("ro_uri"))
|
||||
mutable = d.get("mutable", False) # older nodes don't provide it
|
||||
t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
|
||||
return t
|
||||
|
@ -5,7 +5,7 @@ import json
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import unicode_to_output, quote_output, is_printable_ascii, to_str
|
||||
from allmydata.util.encodingutil import unicode_to_output, quote_output, is_printable_ascii, to_bytes
|
||||
|
||||
def list(options):
|
||||
nodeurl = options['node-url']
|
||||
@ -94,8 +94,8 @@ def list(options):
|
||||
mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
|
||||
if not mtime:
|
||||
mtime = child[1]["metadata"].get("mtime")
|
||||
rw_uri = to_str(child[1].get("rw_uri"))
|
||||
ro_uri = to_str(child[1].get("ro_uri"))
|
||||
rw_uri = to_bytes(child[1].get("rw_uri"))
|
||||
ro_uri = to_bytes(child[1].get("ro_uri"))
|
||||
if ctime:
|
||||
# match for formatting that GNU 'ls' does
|
||||
if (now - ctime) > 6*30*24*60*60:
|
||||
|
@ -6,7 +6,7 @@ import json
|
||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||
UnknownAliasError
|
||||
from allmydata.scripts.common_http import do_http, format_http_error
|
||||
from allmydata.util.encodingutil import to_str
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
|
||||
# this script is used for both 'mv' and 'ln'
|
||||
|
||||
@ -35,7 +35,7 @@ def mv(options, mode="move"):
|
||||
return 1
|
||||
data = resp.read()
|
||||
nodetype, attrs = json.loads(data)
|
||||
cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])
|
||||
cap = to_bytes(attrs.get("rw_uri") or attrs["ro_uri"])
|
||||
|
||||
# now get the target
|
||||
try:
|
||||
|
@ -1,3 +1,4 @@
|
||||
from future.utils import PY3
|
||||
|
||||
import os.path
|
||||
from allmydata.util import base32
|
||||
@ -17,5 +18,12 @@ def si_a2b(ascii_storageindex):
|
||||
return base32.a2b(ascii_storageindex)
|
||||
|
||||
def storage_index_to_dir(storageindex):
|
||||
"""Convert storage index to directory path.
|
||||
|
||||
Returns native string.
|
||||
"""
|
||||
sia = si_b2a(storageindex)
|
||||
if PY3:
|
||||
# On Python 3 we expect paths to be unicode.
|
||||
sia = sia.decode("ascii")
|
||||
return os.path.join(sia[:2], sia)
|
||||
|
@ -1,6 +1,25 @@
|
||||
"""
|
||||
Crawl the storage server shares.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
# We don't import bytes, object, dict, and list just in case they're used,
|
||||
# so as not to create brittle pickles with random magic objects.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401
|
||||
|
||||
import os, time, struct
|
||||
import cPickle as pickle
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
from twisted.internet import reactor
|
||||
from twisted.application import service
|
||||
from allmydata.storage.common import si_b2a
|
||||
@ -74,6 +93,9 @@ class ShareCrawler(service.MultiService):
|
||||
self.statefile = statefile
|
||||
self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
|
||||
for i in range(2**10)]
|
||||
if PY3:
|
||||
# On Python 3 we expect the paths to be unicode, not bytes.
|
||||
self.prefixes = [p.decode("ascii") for p in self.prefixes]
|
||||
self.prefixes.sort()
|
||||
self.timer = None
|
||||
self.bucket_cache = (None, [])
|
||||
@ -353,7 +375,8 @@ class ShareCrawler(service.MultiService):
|
||||
"""
|
||||
|
||||
for bucket in buckets:
|
||||
if bucket <= self.state["last-complete-bucket"]:
|
||||
last_complete = self.state["last-complete-bucket"]
|
||||
if last_complete is not None and bucket <= last_complete:
|
||||
continue
|
||||
self.process_bucket(cycle, prefix, prefixdir, bucket)
|
||||
self.state["last-complete-bucket"] = bucket
|
||||
|
@ -8,7 +8,7 @@ class LeaseInfo(object):
|
||||
self.cancel_secret = cancel_secret
|
||||
self.expiration_time = expiration_time
|
||||
if nodeid is not None:
|
||||
assert isinstance(nodeid, str)
|
||||
assert isinstance(nodeid, bytes)
|
||||
assert len(nodeid) == 20
|
||||
self.nodeid = nodeid
|
||||
|
||||
|
@ -49,7 +49,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
expiration_cutoff_date=None,
|
||||
expiration_sharetypes=("mutable", "immutable")):
|
||||
service.MultiService.__init__(self)
|
||||
assert isinstance(nodeid, str)
|
||||
assert isinstance(nodeid, bytes)
|
||||
assert len(nodeid) == 20
|
||||
self.my_nodeid = nodeid
|
||||
self.storedir = storedir
|
||||
|
@ -5,7 +5,7 @@ from six.moves import cStringIO as StringIO
|
||||
|
||||
from allmydata import uri
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import quote_output, to_str
|
||||
from allmydata.util.encodingutil import quote_output, to_bytes
|
||||
from allmydata.mutable.publish import MutableData
|
||||
from allmydata.immutable import upload
|
||||
from allmydata.scripts import debug
|
||||
@ -41,7 +41,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(err, "")
|
||||
self.failUnlessReallyEqual(rc, 0)
|
||||
data = json.loads(out)
|
||||
self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
|
||||
self.failUnlessReallyEqual(to_bytes(data["summary"]), "Healthy")
|
||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||
d.addCallback(_check2)
|
||||
|
||||
|
@ -8,7 +8,7 @@ from twisted.internet import defer
|
||||
from allmydata.scripts import cli
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util.encodingutil import (quote_output, get_io_encoding,
|
||||
unicode_to_output, to_str)
|
||||
unicode_to_output, to_bytes)
|
||||
from allmydata.util.assertutil import _assert
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import CLITestMixin
|
||||
@ -272,9 +272,9 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnless(data['mutable'])
|
||||
|
||||
self.failUnlessIn("rw_uri", data)
|
||||
self.rw_uri = to_str(data["rw_uri"])
|
||||
self.rw_uri = to_bytes(data["rw_uri"])
|
||||
self.failUnlessIn("ro_uri", data)
|
||||
self.ro_uri = to_str(data["ro_uri"])
|
||||
self.ro_uri = to_bytes(data["ro_uri"])
|
||||
d.addCallback(_get_test_txt_uris)
|
||||
|
||||
# Now make a new file to copy in place of test.txt.
|
||||
@ -306,9 +306,9 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnless(data['mutable'])
|
||||
|
||||
self.failUnlessIn("ro_uri", data)
|
||||
self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
|
||||
self.failUnlessEqual(to_bytes(data["ro_uri"]), self.ro_uri)
|
||||
self.failUnlessIn("rw_uri", data)
|
||||
self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
|
||||
self.failUnlessEqual(to_bytes(data["rw_uri"]), self.rw_uri)
|
||||
d.addCallback(_check_json)
|
||||
|
||||
# and, finally, doing a GET directly on one of the old uris
|
||||
@ -381,7 +381,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failIf(childdata['mutable'])
|
||||
self.failUnlessIn("ro_uri", childdata)
|
||||
uri_key = "ro_uri"
|
||||
self.childuris[k] = to_str(childdata[uri_key])
|
||||
self.childuris[k] = to_bytes(childdata[uri_key])
|
||||
d.addCallback(_process_directory_json)
|
||||
# Now build a local directory to copy into place, like the following:
|
||||
# test2/
|
||||
@ -410,11 +410,11 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
if "mutable" in fn:
|
||||
self.failUnless(data['mutable'])
|
||||
self.failUnlessIn("rw_uri", data)
|
||||
self.failUnlessEqual(to_str(data["rw_uri"]), self.childuris[fn])
|
||||
self.failUnlessEqual(to_bytes(data["rw_uri"]), self.childuris[fn])
|
||||
else:
|
||||
self.failIf(data['mutable'])
|
||||
self.failUnlessIn("ro_uri", data)
|
||||
self.failIfEqual(to_str(data["ro_uri"]), self.childuris[fn])
|
||||
self.failIfEqual(to_bytes(data["ro_uri"]), self.childuris[fn])
|
||||
|
||||
for fn in ("mutable1", "mutable2"):
|
||||
d.addCallback(lambda ignored, fn=fn:
|
||||
@ -456,7 +456,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessEqual(filetype, "filenode")
|
||||
self.failIf(data['mutable'])
|
||||
self.failUnlessIn("ro_uri", data)
|
||||
self.failUnlessEqual(to_str(data["ro_uri"]), self.childuris["imm2"])
|
||||
self.failUnlessEqual(to_bytes(data["ro_uri"]), self.childuris["imm2"])
|
||||
d.addCallback(_process_imm2_json)
|
||||
return d
|
||||
|
||||
@ -497,7 +497,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessEqual(filetype, "filenode")
|
||||
self.failUnless(data['mutable'])
|
||||
self.failUnlessIn("ro_uri", data)
|
||||
self._test_read_uri = to_str(data["ro_uri"])
|
||||
self._test_read_uri = to_bytes(data["ro_uri"])
|
||||
d.addCallback(_process_test_json)
|
||||
# Now we'll link the readonly URI into the tahoe: alias.
|
||||
d.addCallback(lambda ignored:
|
||||
@ -521,7 +521,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessEqual(testtype, "filenode")
|
||||
self.failUnless(testdata['mutable'])
|
||||
self.failUnlessIn("ro_uri", testdata)
|
||||
self.failUnlessEqual(to_str(testdata["ro_uri"]), self._test_read_uri)
|
||||
self.failUnlessEqual(to_bytes(testdata["ro_uri"]), self._test_read_uri)
|
||||
self.failIfIn("rw_uri", testdata)
|
||||
d.addCallback(_process_tahoe_json)
|
||||
# Okay, now we're going to try uploading another mutable file in
|
||||
@ -589,7 +589,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
||||
self.failUnlessEqual(file2type, "filenode")
|
||||
self.failUnless(file2data['mutable'])
|
||||
self.failUnlessIn("ro_uri", file2data)
|
||||
self.failUnlessEqual(to_str(file2data["ro_uri"]), self._test_read_uri)
|
||||
self.failUnlessEqual(to_bytes(file2data["ro_uri"]), self._test_read_uri)
|
||||
self.failIfIn("rw_uri", file2data)
|
||||
d.addCallback(_got_testdir_json)
|
||||
return d
|
||||
|
@ -12,12 +12,18 @@ from __future__ import print_function
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from past.builtins import unicode
|
||||
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python import failure
|
||||
from twisted.trial import unittest
|
||||
|
||||
from ..util.assertutil import precondition
|
||||
from ..util.encodingutil import unicode_platform, get_filesystem_encoding
|
||||
|
||||
|
||||
class TimezoneMixin(object):
|
||||
@ -65,3 +71,67 @@ class SignalMixin(object):
|
||||
if self.sigchldHandler:
|
||||
signal.signal(signal.SIGCHLD, self.sigchldHandler)
|
||||
return super(SignalMixin, self).tearDown()
|
||||
|
||||
|
||||
class ShouldFailMixin(object):
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
assert substring is None or isinstance(substring, (bytes, unicode))
|
||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||
def done(res):
|
||||
if isinstance(res, failure.Failure):
|
||||
res.trap(expected_failure)
|
||||
if substring:
|
||||
self.failUnless(substring in str(res),
|
||||
"%s: substring '%s' not in '%s'"
|
||||
% (which, substring, str(res)))
|
||||
# return the Failure for further analysis, but in a form that
|
||||
# doesn't make the Deferred chain think that we failed.
|
||||
return [res]
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
(which, expected_failure, res))
|
||||
d.addBoth(done)
|
||||
return d
|
||||
|
||||
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
|
||||
enc = get_filesystem_encoding()
|
||||
if not unicode_platform():
|
||||
try:
|
||||
u.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
||||
|
||||
|
||||
class Marker(object):
|
||||
pass
|
||||
|
||||
class FakeCanary(object):
|
||||
"""For use in storage tests.
|
||||
|
||||
Can be moved back to test_storage.py once enough Python 3 porting has been
|
||||
done.
|
||||
"""
|
||||
def __init__(self, ignore_disconnectors=False):
|
||||
self.ignore = ignore_disconnectors
|
||||
self.disconnectors = {}
|
||||
def notifyOnDisconnect(self, f, *args, **kwargs):
|
||||
if self.ignore:
|
||||
return
|
||||
m = Marker()
|
||||
self.disconnectors[m] = (f, args, kwargs)
|
||||
return m
|
||||
def dontNotifyOnDisconnect(self, marker):
|
||||
if self.ignore:
|
||||
return
|
||||
del self.disconnectors[marker]
|
||||
|
@ -5,26 +5,20 @@ from random import randrange
|
||||
from six.moves import StringIO
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import failure
|
||||
from twisted.trial import unittest
|
||||
|
||||
from ..util.assertutil import precondition
|
||||
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
|
||||
get_io_encoding)
|
||||
from ..scripts import runner
|
||||
from .common_py3 import SignalMixin
|
||||
from allmydata.util.encodingutil import get_io_encoding
|
||||
from future.utils import PY2
|
||||
if PY2: # XXX this is a hack that makes some tests pass on Python3, remove
|
||||
# in the future
|
||||
from ..scripts import runner
|
||||
# Imported for backwards compatibility:
|
||||
from .common_py3 import (
|
||||
SignalMixin, skip_if_cannot_represent_filename, ReallyEqualMixin, ShouldFailMixin
|
||||
)
|
||||
|
||||
|
||||
def skip_if_cannot_represent_filename(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
|
||||
enc = get_filesystem_encoding()
|
||||
if not unicode_platform():
|
||||
try:
|
||||
u.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
|
||||
|
||||
def skip_if_cannot_represent_argv(u):
|
||||
precondition(isinstance(u, unicode))
|
||||
try:
|
||||
@ -84,40 +78,12 @@ def flip_one_bit(s, offset=0, size=None):
|
||||
return result
|
||||
|
||||
|
||||
class ReallyEqualMixin(object):
|
||||
def failUnlessReallyEqual(self, a, b, msg=None):
|
||||
self.assertEqual(a, b, msg)
|
||||
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
|
||||
|
||||
|
||||
class StallMixin(object):
|
||||
def stall(self, res=None, delay=1):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(delay, d.callback, res)
|
||||
return d
|
||||
|
||||
class ShouldFailMixin(object):
|
||||
|
||||
def shouldFail(self, expected_failure, which, substring,
|
||||
callable, *args, **kwargs):
|
||||
assert substring is None or isinstance(substring, str)
|
||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||
def done(res):
|
||||
if isinstance(res, failure.Failure):
|
||||
res.trap(expected_failure)
|
||||
if substring:
|
||||
self.failUnless(substring in str(res),
|
||||
"%s: substring '%s' not in '%s'"
|
||||
% (which, substring, str(res)))
|
||||
# return the Failure for further analysis, but in a form that
|
||||
# doesn't make the Deferred chain think that we failed.
|
||||
return [res]
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
(which, expected_failure, res))
|
||||
d.addBoth(done)
|
||||
return d
|
||||
|
||||
|
||||
class TestMixin(SignalMixin):
|
||||
def setUp(self):
|
||||
@ -183,3 +149,11 @@ except ImportError:
|
||||
os.chmod(path, stat.S_IWRITE | stat.S_IEXEC | stat.S_IREAD)
|
||||
make_readonly = _make_readonly
|
||||
make_accessible = _make_accessible
|
||||
|
||||
|
||||
__all__ = [
|
||||
"make_readonly", "make_accessible", "TestMixin", "ShouldFailMixin",
|
||||
"StallMixin", "skip_if_cannot_represent_argv", "run_cli", "parse_cli",
|
||||
"DevNullDictionary", "insecurerandstr", "flip_bit", "flip_one_bit",
|
||||
"SignalMixin", "skip_if_cannot_represent_filename", "ReallyEqualMixin"
|
||||
]
|
||||
|
37
src/allmydata/test/python3_tests.py
Normal file
37
src/allmydata/test/python3_tests.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""
|
||||
This module defines the subset of the full test suite which is expected to
|
||||
pass on Python 3 in a way which makes that suite discoverable by trial.
|
||||
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.python.reflect import (
|
||||
namedModule,
|
||||
)
|
||||
from twisted.trial.runner import (
|
||||
TestLoader,
|
||||
)
|
||||
from twisted.trial.unittest import (
|
||||
TestSuite,
|
||||
)
|
||||
|
||||
from allmydata.util._python3 import (
|
||||
PORTED_TEST_MODULES,
|
||||
)
|
||||
|
||||
def testSuite():
|
||||
loader = TestLoader()
|
||||
return TestSuite(list(
|
||||
loader.loadModule(namedModule(module))
|
||||
for module
|
||||
in PORTED_TEST_MODULES
|
||||
))
|
@ -1,10 +1,25 @@
|
||||
|
||||
import json
|
||||
import os.path, shutil
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
|
||||
from nevow.inevow import IRequest
|
||||
from zope.interface import implementer
|
||||
from twisted.web.server import Request
|
||||
from twisted.web.test.requesthelper import DummyChannel
|
||||
from twisted.web.template import flattenString
|
||||
|
||||
from allmydata import check_results, uri
|
||||
from allmydata import uri as tahoe_uri
|
||||
from allmydata.interfaces import (
|
||||
IServer,
|
||||
ICheckResults,
|
||||
ICheckAndRepairResults,
|
||||
)
|
||||
from allmydata.util import base32
|
||||
from allmydata.web import check_results as web_check_results
|
||||
from allmydata.storage_client import StorageFarmBroker, NativeStorageServer
|
||||
@ -12,18 +27,115 @@ from allmydata.storage.server import storage_index_to_dir
|
||||
from allmydata.monitor import Monitor
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.immutable.upload import Data
|
||||
from allmydata.test.common_web import WebRenderingMixin
|
||||
from allmydata.mutable.publish import MutableData
|
||||
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
from .web.common import (
|
||||
assert_soup_has_favicon,
|
||||
assert_soup_has_tag_with_content,
|
||||
)
|
||||
|
||||
class FakeClient(object):
|
||||
def get_storage_broker(self):
|
||||
return self.storage_broker
|
||||
|
||||
class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
@implementer(IRequest)
|
||||
class TestRequest(Request, object):
|
||||
"""
|
||||
A minimal Request class to use in tests.
|
||||
|
||||
XXX: We have to have this class because `common.get_arg()` expects
|
||||
a `nevow.inevow.IRequest`, which `twisted.web.server.Request`
|
||||
isn't. The request needs to have `args`, `fields`, `prepath`, and
|
||||
`postpath` properties so that `allmydata.web.common.get_arg()`
|
||||
won't complain.
|
||||
"""
|
||||
def __init__(self, args=None, fields=None):
|
||||
super(TestRequest, self).__init__(DummyChannel())
|
||||
self.args = args or {}
|
||||
self.fields = fields or {}
|
||||
self.prepath = [b""]
|
||||
self.postpath = [b""]
|
||||
|
||||
|
||||
@implementer(IServer)
|
||||
class FakeServer(object):
|
||||
|
||||
def get_name(self):
|
||||
return "fake name"
|
||||
|
||||
def get_longname(self):
|
||||
return "fake longname"
|
||||
|
||||
def get_nickname(self):
|
||||
return "fake nickname"
|
||||
|
||||
|
||||
@implementer(ICheckResults)
|
||||
class FakeCheckResults(object):
|
||||
|
||||
def __init__(self, si=None,
|
||||
healthy=False, recoverable=False,
|
||||
summary="fake summary"):
|
||||
self._storage_index = si
|
||||
self._is_healthy = healthy
|
||||
self._is_recoverable = recoverable
|
||||
self._summary = summary
|
||||
|
||||
def get_storage_index(self):
|
||||
return self._storage_index
|
||||
|
||||
def get_storage_index_string(self):
|
||||
return base32.b2a_or_none(self._storage_index)
|
||||
|
||||
def is_healthy(self):
|
||||
return self._is_healthy
|
||||
|
||||
def is_recoverable(self):
|
||||
return self._is_recoverable
|
||||
|
||||
def get_summary(self):
|
||||
return self._summary
|
||||
|
||||
def get_corrupt_shares(self):
|
||||
# returns a list of (IServer, storage_index, sharenum)
|
||||
return [(FakeServer(), "<fake-si>", 0)]
|
||||
|
||||
|
||||
@implementer(ICheckAndRepairResults)
|
||||
class FakeCheckAndRepairResults(object):
|
||||
|
||||
def __init__(self, si=None,
|
||||
repair_attempted=False,
|
||||
repair_success=False):
|
||||
self._storage_index = si
|
||||
self._repair_attempted = repair_attempted
|
||||
self._repair_success = repair_success
|
||||
|
||||
def get_storage_index(self):
|
||||
return self._storage_index
|
||||
|
||||
def get_pre_repair_results(self):
|
||||
return FakeCheckResults()
|
||||
|
||||
def get_post_repair_results(self):
|
||||
return FakeCheckResults()
|
||||
|
||||
def get_repair_attempted(self):
|
||||
return self._repair_attempted
|
||||
|
||||
def get_repair_successful(self):
|
||||
return self._repair_success
|
||||
|
||||
|
||||
class WebResultsRendering(unittest.TestCase):
|
||||
|
||||
@staticmethod
|
||||
def remove_tags(html):
|
||||
return BeautifulSoup(html).get_text(separator=" ")
|
||||
|
||||
def create_fake_client(self):
|
||||
sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG)
|
||||
@ -51,34 +163,31 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
c.storage_broker = sb
|
||||
return c
|
||||
|
||||
def render_json(self, page):
|
||||
d = self.render1(page, args={"output": ["json"]})
|
||||
return d
|
||||
def render_json(self, resource):
|
||||
return resource.render(TestRequest(args={"output": ["json"]}))
|
||||
|
||||
def render_element(self, element, args=None):
|
||||
d = flattenString(TestRequest(args), element)
|
||||
return unittest.TestCase().successResultOf(d)
|
||||
|
||||
def test_literal(self):
|
||||
lcr = web_check_results.LiteralCheckResultsRendererElement()
|
||||
|
||||
html = self.render_element(lcr)
|
||||
self.failUnlessIn("Literal files are always healthy", html)
|
||||
|
||||
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
|
||||
self.failUnlessIn("Literal files are always healthy", html)
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file.</a>', html)
|
||||
|
||||
c = self.create_fake_client()
|
||||
lcr = web_check_results.LiteralCheckResultsRenderer(c)
|
||||
|
||||
d = self.render1(lcr)
|
||||
def _check(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("Literal files are always healthy", s)
|
||||
d.addCallback(_check)
|
||||
d.addCallback(lambda ignored:
|
||||
self.render1(lcr, args={"return_to": ["FOOURL"]}))
|
||||
def _check_return_to(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("Literal files are always healthy", s)
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file.</a>',
|
||||
html)
|
||||
d.addCallback(_check_return_to)
|
||||
d.addCallback(lambda ignored: self.render_json(lcr))
|
||||
def _check_json(js):
|
||||
j = json.loads(js)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
self.failUnlessEqual(j["results"]["healthy"], True)
|
||||
d.addCallback(_check_json)
|
||||
return d
|
||||
js = self.render_json(lcr)
|
||||
j = json.loads(js)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
self.failUnlessEqual(j["results"]["healthy"], True)
|
||||
|
||||
|
||||
def test_check(self):
|
||||
c = self.create_fake_client()
|
||||
@ -108,8 +217,8 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
healthy=True, recoverable=True,
|
||||
summary="groovy",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Healthy : groovy", s)
|
||||
@ -120,14 +229,14 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
self.failUnlessIn("Wrong Shares: 0", s)
|
||||
self.failUnlessIn("Recoverable Versions: 1", s)
|
||||
self.failUnlessIn("Unrecoverable Versions: 0", s)
|
||||
self.failUnlessIn("Good Shares (sorted in share order): Share ID Nickname Node ID shareid1 peer-0 00000000 peer-f ffffffff", s)
|
||||
self.failUnlessIn("Good Shares (sorted in share order): Share ID Nickname Node ID shareid1 peer-0 00000000 peer-f ffffffff", s)
|
||||
|
||||
cr = check_results.CheckResults(u, u.get_storage_index(),
|
||||
healthy=False, recoverable=True,
|
||||
summary="ungroovy",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Healthy! : ungroovy", s)
|
||||
@ -138,22 +247,23 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
healthy=False, recoverable=False,
|
||||
summary="rather dead",
|
||||
**data)
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
self.failUnlessIn("Corrupt shares: Share ID Nickname Node ID sh#2 peer-0 00000000", s)
|
||||
self.failUnlessIn("Corrupt shares: Share ID Nickname Node ID sh#2 peer-0 00000000", s)
|
||||
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s) # abbreviated
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
|
||||
html = self.render2(w, args={"return_to": ["FOOURL"]})
|
||||
html = self.render_element(w, args={"return_to": ["FOOURL"]})
|
||||
self.failUnlessIn('<a href="FOOURL">Return to file/directory.</a>',
|
||||
html)
|
||||
|
||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||
d = self.render_json(w)
|
||||
def _check_json(jdata):
|
||||
j = json.loads(jdata)
|
||||
@ -178,15 +288,15 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
'recoverable': False,
|
||||
}
|
||||
self.failUnlessEqual(j["results"], expected)
|
||||
d.addCallback(_check_json)
|
||||
d.addCallback(lambda ignored: self.render1(w))
|
||||
_check_json(d)
|
||||
|
||||
w = web_check_results.CheckResultsRendererElement(c, cr)
|
||||
d = self.render_element(w)
|
||||
def _check(html):
|
||||
s = self.remove_tags(html)
|
||||
self.failUnlessIn("File Check Results for SI=2k6avp", s)
|
||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
_check(html)
|
||||
|
||||
def test_check_and_repair(self):
|
||||
c = self.create_fake_client()
|
||||
@ -244,8 +354,8 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
crr.post_repair_results = post_cr
|
||||
crr.repair_attempted = False
|
||||
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, crr)
|
||||
html = self.render2(w)
|
||||
w = web_check_results.CheckAndRepairResultsRendererElement(c, crr)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -256,7 +366,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
|
||||
crr.repair_attempted = True
|
||||
crr.repair_successful = True
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -271,7 +381,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
summary="better",
|
||||
**data)
|
||||
crr.post_repair_results = post_cr
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -286,7 +396,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
summary="worse",
|
||||
**data)
|
||||
crr.post_repair_results = post_cr
|
||||
html = self.render2(w)
|
||||
html = self.render_element(w)
|
||||
s = self.remove_tags(html)
|
||||
|
||||
self.failUnlessIn("File Check-And-Repair Results for SI=2k6avp", s)
|
||||
@ -294,24 +404,218 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
self.failUnlessIn("Repair unsuccessful", s)
|
||||
self.failUnlessIn("Post-Repair Checker Results:", s)
|
||||
|
||||
d = self.render_json(w)
|
||||
def _got_json(data):
|
||||
j = json.loads(data)
|
||||
self.failUnlessEqual(j["repair-attempted"], True)
|
||||
self.failUnlessEqual(j["storage-index"],
|
||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||
self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
|
||||
self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
|
||||
d.addCallback(_got_json)
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, crr)
|
||||
j = json.loads(self.render_json(w))
|
||||
self.failUnlessEqual(j["repair-attempted"], True)
|
||||
self.failUnlessEqual(j["storage-index"],
|
||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||
self.failUnlessEqual(j["pre-repair-results"]["summary"], "illing")
|
||||
self.failUnlessEqual(j["post-repair-results"]["summary"], "worse")
|
||||
|
||||
w = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
||||
j = json.loads(self.render_json(w))
|
||||
self.failUnlessEqual(j["repair-attempted"], False)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
|
||||
|
||||
def test_deep_check_renderer(self):
|
||||
status = check_results.DeepCheckResults("fake-root-si")
|
||||
status.add_check(
|
||||
FakeCheckResults("<unhealthy/unrecoverable>", False, False),
|
||||
(u"fake", u"unhealthy", u"unrecoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<healthy/recoverable>", True, True),
|
||||
(u"fake", u"healthy", u"recoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<healthy/unrecoverable>", True, False),
|
||||
(u"fake", u"healthy", u"unrecoverable")
|
||||
)
|
||||
status.add_check(
|
||||
FakeCheckResults("<unhealthy/unrecoverable>", False, True),
|
||||
(u"fake", u"unhealthy", u"recoverable")
|
||||
)
|
||||
|
||||
monitor = Monitor()
|
||||
monitor.set_status(status)
|
||||
|
||||
elem = web_check_results.DeepCheckResultsRendererElement(monitor)
|
||||
doc = self.render_element(elem)
|
||||
soup = BeautifulSoup(doc, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Deep Check Results"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h1",
|
||||
"Deep-Check Results for root SI="
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Checked: 4"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unrecoverable: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares: 4"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Had Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"fake/unhealthy/recoverable: fake summary"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"fake/unhealthy/unrecoverable: fake summary"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Servers on which corrupt shares were found"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Corrupt Shares"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"All Results"
|
||||
)
|
||||
|
||||
def test_deep_check_and_repair_renderer(self):
|
||||
status = check_results.DeepCheckAndRepairResults("")
|
||||
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("attempted/success", True, True),
|
||||
(u"attempted", u"success")
|
||||
)
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("attempted/failure", True, False),
|
||||
(u"attempted", u"failure")
|
||||
)
|
||||
status.add_check_and_repair(
|
||||
FakeCheckAndRepairResults("unattempted/failure", False, False),
|
||||
(u"unattempted", u"failure")
|
||||
)
|
||||
|
||||
monitor = Monitor()
|
||||
monitor.set_status(status)
|
||||
|
||||
elem = web_check_results.DeepCheckAndRepairResultsRendererElement(monitor)
|
||||
doc = self.render_element(elem)
|
||||
soup = BeautifulSoup(doc, 'html5lib')
|
||||
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"title",
|
||||
u"Tahoe-LAFS - Deep Check Results"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h1",
|
||||
u"Deep-Check-And-Repair Results for root SI="
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Checked: 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy (before repair): 0"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy (before repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares (before repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Repairs Attempted: 2"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Repairs Successful: 1"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
"Repairs Unsuccessful: 1"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Healthy (after repair): 0"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Objects Unhealthy (after repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"li",
|
||||
u"Corrupt Shares (after repair): 3"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Had Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Files/Directories That Still Have Problems:"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Servers on which corrupt shares were found"
|
||||
)
|
||||
|
||||
assert_soup_has_tag_with_content(
|
||||
self, soup, u"h2",
|
||||
u"Remaining Corrupt Shares"
|
||||
)
|
||||
|
||||
w2 = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
||||
d.addCallback(lambda ignored: self.render_json(w2))
|
||||
def _got_lit_results(data):
|
||||
j = json.loads(data)
|
||||
self.failUnlessEqual(j["repair-attempted"], False)
|
||||
self.failUnlessEqual(j["storage-index"], "")
|
||||
d.addCallback(_got_lit_results)
|
||||
return d
|
||||
|
||||
class BalancingAct(GridTestMixin, unittest.TestCase):
|
||||
# test for #1115 regarding the 'count-good-share-hosts' metric
|
||||
|
@ -1,4 +1,20 @@
|
||||
"""
|
||||
Tests for allmydata.storage.crawler.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
# Don't use future bytes, since it breaks tests. No further works is
|
||||
# needed, once we're only on Python 3 we'll be deleting this future imports
|
||||
# anyway, and tests pass just fine on Python 3.
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
import os.path
|
||||
@ -11,7 +27,7 @@ from allmydata.util import fileutil, hashutil, pollmixin
|
||||
from allmydata.storage.server import StorageServer, si_b2a
|
||||
from allmydata.storage.crawler import ShareCrawler, TimeSliceExceeded
|
||||
|
||||
from allmydata.test.test_storage import FakeCanary
|
||||
from allmydata.test.common_py3 import FakeCanary
|
||||
from allmydata.test.common_util import StallMixin
|
||||
|
||||
class BucketEnumeratingCrawler(ShareCrawler):
|
||||
@ -22,6 +38,10 @@ class BucketEnumeratingCrawler(ShareCrawler):
|
||||
self.all_buckets = []
|
||||
self.finished_d = defer.Deferred()
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
def finished_cycle(self, cycle):
|
||||
eventually(self.finished_d.callback, None)
|
||||
@ -36,6 +56,10 @@ class PacedCrawler(ShareCrawler):
|
||||
self.finished_d = defer.Deferred()
|
||||
self.yield_cb = None
|
||||
def process_bucket(self, cycle, prefix, prefixdir, storage_index_b32):
|
||||
if PY3:
|
||||
# Bucket _inputs_ are bytes, and that's what we will compare this
|
||||
# to:
|
||||
storage_index_b32 = storage_index_b32.encode("ascii")
|
||||
self.all_buckets.append(storage_index_b32)
|
||||
self.countdown -= 1
|
||||
if self.countdown == 0:
|
||||
@ -92,27 +116,27 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
return self.s.stopService()
|
||||
|
||||
def si(self, i):
|
||||
return hashutil.storage_index_hash(str(i))
|
||||
return hashutil.storage_index_hash(b"%d" % (i,))
|
||||
def rs(self, i, serverid):
|
||||
return hashutil.bucket_renewal_secret_hash(str(i), serverid)
|
||||
return hashutil.bucket_renewal_secret_hash(b"%d" % (i,), serverid)
|
||||
def cs(self, i, serverid):
|
||||
return hashutil.bucket_cancel_secret_hash(str(i), serverid)
|
||||
return hashutil.bucket_cancel_secret_hash(b"%d" % (i,), serverid)
|
||||
|
||||
def write(self, i, ss, serverid, tail=0):
|
||||
si = self.si(i)
|
||||
si = si[:-1] + chr(tail)
|
||||
si = si[:-1] + bytes(bytearray((tail,)))
|
||||
had,made = ss.remote_allocate_buckets(si,
|
||||
self.rs(i, serverid),
|
||||
self.cs(i, serverid),
|
||||
set([0]), 99, FakeCanary())
|
||||
made[0].remote_write(0, "data")
|
||||
made[0].remote_write(0, b"data")
|
||||
made[0].remote_close()
|
||||
return si_b2a(si)
|
||||
|
||||
def test_immediate(self):
|
||||
self.basedir = "crawler/Basic/immediate"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -141,7 +165,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
def test_service(self):
|
||||
self.basedir = "crawler/Basic/service"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -169,7 +193,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
def test_paced(self):
|
||||
self.basedir = "crawler/Basic/paced"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -271,7 +295,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
def test_paced_service(self):
|
||||
self.basedir = "crawler/Basic/paced_service"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -338,7 +362,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
|
||||
self.basedir = "crawler/Basic/cpu_usage"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -383,7 +407,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
def test_empty_subclass(self):
|
||||
self.basedir = "crawler/Basic/empty_subclass"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
@ -411,7 +435,7 @@ class Basic(unittest.TestCase, StallMixin, pollmixin.PollMixin):
|
||||
def test_oneshot(self):
|
||||
self.basedir = "crawler/Basic/oneshot"
|
||||
fileutil.make_dirs(self.basedir)
|
||||
serverid = "\x00" * 20
|
||||
serverid = b"\x00" * 20
|
||||
ss = StorageServer(self.basedir, serverid)
|
||||
ss.setServiceParent(self.s)
|
||||
|
||||
|
@ -1,4 +1,14 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
# We don't import str because omg way too ambiguous in this context.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
lumiere_nfc = u"lumi\u00E8re"
|
||||
Artonwall_nfc = u"\u00C4rtonwall.mp3"
|
||||
@ -43,8 +53,10 @@ if __name__ == "__main__":
|
||||
for fname in TEST_FILENAMES:
|
||||
open(os.path.join(tmpdir, fname), 'w').close()
|
||||
|
||||
# Use Unicode API under Windows or MacOS X
|
||||
if sys.platform in ('win32', 'darwin'):
|
||||
# On Python 2, listing directories returns unicode under Windows or
|
||||
# MacOS X if the input is unicode. On Python 3, it always returns
|
||||
# Unicode.
|
||||
if PY2 and sys.platform in ('win32', 'darwin'):
|
||||
dirlist = os.listdir(unicode(tmpdir))
|
||||
else:
|
||||
dirlist = os.listdir(tmpdir)
|
||||
@ -59,20 +71,22 @@ if __name__ == "__main__":
|
||||
|
||||
|
||||
import os, sys, locale
|
||||
from unittest import skipIf
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
from allmydata.test.common_py3 import (
|
||||
ReallyEqualMixin, skip_if_cannot_represent_filename,
|
||||
)
|
||||
from allmydata.util import encodingutil, fileutil
|
||||
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
|
||||
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
|
||||
quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \
|
||||
get_io_encoding, get_filesystem_encoding, to_str, from_utf8_or_none, _reload, \
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from
|
||||
from allmydata.dirnode import normalize
|
||||
from .common_util import skip_if_cannot_represent_filename
|
||||
get_io_encoding, get_filesystem_encoding, to_bytes, from_utf8_or_none, _reload, \
|
||||
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
|
||||
unicode_to_argv
|
||||
from twisted.python import usage
|
||||
|
||||
|
||||
@ -90,7 +104,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
mock_stdout.encoding = 'cp65001'
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
|
||||
self.assertEqual(get_io_encoding(), 'utf-8')
|
||||
|
||||
mock_stdout.encoding = 'koi8-r'
|
||||
expected = sys.platform == "win32" and 'utf-8' or 'koi8-r'
|
||||
@ -122,7 +136,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
preferredencoding = None
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
|
||||
self.assertEqual(get_io_encoding(), 'utf-8')
|
||||
|
||||
def test_argv_to_unicode(self):
|
||||
encodingutil.io_encoding = 'utf-8'
|
||||
@ -130,6 +144,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
argv_to_unicode,
|
||||
lumiere_nfc.encode('latin1'))
|
||||
|
||||
@skipIf(PY3, "Python 2 only.")
|
||||
def test_unicode_to_output(self):
|
||||
encodingutil.io_encoding = 'koi8-r'
|
||||
self.failUnlessRaises(UnicodeEncodeError, unicode_to_output, lumiere_nfc)
|
||||
@ -150,6 +165,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
|
||||
# The following tests apply only to platforms that don't store filenames as
|
||||
# Unicode entities on the filesystem.
|
||||
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
|
||||
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
|
||||
def setUp(self):
|
||||
# Mock sys.platform because unicode_platform() uses it
|
||||
self.original_platform = sys.platform
|
||||
@ -211,9 +227,10 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
self.failUnlessReallyEqual(argv_to_unicode(argv), argu)
|
||||
|
||||
def test_unicode_to_url(self):
|
||||
self.failUnless(unicode_to_url(lumiere_nfc), "lumi\xc3\xa8re")
|
||||
self.failUnless(unicode_to_url(lumiere_nfc), b"lumi\xc3\xa8re")
|
||||
|
||||
def test_unicode_to_output(self):
|
||||
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
|
||||
def test_unicode_to_output_py2(self):
|
||||
if 'argv' not in dir(self):
|
||||
return
|
||||
|
||||
@ -224,7 +241,22 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), self.argv)
|
||||
|
||||
def test_unicode_platform(self):
|
||||
@skipIf(PY2, "Python 3 only.")
|
||||
def test_unicode_to_output_py3(self):
|
||||
self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), lumiere_nfc)
|
||||
|
||||
@skipIf(PY3, "Python 2 only.")
|
||||
def test_unicode_to_argv_py2(self):
|
||||
"""unicode_to_argv() converts to bytes on Python 2."""
|
||||
self.assertEqual(unicode_to_argv("abc"), u"abc".encode(self.io_encoding))
|
||||
|
||||
@skipIf(PY2, "Python 3 only.")
|
||||
def test_unicode_to_argv_py3(self):
|
||||
"""unicode_to_argv() is noop on Python 3."""
|
||||
self.assertEqual(unicode_to_argv("abc"), "abc")
|
||||
|
||||
@skipIf(PY3, "Python 3 only.")
|
||||
def test_unicode_platform_py2(self):
|
||||
matrix = {
|
||||
'linux2': False,
|
||||
'linux3': False,
|
||||
@ -236,6 +268,11 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_platform(), matrix[self.platform])
|
||||
|
||||
@skipIf(PY2, "Python 3 isn't Python 2.")
|
||||
def test_unicode_platform_py3(self):
|
||||
_reload()
|
||||
self.failUnlessReallyEqual(unicode_platform(), True)
|
||||
|
||||
def test_listdir_unicode(self):
|
||||
if 'dirlist' not in dir(self):
|
||||
return
|
||||
@ -248,7 +285,14 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
% (self.filesystem_encoding,))
|
||||
|
||||
def call_os_listdir(path):
|
||||
return self.dirlist
|
||||
if PY2:
|
||||
return self.dirlist
|
||||
else:
|
||||
# Python 3 always lists unicode filenames:
|
||||
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
|
||||
else d
|
||||
for d in self.dirlist]
|
||||
|
||||
self.patch(os, 'listdir', call_os_listdir)
|
||||
|
||||
def call_sys_getfilesystemencoding():
|
||||
@ -258,7 +302,7 @@ class EncodingUtil(ReallyEqualMixin):
|
||||
_reload()
|
||||
filenames = listdir_unicode(u'/dummy')
|
||||
|
||||
self.failUnlessEqual(set([normalize(fname) for fname in filenames]),
|
||||
self.failUnlessEqual(set([encodingutil.normalize(fname) for fname in filenames]),
|
||||
set(TEST_FILENAMES))
|
||||
|
||||
|
||||
@ -278,12 +322,16 @@ class StdlibUnicode(unittest.TestCase):
|
||||
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
|
||||
open(fn, 'wb').close()
|
||||
self.failUnless(os.path.exists(fn))
|
||||
self.failUnless(os.path.exists(os.path.join(os.getcwdu(), fn)))
|
||||
if PY2:
|
||||
getcwdu = os.getcwdu
|
||||
else:
|
||||
getcwdu = os.getcwd
|
||||
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
|
||||
filenames = listdir_unicode(lumiere_nfc)
|
||||
|
||||
# We only require that the listing includes a filename that is canonically equivalent
|
||||
# to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
|
||||
self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
|
||||
self.failUnlessIn(lumiere_nfc + u".txt", set([encodingutil.normalize(fname) for fname in filenames]))
|
||||
|
||||
expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
|
||||
self.failIfIn(u"~", expanded)
|
||||
@ -307,6 +355,8 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
_reload()
|
||||
|
||||
def _check(self, inp, out, enc, optional_quotes, quote_newlines):
|
||||
if PY3 and isinstance(out, bytes):
|
||||
out = out.decode(enc or encodingutil.io_encoding)
|
||||
out2 = out
|
||||
if optional_quotes:
|
||||
out2 = out2[1:-1]
|
||||
@ -314,59 +364,73 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(quote_output(inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
if out[0:2] == 'b"':
|
||||
pass
|
||||
elif isinstance(inp, str):
|
||||
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
elif isinstance(inp, bytes):
|
||||
try:
|
||||
unicode_inp = inp.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Some things decode on Python 2, but not Python 3...
|
||||
return
|
||||
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
else:
|
||||
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
try:
|
||||
bytes_inp = inp.encode('utf-8')
|
||||
except UnicodeEncodeError:
|
||||
# Some things encode on Python 2, but not Python 3, e.g.
|
||||
# surrogates like u"\uDC00\uD800"...
|
||||
return
|
||||
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quote_newlines=quote_newlines), out)
|
||||
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
|
||||
|
||||
def _test_quote_output_all(self, enc):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
if PY3:
|
||||
# Result is always Unicode on Python 3
|
||||
out = out.decode("ascii")
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
# optional single quotes
|
||||
check("foo", "'foo'", True)
|
||||
check("\\", "'\\'", True)
|
||||
check("$\"`", "'$\"`'", True)
|
||||
check("\n", "'\n'", True, quote_newlines=False)
|
||||
check(b"foo", b"'foo'", True)
|
||||
check(b"\\", b"'\\'", True)
|
||||
check(b"$\"`", b"'$\"`'", True)
|
||||
check(b"\n", b"'\n'", True, quote_newlines=False)
|
||||
|
||||
# mandatory single quotes
|
||||
check("\"", "'\"'")
|
||||
check(b"\"", b"'\"'")
|
||||
|
||||
# double quotes
|
||||
check("'", "\"'\"")
|
||||
check("\n", "\"\\x0a\"", quote_newlines=True)
|
||||
check("\x00", "\"\\x00\"")
|
||||
check(b"'", b"\"'\"")
|
||||
check(b"\n", b"\"\\x0a\"", quote_newlines=True)
|
||||
check(b"\x00", b"\"\\x00\"")
|
||||
|
||||
# invalid Unicode and astral planes
|
||||
check(u"\uFDD0\uFDEF", "\"\\ufdd0\\ufdef\"")
|
||||
check(u"\uDC00\uD800", "\"\\udc00\\ud800\"")
|
||||
check(u"\uDC00\uD800\uDC00", "\"\\udc00\\U00010000\"")
|
||||
check(u"\uD800\uDC00", "\"\\U00010000\"")
|
||||
check(u"\uD800\uDC01", "\"\\U00010001\"")
|
||||
check(u"\uD801\uDC00", "\"\\U00010400\"")
|
||||
check(u"\uDBFF\uDFFF", "\"\\U0010ffff\"")
|
||||
check(u"'\uDBFF\uDFFF", "\"'\\U0010ffff\"")
|
||||
check(u"\"\uDBFF\uDFFF", "\"\\\"\\U0010ffff\"")
|
||||
check(u"\uFDD0\uFDEF", b"\"\\ufdd0\\ufdef\"")
|
||||
check(u"\uDC00\uD800", b"\"\\udc00\\ud800\"")
|
||||
check(u"\uDC00\uD800\uDC00", b"\"\\udc00\\U00010000\"")
|
||||
check(u"\uD800\uDC00", b"\"\\U00010000\"")
|
||||
check(u"\uD800\uDC01", b"\"\\U00010001\"")
|
||||
check(u"\uD801\uDC00", b"\"\\U00010400\"")
|
||||
check(u"\uDBFF\uDFFF", b"\"\\U0010ffff\"")
|
||||
check(u"'\uDBFF\uDFFF", b"\"'\\U0010ffff\"")
|
||||
check(u"\"\uDBFF\uDFFF", b"\"\\\"\\U0010ffff\"")
|
||||
|
||||
# invalid UTF-8
|
||||
check("\xFF", "b\"\\xff\"")
|
||||
check("\x00\"$\\`\x80\xFF", "b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
|
||||
check(b"\xFF", b"b\"\\xff\"")
|
||||
check(b"\x00\"$\\`\x80\xFF", b"b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
|
||||
|
||||
def test_quote_output_ascii(self, enc='ascii'):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
self._test_quote_output_all(enc)
|
||||
check(u"\u00D7", "\"\\xd7\"")
|
||||
check(u"'\u00D7", "\"'\\xd7\"")
|
||||
check(u"\"\u00D7", "\"\\\"\\xd7\"")
|
||||
check(u"\u2621", "\"\\u2621\"")
|
||||
check(u"'\u2621", "\"'\\u2621\"")
|
||||
check(u"\"\u2621", "\"\\\"\\u2621\"")
|
||||
check(u"\n", "'\n'", True, quote_newlines=False)
|
||||
check(u"\n", "\"\\x0a\"", quote_newlines=True)
|
||||
check(u"\u00D7", b"\"\\xd7\"")
|
||||
check(u"'\u00D7", b"\"'\\xd7\"")
|
||||
check(u"\"\u00D7", b"\"\\\"\\xd7\"")
|
||||
check(u"\u2621", b"\"\\u2621\"")
|
||||
check(u"'\u2621", b"\"'\\u2621\"")
|
||||
check(u"\"\u2621", b"\"\\\"\\u2621\"")
|
||||
check(u"\n", b"'\n'", True, quote_newlines=False)
|
||||
check(u"\n", b"\"\\x0a\"", quote_newlines=True)
|
||||
|
||||
def test_quote_output_latin1(self, enc='latin1'):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
@ -385,7 +449,10 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def test_quote_output_utf8(self, enc='utf-8'):
|
||||
def check(inp, out, optional_quotes=False, quote_newlines=None):
|
||||
self._check(inp, out.encode('utf-8'), enc, optional_quotes, quote_newlines)
|
||||
if PY2:
|
||||
# On Python 3 output is always Unicode:
|
||||
out = out.encode('utf-8')
|
||||
self._check(inp, out, enc, optional_quotes, quote_newlines)
|
||||
|
||||
self._test_quote_output_all(enc)
|
||||
check(u"\u2621", u"'\u2621'", True)
|
||||
@ -410,44 +477,51 @@ def win32_other(win32, other):
|
||||
return win32 if sys.platform == "win32" else other
|
||||
|
||||
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
def test_quote_path(self):
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), "'foo/bar'")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=False), "foo/bar")
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), '"foo/\\x0abar"')
|
||||
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), '"foo/\\x0abar"')
|
||||
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
|
||||
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=True),
|
||||
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=False),
|
||||
win32_other("C:\\foo", "\\\\?\\C:\\foo"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
|
||||
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=True),
|
||||
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=False),
|
||||
win32_other("\\\\foo\\bar", "\\\\?\\UNC\\foo\\bar"))
|
||||
def assertPathsEqual(self, actual, expected):
|
||||
if PY3:
|
||||
# On Python 3, results should be unicode:
|
||||
expected = expected.decode("ascii")
|
||||
self.failUnlessReallyEqual(actual, expected)
|
||||
|
||||
def test_quote_path(self):
|
||||
self.assertPathsEqual(quote_path([u'foo', u'bar']), b"'foo/bar'")
|
||||
self.assertPathsEqual(quote_path([u'foo', u'bar'], quotemarks=True), b"'foo/bar'")
|
||||
self.assertPathsEqual(quote_path([u'foo', u'bar'], quotemarks=False), b"foo/bar")
|
||||
self.assertPathsEqual(quote_path([u'foo', u'\nbar']), b'"foo/\\x0abar"')
|
||||
self.assertPathsEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), b'"foo/\\x0abar"')
|
||||
self.assertPathsEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), b'"foo/\\x0abar"')
|
||||
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
|
||||
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=True),
|
||||
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=False),
|
||||
win32_other(b"C:\\foo", b"\\\\?\\C:\\foo"))
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
|
||||
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=True),
|
||||
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
|
||||
self.assertPathsEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=False),
|
||||
win32_other(b"\\\\foo\\bar", b"\\\\?\\UNC\\foo\\bar"))
|
||||
|
||||
def test_quote_filepath(self):
|
||||
foo_bar_fp = FilePath(win32_other(u'C:\\foo\\bar', u'/foo/bar'))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp),
|
||||
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=True),
|
||||
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=False),
|
||||
win32_other("C:\\foo\\bar", "/foo/bar"))
|
||||
self.assertPathsEqual(quote_filepath(foo_bar_fp),
|
||||
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
|
||||
self.assertPathsEqual(quote_filepath(foo_bar_fp, quotemarks=True),
|
||||
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
|
||||
self.assertPathsEqual(quote_filepath(foo_bar_fp, quotemarks=False),
|
||||
win32_other(b"C:\\foo\\bar", b"/foo/bar"))
|
||||
|
||||
if sys.platform == "win32":
|
||||
foo_longfp = FilePath(u'\\\\?\\C:\\foo')
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp),
|
||||
"'C:\\foo'")
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=True),
|
||||
"'C:\\foo'")
|
||||
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=False),
|
||||
"C:\\foo")
|
||||
self.assertPathsEqual(quote_filepath(foo_longfp),
|
||||
b"'C:\\foo'")
|
||||
self.assertPathsEqual(quote_filepath(foo_longfp, quotemarks=True),
|
||||
b"'C:\\foo'")
|
||||
self.assertPathsEqual(quote_filepath(foo_longfp, quotemarks=False),
|
||||
b"C:\\foo")
|
||||
|
||||
|
||||
class FilePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
@ -501,23 +575,23 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||
argv = 'lumi\xc3\xa8re'
|
||||
argv = b'lumi\xc3\xa8re'
|
||||
platform = 'linux2'
|
||||
filesystem_encoding = 'UTF-8'
|
||||
io_encoding = 'UTF-8'
|
||||
dirlist = ['test_file', '\xc3\x84rtonwall.mp3', 'Blah blah.txt']
|
||||
dirlist = [b'test_file', b'\xc3\x84rtonwall.mp3', b'Blah blah.txt']
|
||||
|
||||
class UbuntuKarmicLatin1(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
|
||||
argv = 'lumi\xe8re'
|
||||
argv = b'lumi\xe8re'
|
||||
platform = 'linux2'
|
||||
filesystem_encoding = 'ISO-8859-1'
|
||||
io_encoding = 'ISO-8859-1'
|
||||
dirlist = ['test_file', 'Blah blah.txt', '\xc4rtonwall.mp3']
|
||||
dirlist = [b'test_file', b'Blah blah.txt', b'\xc4rtonwall.mp3']
|
||||
|
||||
class Windows(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Windows XP 5.1.2600 x86 x86 Family 15 Model 75 Step ping 2, AuthenticAMD'
|
||||
argv = 'lumi\xc3\xa8re'
|
||||
argv = b'lumi\xc3\xa8re'
|
||||
platform = 'win32'
|
||||
filesystem_encoding = 'mbcs'
|
||||
io_encoding = 'utf-8'
|
||||
@ -525,7 +599,7 @@ class Windows(EncodingUtil, unittest.TestCase):
|
||||
|
||||
class MacOSXLeopard(EncodingUtil, unittest.TestCase):
|
||||
uname = 'Darwin g5.local 9.8.0 Darwin Kernel Version 9.8.0: Wed Jul 15 16:57:01 PDT 2009; root:xnu-1228.15.4~1/RELEASE_PPC Power Macintosh powerpc'
|
||||
output = 'lumi\xc3\xa8re'
|
||||
output = b'lumi\xc3\xa8re'
|
||||
platform = 'darwin'
|
||||
filesystem_encoding = 'utf-8'
|
||||
io_encoding = 'UTF-8'
|
||||
@ -547,15 +621,15 @@ class OpenBSD(EncodingUtil, unittest.TestCase):
|
||||
|
||||
|
||||
class TestToFromStr(ReallyEqualMixin, unittest.TestCase):
|
||||
def test_to_str(self):
|
||||
self.failUnlessReallyEqual(to_str("foo"), "foo")
|
||||
self.failUnlessReallyEqual(to_str("lumi\xc3\xa8re"), "lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str("\xFF"), "\xFF") # passes through invalid UTF-8 -- is this what we want?
|
||||
self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), "lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_str(None), None)
|
||||
def test_to_bytes(self):
|
||||
self.failUnlessReallyEqual(to_bytes(b"foo"), b"foo")
|
||||
self.failUnlessReallyEqual(to_bytes(b"lumi\xc3\xa8re"), b"lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_bytes(b"\xFF"), b"\xFF") # passes through invalid UTF-8 -- is this what we want?
|
||||
self.failUnlessReallyEqual(to_bytes(u"lumi\u00E8re"), b"lumi\xc3\xa8re")
|
||||
self.failUnlessReallyEqual(to_bytes(None), None)
|
||||
|
||||
def test_from_utf8_or_none(self):
|
||||
self.failUnlessRaises(AssertionError, from_utf8_or_none, u"foo")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none("lumi\xc3\xa8re"), u"lumi\u00E8re")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none(b"lumi\xc3\xa8re"), u"lumi\u00E8re")
|
||||
self.failUnlessReallyEqual(from_utf8_or_none(None), None)
|
||||
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, "\xFF")
|
||||
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, b"\xFF")
|
||||
|
@ -1,14 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Tests for allmydata.immutable.happiness_upload and
|
||||
allmydata.util.happinessutil.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from hypothesis import given
|
||||
from hypothesis.strategies import text, sets
|
||||
|
||||
from allmydata.immutable import happiness_upload
|
||||
from allmydata.util.happinessutil import servers_of_happiness, \
|
||||
shares_by_server, merge_servers
|
||||
from allmydata.test.common_py3 import ShouldFailMixin
|
||||
|
||||
|
||||
class HappinessUtils(unittest.TestCase):
|
||||
class HappinessUploadUtils(unittest.TestCase):
|
||||
"""
|
||||
test-cases for utility functions augmenting_path_for and residual_network
|
||||
test-cases for happiness_upload utility functions augmenting_path_for and
|
||||
residual_network.
|
||||
"""
|
||||
|
||||
def test_residual_0(self):
|
||||
@ -269,3 +290,192 @@ class PlacementTests(unittest.TestCase):
|
||||
# peers; if we have fewer shares than peers happiness is capped at
|
||||
# # of peers.
|
||||
assert happiness == min(len(peers), len(shares))
|
||||
|
||||
|
||||
class FakeServerTracker(object):
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
def get_serverid(self):
|
||||
return self._serverid
|
||||
|
||||
|
||||
class HappinessUtilTests(unittest.TestCase, ShouldFailMixin):
|
||||
"""Tests for happinesutil.py."""
|
||||
|
||||
def test_merge_servers(self):
|
||||
# merge_servers merges a list of upload_servers and a dict of
|
||||
# shareid -> serverid mappings.
|
||||
shares = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2"]),
|
||||
}
|
||||
# if not provided with a upload_servers argument, it should just
|
||||
# return the first argument unchanged.
|
||||
self.failUnlessEqual(shares, merge_servers(shares, set([])))
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2", "server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares, set(trackers)))
|
||||
shares2 = {}
|
||||
expected = {
|
||||
5 : set(["server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares2, set(trackers)))
|
||||
shares3 = {}
|
||||
trackers = []
|
||||
expected = {}
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(10)]:
|
||||
shares3[i] = set([server])
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected[i] = set([server])
|
||||
self.failUnlessEqual(expected, merge_servers(shares3, set(trackers)))
|
||||
|
||||
|
||||
def test_servers_of_happiness_utility_function(self):
|
||||
# These tests are concerned with the servers_of_happiness()
|
||||
# utility function, and its underlying matching algorithm. Other
|
||||
# aspects of the servers_of_happiness behavior are tested
|
||||
# elsehwere These tests exist to ensure that
|
||||
# servers_of_happiness doesn't under or overcount the happiness
|
||||
# value for given inputs.
|
||||
|
||||
# servers_of_happiness expects a dict of
|
||||
# shnum => set(serverids) as a preexisting shares argument.
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4"])
|
||||
}
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(4, happy)
|
||||
test1[4] = set(["server1"])
|
||||
# We've added a duplicate server, so now servers_of_happiness
|
||||
# should be 3 instead of 4.
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(3, happy)
|
||||
# The second argument of merge_servers should be a set of objects with
|
||||
# serverid and buckets as attributes. In actual use, these will be
|
||||
# ServerTracker instances, but for testing it is fine to make a
|
||||
# FakeServerTracker whose job is to hold those instance variables to
|
||||
# test that part.
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in range(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
# Recall that test1 is a server layout with servers_of_happiness
|
||||
# = 3. Since there isn't any overlap between the shnum ->
|
||||
# set([serverid]) correspondences in test1 and those in trackers,
|
||||
# the result here should be 7.
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
# Now add an overlapping server to trackers. This is redundant,
|
||||
# so it should not cause the previously reported happiness value
|
||||
# to change.
|
||||
t = FakeServerTracker("server1", [1])
|
||||
trackers.append(t)
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
test = {}
|
||||
happy = servers_of_happiness(test)
|
||||
self.failUnlessEqual(0, happy)
|
||||
# Test a more substantial overlap between the trackers and the
|
||||
# existing assignments.
|
||||
test = {
|
||||
1 : set(['server1']),
|
||||
2 : set(['server2']),
|
||||
3 : set(['server3']),
|
||||
4 : set(['server4']),
|
||||
}
|
||||
trackers = []
|
||||
t = FakeServerTracker('server5', [4])
|
||||
trackers.append(t)
|
||||
t = FakeServerTracker('server6', [3, 5])
|
||||
trackers.append(t)
|
||||
# The value returned by servers_of_happiness is the size
|
||||
# of a maximum matching in the bipartite graph that
|
||||
# servers_of_happiness() makes between serverids and share
|
||||
# numbers. It should find something like this:
|
||||
# (server 1, share 1)
|
||||
# (server 2, share 2)
|
||||
# (server 3, share 3)
|
||||
# (server 5, share 4)
|
||||
# (server 6, share 5)
|
||||
#
|
||||
# and, since there are 5 edges in this matching, it should
|
||||
# return 5.
|
||||
test2 = merge_servers(test, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(5, happy)
|
||||
# Zooko's first puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:156)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: shares 1, 2
|
||||
# server 3: share 2
|
||||
#
|
||||
# This should yield happiness of 3.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
2 : set(['server2', 'server3']),
|
||||
}
|
||||
self.failUnlessEqual(3, servers_of_happiness(test))
|
||||
# Zooko's second puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:158)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: share 1
|
||||
#
|
||||
# This should yield happiness of 2.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
}
|
||||
self.failUnlessEqual(2, servers_of_happiness(test))
|
||||
|
||||
|
||||
def test_shares_by_server(self):
|
||||
test = dict([(i, set(["server%d" % i])) for i in range(1, 5)])
|
||||
sbs = shares_by_server(test)
|
||||
self.failUnlessEqual(set([1]), sbs["server1"])
|
||||
self.failUnlessEqual(set([2]), sbs["server2"])
|
||||
self.failUnlessEqual(set([3]), sbs["server3"])
|
||||
self.failUnlessEqual(set([4]), sbs["server4"])
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server1"]),
|
||||
3 : set(["server1"]),
|
||||
4 : set(["server2"]),
|
||||
5 : set(["server2"])
|
||||
}
|
||||
sbs = shares_by_server(test1)
|
||||
self.failUnlessEqual(set([1, 2, 3]), sbs["server1"])
|
||||
self.failUnlessEqual(set([4, 5]), sbs["server2"])
|
||||
# This should fail unless the serverid part of the mapping is a set
|
||||
test2 = {1: "server1"}
|
||||
self.shouldFail(AssertionError,
|
||||
"test_shares_by_server",
|
||||
"",
|
||||
shares_by_server, test2)
|
||||
|
File diff suppressed because it is too large
Load Diff
1309
src/allmydata/test/test_storage_web.py
Normal file
1309
src/allmydata/test/test_storage_web.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -15,17 +15,15 @@ from allmydata.util import log, base32
|
||||
from allmydata.util.assertutil import precondition
|
||||
from allmydata.util.deferredutil import DeferredListShouldSucceed
|
||||
from allmydata.test.no_network import GridTestMixin
|
||||
from allmydata.test.common_util import ShouldFailMixin
|
||||
from allmydata.util.happinessutil import servers_of_happiness, \
|
||||
shares_by_server, merge_servers
|
||||
from allmydata.test.common_py3 import ShouldFailMixin
|
||||
from allmydata.storage_client import StorageFarmBroker
|
||||
from allmydata.storage.server import storage_index_to_dir
|
||||
from allmydata.client import _Client
|
||||
|
||||
from .common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
)
|
||||
|
||||
|
||||
MiB = 1024*1024
|
||||
|
||||
def extract_uri(results):
|
||||
@ -864,12 +862,6 @@ def is_happy_enough(servertoshnums, h, k):
|
||||
return False
|
||||
return True
|
||||
|
||||
class FakeServerTracker(object):
|
||||
def __init__(self, serverid, buckets):
|
||||
self._serverid = serverid
|
||||
self.buckets = buckets
|
||||
def get_serverid(self):
|
||||
return self._serverid
|
||||
|
||||
class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
ShouldFailMixin):
|
||||
@ -1499,185 +1491,6 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
|
||||
self._do_upload_with_broken_servers, 2))
|
||||
return d
|
||||
|
||||
|
||||
def test_merge_servers(self):
|
||||
# merge_servers merges a list of upload_servers and a dict of
|
||||
# shareid -> serverid mappings.
|
||||
shares = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2"]),
|
||||
}
|
||||
# if not provided with a upload_servers argument, it should just
|
||||
# return the first argument unchanged.
|
||||
self.failUnlessEqual(shares, merge_servers(shares, set([])))
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4", "server5"]),
|
||||
5 : set(["server1", "server2", "server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares, set(trackers)))
|
||||
shares2 = {}
|
||||
expected = {
|
||||
5 : set(["server5"]),
|
||||
6 : set(["server6"]),
|
||||
7 : set(["server7"]),
|
||||
8 : set(["server8"]),
|
||||
}
|
||||
self.failUnlessEqual(expected, merge_servers(shares2, set(trackers)))
|
||||
shares3 = {}
|
||||
trackers = []
|
||||
expected = {}
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(10)]:
|
||||
shares3[i] = set([server])
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
expected[i] = set([server])
|
||||
self.failUnlessEqual(expected, merge_servers(shares3, set(trackers)))
|
||||
|
||||
|
||||
def test_servers_of_happiness_utility_function(self):
|
||||
# These tests are concerned with the servers_of_happiness()
|
||||
# utility function, and its underlying matching algorithm. Other
|
||||
# aspects of the servers_of_happiness behavior are tested
|
||||
# elsehwere These tests exist to ensure that
|
||||
# servers_of_happiness doesn't under or overcount the happiness
|
||||
# value for given inputs.
|
||||
|
||||
# servers_of_happiness expects a dict of
|
||||
# shnum => set(serverids) as a preexisting shares argument.
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server2"]),
|
||||
3 : set(["server3"]),
|
||||
4 : set(["server4"])
|
||||
}
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(4, happy)
|
||||
test1[4] = set(["server1"])
|
||||
# We've added a duplicate server, so now servers_of_happiness
|
||||
# should be 3 instead of 4.
|
||||
happy = servers_of_happiness(test1)
|
||||
self.failUnlessEqual(3, happy)
|
||||
# The second argument of merge_servers should be a set of objects with
|
||||
# serverid and buckets as attributes. In actual use, these will be
|
||||
# ServerTracker instances, but for testing it is fine to make a
|
||||
# FakeServerTracker whose job is to hold those instance variables to
|
||||
# test that part.
|
||||
trackers = []
|
||||
for (i, server) in [(i, "server%d" % i) for i in xrange(5, 9)]:
|
||||
t = FakeServerTracker(server, [i])
|
||||
trackers.append(t)
|
||||
# Recall that test1 is a server layout with servers_of_happiness
|
||||
# = 3. Since there isn't any overlap between the shnum ->
|
||||
# set([serverid]) correspondences in test1 and those in trackers,
|
||||
# the result here should be 7.
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
# Now add an overlapping server to trackers. This is redundant,
|
||||
# so it should not cause the previously reported happiness value
|
||||
# to change.
|
||||
t = FakeServerTracker("server1", [1])
|
||||
trackers.append(t)
|
||||
test2 = merge_servers(test1, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(7, happy)
|
||||
test = {}
|
||||
happy = servers_of_happiness(test)
|
||||
self.failUnlessEqual(0, happy)
|
||||
# Test a more substantial overlap between the trackers and the
|
||||
# existing assignments.
|
||||
test = {
|
||||
1 : set(['server1']),
|
||||
2 : set(['server2']),
|
||||
3 : set(['server3']),
|
||||
4 : set(['server4']),
|
||||
}
|
||||
trackers = []
|
||||
t = FakeServerTracker('server5', [4])
|
||||
trackers.append(t)
|
||||
t = FakeServerTracker('server6', [3, 5])
|
||||
trackers.append(t)
|
||||
# The value returned by servers_of_happiness is the size
|
||||
# of a maximum matching in the bipartite graph that
|
||||
# servers_of_happiness() makes between serverids and share
|
||||
# numbers. It should find something like this:
|
||||
# (server 1, share 1)
|
||||
# (server 2, share 2)
|
||||
# (server 3, share 3)
|
||||
# (server 5, share 4)
|
||||
# (server 6, share 5)
|
||||
#
|
||||
# and, since there are 5 edges in this matching, it should
|
||||
# return 5.
|
||||
test2 = merge_servers(test, set(trackers))
|
||||
happy = servers_of_happiness(test2)
|
||||
self.failUnlessEqual(5, happy)
|
||||
# Zooko's first puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:156)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: shares 1, 2
|
||||
# server 3: share 2
|
||||
#
|
||||
# This should yield happiness of 3.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
2 : set(['server2', 'server3']),
|
||||
}
|
||||
self.failUnlessEqual(3, servers_of_happiness(test))
|
||||
# Zooko's second puzzle:
|
||||
# (from http://allmydata.org/trac/tahoe-lafs/ticket/778#comment:158)
|
||||
#
|
||||
# server 1: shares 0, 1
|
||||
# server 2: share 1
|
||||
#
|
||||
# This should yield happiness of 2.
|
||||
test = {
|
||||
0 : set(['server1']),
|
||||
1 : set(['server1', 'server2']),
|
||||
}
|
||||
self.failUnlessEqual(2, servers_of_happiness(test))
|
||||
|
||||
|
||||
def test_shares_by_server(self):
|
||||
test = dict([(i, set(["server%d" % i])) for i in xrange(1, 5)])
|
||||
sbs = shares_by_server(test)
|
||||
self.failUnlessEqual(set([1]), sbs["server1"])
|
||||
self.failUnlessEqual(set([2]), sbs["server2"])
|
||||
self.failUnlessEqual(set([3]), sbs["server3"])
|
||||
self.failUnlessEqual(set([4]), sbs["server4"])
|
||||
test1 = {
|
||||
1 : set(["server1"]),
|
||||
2 : set(["server1"]),
|
||||
3 : set(["server1"]),
|
||||
4 : set(["server2"]),
|
||||
5 : set(["server2"])
|
||||
}
|
||||
sbs = shares_by_server(test1)
|
||||
self.failUnlessEqual(set([1, 2, 3]), sbs["server1"])
|
||||
self.failUnlessEqual(set([4, 5]), sbs["server2"])
|
||||
# This should fail unless the serverid part of the mapping is a set
|
||||
test2 = {1: "server1"}
|
||||
self.shouldFail(AssertionError,
|
||||
"test_shares_by_server",
|
||||
"",
|
||||
shares_by_server, test2)
|
||||
|
||||
|
||||
def test_existing_share_detection(self):
|
||||
self.basedir = self.mktemp()
|
||||
d = self._setup_and_upload()
|
||||
|
@ -1,3 +1,17 @@
|
||||
"""
|
||||
Tests for allmydata.uri.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
@ -40,24 +54,24 @@ class Literal(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(u.get_verify_cap(), None)
|
||||
|
||||
def test_empty(self):
|
||||
data = "" # This data is some *very* small data!
|
||||
data = b"" # This data is some *very* small data!
|
||||
return self._help_test(data)
|
||||
|
||||
def test_pack(self):
|
||||
data = "This is some small data"
|
||||
data = b"This is some small data"
|
||||
return self._help_test(data)
|
||||
|
||||
def test_nonascii(self):
|
||||
data = "This contains \x00 and URI:LIT: and \n, oh my."
|
||||
data = b"This contains \x00 and URI:LIT: and \n, oh my."
|
||||
return self._help_test(data)
|
||||
|
||||
class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_compare(self):
|
||||
lit1 = uri.LiteralFileURI("some data")
|
||||
fileURI = 'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834'
|
||||
lit1 = uri.LiteralFileURI(b"some data")
|
||||
fileURI = b'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834'
|
||||
chk1 = uri.CHKFileURI.init_from_string(fileURI)
|
||||
chk2 = uri.CHKFileURI.init_from_string(fileURI)
|
||||
unk = uri.UnknownURI("lafs://from_the_future")
|
||||
unk = uri.UnknownURI(b"lafs://from_the_future")
|
||||
self.failIfEqual(lit1, chk1)
|
||||
self.failUnlessReallyEqual(chk1, chk2)
|
||||
self.failIfEqual(chk1, "not actually a URI")
|
||||
@ -66,21 +80,24 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(len(s), 3) # since chk1==chk2
|
||||
|
||||
def test_is_uri(self):
|
||||
lit1 = uri.LiteralFileURI("some data").to_string()
|
||||
lit1 = uri.LiteralFileURI(b"some data").to_string()
|
||||
self.failUnless(uri.is_uri(lit1))
|
||||
self.failIf(uri.is_uri(None))
|
||||
|
||||
def test_is_literal_file_uri(self):
|
||||
lit1 = uri.LiteralFileURI("some data").to_string()
|
||||
lit1 = uri.LiteralFileURI(b"some data").to_string()
|
||||
self.failUnless(uri.is_literal_file_uri(lit1))
|
||||
self.failIf(uri.is_literal_file_uri(None))
|
||||
self.failIf(uri.is_literal_file_uri("foo"))
|
||||
self.failIf(uri.is_literal_file_uri("ro.foo"))
|
||||
self.failIf(uri.is_literal_file_uri("URI:LITfoo"))
|
||||
self.failIf(uri.is_literal_file_uri(b"URI:LITfoo"))
|
||||
self.failUnless(uri.is_literal_file_uri("ro.URI:LIT:foo"))
|
||||
self.failUnless(uri.is_literal_file_uri("imm.URI:LIT:foo"))
|
||||
|
||||
def test_has_uri_prefix(self):
|
||||
self.failUnless(uri.has_uri_prefix(b"URI:foo"))
|
||||
self.failUnless(uri.has_uri_prefix(b"ro.URI:foo"))
|
||||
self.failUnless(uri.has_uri_prefix(b"imm.URI:foo"))
|
||||
self.failUnless(uri.has_uri_prefix("URI:foo"))
|
||||
self.failUnless(uri.has_uri_prefix("ro.URI:foo"))
|
||||
self.failUnless(uri.has_uri_prefix("imm.URI:foo"))
|
||||
@ -89,9 +106,9 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_pack(self):
|
||||
key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
storage_index = hashutil.storage_index_hash(key)
|
||||
uri_extension_hash = hashutil.uri_extension_hash("stuff")
|
||||
uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
|
||||
needed_shares = 25
|
||||
total_shares = 100
|
||||
size = 1234
|
||||
@ -138,26 +155,26 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessReallyEqual(u.to_string(), u2imm.to_string())
|
||||
|
||||
v = u.get_verify_cap()
|
||||
self.failUnless(isinstance(v.to_string(), str))
|
||||
self.failUnless(isinstance(v.to_string(), bytes))
|
||||
self.failUnless(v.is_readonly())
|
||||
self.failIf(v.is_mutable())
|
||||
|
||||
v2 = uri.from_string(v.to_string())
|
||||
self.failUnlessReallyEqual(v, v2)
|
||||
|
||||
v3 = uri.CHKFileVerifierURI(storage_index="\x00"*16,
|
||||
uri_extension_hash="\x00"*32,
|
||||
v3 = uri.CHKFileVerifierURI(storage_index=b"\x00"*16,
|
||||
uri_extension_hash=b"\x00"*32,
|
||||
needed_shares=3,
|
||||
total_shares=10,
|
||||
size=1234)
|
||||
self.failUnless(isinstance(v3.to_string(), str))
|
||||
self.failUnless(isinstance(v3.to_string(), bytes))
|
||||
self.failUnless(v3.is_readonly())
|
||||
self.failIf(v3.is_mutable())
|
||||
|
||||
def test_pack_badly(self):
|
||||
key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
storage_index = hashutil.storage_index_hash(key)
|
||||
uri_extension_hash = hashutil.uri_extension_hash("stuff")
|
||||
uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
|
||||
needed_shares = 25
|
||||
total_shares = 100
|
||||
size = 1234
|
||||
@ -186,35 +203,37 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_pack(self):
|
||||
data = {"stuff": "value",
|
||||
"size": 12,
|
||||
"needed_shares": 3,
|
||||
"big_hash": hashutil.tagged_hash("foo", "bar"),
|
||||
data = {b"stuff": b"value",
|
||||
b"size": 12,
|
||||
b"needed_shares": 3,
|
||||
b"big_hash": hashutil.tagged_hash(b"foo", b"bar"),
|
||||
}
|
||||
ext = uri.pack_extension(data)
|
||||
d = uri.unpack_extension(ext)
|
||||
self.failUnlessReallyEqual(d["stuff"], "value")
|
||||
self.failUnlessReallyEqual(d["size"], 12)
|
||||
self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash("foo", "bar"))
|
||||
self.failUnlessReallyEqual(d[b"stuff"], b"value")
|
||||
self.failUnlessReallyEqual(d[b"size"], 12)
|
||||
self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
|
||||
|
||||
readable = uri.unpack_extension_readable(ext)
|
||||
self.failUnlessReallyEqual(readable["needed_shares"], 3)
|
||||
self.failUnlessReallyEqual(readable["stuff"], "value")
|
||||
self.failUnlessReallyEqual(readable["size"], 12)
|
||||
self.failUnlessReallyEqual(readable["big_hash"],
|
||||
base32.b2a(hashutil.tagged_hash("foo", "bar")))
|
||||
self.failUnlessReallyEqual(readable["UEB_hash"],
|
||||
self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
|
||||
self.failUnlessReallyEqual(readable[b"stuff"], b"value")
|
||||
self.failUnlessReallyEqual(readable[b"size"], 12)
|
||||
self.failUnlessReallyEqual(readable[b"big_hash"],
|
||||
base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
|
||||
self.failUnlessReallyEqual(readable[b"UEB_hash"],
|
||||
base32.b2a(hashutil.uri_extension_hash(ext)))
|
||||
|
||||
class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_from_future(self):
|
||||
# any URI type that we don't recognize should be treated as unknown
|
||||
future_uri = "I am a URI from the future. Whatever you do, don't "
|
||||
future_uri = b"I am a URI from the future. Whatever you do, don't "
|
||||
u = uri.from_string(future_uri)
|
||||
self.failUnless(isinstance(u, uri.UnknownURI))
|
||||
self.failUnlessReallyEqual(u.to_string(), future_uri)
|
||||
self.failUnless(u.get_readonly() is None)
|
||||
self.failUnless(u.get_error() is None)
|
||||
future_uri_unicode = future_uri.decode("utf-8")
|
||||
self.assertEqual(future_uri, uri.from_string(future_uri_unicode).to_string())
|
||||
|
||||
u2 = uri.UnknownURI(future_uri, error=CapConstraintError("..."))
|
||||
self.failUnlessReallyEqual(u.to_string(), future_uri)
|
||||
@ -222,7 +241,7 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnless(isinstance(u2.get_error(), CapConstraintError))
|
||||
|
||||
# Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?)
|
||||
future_uri = u"I am a cap from the \u263A future. Whatever you ".encode('utf-8')
|
||||
future_uri = u"I am a cap from the \u263A future. Whatever you ".encode("utf-8")
|
||||
u = uri.from_string(future_uri)
|
||||
self.failUnless(isinstance(u, uri.UnknownURI))
|
||||
self.failUnlessReallyEqual(u.to_string(), future_uri)
|
||||
@ -236,15 +255,15 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class Constraint(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_constraint(self):
|
||||
bad = "http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/"
|
||||
bad = b"http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/"
|
||||
self.failUnlessRaises(uri.BadURIError, uri.DirectoryURI.init_from_string, bad)
|
||||
fileURI = 'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834'
|
||||
fileURI = b'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834'
|
||||
uri.CHKFileURI.init_from_string(fileURI)
|
||||
|
||||
class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.writekey = "\x01" * 16
|
||||
self.fingerprint = "\x02" * 32
|
||||
self.writekey = b"\x01" * 16
|
||||
self.fingerprint = b"\x02" * 32
|
||||
self.readkey = hashutil.ssk_readkey_hash(self.writekey)
|
||||
self.storage_index = hashutil.ssk_storage_index_hash(self.readkey)
|
||||
|
||||
@ -410,28 +429,29 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint)
|
||||
cap = u1.to_string()
|
||||
|
||||
cap2 = cap+":I COME FROM THE FUTURE"
|
||||
cap2 = cap+b":I COME FROM THE FUTURE"
|
||||
u2 = uri.WriteableMDMFFileURI.init_from_string(cap2)
|
||||
self.failUnlessReallyEqual(self.writekey, u2.writekey)
|
||||
self.failUnlessReallyEqual(self.fingerprint, u2.fingerprint)
|
||||
self.failIf(u2.is_readonly())
|
||||
self.failUnless(u2.is_mutable())
|
||||
|
||||
cap3 = cap+":"+os.urandom(40) # parse *that*!
|
||||
|
||||
cap3 = cap+b":" + os.urandom(40)
|
||||
u3 = uri.WriteableMDMFFileURI.init_from_string(cap3)
|
||||
self.failUnlessReallyEqual(self.writekey, u3.writekey)
|
||||
self.failUnlessReallyEqual(self.fingerprint, u3.fingerprint)
|
||||
self.failIf(u3.is_readonly())
|
||||
self.failUnless(u3.is_mutable())
|
||||
|
||||
cap4 = u1.get_readonly().to_string()+":ooh scary future stuff"
|
||||
cap4 = u1.get_readonly().to_string()+b":ooh scary future stuff"
|
||||
u4 = uri.from_string_mutable_filenode(cap4)
|
||||
self.failUnlessReallyEqual(self.readkey, u4.readkey)
|
||||
self.failUnlessReallyEqual(self.fingerprint, u4.fingerprint)
|
||||
self.failUnless(u4.is_readonly())
|
||||
self.failUnless(u4.is_mutable())
|
||||
|
||||
cap5 = u1.get_verify_cap().to_string()+":spoilers!"
|
||||
cap5 = u1.get_verify_cap().to_string()+b":spoilers!"
|
||||
u5 = uri.from_string(cap5)
|
||||
self.failUnlessReallyEqual(self.storage_index, u5.storage_index)
|
||||
self.failUnlessReallyEqual(self.fingerprint, u5.fingerprint)
|
||||
@ -468,8 +488,8 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
def test_pack(self):
|
||||
writekey = "\x01" * 16
|
||||
fingerprint = "\x02" * 32
|
||||
writekey = b"\x01" * 16
|
||||
fingerprint = b"\x02" * 32
|
||||
|
||||
n = uri.WriteableSSKFileURI(writekey, fingerprint)
|
||||
u1 = uri.DirectoryURI(n)
|
||||
@ -536,8 +556,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
u1.get_verify_cap()._filenode_uri)
|
||||
|
||||
def test_immutable(self):
|
||||
readkey = "\x01" * 16
|
||||
uri_extension_hash = hashutil.uri_extension_hash("stuff")
|
||||
readkey = b"\x01" * 16
|
||||
uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
|
||||
needed_shares = 3
|
||||
total_shares = 10
|
||||
size = 1234
|
||||
@ -548,7 +568,7 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
total_shares=total_shares,
|
||||
size=size)
|
||||
fncap = fnuri.to_string()
|
||||
self.failUnlessReallyEqual(fncap, "URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234")
|
||||
self.failUnlessReallyEqual(fncap, b"URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234")
|
||||
u1 = uri.ImmutableDirectoryURI(fnuri)
|
||||
self.failUnless(u1.is_readonly())
|
||||
self.failIf(u1.is_mutable())
|
||||
@ -587,20 +607,20 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnless(IVerifierURI.providedBy(u2_verifier))
|
||||
u2vs = u2_verifier.to_string()
|
||||
# URI:DIR2-CHK-Verifier:$key:$ueb:$k:$n:$size
|
||||
self.failUnless(u2vs.startswith("URI:DIR2-CHK-Verifier:"), u2vs)
|
||||
self.failUnless(u2vs.startswith(b"URI:DIR2-CHK-Verifier:"), u2vs)
|
||||
u2_verifier_fileuri = u2_verifier.get_filenode_cap()
|
||||
self.failUnless(IVerifierURI.providedBy(u2_verifier_fileuri))
|
||||
u2vfs = u2_verifier_fileuri.to_string()
|
||||
# URI:CHK-Verifier:$key:$ueb:$k:$n:$size
|
||||
self.failUnlessReallyEqual(u2vfs, fnuri.get_verify_cap().to_string())
|
||||
self.failUnlessReallyEqual(u2vs[len("URI:DIR2-"):], u2vfs[len("URI:"):])
|
||||
self.failUnlessReallyEqual(u2vs[len(b"URI:DIR2-"):], u2vfs[len(b"URI:"):])
|
||||
self.failUnless(str(u2_verifier))
|
||||
|
||||
def test_literal(self):
|
||||
u0 = uri.LiteralFileURI("data")
|
||||
u0 = uri.LiteralFileURI(b"data")
|
||||
u1 = uri.LiteralDirectoryURI(u0)
|
||||
self.failUnless(str(u1))
|
||||
self.failUnlessReallyEqual(u1.to_string(), "URI:DIR2-LIT:mrqxiyi")
|
||||
self.failUnlessReallyEqual(u1.to_string(), b"URI:DIR2-LIT:mrqxiyi")
|
||||
self.failUnless(u1.is_readonly())
|
||||
self.failIf(u1.is_mutable())
|
||||
self.failUnless(IURI.providedBy(u1))
|
||||
@ -608,11 +628,11 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnless(IDirnodeURI.providedBy(u1))
|
||||
self.failUnlessReallyEqual(u1.get_verify_cap(), None)
|
||||
self.failUnlessReallyEqual(u1.get_storage_index(), None)
|
||||
self.failUnlessReallyEqual(u1.abbrev_si(), "<LIT>")
|
||||
self.failUnlessReallyEqual(u1.abbrev_si(), b"<LIT>")
|
||||
|
||||
def test_mdmf(self):
|
||||
writekey = "\x01" * 16
|
||||
fingerprint = "\x02" * 32
|
||||
writekey = b"\x01" * 16
|
||||
fingerprint = b"\x02" * 32
|
||||
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
|
||||
d1 = uri.MDMFDirectoryURI(uri1)
|
||||
self.failIf(d1.is_readonly())
|
||||
@ -635,8 +655,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessIsInstance(d3, uri.UnknownURI)
|
||||
|
||||
def test_mdmf_attenuation(self):
|
||||
writekey = "\x01" * 16
|
||||
fingerprint = "\x02" * 32
|
||||
writekey = b"\x01" * 16
|
||||
fingerprint = b"\x02" * 32
|
||||
|
||||
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
|
||||
d1 = uri.MDMFDirectoryURI(uri1)
|
||||
@ -676,8 +696,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def test_mdmf_verifier(self):
|
||||
# I'm not sure what I want to write here yet.
|
||||
writekey = "\x01" * 16
|
||||
fingerprint = "\x02" * 32
|
||||
writekey = b"\x01" * 16
|
||||
fingerprint = b"\x02" * 32
|
||||
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
|
||||
d1 = uri.MDMFDirectoryURI(uri1)
|
||||
v1 = d1.get_verify_cap()
|
||||
|
@ -1,5 +1,15 @@
|
||||
from __future__ import print_function
|
||||
"""
|
||||
Ported to Python3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
import six
|
||||
import os, time, sys
|
||||
import yaml
|
||||
@ -19,7 +29,7 @@ if six.PY3:
|
||||
|
||||
class IDLib(unittest.TestCase):
|
||||
def test_nodeid_b2a(self):
|
||||
self.failUnlessEqual(idlib.nodeid_b2a("\x00"*20), "a"*32)
|
||||
self.failUnlessEqual(idlib.nodeid_b2a(b"\x00"*20), "a"*32)
|
||||
|
||||
|
||||
class MyList(list):
|
||||
@ -85,10 +95,10 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
basedir = "util/FileUtil/test_write_atomically"
|
||||
fileutil.make_dirs(basedir)
|
||||
fn = os.path.join(basedir, "here")
|
||||
fileutil.write_atomically(fn, "one")
|
||||
self.failUnlessEqual(fileutil.read(fn), "one")
|
||||
fileutil.write_atomically(fn, "two", mode="") # non-binary
|
||||
self.failUnlessEqual(fileutil.read(fn), "two")
|
||||
fileutil.write_atomically(fn, b"one", "b")
|
||||
self.failUnlessEqual(fileutil.read(fn), b"one")
|
||||
fileutil.write_atomically(fn, u"two", mode="") # non-binary
|
||||
self.failUnlessEqual(fileutil.read(fn), b"two")
|
||||
|
||||
def test_rename(self):
|
||||
basedir = "util/FileUtil/test_rename"
|
||||
@ -111,20 +121,20 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
|
||||
|
||||
# when only dest exists
|
||||
fileutil.write(dest_path, "dest")
|
||||
fileutil.write(dest_path, b"dest")
|
||||
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
|
||||
self.failUnlessEqual(fileutil.read(dest_path), "dest")
|
||||
self.failUnlessEqual(fileutil.read(dest_path), b"dest")
|
||||
|
||||
# when both exist
|
||||
fileutil.write(source_path, "source")
|
||||
fileutil.write(source_path, b"source")
|
||||
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
|
||||
self.failUnlessEqual(fileutil.read(source_path), "source")
|
||||
self.failUnlessEqual(fileutil.read(dest_path), "dest")
|
||||
self.failUnlessEqual(fileutil.read(source_path), b"source")
|
||||
self.failUnlessEqual(fileutil.read(dest_path), b"dest")
|
||||
|
||||
# when only source exists
|
||||
os.remove(dest_path)
|
||||
fileutil.rename_no_overwrite(source_path, dest_path)
|
||||
self.failUnlessEqual(fileutil.read(dest_path), "source")
|
||||
self.failUnlessEqual(fileutil.read(dest_path), b"source")
|
||||
self.failIf(os.path.exists(source_path))
|
||||
|
||||
def test_replace_file(self):
|
||||
@ -138,21 +148,21 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
|
||||
|
||||
# when only replaced exists
|
||||
fileutil.write(replaced_path, "foo")
|
||||
fileutil.write(replaced_path, b"foo")
|
||||
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), "foo")
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), b"foo")
|
||||
|
||||
# when both replaced and replacement exist
|
||||
fileutil.write(replacement_path, "bar")
|
||||
fileutil.write(replacement_path, b"bar")
|
||||
fileutil.replace_file(replaced_path, replacement_path)
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), b"bar")
|
||||
self.failIf(os.path.exists(replacement_path))
|
||||
|
||||
# when only replacement exists
|
||||
os.remove(replaced_path)
|
||||
fileutil.write(replacement_path, "bar")
|
||||
fileutil.write(replacement_path, b"bar")
|
||||
fileutil.replace_file(replaced_path, replacement_path)
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
|
||||
self.failUnlessEqual(fileutil.read(replaced_path), b"bar")
|
||||
self.failIf(os.path.exists(replacement_path))
|
||||
|
||||
def test_du(self):
|
||||
@ -170,13 +180,15 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
self.failUnlessEqual(10+11+12+13, used)
|
||||
|
||||
def test_abspath_expanduser_unicode(self):
|
||||
self.failUnlessRaises(AssertionError, fileutil.abspath_expanduser_unicode, "bytestring")
|
||||
self.failUnlessRaises(AssertionError, fileutil.abspath_expanduser_unicode, b"bytestring")
|
||||
|
||||
saved_cwd = os.path.normpath(os.getcwdu())
|
||||
saved_cwd = os.path.normpath(os.getcwd())
|
||||
if PY2:
|
||||
saved_cwd = saved_cwd.decode("utf8")
|
||||
abspath_cwd = fileutil.abspath_expanduser_unicode(u".")
|
||||
abspath_cwd_notlong = fileutil.abspath_expanduser_unicode(u".", long_path=False)
|
||||
self.failUnless(isinstance(saved_cwd, unicode), saved_cwd)
|
||||
self.failUnless(isinstance(abspath_cwd, unicode), abspath_cwd)
|
||||
self.failUnless(isinstance(saved_cwd, str), saved_cwd)
|
||||
self.failUnless(isinstance(abspath_cwd, str), abspath_cwd)
|
||||
if sys.platform == "win32":
|
||||
self.failUnlessReallyEqual(abspath_cwd, fileutil.to_windows_long_path(saved_cwd))
|
||||
else:
|
||||
@ -237,10 +249,10 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
os.chdir(cwd)
|
||||
for upath in (u'', u'fuu', u'f\xf9\xf9', u'/fuu', u'U:\\', u'~'):
|
||||
uabspath = fileutil.abspath_expanduser_unicode(upath)
|
||||
self.failUnless(isinstance(uabspath, unicode), uabspath)
|
||||
self.failUnless(isinstance(uabspath, str), uabspath)
|
||||
|
||||
uabspath_notlong = fileutil.abspath_expanduser_unicode(upath, long_path=False)
|
||||
self.failUnless(isinstance(uabspath_notlong, unicode), uabspath_notlong)
|
||||
self.failUnless(isinstance(uabspath_notlong, str), uabspath_notlong)
|
||||
finally:
|
||||
os.chdir(saved_cwd)
|
||||
|
||||
@ -293,9 +305,9 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
fileutil.remove(long_path)
|
||||
self.addCleanup(_cleanup)
|
||||
|
||||
fileutil.write(long_path, "test")
|
||||
fileutil.write(long_path, b"test")
|
||||
self.failUnless(os.path.exists(long_path))
|
||||
self.failUnlessEqual(fileutil.read(long_path), "test")
|
||||
self.failUnlessEqual(fileutil.read(long_path), b"test")
|
||||
_cleanup()
|
||||
self.failIf(os.path.exists(long_path))
|
||||
|
||||
@ -353,7 +365,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
# create a file
|
||||
f = os.path.join(basedir, "1.txt")
|
||||
fileutil.write(f, "a"*10)
|
||||
fileutil.write(f, b"a"*10)
|
||||
fileinfo = fileutil.get_pathinfo(f)
|
||||
self.failUnlessTrue(fileinfo.isfile)
|
||||
self.failUnlessTrue(fileinfo.exists)
|
||||
@ -381,7 +393,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
fileutil.make_dirs(basedir)
|
||||
|
||||
f = os.path.join(basedir, "1.txt")
|
||||
fileutil.write(f, "a"*10)
|
||||
fileutil.write(f, b"a"*10)
|
||||
|
||||
# create a symlink pointing to 1.txt
|
||||
slname = os.path.join(basedir, "linkto1.txt")
|
||||
@ -394,7 +406,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def test_encrypted_tempfile(self):
|
||||
f = EncryptedTemporaryFile()
|
||||
f.write("foobar")
|
||||
f.write(b"foobar")
|
||||
f.close()
|
||||
|
||||
|
||||
@ -409,7 +421,7 @@ class PollMixinTests(unittest.TestCase):
|
||||
|
||||
def test_PollMixin_False_then_True(self):
|
||||
i = iter([False, True])
|
||||
d = self.pm.poll(check_f=i.next,
|
||||
d = self.pm.poll(check_f=lambda: next(i),
|
||||
pollinterval=0.1)
|
||||
return d
|
||||
|
||||
@ -454,6 +466,6 @@ class YAML(unittest.TestCase):
|
||||
def test_convert(self):
|
||||
data = yaml.safe_dump(["str", u"unicode", u"\u1234nicode"])
|
||||
back = yamlutil.safe_load(data)
|
||||
self.failUnlessEqual(type(back[0]), unicode)
|
||||
self.failUnlessEqual(type(back[1]), unicode)
|
||||
self.failUnlessEqual(type(back[2]), unicode)
|
||||
self.assertIsInstance(back[0], str)
|
||||
self.assertIsInstance(back[1], str)
|
||||
self.assertIsInstance(back[2], str)
|
||||
|
@ -5,8 +5,6 @@ unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8')
|
||||
unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8')
|
||||
unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8')
|
||||
|
||||
FAVICON_MARKUP = '<link href="/icon.png" rel="shortcut icon" />'
|
||||
|
||||
|
||||
def assert_soup_has_favicon(testcase, soup):
|
||||
"""
|
||||
|
@ -10,7 +10,7 @@ from twisted.web import resource
|
||||
from twisted.trial import unittest
|
||||
from allmydata import uri, dirnode
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import to_str
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
from allmydata.util.consumer import download_to_data
|
||||
from allmydata.util.netstring import split_netstring
|
||||
from allmydata.unknown import UnknownNode
|
||||
@ -21,7 +21,12 @@ from allmydata.mutable import publish
|
||||
from .. import common_util as testutil
|
||||
from ..common import WebErrorMixin, ShouldFailMixin
|
||||
from ..no_network import GridTestMixin
|
||||
from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP
|
||||
from .common import (
|
||||
assert_soup_has_favicon,
|
||||
unknown_immcap,
|
||||
unknown_rocap,
|
||||
unknown_rwcap,
|
||||
)
|
||||
|
||||
DIR_HTML_TAG = '<html lang="en">'
|
||||
|
||||
@ -92,7 +97,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
def _got_html_good(res):
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
d.addCallback(_got_html_good)
|
||||
d.addCallback(self.CHECK, "good", "t=check&return_to=somewhere")
|
||||
def _got_html_good_return_to(res):
|
||||
@ -235,7 +242,9 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessIn("Healthy", res)
|
||||
self.failIfIn("Not Healthy", res)
|
||||
self.failUnlessIn("No repair necessary", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
d.addCallback(_got_html_good)
|
||||
|
||||
d.addCallback(self.CHECK, "sick", "t=check&repair=true")
|
||||
@ -358,13 +367,13 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
f = data[1]["children"][name]
|
||||
self.failUnlessEqual(f[0], "unknown")
|
||||
if expect_rw_uri:
|
||||
self.failUnlessReallyEqual(to_str(f[1]["rw_uri"]), unknown_rwcap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["rw_uri"]), unknown_rwcap, data)
|
||||
else:
|
||||
self.failIfIn("rw_uri", f[1])
|
||||
if immutable:
|
||||
self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_immcap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_immcap, data)
|
||||
else:
|
||||
self.failUnlessReallyEqual(to_str(f[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(f[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessIn("metadata", f[1])
|
||||
d.addCallback(_check_directory_json, expect_rw_uri=not immutable)
|
||||
|
||||
@ -397,18 +406,18 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
data = json.loads(res)
|
||||
self.failUnlessEqual(data[0], "unknown")
|
||||
if expect_rw_uri:
|
||||
self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["rw_uri"]), unknown_rwcap, data)
|
||||
else:
|
||||
self.failIfIn("rw_uri", data[1])
|
||||
|
||||
if immutable:
|
||||
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_immcap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_immcap, data)
|
||||
self.failUnlessReallyEqual(data[1]["mutable"], False)
|
||||
elif expect_rw_uri:
|
||||
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessReallyEqual(data[1]["mutable"], True)
|
||||
else:
|
||||
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), unknown_rocap, data)
|
||||
self.failIfIn("mutable", data[1])
|
||||
|
||||
# TODO: check metadata contents
|
||||
@ -572,7 +581,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
ll_type, ll_data = listed_children[u"lonely"]
|
||||
self.failUnlessEqual(ll_type, "filenode")
|
||||
self.failIfIn("rw_uri", ll_data)
|
||||
self.failUnlessReallyEqual(to_str(ll_data["ro_uri"]), lonely_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(ll_data["ro_uri"]), lonely_uri)
|
||||
d.addCallback(_check_json)
|
||||
return d
|
||||
|
||||
@ -634,14 +643,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
u0 = units[0]
|
||||
self.failUnlessEqual(u0["path"], [])
|
||||
self.failUnlessEqual(u0["type"], "directory")
|
||||
self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
|
||||
self.failUnlessReallyEqual(to_bytes(u0["cap"]), self.rootnode.get_uri())
|
||||
u0cr = u0["check-results"]
|
||||
self.failUnlessReallyEqual(u0cr["results"]["count-happiness"], 10)
|
||||
self.failUnlessReallyEqual(u0cr["results"]["count-shares-good"], 10)
|
||||
|
||||
ugood = [u for u in units
|
||||
if u["type"] == "file" and u["path"] == [u"good"]][0]
|
||||
self.failUnlessReallyEqual(to_str(ugood["cap"]), self.uris["good"])
|
||||
self.failUnlessReallyEqual(to_bytes(ugood["cap"]), self.uris["good"])
|
||||
ugoodcr = ugood["check-results"]
|
||||
self.failUnlessReallyEqual(ugoodcr["results"]["count-happiness"], 10)
|
||||
self.failUnlessReallyEqual(ugoodcr["results"]["count-shares-good"], 10)
|
||||
@ -663,7 +672,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
self.failUnlessEqual(units[-1]["type"], "stats")
|
||||
first = units[0]
|
||||
self.failUnlessEqual(first["path"], [])
|
||||
self.failUnlessEqual(to_str(first["cap"]), self.rootnode.get_uri())
|
||||
self.failUnlessEqual(to_bytes(first["cap"]), self.rootnode.get_uri())
|
||||
self.failUnlessEqual(first["type"], "directory")
|
||||
stats = units[-1]["stats"]
|
||||
self.failUnlessReallyEqual(stats["count-immutable-files"], 2)
|
||||
@ -817,7 +826,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
u0 = units[0]
|
||||
self.failUnlessEqual(u0["path"], [])
|
||||
self.failUnlessEqual(u0["type"], "directory")
|
||||
self.failUnlessReallyEqual(to_str(u0["cap"]), self.rootnode.get_uri())
|
||||
self.failUnlessReallyEqual(to_bytes(u0["cap"]), self.rootnode.get_uri())
|
||||
u0crr = u0["check-and-repair-results"]
|
||||
self.failUnlessReallyEqual(u0crr["repair-attempted"], False)
|
||||
self.failUnlessReallyEqual(u0crr["pre-repair-results"]["results"]["count-happiness"], 10)
|
||||
@ -825,7 +834,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
ugood = [u for u in units
|
||||
if u["type"] == "file" and u["path"] == [u"good"]][0]
|
||||
self.failUnlessEqual(to_str(ugood["cap"]), self.uris["good"])
|
||||
self.failUnlessEqual(to_bytes(ugood["cap"]), self.uris["good"])
|
||||
ugoodcrr = ugood["check-and-repair-results"]
|
||||
self.failUnlessReallyEqual(ugoodcrr["repair-attempted"], False)
|
||||
self.failUnlessReallyEqual(ugoodcrr["pre-repair-results"]["results"]["count-happiness"], 10)
|
||||
@ -833,7 +842,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
||||
|
||||
usick = [u for u in units
|
||||
if u["type"] == "file" and u["path"] == [u"sick"]][0]
|
||||
self.failUnlessReallyEqual(to_str(usick["cap"]), self.uris["sick"])
|
||||
self.failUnlessReallyEqual(to_bytes(usick["cap"]), self.uris["sick"])
|
||||
usickcrr = usick["check-and-repair-results"]
|
||||
self.failUnlessReallyEqual(usickcrr["repair-attempted"], True)
|
||||
self.failUnlessReallyEqual(usickcrr["repair-successful"], True)
|
||||
|
@ -36,7 +36,7 @@ from allmydata.nodemaker import NodeMaker
|
||||
from allmydata.web.common import WebError, MultiFormatPage
|
||||
from allmydata.util import fileutil, base32, hashutil
|
||||
from allmydata.util.consumer import download_to_data
|
||||
from allmydata.util.encodingutil import to_str
|
||||
from allmydata.util.encodingutil import to_bytes
|
||||
from ...util.connection_status import ConnectionStatus
|
||||
from ..common import (
|
||||
EMPTY_CLIENT_CONFIG,
|
||||
@ -54,6 +54,9 @@ from .common import (
|
||||
assert_soup_has_tag_with_attributes,
|
||||
assert_soup_has_tag_with_content,
|
||||
assert_soup_has_tag_with_attributes_and_content,
|
||||
unknown_rwcap,
|
||||
unknown_rocap,
|
||||
unknown_immcap,
|
||||
)
|
||||
|
||||
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
|
||||
@ -65,7 +68,6 @@ from ..common_web import (
|
||||
Error,
|
||||
)
|
||||
from allmydata.client import _Client, SecretHolder
|
||||
from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP
|
||||
|
||||
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
||||
# create a webserver that works against them
|
||||
@ -455,8 +457,8 @@ class WebMixin(TimezoneMixin):
|
||||
self.failUnless(isinstance(data[1], dict))
|
||||
self.failIf(data[1]["mutable"])
|
||||
self.failIfIn("rw_uri", data[1]) # immutable
|
||||
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
|
||||
self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), self._bar_txt_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["verify_uri"]), self._bar_txt_verifycap)
|
||||
self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
|
||||
|
||||
def failUnlessIsQuuxJSON(self, res, readonly=False):
|
||||
@ -485,9 +487,9 @@ class WebMixin(TimezoneMixin):
|
||||
self.failUnless(isinstance(data[1], dict))
|
||||
self.failUnless(data[1]["mutable"])
|
||||
self.failUnlessIn("rw_uri", data[1]) # mutable
|
||||
self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
|
||||
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
|
||||
self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["rw_uri"]), self._foo_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["ro_uri"]), self._foo_readonly_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(data[1]["verify_uri"]), self._foo_verifycap)
|
||||
|
||||
kidnames = sorted([unicode(n) for n in data[1]["children"]])
|
||||
self.failUnlessEqual(kidnames,
|
||||
@ -504,19 +506,19 @@ class WebMixin(TimezoneMixin):
|
||||
self.failUnlessIn("linkmotime", tahoe_md)
|
||||
self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
|
||||
self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
|
||||
self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
|
||||
self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
|
||||
self.failUnlessReallyEqual(to_bytes(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(kids[u"bar.txt"][1]["verify_uri"]),
|
||||
self._bar_txt_verifycap)
|
||||
self.failUnlessIn("metadata", kids[u"bar.txt"][1])
|
||||
self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
|
||||
self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
|
||||
self._bar_txt_metadata["tahoe"]["linkcrtime"])
|
||||
self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
|
||||
self.failUnlessReallyEqual(to_bytes(kids[u"n\u00fc.txt"][1]["ro_uri"]),
|
||||
self._bar_txt_uri)
|
||||
self.failUnlessIn("quux.txt", kids)
|
||||
self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["rw_uri"]),
|
||||
self.failUnlessReallyEqual(to_bytes(kids[u"quux.txt"][1]["rw_uri"]),
|
||||
self._quux_txt_uri)
|
||||
self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["ro_uri"]),
|
||||
self.failUnlessReallyEqual(to_bytes(kids[u"quux.txt"][1]["ro_uri"]),
|
||||
self._quux_txt_readonly_uri)
|
||||
|
||||
@inlineCallbacks
|
||||
@ -2179,7 +2181,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
got = {}
|
||||
for (path_list, cap) in data:
|
||||
got[tuple(path_list)] = cap
|
||||
self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(got[(u"sub",)]), self._sub_uri)
|
||||
self.failUnlessIn((u"sub", u"baz.txt"), got)
|
||||
self.failUnlessIn("finished", res)
|
||||
self.failUnlessIn("origin", res)
|
||||
@ -2264,9 +2266,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
self.failUnlessEqual(units[-1]["type"], "stats")
|
||||
first = units[0]
|
||||
self.failUnlessEqual(first["path"], [])
|
||||
self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(first["cap"]), self._foo_uri)
|
||||
self.failUnlessEqual(first["type"], "directory")
|
||||
baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
|
||||
baz = [u for u in units[:-1] if to_bytes(u["cap"]) == self._baz_file_uri][0]
|
||||
self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
|
||||
self.failIfEqual(baz["storage-index"], None)
|
||||
self.failIfEqual(baz["verifycap"], None)
|
||||
@ -2279,14 +2281,14 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_GET_DIRURL_uri(self):
|
||||
d = self.GET(self.public_url + "/foo?t=uri")
|
||||
def _check(res):
|
||||
self.failUnlessReallyEqual(to_str(res), self._foo_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(res), self._foo_uri)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
def test_GET_DIRURL_readonly_uri(self):
|
||||
d = self.GET(self.public_url + "/foo?t=readonly-uri")
|
||||
def _check(res):
|
||||
self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(res), self._foo_readonly_uri)
|
||||
d.addCallback(_check)
|
||||
return d
|
||||
|
||||
@ -2948,9 +2950,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
new_json = children[u"new.txt"]
|
||||
self.failUnlessEqual(new_json[0], "filenode")
|
||||
self.failUnless(new_json[1]["mutable"])
|
||||
self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(new_json[1]["rw_uri"]), self._mutable_uri)
|
||||
ro_uri = self._mutable_node.get_readonly().to_string()
|
||||
self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(new_json[1]["ro_uri"]), ro_uri)
|
||||
d.addCallback(_check_page_json)
|
||||
|
||||
# and the JSON form of the file
|
||||
@ -2960,9 +2962,9 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
parsed = json.loads(res)
|
||||
self.failUnlessEqual(parsed[0], "filenode")
|
||||
self.failUnless(parsed[1]["mutable"])
|
||||
self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(parsed[1]["rw_uri"]), self._mutable_uri)
|
||||
ro_uri = self._mutable_node.get_readonly().to_string()
|
||||
self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
|
||||
self.failUnlessReallyEqual(to_bytes(parsed[1]["ro_uri"]), ro_uri)
|
||||
d.addCallback(_check_file_json)
|
||||
|
||||
# and look at t=uri and t=readonly-uri
|
||||
@ -3262,13 +3264,15 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
res = yield self.get_operation_results(None, "123", "html")
|
||||
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
||||
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
res = yield self.GET("/operations/123/")
|
||||
# should be the same as without the slash
|
||||
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
||||
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
|
||||
yield self.shouldFail2(error.Error, "one", "404 Not Found",
|
||||
"No detailed results for SI bogus",
|
||||
@ -3318,7 +3322,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
self.failUnlessIn("Objects Unhealthy (after repair): <span>0</span>", res)
|
||||
self.failUnlessIn("Corrupt Shares (after repair): <span>0</span>", res)
|
||||
|
||||
self.failUnlessIn(FAVICON_MARKUP, res)
|
||||
soup = BeautifulSoup(res, 'html5lib')
|
||||
assert_soup_has_favicon(self, soup)
|
||||
d.addCallback(_check_html)
|
||||
return d
|
||||
|
||||
|
@ -1,3 +1,22 @@
|
||||
"""
|
||||
URIs (kinda sorta, really they're capabilities?).
|
||||
|
||||
Ported to Python 3.
|
||||
|
||||
Methods ending in to_string() are actually to_bytes(), possibly should be fixed
|
||||
in follow-up port.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# Don't import bytes, to prevent leaks.
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode, long
|
||||
|
||||
import re
|
||||
|
||||
@ -24,10 +43,10 @@ class BadURIError(CapConstraintError):
|
||||
# - make variable and method names consistently use _uri for an URI string,
|
||||
# and _cap for a Cap object (decoded URI)
|
||||
|
||||
BASE32STR_128bits = '(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits)
|
||||
BASE32STR_256bits = '(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits)
|
||||
BASE32STR_128bits = b'(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits)
|
||||
BASE32STR_256bits = b'(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits)
|
||||
|
||||
NUMBER='([0-9]+)'
|
||||
NUMBER=b'([0-9]+)'
|
||||
|
||||
|
||||
class _BaseURI(object):
|
||||
@ -53,10 +72,10 @@ class _BaseURI(object):
|
||||
@implementer(IURI, IImmutableFileURI)
|
||||
class CHKFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:CHK:'
|
||||
STRING_RE=re.compile('^URI:CHK:'+BASE32STR_128bits+':'+
|
||||
BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER+
|
||||
'$')
|
||||
BASE_STRING=b'URI:CHK:'
|
||||
STRING_RE=re.compile(b'^URI:CHK:'+BASE32STR_128bits+b':'+
|
||||
BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER+
|
||||
b'$')
|
||||
|
||||
def __init__(self, key, uri_extension_hash, needed_shares, total_shares,
|
||||
size):
|
||||
@ -82,7 +101,7 @@ class CHKFileURI(_BaseURI):
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
|
||||
return ('URI:CHK:%s:%s:%d:%d:%d' %
|
||||
return (b'URI:CHK:%s:%s:%d:%d:%d' %
|
||||
(base32.b2a(self.key),
|
||||
base32.b2a(self.uri_extension_hash),
|
||||
self.needed_shares,
|
||||
@ -112,9 +131,9 @@ class CHKFileURI(_BaseURI):
|
||||
@implementer(IVerifierURI)
|
||||
class CHKFileVerifierURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:CHK-Verifier:'
|
||||
STRING_RE=re.compile('^URI:CHK-Verifier:'+BASE32STR_128bits+':'+
|
||||
BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER)
|
||||
BASE_STRING=b'URI:CHK-Verifier:'
|
||||
STRING_RE=re.compile(b'^URI:CHK-Verifier:'+BASE32STR_128bits+b':'+
|
||||
BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER)
|
||||
|
||||
def __init__(self, storage_index, uri_extension_hash,
|
||||
needed_shares, total_shares, size):
|
||||
@ -138,7 +157,7 @@ class CHKFileVerifierURI(_BaseURI):
|
||||
assert isinstance(self.total_shares, int)
|
||||
assert isinstance(self.size, (int,long))
|
||||
|
||||
return ('URI:CHK-Verifier:%s:%s:%d:%d:%d' %
|
||||
return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
|
||||
(si_b2a(self.storage_index),
|
||||
base32.b2a(self.uri_extension_hash),
|
||||
self.needed_shares,
|
||||
@ -161,12 +180,12 @@ class CHKFileVerifierURI(_BaseURI):
|
||||
@implementer(IURI, IImmutableFileURI)
|
||||
class LiteralFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:LIT:'
|
||||
STRING_RE=re.compile('^URI:LIT:'+base32.BASE32STR_anybytes+'$')
|
||||
BASE_STRING=b'URI:LIT:'
|
||||
STRING_RE=re.compile(b'^URI:LIT:'+base32.BASE32STR_anybytes+b'$')
|
||||
|
||||
def __init__(self, data=None):
|
||||
if data is not None:
|
||||
assert isinstance(data, str)
|
||||
assert isinstance(data, bytes)
|
||||
self.data = data
|
||||
|
||||
@classmethod
|
||||
@ -177,7 +196,7 @@ class LiteralFileURI(_BaseURI):
|
||||
return cls(base32.a2b(mo.group(1)))
|
||||
|
||||
def to_string(self):
|
||||
return 'URI:LIT:%s' % base32.b2a(self.data)
|
||||
return b'URI:LIT:%s' % base32.b2a(self.data)
|
||||
|
||||
def is_readonly(self):
|
||||
return True
|
||||
@ -202,9 +221,9 @@ class LiteralFileURI(_BaseURI):
|
||||
@implementer(IURI, IMutableFileURI)
|
||||
class WriteableSSKFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:SSK:'
|
||||
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+
|
||||
BASE32STR_256bits+'$')
|
||||
BASE_STRING=b'URI:SSK:'
|
||||
STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+
|
||||
BASE32STR_256bits+b'$')
|
||||
|
||||
def __init__(self, writekey, fingerprint):
|
||||
self.writekey = writekey
|
||||
@ -221,10 +240,10 @@ class WriteableSSKFileURI(_BaseURI):
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.writekey, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
return 'URI:SSK:%s:%s' % (base32.b2a(self.writekey),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.writekey, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
return b'URI:SSK:%s:%s' % (base32.b2a(self.writekey),
|
||||
base32.b2a(self.fingerprint))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
@ -251,8 +270,8 @@ class WriteableSSKFileURI(_BaseURI):
|
||||
@implementer(IURI, IMutableFileURI)
|
||||
class ReadonlySSKFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:SSK-RO:'
|
||||
STRING_RE=re.compile('^URI:SSK-RO:'+BASE32STR_128bits+':'+BASE32STR_256bits+'$')
|
||||
BASE_STRING=b'URI:SSK-RO:'
|
||||
STRING_RE=re.compile(b'^URI:SSK-RO:'+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
|
||||
|
||||
def __init__(self, readkey, fingerprint):
|
||||
self.readkey = readkey
|
||||
@ -268,10 +287,10 @@ class ReadonlySSKFileURI(_BaseURI):
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.readkey, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
return 'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.readkey, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
return b'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey),
|
||||
base32.b2a(self.fingerprint))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
@ -298,8 +317,8 @@ class ReadonlySSKFileURI(_BaseURI):
|
||||
@implementer(IVerifierURI)
|
||||
class SSKVerifierURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:SSK-Verifier:'
|
||||
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'$')
|
||||
BASE_STRING=b'URI:SSK-Verifier:'
|
||||
STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
|
||||
|
||||
def __init__(self, storage_index, fingerprint):
|
||||
assert len(storage_index) == 16
|
||||
@ -314,10 +333,10 @@ class SSKVerifierURI(_BaseURI):
|
||||
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.storage_index, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
return 'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.storage_index, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
return b'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index),
|
||||
base32.b2a(self.fingerprint))
|
||||
|
||||
def is_readonly(self):
|
||||
return True
|
||||
@ -335,8 +354,8 @@ class SSKVerifierURI(_BaseURI):
|
||||
@implementer(IURI, IMutableFileURI)
|
||||
class WriteableMDMFFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:MDMF:'
|
||||
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
|
||||
BASE_STRING=b'URI:MDMF:'
|
||||
STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
|
||||
|
||||
def __init__(self, writekey, fingerprint):
|
||||
self.writekey = writekey
|
||||
@ -353,10 +372,10 @@ class WriteableMDMFFileURI(_BaseURI):
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.writekey, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
ret = 'URI:MDMF:%s:%s' % (base32.b2a(self.writekey),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.writekey, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
ret = b'URI:MDMF:%s:%s' % (base32.b2a(self.writekey),
|
||||
base32.b2a(self.fingerprint))
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
@ -384,8 +403,8 @@ class WriteableMDMFFileURI(_BaseURI):
|
||||
@implementer(IURI, IMutableFileURI)
|
||||
class ReadonlyMDMFFileURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:MDMF-RO:'
|
||||
STRING_RE=re.compile('^' +BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
|
||||
BASE_STRING=b'URI:MDMF-RO:'
|
||||
STRING_RE=re.compile(b'^' +BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
|
||||
|
||||
def __init__(self, readkey, fingerprint):
|
||||
self.readkey = readkey
|
||||
@ -402,10 +421,10 @@ class ReadonlyMDMFFileURI(_BaseURI):
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.readkey, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
ret = 'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.readkey, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
ret = b'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey),
|
||||
base32.b2a(self.fingerprint))
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
@ -433,8 +452,8 @@ class ReadonlyMDMFFileURI(_BaseURI):
|
||||
@implementer(IVerifierURI)
|
||||
class MDMFVerifierURI(_BaseURI):
|
||||
|
||||
BASE_STRING='URI:MDMF-Verifier:'
|
||||
STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
|
||||
BASE_STRING=b'URI:MDMF-Verifier:'
|
||||
STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
|
||||
|
||||
def __init__(self, storage_index, fingerprint):
|
||||
assert len(storage_index) == 16
|
||||
@ -449,10 +468,10 @@ class MDMFVerifierURI(_BaseURI):
|
||||
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
assert isinstance(self.storage_index, str)
|
||||
assert isinstance(self.fingerprint, str)
|
||||
ret = 'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
|
||||
base32.b2a(self.fingerprint))
|
||||
assert isinstance(self.storage_index, bytes)
|
||||
assert isinstance(self.fingerprint, bytes)
|
||||
ret = b'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
|
||||
base32.b2a(self.fingerprint))
|
||||
return ret
|
||||
|
||||
def is_readonly(self):
|
||||
@ -494,12 +513,12 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
return self.BASE_STRING+bits
|
||||
|
||||
def abbrev(self):
|
||||
return self._filenode_uri.to_string().split(':')[2][:5]
|
||||
return self._filenode_uri.to_string().split(b':')[2][:5]
|
||||
|
||||
def abbrev_si(self):
|
||||
si = self._filenode_uri.get_storage_index()
|
||||
if si is None:
|
||||
return "<LIT>"
|
||||
return b"<LIT>"
|
||||
return base32.b2a(si)[:5]
|
||||
|
||||
def is_mutable(self):
|
||||
@ -518,8 +537,8 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
@implementer(IDirectoryURI)
|
||||
class DirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=WriteableSSKFileURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -537,8 +556,8 @@ class DirectoryURI(_DirectoryBaseURI):
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
class ReadonlyDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2-RO:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-RO:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=ReadonlySSKFileURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -571,8 +590,8 @@ class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
|
||||
|
||||
|
||||
class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
BASE_STRING='URI:DIR2-CHK:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-CHK:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=CHKFileURI
|
||||
|
||||
def get_verify_cap(self):
|
||||
@ -581,8 +600,8 @@ class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
|
||||
|
||||
class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
BASE_STRING='URI:DIR2-LIT:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-LIT:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=LiteralFileURI
|
||||
|
||||
def get_verify_cap(self):
|
||||
@ -593,8 +612,8 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
|
||||
@implementer(IDirectoryURI)
|
||||
class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2-MDMF:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-MDMF:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=WriteableMDMFFileURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -615,8 +634,8 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
|
||||
@implementer(IReadonlyDirectoryURI)
|
||||
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2-MDMF-RO:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-MDMF-RO:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=ReadonlyMDMFFileURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -653,8 +672,8 @@ def wrap_dirnode_cap(filecap):
|
||||
@implementer(IVerifierURI)
|
||||
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2-MDMF-Verifier:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=MDMFVerifierURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -678,8 +697,8 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
|
||||
@implementer(IVerifierURI)
|
||||
class DirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
BASE_STRING='URI:DIR2-Verifier:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-Verifier:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=SSKVerifierURI
|
||||
|
||||
def __init__(self, filenode_uri=None):
|
||||
@ -702,8 +721,8 @@ class DirectoryURIVerifier(_DirectoryBaseURI):
|
||||
|
||||
@implementer(IVerifierURI)
|
||||
class ImmutableDirectoryURIVerifier(DirectoryURIVerifier):
|
||||
BASE_STRING='URI:DIR2-CHK-Verifier:'
|
||||
BASE_STRING_RE=re.compile('^'+BASE_STRING)
|
||||
BASE_STRING=b'URI:DIR2-CHK-Verifier:'
|
||||
BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
|
||||
INNER_URI_CLASS=CHKFileVerifierURI
|
||||
|
||||
|
||||
@ -725,12 +744,15 @@ class UnknownURI(object):
|
||||
return None
|
||||
|
||||
|
||||
ALLEGED_READONLY_PREFIX = 'ro.'
|
||||
ALLEGED_IMMUTABLE_PREFIX = 'imm.'
|
||||
ALLEGED_READONLY_PREFIX = b'ro.'
|
||||
ALLEGED_IMMUTABLE_PREFIX = b'imm.'
|
||||
|
||||
def from_string(u, deep_immutable=False, name=u"<unknown name>"):
|
||||
if not isinstance(u, str):
|
||||
raise TypeError("URI must be str: %r" % (u,))
|
||||
"""Create URI from either unicode or byte string."""
|
||||
if isinstance(u, unicode):
|
||||
u = u.encode("utf-8")
|
||||
if not isinstance(u, bytes):
|
||||
raise TypeError("URI must be unicode string or bytes: %r" % (u,))
|
||||
|
||||
# We allow and check ALLEGED_READONLY_PREFIX or ALLEGED_IMMUTABLE_PREFIX
|
||||
# on all URIs, even though we would only strictly need to do so for caps of
|
||||
@ -748,62 +770,62 @@ def from_string(u, deep_immutable=False, name=u"<unknown name>"):
|
||||
|
||||
error = None
|
||||
try:
|
||||
if s.startswith('URI:CHK:'):
|
||||
if s.startswith(b'URI:CHK:'):
|
||||
return CHKFileURI.init_from_string(s)
|
||||
elif s.startswith('URI:CHK-Verifier:'):
|
||||
elif s.startswith(b'URI:CHK-Verifier:'):
|
||||
return CHKFileVerifierURI.init_from_string(s)
|
||||
elif s.startswith('URI:LIT:'):
|
||||
elif s.startswith(b'URI:LIT:'):
|
||||
return LiteralFileURI.init_from_string(s)
|
||||
elif s.startswith('URI:SSK:'):
|
||||
elif s.startswith(b'URI:SSK:'):
|
||||
if can_be_writeable:
|
||||
return WriteableSSKFileURI.init_from_string(s)
|
||||
kind = "URI:SSK file writecap"
|
||||
elif s.startswith('URI:SSK-RO:'):
|
||||
elif s.startswith(b'URI:SSK-RO:'):
|
||||
if can_be_mutable:
|
||||
return ReadonlySSKFileURI.init_from_string(s)
|
||||
kind = "URI:SSK-RO readcap to a mutable file"
|
||||
elif s.startswith('URI:SSK-Verifier:'):
|
||||
elif s.startswith(b'URI:SSK-Verifier:'):
|
||||
return SSKVerifierURI.init_from_string(s)
|
||||
elif s.startswith('URI:MDMF:'):
|
||||
elif s.startswith(b'URI:MDMF:'):
|
||||
if can_be_writeable:
|
||||
return WriteableMDMFFileURI.init_from_string(s)
|
||||
kind = "URI:MDMF file writecap"
|
||||
elif s.startswith('URI:MDMF-RO:'):
|
||||
elif s.startswith(b'URI:MDMF-RO:'):
|
||||
if can_be_mutable:
|
||||
return ReadonlyMDMFFileURI.init_from_string(s)
|
||||
kind = "URI:MDMF-RO readcap to a mutable file"
|
||||
elif s.startswith('URI:MDMF-Verifier:'):
|
||||
elif s.startswith(b'URI:MDMF-Verifier:'):
|
||||
return MDMFVerifierURI.init_from_string(s)
|
||||
elif s.startswith('URI:DIR2:'):
|
||||
elif s.startswith(b'URI:DIR2:'):
|
||||
if can_be_writeable:
|
||||
return DirectoryURI.init_from_string(s)
|
||||
kind = "URI:DIR2 directory writecap"
|
||||
elif s.startswith('URI:DIR2-RO:'):
|
||||
elif s.startswith(b'URI:DIR2-RO:'):
|
||||
if can_be_mutable:
|
||||
return ReadonlyDirectoryURI.init_from_string(s)
|
||||
kind = "URI:DIR2-RO readcap to a mutable directory"
|
||||
elif s.startswith('URI:DIR2-Verifier:'):
|
||||
elif s.startswith(b'URI:DIR2-Verifier:'):
|
||||
return DirectoryURIVerifier.init_from_string(s)
|
||||
elif s.startswith('URI:DIR2-CHK:'):
|
||||
elif s.startswith(b'URI:DIR2-CHK:'):
|
||||
return ImmutableDirectoryURI.init_from_string(s)
|
||||
elif s.startswith('URI:DIR2-CHK-Verifier:'):
|
||||
elif s.startswith(b'URI:DIR2-CHK-Verifier:'):
|
||||
return ImmutableDirectoryURIVerifier.init_from_string(s)
|
||||
elif s.startswith('URI:DIR2-LIT:'):
|
||||
elif s.startswith(b'URI:DIR2-LIT:'):
|
||||
return LiteralDirectoryURI.init_from_string(s)
|
||||
elif s.startswith('URI:DIR2-MDMF:'):
|
||||
elif s.startswith(b'URI:DIR2-MDMF:'):
|
||||
if can_be_writeable:
|
||||
return MDMFDirectoryURI.init_from_string(s)
|
||||
kind = "URI:DIR2-MDMF directory writecap"
|
||||
elif s.startswith('URI:DIR2-MDMF-RO:'):
|
||||
elif s.startswith(b'URI:DIR2-MDMF-RO:'):
|
||||
if can_be_mutable:
|
||||
return ReadonlyMDMFDirectoryURI.init_from_string(s)
|
||||
kind = "URI:DIR2-MDMF-RO readcap to a mutable directory"
|
||||
elif s.startswith('URI:DIR2-MDMF-Verifier:'):
|
||||
elif s.startswith(b'URI:DIR2-MDMF-Verifier:'):
|
||||
return MDMFDirectoryURIVerifier.init_from_string(s)
|
||||
elif s.startswith('x-tahoe-future-test-writeable:') and not can_be_writeable:
|
||||
elif s.startswith(b'x-tahoe-future-test-writeable:') and not can_be_writeable:
|
||||
# For testing how future writeable caps would behave in read-only contexts.
|
||||
kind = "x-tahoe-future-test-writeable: testing cap"
|
||||
elif s.startswith('x-tahoe-future-test-mutable:') and not can_be_mutable:
|
||||
elif s.startswith(b'x-tahoe-future-test-mutable:') and not can_be_mutable:
|
||||
# For testing how future mutable readcaps would behave in immutable contexts.
|
||||
kind = "x-tahoe-future-test-mutable: testing cap"
|
||||
else:
|
||||
@ -829,18 +851,22 @@ def is_uri(s):
|
||||
return False
|
||||
|
||||
def is_literal_file_uri(s):
|
||||
if not isinstance(s, str):
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
return (s.startswith('URI:LIT:') or
|
||||
s.startswith(ALLEGED_READONLY_PREFIX + 'URI:LIT:') or
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:LIT:'))
|
||||
return (s.startswith(b'URI:LIT:') or
|
||||
s.startswith(ALLEGED_READONLY_PREFIX + b'URI:LIT:') or
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
|
||||
|
||||
def has_uri_prefix(s):
|
||||
if not isinstance(s, str):
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode("utf-8")
|
||||
if not isinstance(s, bytes):
|
||||
return False
|
||||
return (s.startswith("URI:") or
|
||||
s.startswith(ALLEGED_READONLY_PREFIX + 'URI:') or
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:'))
|
||||
return (s.startswith(b"URI:") or
|
||||
s.startswith(ALLEGED_READONLY_PREFIX + b'URI:') or
|
||||
s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:'))
|
||||
|
||||
|
||||
# These take the same keyword arguments as from_string above.
|
||||
@ -850,26 +876,26 @@ def from_string_dirnode(s, **kwargs):
|
||||
_assert(IDirnodeURI.providedBy(u))
|
||||
return u
|
||||
|
||||
registerAdapter(from_string_dirnode, str, IDirnodeURI)
|
||||
registerAdapter(from_string_dirnode, bytes, IDirnodeURI)
|
||||
|
||||
def from_string_filenode(s, **kwargs):
|
||||
u = from_string(s, **kwargs)
|
||||
_assert(IFileURI.providedBy(u))
|
||||
return u
|
||||
|
||||
registerAdapter(from_string_filenode, str, IFileURI)
|
||||
registerAdapter(from_string_filenode, bytes, IFileURI)
|
||||
|
||||
def from_string_mutable_filenode(s, **kwargs):
|
||||
u = from_string(s, **kwargs)
|
||||
_assert(IMutableFileURI.providedBy(u))
|
||||
return u
|
||||
registerAdapter(from_string_mutable_filenode, str, IMutableFileURI)
|
||||
registerAdapter(from_string_mutable_filenode, bytes, IMutableFileURI)
|
||||
|
||||
def from_string_verifier(s, **kwargs):
|
||||
u = from_string(s, **kwargs)
|
||||
_assert(IVerifierURI.providedBy(u))
|
||||
return u
|
||||
registerAdapter(from_string_verifier, str, IVerifierURI)
|
||||
registerAdapter(from_string_verifier, bytes, IVerifierURI)
|
||||
|
||||
|
||||
def pack_extension(data):
|
||||
@ -877,34 +903,36 @@ def pack_extension(data):
|
||||
for k in sorted(data.keys()):
|
||||
value = data[k]
|
||||
if isinstance(value, (int, long)):
|
||||
value = "%d" % value
|
||||
assert isinstance(value, str), k
|
||||
assert re.match(r'^[a-zA-Z_\-]+$', k)
|
||||
pieces.append(k + ':' + hashutil.netstring(value))
|
||||
uri_extension = ''.join(pieces)
|
||||
value = b"%d" % value
|
||||
if isinstance(k, unicode):
|
||||
k = k.encode("utf-8")
|
||||
assert isinstance(value, bytes), k
|
||||
assert re.match(br'^[a-zA-Z_\-]+$', k)
|
||||
pieces.append(k + b':' + hashutil.netstring(value))
|
||||
uri_extension = b''.join(pieces)
|
||||
return uri_extension
|
||||
|
||||
def unpack_extension(data):
|
||||
d = {}
|
||||
while data:
|
||||
colon = data.index(':')
|
||||
colon = data.index(b':')
|
||||
key = data[:colon]
|
||||
data = data[colon+1:]
|
||||
|
||||
colon = data.index(':')
|
||||
colon = data.index(b':')
|
||||
number = data[:colon]
|
||||
length = int(number)
|
||||
data = data[colon+1:]
|
||||
|
||||
value = data[:length]
|
||||
assert data[length] == ','
|
||||
assert data[length:length+1] == b','
|
||||
data = data[length+1:]
|
||||
|
||||
d[key] = value
|
||||
|
||||
# convert certain things to numbers
|
||||
for intkey in ('size', 'segment_size', 'num_segments',
|
||||
'needed_shares', 'total_shares'):
|
||||
for intkey in (b'size', b'segment_size', b'num_segments',
|
||||
b'needed_shares', b'total_shares'):
|
||||
if intkey in d:
|
||||
d[intkey] = int(d[intkey])
|
||||
return d
|
||||
@ -912,9 +940,9 @@ def unpack_extension(data):
|
||||
|
||||
def unpack_extension_readable(data):
|
||||
unpacked = unpack_extension(data)
|
||||
unpacked["UEB_hash"] = hashutil.uri_extension_hash(data)
|
||||
unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
|
||||
for k in sorted(unpacked.keys()):
|
||||
if 'hash' in k:
|
||||
if b'hash' in k:
|
||||
unpacked[k] = base32.b2a(unpacked[k])
|
||||
return unpacked
|
||||
|
||||
|
@ -1,6 +1,15 @@
|
||||
"""
|
||||
Track the port to Python 3.
|
||||
|
||||
The two easiest ways to run the part of the test suite which is expected to
|
||||
pass on Python 3 are::
|
||||
|
||||
$ tox -e py36
|
||||
|
||||
and::
|
||||
|
||||
$ trial allmydata.test.python3_tests
|
||||
|
||||
This module has been ported to Python 3.
|
||||
"""
|
||||
|
||||
@ -11,7 +20,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
@ -23,17 +32,23 @@ PORTED_MODULES = [
|
||||
"allmydata.crypto.rsa",
|
||||
"allmydata.crypto.util",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.immutable.happiness_upload",
|
||||
"allmydata.interfaces",
|
||||
"allmydata.monitor",
|
||||
"allmydata.storage.crawler",
|
||||
"allmydata.test.common_py3",
|
||||
"allmydata.uri",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.abbreviate",
|
||||
"allmydata.util.assertutil",
|
||||
"allmydata.util.base32",
|
||||
"allmydata.util.base62",
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.fileutil",
|
||||
"allmydata.util.dictutil",
|
||||
"allmydata.util.encodingutil",
|
||||
"allmydata.util.gcutil",
|
||||
"allmydata.util.happinessutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.iputil",
|
||||
@ -54,9 +69,12 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_codec",
|
||||
"allmydata.test.test_crawler",
|
||||
"allmydata.test.test_crypto",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_dictutil",
|
||||
"allmydata.test.test_encodingutil",
|
||||
"allmydata.test.test_happiness",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
"allmydata.test.test_humanreadable",
|
||||
@ -70,10 +88,7 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_spans",
|
||||
"allmydata.test.test_statistics",
|
||||
"allmydata.test.test_time_format",
|
||||
"allmydata.test.test_uri",
|
||||
"allmydata.test.test_util",
|
||||
"allmydata.test.test_version",
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from subprocess import check_call
|
||||
check_call(["trial"] + PORTED_TEST_MODULES)
|
||||
|
@ -1,9 +1,27 @@
|
||||
"""
|
||||
Functions used to convert inputs from whatever encoding used in the system to
|
||||
unicode and back.
|
||||
|
||||
Ported to Python 3.
|
||||
|
||||
Once Python 2 support is dropped, most of this module will obsolete, since
|
||||
Unicode is the default everywhere in Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3, native_str
|
||||
if PY2:
|
||||
# We omit str() because that seems too tricky to get right.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
import sys, os, re, locale
|
||||
import unicodedata
|
||||
import warnings
|
||||
|
||||
from allmydata.util.assertutil import precondition, _assert
|
||||
from twisted.python import usage
|
||||
@ -62,13 +80,17 @@ def _reload():
|
||||
|
||||
check_encoding(io_encoding)
|
||||
|
||||
is_unicode_platform = sys.platform in ["win32", "darwin"]
|
||||
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
|
||||
|
||||
# Despite the Unicode-mode FilePath support added to Twisted in
|
||||
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
|
||||
# Unicode-mode FilePaths with INotify on non-Windows platforms
|
||||
# due to <https://twistedmatrix.com/trac/ticket/7928>.
|
||||
use_unicode_filepath = sys.platform == "win32"
|
||||
# Unicode-mode FilePaths with INotify on non-Windows platforms due to
|
||||
# <https://twistedmatrix.com/trac/ticket/7928>. Supposedly 7928 is fixed,
|
||||
# though... and Tahoe-LAFS doesn't use inotify anymore!
|
||||
#
|
||||
# In the interest of not breaking anything, this logic is unchanged for
|
||||
# Python 2, but on Python 3 the paths are always unicode, like it or not.
|
||||
use_unicode_filepath = PY3 or sys.platform == "win32"
|
||||
|
||||
_reload()
|
||||
|
||||
@ -89,7 +111,10 @@ def argv_to_unicode(s):
|
||||
"""
|
||||
Decode given argv element to unicode. If this fails, raise a UsageError.
|
||||
"""
|
||||
precondition(isinstance(s, str), s)
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
|
||||
precondition(isinstance(s, bytes), s)
|
||||
|
||||
try:
|
||||
return unicode(s, io_encoding)
|
||||
@ -114,39 +139,49 @@ def unicode_to_argv(s, mangle=False):
|
||||
If the argument is to be passed to a different process, then the 'mangle' argument
|
||||
should be true; on Windows, this uses a mangled encoding that will be reversed by
|
||||
code in runner.py.
|
||||
|
||||
On Python 3, just return the string unchanged, since argv is unicode.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
return s
|
||||
|
||||
if mangle and sys.platform == "win32":
|
||||
# This must be the same as 'mangle' in bin/tahoe-script.template.
|
||||
return str(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s))
|
||||
return bytes(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s), io_encoding)
|
||||
else:
|
||||
return s.encode(io_encoding)
|
||||
|
||||
def unicode_to_url(s):
|
||||
"""
|
||||
Encode an unicode object used in an URL.
|
||||
Encode an unicode object used in an URL to bytes.
|
||||
"""
|
||||
# According to RFC 2718, non-ascii characters in URLs must be UTF-8 encoded.
|
||||
|
||||
# FIXME
|
||||
return to_str(s)
|
||||
return to_bytes(s)
|
||||
#precondition(isinstance(s, unicode), s)
|
||||
#return s.encode('utf-8')
|
||||
|
||||
def to_str(s):
|
||||
if s is None or isinstance(s, str):
|
||||
def to_bytes(s):
|
||||
"""Convert unicode to bytes.
|
||||
|
||||
None and bytes are passed through unchanged.
|
||||
"""
|
||||
if s is None or isinstance(s, bytes):
|
||||
return s
|
||||
return s.encode('utf-8')
|
||||
|
||||
def from_utf8_or_none(s):
|
||||
precondition(isinstance(s, (NoneType, str)), s)
|
||||
precondition(isinstance(s, bytes) or s is None, s)
|
||||
if s is None:
|
||||
return s
|
||||
return s.decode('utf-8')
|
||||
|
||||
PRINTABLE_ASCII = re.compile(r'^[\n\r\x20-\x7E]*$', re.DOTALL)
|
||||
PRINTABLE_8BIT = re.compile(r'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
|
||||
PRINTABLE_ASCII = re.compile(br'^[\n\r\x20-\x7E]*$', re.DOTALL)
|
||||
PRINTABLE_8BIT = re.compile(br'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
|
||||
|
||||
def is_printable_ascii(s):
|
||||
return PRINTABLE_ASCII.search(s) is not None
|
||||
@ -154,20 +189,27 @@ def is_printable_ascii(s):
|
||||
def unicode_to_output(s):
|
||||
"""
|
||||
Encode an unicode object for representation on stdout or stderr.
|
||||
|
||||
On Python 3 just returns the unicode string unchanged, since encoding is
|
||||
the responsibility of stdout/stderr, they expect Unicode by default.
|
||||
"""
|
||||
precondition(isinstance(s, unicode), s)
|
||||
if PY3:
|
||||
warnings.warn("This will be unnecessary once Python 2 is dropped.",
|
||||
DeprecationWarning)
|
||||
return s
|
||||
|
||||
try:
|
||||
out = s.encode(io_encoding)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
raise UnicodeEncodeError(io_encoding, s, 0, 0,
|
||||
"A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s)))
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
|
||||
if PRINTABLE_8BIT.search(out) is None:
|
||||
raise UnicodeEncodeError(io_encoding, s, 0, 0,
|
||||
"A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s)))
|
||||
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
|
||||
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
|
||||
(io_encoding, repr(s))))
|
||||
return out
|
||||
|
||||
|
||||
@ -188,14 +230,17 @@ def _unicode_escape(m, quote_newlines):
|
||||
else:
|
||||
return u'\\x%02x' % (codepoint,)
|
||||
|
||||
def _str_escape(m, quote_newlines):
|
||||
def _bytes_escape(m, quote_newlines):
|
||||
"""
|
||||
Takes a re match on bytes, the result is escaped bytes of group(0).
|
||||
"""
|
||||
c = m.group(0)
|
||||
if c == '"' or c == '$' or c == '`' or c == '\\':
|
||||
return '\\' + c
|
||||
elif c == '\n' and not quote_newlines:
|
||||
if c == b'"' or c == b'$' or c == b'`' or c == b'\\':
|
||||
return b'\\' + c
|
||||
elif c == b'\n' and not quote_newlines:
|
||||
return c
|
||||
else:
|
||||
return '\\x%02x' % (ord(c),)
|
||||
return b'\\x%02x' % (ord(c),)
|
||||
|
||||
MUST_DOUBLE_QUOTE_NL = re.compile(u'[^\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
|
||||
MUST_DOUBLE_QUOTE = re.compile(u'[^\\n\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
|
||||
@ -205,7 +250,7 @@ ESCAPABLE_UNICODE = re.compile(u'([\uD800-\uDBFF][\uDC00-\uDFFF])|' # valid sur
|
||||
u'[^ !#\\x25-\\x5B\\x5D-\\x5F\\x61-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]',
|
||||
re.DOTALL)
|
||||
|
||||
ESCAPABLE_8BIT = re.compile( r'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
|
||||
ESCAPABLE_8BIT = re.compile( br'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
|
||||
|
||||
def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
"""
|
||||
@ -220,33 +265,53 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
|
||||
Python-compatible backslash escaping is used.
|
||||
|
||||
If not explicitly given, quote_newlines is True when quotemarks is True.
|
||||
|
||||
On Python 3, returns Unicode strings.
|
||||
"""
|
||||
precondition(isinstance(s, (str, unicode)), s)
|
||||
precondition(isinstance(s, (bytes, unicode)), s)
|
||||
encoding = encoding or io_encoding
|
||||
|
||||
if quote_newlines is None:
|
||||
quote_newlines = quotemarks
|
||||
|
||||
if isinstance(s, str):
|
||||
try:
|
||||
s = s.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
return 'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _str_escape(m, quote_newlines), s),)
|
||||
def _encode(s):
|
||||
if isinstance(s, bytes):
|
||||
try:
|
||||
s = s.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
return b'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _bytes_escape(m, quote_newlines), s),)
|
||||
|
||||
must_double_quote = quote_newlines and MUST_DOUBLE_QUOTE_NL or MUST_DOUBLE_QUOTE
|
||||
if must_double_quote.search(s) is None:
|
||||
try:
|
||||
out = s.encode(encoding or io_encoding)
|
||||
if quotemarks or out.startswith('"'):
|
||||
return "'%s'" % (out,)
|
||||
else:
|
||||
return out
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
pass
|
||||
must_double_quote = quote_newlines and MUST_DOUBLE_QUOTE_NL or MUST_DOUBLE_QUOTE
|
||||
if must_double_quote.search(s) is None:
|
||||
try:
|
||||
out = s.encode(encoding)
|
||||
if quotemarks or out.startswith(b'"'):
|
||||
return b"'%s'" % (out,)
|
||||
else:
|
||||
return out
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
pass
|
||||
|
||||
escaped = ESCAPABLE_UNICODE.sub(lambda m: _unicode_escape(m, quote_newlines), s)
|
||||
return b'"%s"' % (escaped.encode(encoding, 'backslashreplace'),)
|
||||
|
||||
result = _encode(s)
|
||||
if PY3:
|
||||
# On Python 3 half of what this function does is unnecessary, since
|
||||
# sys.stdout typically expects Unicode. To ensure no encode errors, one
|
||||
# can do:
|
||||
#
|
||||
# sys.stdout.reconfigure(encoding=sys.stdout.encoding, errors="backslashreplace")
|
||||
#
|
||||
# Although the problem is that doesn't work in Python 3.6, only 3.7 or
|
||||
# later... For now not thinking about it, just returning unicode since
|
||||
# that is the right thing to do on Python 3.
|
||||
result = result.decode(encoding)
|
||||
return result
|
||||
|
||||
escaped = ESCAPABLE_UNICODE.sub(lambda m: _unicode_escape(m, quote_newlines), s)
|
||||
return '"%s"' % (escaped.encode(encoding or io_encoding, 'backslashreplace'),)
|
||||
|
||||
def quote_path(path, quotemarks=True):
|
||||
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||
return quote_output(b"/".join(map(to_bytes, path)), quotemarks=quotemarks, quote_newlines=True)
|
||||
|
||||
def quote_local_unicode_path(path, quotemarks=True):
|
||||
precondition(isinstance(path, unicode), path)
|
||||
@ -275,7 +340,7 @@ def extend_filepath(fp, segments):
|
||||
return fp
|
||||
|
||||
def to_filepath(path):
|
||||
precondition(isinstance(path, unicode if use_unicode_filepath else basestring),
|
||||
precondition(isinstance(path, unicode if use_unicode_filepath else (bytes, unicode)),
|
||||
path=path)
|
||||
|
||||
if isinstance(path, unicode) and not use_unicode_filepath:
|
||||
@ -290,7 +355,7 @@ def to_filepath(path):
|
||||
return FilePath(path)
|
||||
|
||||
def _decode(s):
|
||||
precondition(isinstance(s, basestring), s=s)
|
||||
precondition(isinstance(s, (bytes, unicode)), s=s)
|
||||
|
||||
if isinstance(s, bytes):
|
||||
return s.decode(filesystem_encoding)
|
||||
@ -356,3 +421,9 @@ def listdir_unicode(path):
|
||||
|
||||
def listdir_filepath(fp):
|
||||
return listdir_unicode(unicode_from_filepath(fp))
|
||||
|
||||
|
||||
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
|
||||
# be NFC-normalized.
|
||||
def normalize(namex):
|
||||
return unicodedata.normalize('NFC', namex)
|
||||
|
@ -1,9 +1,19 @@
|
||||
from __future__ import print_function
|
||||
|
||||
"""
|
||||
Ported to Python3.
|
||||
|
||||
Futz with files like a pro.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# open is not here because we want to use native strings on Py2
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import sys, os, stat, tempfile, time, binascii
|
||||
import six
|
||||
from collections import namedtuple
|
||||
@ -253,6 +263,9 @@ def move_into_place(source, dest):
|
||||
os.rename(source, dest)
|
||||
|
||||
def write_atomically(target, contents, mode="b"):
|
||||
assert (
|
||||
isinstance(contents, bytes) and "b" in mode or
|
||||
isinstance(contents, str) and "t" in mode or mode == ""), (type(contents), mode)
|
||||
with open(target+".tmp", "w"+mode) as f:
|
||||
f.write(contents)
|
||||
move_into_place(target+".tmp", target)
|
||||
@ -277,7 +290,7 @@ def put_file(path, inf):
|
||||
outf.write(data)
|
||||
|
||||
def precondition_abspath(path):
|
||||
if not isinstance(path, unicode):
|
||||
if not isinstance(path, str):
|
||||
raise AssertionError("an abspath must be a Unicode string")
|
||||
|
||||
if sys.platform == "win32":
|
||||
@ -309,7 +322,7 @@ def abspath_expanduser_unicode(path, base=None, long_path=True):
|
||||
abspath_expanduser_unicode.
|
||||
On Windows, the result will be a long path unless long_path is given as False.
|
||||
"""
|
||||
if not isinstance(path, unicode):
|
||||
if not isinstance(path, str):
|
||||
raise AssertionError("paths must be Unicode strings")
|
||||
if base is not None and long_path:
|
||||
precondition_abspath(base)
|
||||
@ -330,7 +343,10 @@ def abspath_expanduser_unicode(path, base=None, long_path=True):
|
||||
|
||||
if not os.path.isabs(path):
|
||||
if base is None:
|
||||
path = os.path.join(os.getcwdu(), path)
|
||||
cwd = os.getcwd()
|
||||
if PY2:
|
||||
cwd = cwd.decode('utf8')
|
||||
path = os.path.join(cwd, path)
|
||||
else:
|
||||
path = os.path.join(base, path)
|
||||
|
||||
@ -415,7 +431,7 @@ ERROR_ENVVAR_NOT_FOUND = 203
|
||||
def windows_getenv(name):
|
||||
# Based on <http://stackoverflow.com/questions/2608200/problems-with-umlauts-in-python-appdata-environvent-variable/2608368#2608368>,
|
||||
# with improved error handling. Returns None if there is no enivronment variable of the given name.
|
||||
if not isinstance(name, unicode):
|
||||
if not isinstance(name, str):
|
||||
raise AssertionError("name must be Unicode")
|
||||
|
||||
n = GetEnvironmentVariableW(name, None, 0)
|
||||
|
@ -1,7 +1,18 @@
|
||||
"""
|
||||
I contain utilities useful for calculating servers_of_happiness, and for
|
||||
reporting it in messages
|
||||
reporting it in messages.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from copy import deepcopy
|
||||
from allmydata.immutable.happiness_upload import residual_network
|
||||
@ -51,7 +62,7 @@ def shares_by_server(servermap):
|
||||
dictionary of sets of shares, indexed by peerids.
|
||||
"""
|
||||
ret = {}
|
||||
for shareid, peers in servermap.iteritems():
|
||||
for shareid, peers in servermap.items():
|
||||
assert isinstance(peers, set)
|
||||
for peerid in peers:
|
||||
ret.setdefault(peerid, set()).add(shareid)
|
||||
@ -146,7 +157,7 @@ def servers_of_happiness(sharemap):
|
||||
# The implementation here is an adapation of an algorithm described in
|
||||
# "Introduction to Algorithms", Cormen et al, 2nd ed., pp 658-662.
|
||||
dim = len(graph)
|
||||
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
|
||||
flow_function = [[0 for sh in range(dim)] for s in range(dim)]
|
||||
residual_graph, residual_function = residual_network(graph, flow_function)
|
||||
while augmenting_path_for(residual_graph):
|
||||
path = augmenting_path_for(residual_graph)
|
||||
@ -169,7 +180,7 @@ def servers_of_happiness(sharemap):
|
||||
# our graph, so we can stop after summing flow across those. The
|
||||
# value of a flow computed in this way is the size of a maximum
|
||||
# matching on the bipartite graph described above.
|
||||
return sum([flow_function[0][v] for v in xrange(1, num_servers+1)])
|
||||
return sum([flow_function[0][v] for v in range(1, num_servers+1)])
|
||||
|
||||
def _flow_network_for(servermap):
|
||||
"""
|
||||
@ -198,14 +209,14 @@ def _flow_network_for(servermap):
|
||||
graph = [] # index -> [index], an adjacency list
|
||||
# Add an entry at the top (index 0) that has an edge to every server
|
||||
# in servermap
|
||||
graph.append(servermap.keys())
|
||||
graph.append(list(servermap.keys()))
|
||||
# For each server, add an entry that has an edge to every share that it
|
||||
# contains (or will contain).
|
||||
for k in servermap:
|
||||
graph.append(servermap[k])
|
||||
# For each share, add an entry that has an edge to the sink.
|
||||
sink_num = num_servers + num_shares + 1
|
||||
for i in xrange(num_shares):
|
||||
for i in range(num_shares):
|
||||
graph.append([sink_num])
|
||||
# Add an empty entry for the sink, which has no outbound edges.
|
||||
graph.append([])
|
||||
@ -231,8 +242,8 @@ def _reindex(servermap, base_index):
|
||||
# Number the shares
|
||||
for k in ret:
|
||||
for shnum in ret[k]:
|
||||
if not shares.has_key(shnum):
|
||||
if shnum not in shares:
|
||||
shares[shnum] = num
|
||||
num += 1
|
||||
ret[k] = map(lambda x: shares[x], ret[k])
|
||||
ret[k] = [shares[x] for x in ret[k]]
|
||||
return (ret, len(shares))
|
||||
|
@ -20,10 +20,9 @@ from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from functools import reduce
|
||||
|
||||
from allmydata.util.mathutil import round_sigfigs
|
||||
import math
|
||||
from functools import reduce
|
||||
import sys
|
||||
|
||||
def pr_file_loss(p_list, k):
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -7,17 +7,17 @@
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>File Check-And-Repair Results for SI=<span n:render="storage_index" /></h1>
|
||||
<h1>File Check-And-Repair Results for SI=<span t:render="storage_index" /></h1>
|
||||
|
||||
<div n:render="summary" />
|
||||
<div t:render="summary" />
|
||||
|
||||
<div n:render="repair_results" />
|
||||
<div t:render="repair_results" />
|
||||
|
||||
<div n:render="post_repair_results" />
|
||||
<div t:render="post_repair_results" />
|
||||
|
||||
<div n:render="maybe_pre_repair_results" />
|
||||
<div t:render="maybe_pre_repair_results" />
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -7,17 +7,17 @@
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>File Check Results for SI=<span n:render="storage_index" /></h1>
|
||||
<h1>File Check Results for SI=<span t:render="storage_index" /></h1>
|
||||
|
||||
<div>
|
||||
<span n:render="summary" />
|
||||
<span t:render="summary" />
|
||||
</div>
|
||||
|
||||
<div n:render="repair" />
|
||||
<div t:render="repair" />
|
||||
|
||||
<div n:render="results" />
|
||||
<div t:render="results" />
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,12 +1,35 @@
|
||||
|
||||
import time
|
||||
import json
|
||||
from nevow import rend, inevow, tags as T
|
||||
from twisted.web import http, html
|
||||
from allmydata.web.common import getxmlfile, get_arg, get_root, WebError
|
||||
|
||||
from twisted.web import (
|
||||
http,
|
||||
html,
|
||||
)
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.web.template import (
|
||||
Element,
|
||||
XMLFile,
|
||||
renderer,
|
||||
renderElement,
|
||||
tags,
|
||||
)
|
||||
from allmydata.web.common import (
|
||||
get_arg,
|
||||
get_root,
|
||||
WebError,
|
||||
MultiFormatResource,
|
||||
SlotsSequenceElement,
|
||||
)
|
||||
from allmydata.web.operations import ReloadMixin
|
||||
from allmydata.interfaces import ICheckAndRepairResults, ICheckResults
|
||||
from allmydata.util import base32, dictutil
|
||||
from allmydata.interfaces import (
|
||||
ICheckAndRepairResults,
|
||||
ICheckResults,
|
||||
)
|
||||
from allmydata.util import (
|
||||
base32,
|
||||
dictutil,
|
||||
)
|
||||
|
||||
|
||||
def json_check_counts(r):
|
||||
@ -64,53 +87,64 @@ def json_check_and_repair_results(r):
|
||||
return data
|
||||
|
||||
class ResultsBase(object):
|
||||
# self.client must point to the Client, so we can get nicknames and
|
||||
# self._client must point to the Client, so we can get nicknames and
|
||||
# determine the permuted peer order
|
||||
|
||||
def _join_pathstring(self, path):
|
||||
"""
|
||||
:param tuple path: a path represented by a tuple, such as
|
||||
``(u'some', u'dir', u'file')``.
|
||||
|
||||
:return: a string joined by path separaters, such as
|
||||
``u'some/dir/file'``.
|
||||
"""
|
||||
if path:
|
||||
pathstring = "/".join(self._html(path))
|
||||
else:
|
||||
pathstring = "<root>"
|
||||
return pathstring
|
||||
|
||||
def _render_results(self, ctx, cr):
|
||||
def _render_results(self, req, cr):
|
||||
assert ICheckResults(cr)
|
||||
c = self.client
|
||||
c = self._client
|
||||
sb = c.get_storage_broker()
|
||||
r = []
|
||||
def add(name, value):
|
||||
r.append(T.li[name + ": ", value])
|
||||
r.append(tags.li(name + ": ", value))
|
||||
|
||||
add("Report", tags.pre("\n".join(self._html(cr.get_report()))))
|
||||
|
||||
add("Report", T.pre["\n".join(self._html(cr.get_report()))])
|
||||
add("Share Counts",
|
||||
"need %d-of-%d, have %d" % (cr.get_encoding_needed(),
|
||||
cr.get_encoding_expected(),
|
||||
cr.get_share_counter_good()))
|
||||
add("Happiness Level", cr.get_happiness())
|
||||
add("Hosts with good shares", cr.get_host_counter_good_shares())
|
||||
add("Happiness Level", str(cr.get_happiness()))
|
||||
add("Hosts with good shares", str(cr.get_host_counter_good_shares()))
|
||||
|
||||
if cr.get_corrupt_shares():
|
||||
badsharemap = []
|
||||
for (s, si, shnum) in cr.get_corrupt_shares():
|
||||
d = T.tr[T.td["sh#%d" % shnum],
|
||||
T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]],
|
||||
]
|
||||
d = tags.tr(tags.td("sh#%d" % shnum),
|
||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")),)
|
||||
badsharemap.append(d)
|
||||
add("Corrupt shares", T.table()[
|
||||
T.tr[T.th["Share ID"],
|
||||
T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]],
|
||||
badsharemap])
|
||||
add("Corrupt shares",
|
||||
tags.table(
|
||||
tags.tr(tags.th("Share ID"),
|
||||
tags.th((tags.div("Nickname"), tags.div("Node ID", class_="nodeid")), class_="nickname-and-peerid")),
|
||||
badsharemap))
|
||||
else:
|
||||
add("Corrupt shares", "none")
|
||||
|
||||
add("Wrong Shares", cr.get_share_counter_wrong())
|
||||
add("Wrong Shares", str(cr.get_share_counter_wrong()))
|
||||
|
||||
sharemap_data = []
|
||||
shares_on_server = dictutil.DictOfSets()
|
||||
|
||||
# FIXME: The two tables below contain nickname-and-nodeid table column markup which is duplicated with each other, introducer.xhtml, and deep-check-results.xhtml. All of these (and any other presentations of nickname-and-nodeid) should be combined.
|
||||
# FIXME: The two tables below contain nickname-and-nodeid
|
||||
# table column markup which is duplicated with each other,
|
||||
# introducer.xhtml, and deep-check-results.xhtml. All of these
|
||||
# (and any other presentations of nickname-and-nodeid) should be combined.
|
||||
|
||||
for shareid in sorted(cr.get_sharemap().keys()):
|
||||
servers = sorted(cr.get_sharemap()[shareid],
|
||||
@ -119,19 +153,20 @@ class ResultsBase(object):
|
||||
shares_on_server.add(s, shareid)
|
||||
shareid_s = ""
|
||||
if i == 0:
|
||||
shareid_s = shareid
|
||||
d = T.tr[T.td[shareid_s],
|
||||
T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]]
|
||||
]
|
||||
shareid_s = str(shareid)
|
||||
d = tags.tr(tags.td(shareid_s),
|
||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")))
|
||||
sharemap_data.append(d)
|
||||
|
||||
add("Good Shares (sorted in share order)",
|
||||
T.table()[T.tr[T.th["Share ID"], T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]]],
|
||||
sharemap_data])
|
||||
tags.table(tags.tr(tags.th("Share ID"),
|
||||
tags.th(tags.div("Nickname"),
|
||||
tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid")),
|
||||
sharemap_data))
|
||||
|
||||
|
||||
add("Recoverable Versions", cr.get_version_counter_recoverable())
|
||||
add("Unrecoverable Versions", cr.get_version_counter_unrecoverable())
|
||||
add("Recoverable Versions", str(cr.get_version_counter_recoverable()))
|
||||
add("Unrecoverable Versions", str(cr.get_version_counter_unrecoverable()))
|
||||
|
||||
# this table is sorted by permuted order
|
||||
permuted_servers = [s
|
||||
@ -144,20 +179,23 @@ class ResultsBase(object):
|
||||
for s in permuted_servers:
|
||||
shareids = list(shares_on_server.get(s, []))
|
||||
shareids.reverse()
|
||||
shareids_s = [ T.tt[shareid, " "] for shareid in sorted(shareids) ]
|
||||
d = T.tr[T.td[T.div(class_="nickname")[s.get_nickname()],
|
||||
T.div(class_="nodeid")[T.tt[s.get_name()]]],
|
||||
T.td[shareids_s],
|
||||
]
|
||||
shareids_s = [tags.tt(str(shareid), " ") for shareid in sorted(shareids)]
|
||||
|
||||
d = tags.tr(tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")),
|
||||
tags.td(shareids_s), )
|
||||
servermap.append(d)
|
||||
num_shares_left -= len(shareids)
|
||||
if not num_shares_left:
|
||||
break
|
||||
add("Share Balancing (servers in permuted order)",
|
||||
T.table()[T.tr[T.th(class_="nickname-and-peerid")[T.div["Nickname"], T.div(class_="nodeid")["Node ID"]], T.th["Share IDs"]],
|
||||
servermap])
|
||||
|
||||
return T.ul[r]
|
||||
add("Share Balancing (servers in permuted order)",
|
||||
tags.table(tags.tr(tags.th(tags.div("Nickname"),
|
||||
tags.div("Node ID", class_="nodeid"), class_="nickname-and-peerid"),
|
||||
tags.th("Share IDs")),
|
||||
servermap))
|
||||
|
||||
return tags.ul(r)
|
||||
|
||||
def _html(self, s):
|
||||
if isinstance(s, (str, unicode)):
|
||||
@ -165,91 +203,114 @@ class ResultsBase(object):
|
||||
assert isinstance(s, (list, tuple))
|
||||
return [html.escape(w) for w in s]
|
||||
|
||||
def want_json(self, ctx):
|
||||
output = get_arg(inevow.IRequest(ctx), "output", "").lower()
|
||||
if output.lower() == "json":
|
||||
return True
|
||||
return False
|
||||
|
||||
def _render_si_link(self, ctx, storage_index):
|
||||
def _render_si_link(self, req, storage_index):
|
||||
si_s = base32.b2a(storage_index)
|
||||
req = inevow.IRequest(ctx)
|
||||
ophandle = req.prepath[-1]
|
||||
target = "%s/operations/%s/%s" % (get_root(ctx), ophandle, si_s)
|
||||
output = get_arg(ctx, "output")
|
||||
target = "%s/operations/%s/%s" % (get_root(req), ophandle, si_s)
|
||||
output = get_arg(req, "output")
|
||||
if output:
|
||||
target = target + "?output=%s" % output
|
||||
return T.a(href=target)[si_s]
|
||||
return tags.a(si_s, href=target)
|
||||
|
||||
class LiteralCheckResultsRenderer(rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("literal-check-results.xhtml")
|
||||
|
||||
class LiteralCheckResultsRenderer(MultiFormatResource, ResultsBase):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
rend.Page.__init__(self, client)
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
"""
|
||||
super(LiteralCheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
return renderElement(req, LiteralCheckResultsRendererElement())
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_results(None)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
|
||||
class LiteralCheckResultsRendererElement(Element):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("literal-check-results.xhtml"))
|
||||
|
||||
def __init__(self):
|
||||
super(LiteralCheckResultsRendererElement, self).__init__()
|
||||
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file."]]
|
||||
return tags.div(tags.a("Return to file.", href=return_to))
|
||||
return ""
|
||||
|
||||
|
||||
class CheckerBase(object):
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
@renderer
|
||||
def storage_index(self, req, tag):
|
||||
return self._results.get_storage_index_string()
|
||||
|
||||
def render_storage_index(self, ctx, data):
|
||||
return self.r.get_storage_index_string()
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
return tags.div(tags.a("Return to file/directory.", href=return_to))
|
||||
return ""
|
||||
|
||||
class CheckResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("check-results.xhtml")
|
||||
|
||||
class CheckResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, results):
|
||||
self.client = client
|
||||
self.r = ICheckResults(results)
|
||||
rend.Page.__init__(self, results)
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.interfaces.ICheckResults results: results of check/vefify operation.
|
||||
"""
|
||||
super(CheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self._results = ICheckResults(results)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
data = json_check_results(self.r)
|
||||
def render_HTML(self, req):
|
||||
return renderElement(req, CheckResultsRendererElement(self._client, self._results))
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_results(self._results)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_summary(self, ctx, data):
|
||||
|
||||
class CheckResultsRendererElement(Element, CheckerBase, ResultsBase):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("check-results.xhtml"))
|
||||
|
||||
def __init__(self, client, results):
|
||||
super(CheckResultsRendererElement, self).__init__()
|
||||
self._client = client
|
||||
self._results = results
|
||||
|
||||
@renderer
|
||||
def summary(self, req, tag):
|
||||
results = []
|
||||
if data.is_healthy():
|
||||
if self._results.is_healthy():
|
||||
results.append("Healthy")
|
||||
elif data.is_recoverable():
|
||||
elif self._results.is_recoverable():
|
||||
results.append("Not Healthy!")
|
||||
else:
|
||||
results.append("Not Recoverable!")
|
||||
results.append(" : ")
|
||||
results.append(self._html(data.get_summary()))
|
||||
return ctx.tag[results]
|
||||
results.append(self._html(self._results.get_summary()))
|
||||
return tag(results)
|
||||
|
||||
def render_repair(self, ctx, data):
|
||||
if data.is_healthy():
|
||||
@renderer
|
||||
def repair(self, req, tag):
|
||||
if self._results.is_healthy():
|
||||
return ""
|
||||
|
||||
#repair = T.form(action=".", method="post",
|
||||
# enctype="multipart/form-data")[
|
||||
# T.fieldset[
|
||||
@ -258,30 +319,52 @@ class CheckResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
# T.input(type="submit", value="Repair"),
|
||||
# ]]
|
||||
#return ctx.tag[repair]
|
||||
|
||||
return "" # repair button disabled until we make it work correctly,
|
||||
# see #622 for details
|
||||
|
||||
def render_results(self, ctx, data):
|
||||
cr = self._render_results(ctx, data)
|
||||
return ctx.tag[cr]
|
||||
@renderer
|
||||
def results(self, req, tag):
|
||||
cr = self._render_results(req, self._results)
|
||||
return tag(cr)
|
||||
|
||||
class CheckAndRepairResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
docFactory = getxmlfile("check-and-repair-results.xhtml")
|
||||
class CheckAndRepairResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, results):
|
||||
self.client = client
|
||||
self.r = None
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.interfaces.ICheckResults results: check/verify results.
|
||||
"""
|
||||
super(CheckAndRepairResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self._results = None
|
||||
if results:
|
||||
self.r = ICheckAndRepairResults(results)
|
||||
rend.Page.__init__(self, results)
|
||||
self._results = ICheckAndRepairResults(results)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
data = json_check_and_repair_results(self.r)
|
||||
def render_HTML(self, req):
|
||||
elem = CheckAndRepairResultsRendererElement(self._client, self._results)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = json_check_and_repair_results(self._results)
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_summary(self, ctx, data):
|
||||
cr = data.get_post_repair_results()
|
||||
|
||||
class CheckAndRepairResultsRendererElement(Element, CheckerBase, ResultsBase):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("check-and-repair-results.xhtml"))
|
||||
|
||||
def __init__(self, client, results):
|
||||
super(CheckAndRepairResultsRendererElement, self).__init__()
|
||||
self._client = client
|
||||
self._results = results
|
||||
|
||||
@renderer
|
||||
def summary(self, req, tag):
|
||||
cr = self._results.get_post_repair_results()
|
||||
results = []
|
||||
if cr.is_healthy():
|
||||
results.append("Healthy")
|
||||
@ -291,35 +374,44 @@ class CheckAndRepairResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
||||
results.append("Not Recoverable!")
|
||||
results.append(" : ")
|
||||
results.append(self._html(cr.get_summary()))
|
||||
return ctx.tag[results]
|
||||
return tag(results)
|
||||
|
||||
def render_repair_results(self, ctx, data):
|
||||
if data.get_repair_attempted():
|
||||
if data.get_repair_successful():
|
||||
return ctx.tag["Repair successful"]
|
||||
@renderer
|
||||
def repair_results(self, req, tag):
|
||||
if self._results.get_repair_attempted():
|
||||
if self._results.get_repair_successful():
|
||||
return tag("Repair successful")
|
||||
else:
|
||||
return ctx.tag["Repair unsuccessful"]
|
||||
return ctx.tag["No repair necessary"]
|
||||
return tag("Repair unsuccessful")
|
||||
return tag("No repair necessary")
|
||||
|
||||
def render_post_repair_results(self, ctx, data):
|
||||
cr = self._render_results(ctx, data.get_post_repair_results())
|
||||
return ctx.tag[T.div["Post-Repair Checker Results:"], cr]
|
||||
@renderer
|
||||
def post_repair_results(self, req, tag):
|
||||
cr = self._render_results(req, self._results.get_post_repair_results())
|
||||
return tag(tags.div("Post-Repair Checker Results:"), cr)
|
||||
|
||||
def render_maybe_pre_repair_results(self, ctx, data):
|
||||
if data.get_repair_attempted():
|
||||
cr = self._render_results(ctx, data.get_pre_repair_results())
|
||||
return ctx.tag[T.div["Pre-Repair Checker Results:"], cr]
|
||||
@renderer
|
||||
def maybe_pre_repair_results(self, req, tag):
|
||||
if self._results.get_repair_attempted():
|
||||
cr = self._render_results(req, self._results.get_pre_repair_results())
|
||||
return tag(tags.div("Pre-Repair Checker Results:"), cr)
|
||||
return ""
|
||||
|
||||
|
||||
class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
docFactory = getxmlfile("deep-check-results.xhtml")
|
||||
class DeepCheckResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, monitor):
|
||||
self.client = client
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.monitor.IMonitor monitor: status, progress, and cancellation provider.
|
||||
"""
|
||||
super(DeepCheckResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self.monitor = monitor
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
def getChild(self, name, req):
|
||||
if not name:
|
||||
return self
|
||||
# /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
|
||||
@ -327,19 +419,18 @@ class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
si = base32.a2b(name)
|
||||
r = self.monitor.get_status()
|
||||
try:
|
||||
return CheckResultsRenderer(self.client,
|
||||
return CheckResultsRenderer(self._client,
|
||||
r.get_results_for_storage_index(si))
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
http.NOT_FOUND)
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
elem = DeepCheckResultsRendererElement(self.monitor)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
data = {}
|
||||
data["finished"] = self.monitor.is_finished()
|
||||
res = self.monitor.get_status()
|
||||
@ -361,116 +452,170 @@ class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
data["stats"] = res.get_stats()
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_root_storage_index(self, ctx, data):
|
||||
|
||||
class DeepCheckResultsRendererElement(Element, ResultsBase, ReloadMixin):
|
||||
|
||||
loader = XMLFile(FilePath(__file__).sibling("deep-check-results.xhtml"))
|
||||
|
||||
def __init__(self, monitor):
|
||||
super(DeepCheckResultsRendererElement, self).__init__()
|
||||
self.monitor = monitor
|
||||
|
||||
@renderer
|
||||
def root_storage_index(self, req, tag):
|
||||
if not self.monitor.get_status():
|
||||
return ""
|
||||
return self.monitor.get_status().get_root_storage_index_string()
|
||||
|
||||
def data_objects_checked(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
||||
def data_objects_healthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy"]
|
||||
def data_objects_unhealthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy"]
|
||||
def data_objects_unrecoverable(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unrecoverable"]
|
||||
def _get_monitor_counter(self, name):
|
||||
if not self.monitor.get_status():
|
||||
return ""
|
||||
return str(self.monitor.get_status().get_counters().get(name))
|
||||
|
||||
def data_count_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares"]
|
||||
@renderer
|
||||
def objects_checked(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-checked")
|
||||
|
||||
def render_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if c["count-objects-unhealthy"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def objects_healthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy")
|
||||
|
||||
@renderer
|
||||
def objects_unhealthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy")
|
||||
|
||||
@renderer
|
||||
def objects_unrecoverable(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unrecoverable")
|
||||
|
||||
@renderer
|
||||
def count_corrupt_shares(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares")
|
||||
|
||||
@renderer
|
||||
def problems_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-objects-unhealthy"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_problems(self, ctx, data):
|
||||
@renderer
|
||||
def problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
cr = all_objects[path]
|
||||
assert ICheckResults.providedBy(cr)
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
summary_text = ""
|
||||
summary = cr.get_summary()
|
||||
if summary:
|
||||
summary_text = ": " + summary
|
||||
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
||||
problems.append({
|
||||
# Not sure self._join_pathstring(path) is the
|
||||
# right thing to use here.
|
||||
"problem": self._join_pathstring(path) + self._html(summary_text),
|
||||
})
|
||||
|
||||
def render_problem(self, ctx, data):
|
||||
path, cr = data
|
||||
summary_text = ""
|
||||
summary = cr.get_summary()
|
||||
if summary:
|
||||
summary_text = ": " + summary
|
||||
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
||||
return ctx.tag[self._join_pathstring(path), self._html(summary_text)]
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
|
||||
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def servers_with_corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_servers_with_corrupt_shares(self, ctx, data):
|
||||
@renderer
|
||||
def servers_with_corrupt_shares(self, req, tag):
|
||||
servers = [s
|
||||
for (s, storage_index, sharenum)
|
||||
in self.monitor.get_status().get_corrupt_shares()]
|
||||
servers.sort(key=lambda s: s.get_longname())
|
||||
return servers
|
||||
|
||||
def render_server_problem(self, ctx, server):
|
||||
data = [server.get_name()]
|
||||
nickname = server.get_nickname()
|
||||
if nickname:
|
||||
data.append(" (%s)" % self._html(nickname))
|
||||
return ctx.tag[data]
|
||||
problems = []
|
||||
|
||||
for server in servers:
|
||||
name = [server.get_name()]
|
||||
nickname = server.get_nickname()
|
||||
if nickname:
|
||||
name.append(" (%s)" % self._html(nickname))
|
||||
problems.append({"problem": name})
|
||||
|
||||
def render_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares"]:
|
||||
return ctx.tag
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
@renderer
|
||||
def corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares"):
|
||||
return tag
|
||||
return ""
|
||||
def data_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_corrupt_shares()
|
||||
def render_share_problem(self, ctx, data):
|
||||
server, storage_index, sharenum = data
|
||||
nickname = server.get_nickname()
|
||||
ctx.fillSlots("serverid", server.get_name())
|
||||
if nickname:
|
||||
ctx.fillSlots("nickname", self._html(nickname))
|
||||
ctx.fillSlots("si", self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("shnum", str(sharenum))
|
||||
return ctx.tag
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
@renderer
|
||||
def corrupt_shares(self, req, tag):
|
||||
shares = self.monitor.get_status().get_corrupt_shares()
|
||||
problems = []
|
||||
|
||||
for share in shares:
|
||||
server, storage_index, sharenum = share
|
||||
nickname = server.get_nickname()
|
||||
problem = {
|
||||
"serverid": server.get_name(),
|
||||
"nickname": self._html(nickname),
|
||||
"si": self._render_si_link(req, storage_index),
|
||||
"shnum": str(sharenum),
|
||||
}
|
||||
problems.append(problem)
|
||||
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
@renderer
|
||||
def return_to(self, req, tag):
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
return tags.div(tags.a("Return to file/directory.", href=return_to))
|
||||
return ""
|
||||
|
||||
def data_all_objects(self, ctx, data):
|
||||
r = self.monitor.get_status().get_all_results()
|
||||
for path in sorted(r.keys()):
|
||||
yield (path, r[path])
|
||||
@renderer
|
||||
def all_objects(self, req, tag):
|
||||
results = self.monitor.get_status().get_all_results()
|
||||
objects = []
|
||||
|
||||
def render_object(self, ctx, data):
|
||||
path, r = data
|
||||
ctx.fillSlots("path", self._join_pathstring(path))
|
||||
ctx.fillSlots("healthy", str(r.is_healthy()))
|
||||
ctx.fillSlots("recoverable", str(r.is_recoverable()))
|
||||
storage_index = r.get_storage_index()
|
||||
ctx.fillSlots("storage_index", self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("summary", self._html(r.get_summary()))
|
||||
return ctx.tag
|
||||
for path in sorted(results.keys()):
|
||||
result = results.get(path)
|
||||
storage_index = result.get_storage_index()
|
||||
object = {
|
||||
"path": self._join_pathstring(path),
|
||||
"healthy": str(result.is_healthy()),
|
||||
"recoverable": str(result.is_recoverable()),
|
||||
"storage_index": self._render_si_link(req, storage_index),
|
||||
"summary": self._html(result.get_summary()),
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
def render_runtime(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return ctx.tag["runtime: %s seconds" % runtime]
|
||||
return SlotsSequenceElement(tag, objects)
|
||||
|
||||
class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
docFactory = getxmlfile("deep-check-and-repair-results.xhtml")
|
||||
@renderer
|
||||
def runtime(self, req, tag):
|
||||
runtime = 'unknown'
|
||||
if hasattr(req, 'processing_started_timestamp'):
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return tag("runtime: %s seconds" % runtime)
|
||||
|
||||
|
||||
class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
|
||||
|
||||
formatArgument = "output"
|
||||
|
||||
def __init__(self, client, monitor):
|
||||
self.client = client
|
||||
"""
|
||||
:param allmydata.interfaces.IStatsProducer client: stats provider.
|
||||
:param allmydata.monitor.IMonitor monitor: status, progress, and cancellation provider.
|
||||
"""
|
||||
super(DeepCheckAndRepairResultsRenderer, self).__init__()
|
||||
self._client = client
|
||||
self.monitor = monitor
|
||||
|
||||
def childFactory(self, ctx, name):
|
||||
def getChild(self, name, req):
|
||||
if not name:
|
||||
return self
|
||||
# /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
|
||||
@ -479,18 +624,17 @@ class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
s = self.monitor.get_status()
|
||||
try:
|
||||
results = s.get_results_for_storage_index(si)
|
||||
return CheckAndRepairResultsRenderer(self.client, results)
|
||||
return CheckAndRepairResultsRenderer(self._client, results)
|
||||
except KeyError:
|
||||
raise WebError("No detailed results for SI %s" % html.escape(name),
|
||||
http.NOT_FOUND)
|
||||
|
||||
def renderHTTP(self, ctx):
|
||||
if self.want_json(ctx):
|
||||
return self.json(ctx)
|
||||
return rend.Page.renderHTTP(self, ctx)
|
||||
def render_HTML(self, req):
|
||||
elem = DeepCheckAndRepairResultsRendererElement(self.monitor)
|
||||
return renderElement(req, elem)
|
||||
|
||||
def json(self, ctx):
|
||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||
def render_JSON(self, req):
|
||||
req.setHeader("content-type", "text/plain")
|
||||
res = self.monitor.get_status()
|
||||
data = {}
|
||||
data["finished"] = self.monitor.is_finished()
|
||||
@ -531,119 +675,132 @@ class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
||||
data["stats"] = res.get_stats()
|
||||
return json.dumps(data, indent=1) + "\n"
|
||||
|
||||
def render_root_storage_index(self, ctx, data):
|
||||
return self.monitor.get_status().get_root_storage_index_string()
|
||||
|
||||
def data_objects_checked(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-checked"]
|
||||
class DeepCheckAndRepairResultsRendererElement(DeepCheckResultsRendererElement):
|
||||
"""
|
||||
The page generated here has several elements common to "deep check
|
||||
results" page; hence the code reuse.
|
||||
"""
|
||||
|
||||
def data_objects_healthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy-pre-repair"]
|
||||
def data_objects_unhealthy(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy-pre-repair"]
|
||||
def data_corrupt_shares(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]
|
||||
loader = XMLFile(FilePath(__file__).sibling("deep-check-and-repair-results.xhtml"))
|
||||
|
||||
def data_repairs_attempted(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-attempted"]
|
||||
def data_repairs_successful(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-successful"]
|
||||
def data_repairs_unsuccessful(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-repairs-unsuccessful"]
|
||||
def __init__(self, monitor):
|
||||
super(DeepCheckAndRepairResultsRendererElement, self).__init__(monitor)
|
||||
self.monitor = monitor
|
||||
|
||||
def data_objects_healthy_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-healthy-post-repair"]
|
||||
def data_objects_unhealthy_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-objects-unhealthy-post-repair"]
|
||||
def data_corrupt_shares_post(self, ctx, data):
|
||||
return self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]
|
||||
@renderer
|
||||
def objects_healthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy-pre-repair")
|
||||
|
||||
def render_pre_repair_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if c["count-objects-unhealthy-pre-repair"]:
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def objects_unhealthy(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy-pre-repair")
|
||||
|
||||
@renderer
|
||||
def corrupt_shares(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares-pre-repair")
|
||||
|
||||
@renderer
|
||||
def repairs_attempted(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-attempted")
|
||||
|
||||
@renderer
|
||||
def repairs_successful(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-successful")
|
||||
|
||||
@renderer
|
||||
def repairs_unsuccessful(self, req, tag):
|
||||
return self._get_monitor_counter("count-repairs-unsuccessful")
|
||||
|
||||
@renderer
|
||||
def objects_healthy_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-healthy-post-repair")
|
||||
|
||||
@renderer
|
||||
def objects_unhealthy_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-objects-unhealthy-post-repair")
|
||||
|
||||
@renderer
|
||||
def corrupt_shares_post(self, req, tag):
|
||||
return self._get_monitor_counter("count-corrupt-shares-post-repair")
|
||||
|
||||
@renderer
|
||||
def pre_repair_problems_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-objects-unhealthy-pre-repair"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_pre_repair_problems(self, ctx, data):
|
||||
@renderer
|
||||
def pre_repair_problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
r = all_objects[path]
|
||||
assert ICheckAndRepairResults.providedBy(r)
|
||||
cr = r.get_pre_repair_results()
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
problem = self._join_pathstring(path), ": ", self._html(cr.get_summary())
|
||||
problems.append({"problem": problem})
|
||||
|
||||
def render_problem(self, ctx, data):
|
||||
path, cr = data
|
||||
return ctx.tag[self._join_pathstring(path), ": ",
|
||||
self._html(cr.get_summary())]
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
def render_post_repair_problems_p(self, ctx, data):
|
||||
c = self.monitor.get_status().get_counters()
|
||||
if (c["count-objects-unhealthy-post-repair"]
|
||||
or c["count-corrupt-shares-post-repair"]):
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def post_repair_problems_p(self, req, tag):
|
||||
if (self._get_monitor_counter("count-objects-unhealthy-post-repair")
|
||||
or self._get_monitor_counter("count-corrupt-shares-post-repair")):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_post_repair_problems(self, ctx, data):
|
||||
@renderer
|
||||
def post_repair_problems(self, req, tag):
|
||||
all_objects = self.monitor.get_status().get_all_results()
|
||||
problems = []
|
||||
|
||||
for path in sorted(all_objects.keys()):
|
||||
r = all_objects[path]
|
||||
assert ICheckAndRepairResults.providedBy(r)
|
||||
cr = r.get_post_repair_results()
|
||||
if not cr.is_healthy():
|
||||
yield path, cr
|
||||
problem = self._join_pathstring(path), ": ", self._html(cr.get_summary())
|
||||
problems.append({"problem": problem})
|
||||
|
||||
def render_servers_with_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares-pre-repair"]:
|
||||
return ctx.tag
|
||||
return ""
|
||||
def data_servers_with_corrupt_shares(self, ctx, data):
|
||||
return [] # TODO
|
||||
def render_server_problem(self, ctx, data):
|
||||
pass
|
||||
return SlotsSequenceElement(tag, problems)
|
||||
|
||||
|
||||
def render_remaining_corrupt_shares_p(self, ctx, data):
|
||||
if self.monitor.get_status().get_counters()["count-corrupt-shares-post-repair"]:
|
||||
return ctx.tag
|
||||
return ""
|
||||
def data_post_repair_corrupt_shares(self, ctx, data):
|
||||
return [] # TODO
|
||||
|
||||
def render_share_problem(self, ctx, data):
|
||||
pass
|
||||
|
||||
|
||||
def render_return(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
return_to = get_arg(req, "return_to", None)
|
||||
if return_to:
|
||||
return T.div[T.a(href=return_to)["Return to file/directory."]]
|
||||
@renderer
|
||||
def remaining_corrupt_shares_p(self, req, tag):
|
||||
if self._get_monitor_counter("count-corrupt-shares-post-repair"):
|
||||
return tag
|
||||
return ""
|
||||
|
||||
def data_all_objects(self, ctx, data):
|
||||
r = self.monitor.get_status().get_all_results()
|
||||
for path in sorted(r.keys()):
|
||||
yield (path, r[path])
|
||||
@renderer
|
||||
def post_repair_corrupt_shares(self, req, tag):
|
||||
# TODO: this was not implemented before porting to
|
||||
# twisted.web.template; leaving it as such.
|
||||
#
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3371
|
||||
corrupt = [{"share":"unimplemented"}]
|
||||
return SlotsSequenceElement(tag, corrupt)
|
||||
|
||||
def render_object(self, ctx, data):
|
||||
path, r = data
|
||||
ctx.fillSlots("path", self._join_pathstring(path))
|
||||
ctx.fillSlots("healthy_pre_repair",
|
||||
str(r.get_pre_repair_results().is_healthy()))
|
||||
ctx.fillSlots("recoverable_pre_repair",
|
||||
str(r.get_pre_repair_results().is_recoverable()))
|
||||
ctx.fillSlots("healthy_post_repair",
|
||||
str(r.get_post_repair_results().is_healthy()))
|
||||
storage_index = r.get_storage_index()
|
||||
ctx.fillSlots("storage_index",
|
||||
self._render_si_link(ctx, storage_index))
|
||||
ctx.fillSlots("summary",
|
||||
self._html(r.get_pre_repair_results().get_summary()))
|
||||
return ctx.tag
|
||||
@renderer
|
||||
def all_objects(self, req, tag):
|
||||
results = {}
|
||||
if self.monitor.get_status():
|
||||
results = self.monitor.get_status().get_all_results()
|
||||
objects = []
|
||||
|
||||
for path in sorted(results.keys()):
|
||||
result = results[path]
|
||||
storage_index = result.get_storage_index()
|
||||
obj = {
|
||||
"path": self._join_pathstring(path),
|
||||
"healthy_pre_repair": str(result.get_pre_repair_results().is_healthy()),
|
||||
"recoverable_pre_repair": str(result.get_pre_repair_results().is_recoverable()),
|
||||
"healthy_post_repair": str(result.get_post_repair_results().is_healthy()),
|
||||
"storage_index": self._render_si_link(req, storage_index),
|
||||
"summary": self._html(result.get_pre_repair_results().get_summary()),
|
||||
}
|
||||
objects.append(obj)
|
||||
|
||||
return SlotsSequenceElement(tag, objects)
|
||||
|
||||
def render_runtime(self, ctx, data):
|
||||
req = inevow.IRequest(ctx)
|
||||
runtime = time.time() - req.processing_started_timestamp
|
||||
return ctx.tag["runtime: %s seconds" % runtime]
|
||||
|
@ -18,7 +18,7 @@ from allmydata.mutable.common import UnrecoverableFileError
|
||||
from allmydata.util import abbreviate
|
||||
from allmydata.util.hashutil import timing_safe_compare
|
||||
from allmydata.util.time_format import format_time, format_delta
|
||||
from allmydata.util.encodingutil import to_str, quote_output
|
||||
from allmydata.util.encodingutil import to_bytes, quote_output
|
||||
|
||||
|
||||
def get_filenode_metadata(filenode):
|
||||
@ -133,8 +133,8 @@ def convert_children_json(nodemaker, children_json):
|
||||
data = json.loads(children_json)
|
||||
for (namex, (ctype, propdict)) in data.iteritems():
|
||||
namex = unicode(namex)
|
||||
writecap = to_str(propdict.get("rw_uri"))
|
||||
readcap = to_str(propdict.get("ro_uri"))
|
||||
writecap = to_bytes(propdict.get("rw_uri"))
|
||||
readcap = to_bytes(propdict.get("ro_uri"))
|
||||
metadata = propdict.get("metadata", {})
|
||||
# name= argument is just for error reporting
|
||||
childnode = nodemaker.create_from_cap(writecap, readcap, name=namex)
|
||||
|
@ -1,95 +1,106 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Deep Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
<link href="/icon.png" rel="shortcut icon" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta n:render="refresh" />
|
||||
<meta t:render="refresh" />
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Deep-Check-And-Repair Results for root
|
||||
SI=<span n:render="root_storage_index" /></h1>
|
||||
SI=<span t:render="root_storage_index" /></h1>
|
||||
|
||||
<h2 n:render="reload" />
|
||||
<h2 t:render="reload" />
|
||||
|
||||
<p>Counters:</p>
|
||||
<ul>
|
||||
<li>Objects Checked: <span n:render="data" n:data="objects_checked" /></li>
|
||||
<li>Objects Checked: <span><t:transparent t:render="objects_checked" /></span></li>
|
||||
|
||||
<li>Objects Healthy (before repair): <span n:render="data" n:data="objects_healthy" /></li>
|
||||
<li>Objects Unhealthy (before repair): <span n:render="data" n:data="objects_unhealthy" /></li>
|
||||
<li>Corrupt Shares (before repair): <span n:render="data" n:data="corrupt_shares" /></li>
|
||||
<li>Objects Healthy (before repair): <span><t:transparent t:render="objects_healthy" /></span></li>
|
||||
<li>Objects Unhealthy (before repair): <span><t:transparent t:render="objects_unhealthy" /></span></li>
|
||||
<li>Corrupt Shares (before repair): <span><t:transparent t:render="corrupt_shares" /></span></li>
|
||||
|
||||
<li>Repairs Attempted: <span n:render="data" n:data="repairs_attempted" /></li>
|
||||
<li>Repairs Successful: <span n:render="data" n:data="repairs_successful" /></li>
|
||||
<li>Repairs Unsuccessful: <span n:render="data" n:data="repairs_unsuccessful" /></li>
|
||||
<li>Repairs Attempted: <span><t:transparent t:render="repairs_attempted" /></span></li>
|
||||
<li>Repairs Successful: <span><t:transparent t:render="repairs_successful" /></span></li>
|
||||
<li>Repairs Unsuccessful: <span><t:transparent t:render="repairs_unsuccessful" /></span></li>
|
||||
|
||||
<li>Objects Healthy (after repair): <span n:render="data" n:data="objects_healthy_post" /></li>
|
||||
<li>Objects Unhealthy (after repair): <span n:render="data" n:data="objects_unhealthy_post" /></li>
|
||||
<li>Corrupt Shares (after repair): <span n:render="data" n:data="corrupt_shares_post" /></li>
|
||||
<li>Objects Healthy (after repair): <span><t:transparent t:render="objects_healthy_post" /></span></li>
|
||||
<li>Objects Unhealthy (after repair): <span><t:transparent t:render="objects_unhealthy_post" /></span></li>
|
||||
<li>Corrupt Shares (after repair): <span><t:transparent t:render="corrupt_shares_post" /></span></li>
|
||||
|
||||
</ul>
|
||||
|
||||
<div n:render="pre_repair_problems_p">
|
||||
<div t:render="pre_repair_problems_p">
|
||||
<h2>Files/Directories That Had Problems:</h2>
|
||||
|
||||
<ul n:render="sequence" n:data="pre_repair_problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="pre_repair_problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div n:render="post_repair_problems_p">
|
||||
<div t:render="post_repair_problems_p">
|
||||
<h2>Files/Directories That Still Have Problems:</h2>
|
||||
<ul n:render="sequence" n:data="post_repair_problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="post_repair_problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="servers_with_corrupt_shares_p">
|
||||
<div t:render="servers_with_corrupt_shares_p">
|
||||
<h2>Servers on which corrupt shares were found</h2>
|
||||
<ul n:render="sequence" n:data="servers_with_corrupt_shares">
|
||||
<li n:pattern="item" n:render="server_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="servers_with_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="remaining_corrupt_shares_p">
|
||||
<div t:render="remaining_corrupt_shares_p">
|
||||
<h2>Remaining Corrupt Shares</h2>
|
||||
<p>These shares need to be manually inspected and removed.</p>
|
||||
<ul n:render="sequence" n:data="post_repair_corrupt_shares">
|
||||
<li n:pattern="item" n:render="share_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="post_repair_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="share" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
<div>
|
||||
<table n:render="sequence" n:data="all_objects">
|
||||
<tr n:pattern="header">
|
||||
<td>Relative Path</td>
|
||||
<td>Healthy Pre-Repair</td>
|
||||
<td>Recoverable Pre-Repair</td>
|
||||
<td>Healthy Post-Repair</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Summary</td>
|
||||
<table t:render="all_objects">
|
||||
<tr t:render="header">
|
||||
<th>Relative Path</th>
|
||||
<th>Healthy Pre-Repair</th>
|
||||
<th>Recoverable Pre-Repair</th>
|
||||
<th>Healthy Post-Repair</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Summary</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="object">
|
||||
<td><n:slot name="path"/></td>
|
||||
<td><n:slot name="healthy_pre_repair"/></td>
|
||||
<td><n:slot name="recoverable_pre_repair"/></td>
|
||||
<td><n:slot name="healthy_post_repair"/></td>
|
||||
<td><n:slot name="storage_index"/></td>
|
||||
<td><n:slot name="summary"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="path"/></td>
|
||||
<td><t:slot name="healthy_pre_repair"/></td>
|
||||
<td><t:slot name="recoverable_pre_repair"/></td>
|
||||
<td><t:slot name="healthy_post_repair"/></td>
|
||||
<td><t:slot name="storage_index"/></td>
|
||||
<td><t:slot name="summary"/></td>
|
||||
</tr>
|
||||
<tr t:render="empty">
|
||||
<td>Nothing to report yet.</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="runtime" />
|
||||
<div t:render="runtime" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,87 +1,93 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Deep Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
<link href="/icon.png" rel="shortcut icon" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta n:render="refresh" />
|
||||
<meta t:render="refresh" />
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Deep-Check Results for root SI=<span n:render="root_storage_index" /></h1>
|
||||
<h1>Deep-Check Results for root SI=<span t:render="root_storage_index" /></h1>
|
||||
|
||||
<h2 n:render="reload" />
|
||||
<h2 t:render="reload" />
|
||||
|
||||
<p>Counters:</p>
|
||||
<ul>
|
||||
<li>Objects Checked: <span n:render="data" n:data="objects_checked" /></li>
|
||||
<li>Objects Healthy: <span n:render="data" n:data="objects_healthy" /></li>
|
||||
<li>Objects Unhealthy: <span n:render="data" n:data="objects_unhealthy" /></li>
|
||||
<li>Objects Unrecoverable: <span n:render="data" n:data="objects_unrecoverable" /></li>
|
||||
<li>Corrupt Shares: <span n:render="data" n:data="count_corrupt_shares" /></li>
|
||||
|
||||
<li>Objects Checked: <span><t:transparent t:render="objects_checked" /></span></li>
|
||||
<li>Objects Healthy: <span><t:transparent t:render="objects_healthy" /></span></li>
|
||||
<li>Objects Unhealthy: <span><t:transparent t:render="objects_unhealthy" /></span></li>
|
||||
<li>Objects Unrecoverable: <span><t:transparent t:render="objects_unrecoverable" /></span></li>
|
||||
<li>Corrupt Shares: <span><t:transparent t:render="count_corrupt_shares" /></span></li>
|
||||
</ul>
|
||||
|
||||
<div n:render="problems_p">
|
||||
<div t:render="problems_p">
|
||||
<h2>Files/Directories That Had Problems:</h2>
|
||||
|
||||
<ul n:render="sequence" n:data="problems">
|
||||
<li n:pattern="item" n:render="problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="problems">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
<div n:render="servers_with_corrupt_shares_p">
|
||||
<div t:render="servers_with_corrupt_shares_p">
|
||||
<h2>Servers on which corrupt shares were found</h2>
|
||||
<ul n:render="sequence" n:data="servers_with_corrupt_shares">
|
||||
<li n:pattern="item" n:render="server_problem"/>
|
||||
<li n:pattern="empty">None</li>
|
||||
<ul t:render="servers_with_corrupt_shares">
|
||||
<li t:render="item">
|
||||
<t:slot name="problem" />
|
||||
</li>
|
||||
<li t:render="empty">None</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div n:render="corrupt_shares_p">
|
||||
<div t:render="corrupt_shares_p">
|
||||
<h2>Corrupt Shares</h2>
|
||||
<p>If repair fails, these shares need to be manually inspected and removed.</p>
|
||||
<table n:render="sequence" n:data="corrupt_shares">
|
||||
<tr n:pattern="header">
|
||||
<td>Server</td>
|
||||
<td>Server Nickname</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Share Number</td>
|
||||
<table t:render="corrupt_shares">
|
||||
<tr t:render="header">
|
||||
<th>Server</th>
|
||||
<th>Server Nickname</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Share Number</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="share_problem">
|
||||
<td><n:slot name="serverid"/></td>
|
||||
<td><n:slot name="nickname"/></td>
|
||||
<td><n:slot name="si"/></td>
|
||||
<td><n:slot name="shnum"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="serverid"/></td>
|
||||
<td><t:slot name="nickname"/></td>
|
||||
<td><t:slot name="si"/></td>
|
||||
<td><t:slot name="shnum"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
<div>
|
||||
<h2>All Results</h2>
|
||||
<table n:render="sequence" n:data="all_objects">
|
||||
<tr n:pattern="header">
|
||||
<td>Relative Path</td>
|
||||
<td>Healthy</td>
|
||||
<td>Recoverable</td>
|
||||
<td>Storage Index</td>
|
||||
<td>Summary</td>
|
||||
<table t:render="all_objects">
|
||||
<tr t:render="header">
|
||||
<th>Relative Path</th>
|
||||
<th>Healthy</th>
|
||||
<th>Recoverable</th>
|
||||
<th>Storage Index</th>
|
||||
<th>Summary</th>
|
||||
</tr>
|
||||
<tr n:pattern="item" n:render="object">
|
||||
<td><n:slot name="path"/></td>
|
||||
<td><n:slot name="healthy"/></td>
|
||||
<td><n:slot name="recoverable"/></td>
|
||||
<td><tt><n:slot name="storage_index"/></tt></td>
|
||||
<td><n:slot name="summary"/></td>
|
||||
<tr t:render="item">
|
||||
<td><t:slot name="path"/></td>
|
||||
<td><t:slot name="healthy"/></td>
|
||||
<td><t:slot name="recoverable"/></td>
|
||||
<td><tt><t:slot name="storage_index"/></tt></td>
|
||||
<td><t:slot name="summary"/></td>
|
||||
</tr>
|
||||
<tr t:render="empty">
|
||||
<td>Nothing to report yet.</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div n:render="runtime" />
|
||||
<div t:render="runtime" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@ -22,7 +22,7 @@ from twisted.python.filepath import FilePath
|
||||
|
||||
from allmydata.util import base32
|
||||
from allmydata.util.encodingutil import (
|
||||
to_str,
|
||||
to_bytes,
|
||||
quote_output,
|
||||
)
|
||||
from allmydata.uri import (
|
||||
@ -484,7 +484,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
to_dir = to_dir.decode(charset)
|
||||
assert isinstance(to_dir, unicode)
|
||||
to_path = to_dir.split(u"/")
|
||||
to_root = self.client.nodemaker.create_from_cap(to_str(to_path[0]))
|
||||
to_root = self.client.nodemaker.create_from_cap(to_bytes(to_path[0]))
|
||||
if not IDirectoryNode.providedBy(to_root):
|
||||
raise WebError("to_dir is not a directory", http.BAD_REQUEST)
|
||||
d = to_root.get_child_at_path(to_path[1:])
|
||||
|
@ -261,7 +261,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_check_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_check = T.form(action=".", method="post",
|
||||
deep_check = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-check"),
|
||||
@ -287,7 +287,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_size_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_size = T.form(action=".", method="post",
|
||||
deep_size = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-size"),
|
||||
@ -300,7 +300,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def deep_stats_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
deep_stats = T.form(action=".", method="post",
|
||||
deep_stats = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-deep-stats"),
|
||||
@ -313,7 +313,7 @@ class MoreInfoElement(Element):
|
||||
@renderer
|
||||
def manifest_form(self, req, tag):
|
||||
ophandle = base32.b2a(os.urandom(16))
|
||||
manifest = T.form(action=".", method="post",
|
||||
manifest = T.form(action=req.path, method="post",
|
||||
enctype="multipart/form-data")(
|
||||
T.fieldset(
|
||||
T.input(type="hidden", name="t", value="start-manifest"),
|
||||
|
@ -1,4 +1,4 @@
|
||||
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
|
||||
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
|
||||
<head>
|
||||
<title>Tahoe-LAFS - Check Results</title>
|
||||
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
|
||||
@ -11,7 +11,7 @@
|
||||
|
||||
<div>Literal files are always healthy: their data is contained in the URI</div>
|
||||
|
||||
<div n:render="return" />
|
||||
<div t:render="return_to" />
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
9
tox.ini
9
tox.ini
@ -49,9 +49,8 @@ commands =
|
||||
tahoe --version
|
||||
|
||||
[testenv:py36]
|
||||
# On macOS, git inside of ratchet.sh needs $HOME.
|
||||
passenv = HOME
|
||||
commands = {toxinidir}/misc/python3/ratchet.sh
|
||||
commands =
|
||||
trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata.test.python3_tests}
|
||||
|
||||
[testenv:integration]
|
||||
setenv =
|
||||
@ -210,7 +209,9 @@ extras =
|
||||
deps =
|
||||
{[testenv]deps}
|
||||
packaging
|
||||
pyinstaller
|
||||
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
||||
# Python 3 we can reconsider this constraint.
|
||||
pyinstaller < 4.0
|
||||
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
||||
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
||||
setenv=PYTHONHASHSEED=1
|
||||
|
Loading…
Reference in New Issue
Block a user