mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-08 03:14:21 +00:00
merge master
This commit is contained in:
commit
c21acc64f0
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM centos:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
||||
git \
|
||||
sudo \
|
||||
make automake gcc gcc-c++ \
|
||||
python2 \
|
||||
python2-devel \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-devel \
|
||||
libffi-devel \
|
||||
openssl-devel \
|
||||
libyaml \
|
||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM debian:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -8,22 +9,22 @@ ENV BUILD_SRC_ROOT /tmp/project
|
||||
|
||||
RUN apt-get --quiet update && \
|
||||
apt-get --quiet --yes install \
|
||||
git \
|
||||
lsb-release \
|
||||
git \
|
||||
lsb-release \
|
||||
sudo \
|
||||
build-essential \
|
||||
python2.7 \
|
||||
python2.7-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
virtualenv
|
||||
build-essential \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
virtualenv
|
||||
|
||||
# Get the project source. This is better than it seems. CircleCI will
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
||||
# Only the integration tests currently need this but it doesn't hurt to always
|
||||
# have it present and it's simpler than building a whole extra image just for
|
||||
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM fedora:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
||||
git \
|
||||
sudo \
|
||||
make automake gcc gcc-c++ \
|
||||
python \
|
||||
python-devel \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-devel \
|
||||
libffi-devel \
|
||||
openssl-devel \
|
||||
libyaml-devel \
|
||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -1,49 +0,0 @@
|
||||
ARG TAG
|
||||
FROM vbatts/slackware:${TAG}
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
# This will get updated by the CircleCI checkout step.
|
||||
ENV BUILD_SRC_ROOT /tmp/project
|
||||
|
||||
# Be careful with slackpkg. If the package name given doesn't match anything,
|
||||
# slackpkg still claims to succeed but you're totally screwed. Slackware
|
||||
# updates versions of packaged software so including too much version prefix
|
||||
# is a good way to have your install commands suddenly begin not installing
|
||||
# anything.
|
||||
RUN slackpkg update && \
|
||||
slackpkg install \
|
||||
openssh-7 git-2 \
|
||||
ca-certificates \
|
||||
sudo-1 \
|
||||
make-4 \
|
||||
automake-1 \
|
||||
kernel-headers \
|
||||
glibc-2 \
|
||||
binutils-2 \
|
||||
gcc-5 \
|
||||
gcc-g++-5 \
|
||||
python-2 \
|
||||
libffi-3 \
|
||||
libyaml-0 \
|
||||
sqlite-3 \
|
||||
icu4c-56 \
|
||||
libmpc-1 </dev/null && \
|
||||
slackpkg upgrade \
|
||||
openssl-1 </dev/null
|
||||
|
||||
# neither virtualenv nor pip is packaged.
|
||||
# do it the hard way.
|
||||
# and it is extra hard since it is slackware.
|
||||
RUN slackpkg install \
|
||||
cyrus-sasl-2 \
|
||||
curl-7 </dev/null && \
|
||||
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||
python get-pip.py && \
|
||||
pip install virtualenv
|
||||
|
||||
# Get the project source. This is better than it seems. CircleCI will
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
@ -1,5 +1,6 @@
|
||||
ARG TAG
|
||||
FROM ubuntu:${TAG}
|
||||
ARG PYTHON_VERSION
|
||||
|
||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||
ENV VIRTUALENV_PATH /tmp/venv
|
||||
@ -13,8 +14,8 @@ RUN apt-get --quiet update && \
|
||||
apt-get --quiet --yes install \
|
||||
sudo \
|
||||
build-essential \
|
||||
python2.7 \
|
||||
python2.7-dev \
|
||||
python${PYTHON_VERSION} \
|
||||
python${PYTHON_VERSION}-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
@ -26,4 +27,4 @@ RUN apt-get --quiet update && \
|
||||
# *update* this checkout on each job run, saving us more time per-job.
|
||||
COPY . ${BUILD_SRC_ROOT}
|
||||
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||
|
@ -11,10 +11,13 @@ workflows:
|
||||
requires:
|
||||
- "debian-9"
|
||||
|
||||
- "ubuntu-18.04"
|
||||
- "ubuntu-20.04"
|
||||
- "ubuntu-18.04":
|
||||
requires:
|
||||
- "ubuntu-20.04"
|
||||
- "ubuntu-16.04":
|
||||
requires:
|
||||
- "ubuntu-18.04"
|
||||
- "ubuntu-20.04"
|
||||
|
||||
- "fedora-29"
|
||||
- "fedora-28":
|
||||
@ -23,13 +26,14 @@ workflows:
|
||||
|
||||
- "centos-8"
|
||||
|
||||
- "slackware-14.2"
|
||||
|
||||
- "nixos-19.09"
|
||||
|
||||
# Test against PyPy 2.7
|
||||
- "pypy2.7-buster"
|
||||
|
||||
# Just one Python 3.6 configuration while the port is in-progress.
|
||||
- "python3.6"
|
||||
|
||||
# Other assorted tasks and configurations
|
||||
- "lint"
|
||||
- "pyinstaller"
|
||||
@ -65,11 +69,12 @@ workflows:
|
||||
- "build-image-debian-9"
|
||||
- "build-image-ubuntu-16.04"
|
||||
- "build-image-ubuntu-18.04"
|
||||
- "build-image-ubuntu-20.04"
|
||||
- "build-image-fedora-28"
|
||||
- "build-image-fedora-29"
|
||||
- "build-image-centos-8"
|
||||
- "build-image-slackware-14.2"
|
||||
- "build-image-pypy-2.7-buster"
|
||||
- "build-image-python36-ubuntu"
|
||||
|
||||
|
||||
jobs:
|
||||
@ -117,7 +122,7 @@ jobs:
|
||||
|
||||
debian-9: &DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/debian:9"
|
||||
- image: "tahoelafsci/debian:9-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
environment: &UTF_8_ENVIRONMENT
|
||||
@ -194,14 +199,14 @@ jobs:
|
||||
debian-8:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/debian:8"
|
||||
- image: "tahoelafsci/debian:8-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
pypy2.7-buster:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/pypy:2.7-buster"
|
||||
- image: "tahoelafsci/pypy:buster-py2"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
@ -257,20 +262,38 @@ jobs:
|
||||
ubuntu-16.04:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:16.04"
|
||||
- image: "tahoelafsci/ubuntu:16.04-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
ubuntu-18.04:
|
||||
ubuntu-18.04: &UBUNTU_18_04
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:18.04"
|
||||
- image: "tahoelafsci/ubuntu:18.04-py2.7"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
python3.6:
|
||||
<<: *UBUNTU_18_04
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:18.04-py3"
|
||||
user: "nobody"
|
||||
|
||||
environment:
|
||||
<<: *UTF_8_ENVIRONMENT
|
||||
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
|
||||
|
||||
|
||||
ubuntu-20.04:
|
||||
<<: *DEBIAN
|
||||
docker:
|
||||
- image: "tahoelafsci/ubuntu:20.04"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
centos-8: &RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/centos:8"
|
||||
- image: "tahoelafsci/centos:8-py2"
|
||||
user: "nobody"
|
||||
|
||||
environment: *UTF_8_ENVIRONMENT
|
||||
@ -292,37 +315,17 @@ jobs:
|
||||
fedora-28:
|
||||
<<: *RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/fedora:28"
|
||||
- image: "tahoelafsci/fedora:28-py"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
fedora-29:
|
||||
<<: *RHEL_DERIV
|
||||
docker:
|
||||
- image: "tahoelafsci/fedora:29"
|
||||
- image: "tahoelafsci/fedora:29-py"
|
||||
user: "nobody"
|
||||
|
||||
|
||||
slackware-14.2:
|
||||
docker:
|
||||
- image: "tahoelafsci/slackware:14.2"
|
||||
user: "nobody"
|
||||
|
||||
environment: *UTF_8_ENVIRONMENT
|
||||
|
||||
# pip cannot install packages if the working directory is not readable.
|
||||
# We want to run a lot of steps as nobody instead of as root.
|
||||
working_directory: "/tmp/project"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
- run: *SETUP_VIRTUALENV
|
||||
- run: *RUN_TESTS
|
||||
- store_test_results: *STORE_TEST_RESULTS
|
||||
- store_artifacts: *STORE_TEST_LOG
|
||||
- store_artifacts: *STORE_OTHER_ARTIFACTS
|
||||
- run: *SUBMIT_COVERAGE
|
||||
|
||||
nixos-19.09:
|
||||
docker:
|
||||
# Run in a highly Nix-capable environment.
|
||||
@ -386,8 +389,9 @@ jobs:
|
||||
- image: "docker:17.05.0-ce-git"
|
||||
|
||||
environment:
|
||||
DISTRO: "tahoelafsci/<DISTRO>:foo"
|
||||
TAG: "tahoelafsci/distro:<TAG>"
|
||||
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
|
||||
TAG: "tahoelafsci/distro:<TAG>-py2"
|
||||
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
|
||||
|
||||
steps:
|
||||
- "checkout"
|
||||
@ -439,13 +443,14 @@ jobs:
|
||||
docker \
|
||||
build \
|
||||
--build-arg TAG=${TAG} \
|
||||
-t tahoelafsci/${DISTRO}:${TAG} \
|
||||
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
|
||||
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
|
||||
-f ~/project/.circleci/Dockerfile.${DISTRO} \
|
||||
~/project/
|
||||
- run:
|
||||
name: "Push image"
|
||||
command: |
|
||||
docker push tahoelafsci/${DISTRO}:${TAG}
|
||||
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||
|
||||
|
||||
build-image-debian-8:
|
||||
@ -454,6 +459,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "8"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-debian-9:
|
||||
@ -462,6 +468,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "debian"
|
||||
TAG: "9"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-ubuntu-16.04:
|
||||
@ -470,6 +477,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "16.04"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-ubuntu-18.04:
|
||||
@ -478,6 +486,24 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "18.04"
|
||||
PYTHON_VERSION: "2.7"
|
||||
|
||||
|
||||
build-image-python36-ubuntu:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "18.04"
|
||||
PYTHON_VERSION: "3"
|
||||
|
||||
|
||||
build-image-ubuntu-20.04:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "ubuntu"
|
||||
TAG: "20.04"
|
||||
|
||||
|
||||
build-image-centos-8:
|
||||
@ -486,6 +512,7 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "centos"
|
||||
TAG: "8"
|
||||
PYTHON_VERSION: "2"
|
||||
|
||||
|
||||
build-image-fedora-28:
|
||||
@ -494,6 +521,8 @@ jobs:
|
||||
environment:
|
||||
DISTRO: "fedora"
|
||||
TAG: "28"
|
||||
# The default on Fedora (this version anyway) is still Python 2.
|
||||
PYTHON_VERSION: ""
|
||||
|
||||
|
||||
build-image-fedora-29:
|
||||
@ -504,17 +533,13 @@ jobs:
|
||||
TAG: "29"
|
||||
|
||||
|
||||
build-image-slackware-14.2:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "slackware"
|
||||
TAG: "14.2"
|
||||
|
||||
|
||||
build-image-pypy-2.7-buster:
|
||||
<<: *BUILD_IMAGE
|
||||
|
||||
environment:
|
||||
DISTRO: "pypy"
|
||||
TAG: "2.7-buster"
|
||||
TAG: "buster"
|
||||
# We only have Python 2 for PyPy right now so there's no support for
|
||||
# setting up PyPy 3 in the image building toolchain. This value is just
|
||||
# for constructing the right Docker image tag.
|
||||
PYTHON_VERSION: "2"
|
||||
|
@ -36,8 +36,9 @@ PIP="${BOOTSTRAP_VENV}/bin/pip"
|
||||
# Tell pip where it can find any existing wheels.
|
||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
|
||||
# Populate the wheelhouse, if necessary.
|
||||
"${PIP}" \
|
||||
# Populate the wheelhouse, if necessary. zfec 1.5.3 can only be built with a
|
||||
# UTF-8 environment so make sure we have one, at least for this invocation.
|
||||
LANG="en_US.UTF-8" "${PIP}" \
|
||||
wheel \
|
||||
--wheel-dir "${WHEELHOUSE_PATH}" \
|
||||
"${PROJECT_ROOT}"[test] \
|
||||
|
@ -81,7 +81,16 @@ ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
|
||||
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
||||
|
||||
if [ -n "${ARTIFACTS}" ]; then
|
||||
if [ ! -e "${SUBUNIT2}" ]; then
|
||||
echo "subunitv2 output file does not exist: ${SUBUNIT2}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create a junitxml results area.
|
||||
mkdir -p "$(dirname "${JUNITXML}")"
|
||||
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
# Always succeed even if subunit2junitxml fails. subunit2junitxml signals
|
||||
# failure if the stream it is processing contains test failures. This is
|
||||
# not what we care about. If we cared about it, the test command above
|
||||
# would have signalled failure already and we wouldn't be here.
|
||||
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || true
|
||||
fi
|
||||
|
40
.travis.yml
40
.travis.yml
@ -1,40 +0,0 @@
|
||||
sudo: false
|
||||
language: python
|
||||
cache: pip
|
||||
dist: xenial
|
||||
before_cache:
|
||||
- rm -f $HOME/.cache/pip/log/debug.log
|
||||
git:
|
||||
depth: 1000
|
||||
|
||||
env:
|
||||
global:
|
||||
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
||||
|
||||
install:
|
||||
- pip install --upgrade tox setuptools virtualenv
|
||||
- echo $PATH; which python; which pip; which tox
|
||||
- python misc/build_helpers/show-tool-versions.py
|
||||
|
||||
script:
|
||||
- |
|
||||
set -eo pipefail
|
||||
tox -e ${T}
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
channels: "chat.freenode.net#tahoe-lafs"
|
||||
on_success: always # for testing
|
||||
on_failure: always
|
||||
template:
|
||||
- "%{repository}#%{build_number} [%{branch}: %{commit} by %{author}] %{message}"
|
||||
- "Changes: %{compare_url} | Details: %{build_url}"
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
python: '3.6'
|
||||
env: T=py36
|
||||
|
||||
fast_finish: true
|
@ -27,12 +27,54 @@ allmydata.test.test_base62.Base62.test_known_values
|
||||
allmydata.test.test_base62.Base62.test_num_octets_that_encode_to_this_many_chars
|
||||
allmydata.test.test_base62.Base62.test_odd_sizes
|
||||
allmydata.test.test_base62.Base62.test_roundtrip
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_private_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_deserialize_public_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_key_serialization
|
||||
allmydata.test.test_crypto.TestEd25519.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signature_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_signed_data_not_bytes
|
||||
allmydata.test.test_crypto.TestEd25519.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_no_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_long_input
|
||||
allmydata.test.test_crypto.TestRegression.test_aes_with_iv_process_short_input
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_ed15519_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_decode_rsa_keypair
|
||||
allmydata.test.test_crypto.TestRegression.test_encrypt_data_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_incorrect_iv_size
|
||||
allmydata.test.test_crypto.TestRegression.test_iv_not_bytes
|
||||
allmydata.test.test_crypto.TestRegression.test_key_incorrect_size
|
||||
allmydata.test.test_crypto.TestRegression.test_old_start_up_test
|
||||
allmydata.test.test_crypto.TestRsa.test_keys
|
||||
allmydata.test.test_crypto.TestRsa.test_sign_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestRsa.test_verify_invalid_pubkey
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_bad
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_entire_string
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_good
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_partial
|
||||
allmydata.test.test_crypto.TestUtil.test_remove_prefix_zero
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_failure
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_gather_results
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_success
|
||||
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
|
||||
allmydata.test.test_dictutil.DictUtil.test_auxdict
|
||||
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
|
||||
allmydata.test.test_happiness.Happiness.test_100
|
||||
allmydata.test.test_happiness.Happiness.test_calc_happy
|
||||
allmydata.test.test_happiness.Happiness.test_everything_broken
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis0
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis_0
|
||||
allmydata.test.test_happiness.Happiness.test_hypothesis_1
|
||||
allmydata.test.test_happiness.Happiness.test_placement_1
|
||||
allmydata.test.test_happiness.Happiness.test_placement_simple
|
||||
allmydata.test.test_happiness.Happiness.test_redistribute
|
||||
allmydata.test.test_happiness.Happiness.test_unhappy
|
||||
allmydata.test.test_happiness.HappinessUtils.test_residual_0
|
||||
allmydata.test.test_happiness.HappinessUtils.test_trivial_flow_graph
|
||||
allmydata.test.test_happiness.HappinessUtils.test_trivial_maximum_graph
|
||||
allmydata.test.test_happiness.PlacementTests.test_hypothesis_unhappy
|
||||
allmydata.test.test_happiness.PlacementTests.test_more_hypothesis
|
||||
allmydata.test.test_hashtree.Complete.test_create
|
||||
allmydata.test.test_hashtree.Complete.test_dump
|
||||
allmydata.test.test_hashtree.Complete.test_needed_hashes
|
||||
@ -49,6 +91,8 @@ allmydata.test.test_hashutil.HashUtilTests.test_sha256d
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_sha256d_truncated
|
||||
allmydata.test.test_hashutil.HashUtilTests.test_timing_safe_compare
|
||||
allmydata.test.test_humanreadable.HumanReadable.test_repr
|
||||
allmydata.test.test_iputil.GcUtil.test_gc_after_allocations
|
||||
allmydata.test.test_iputil.GcUtil.test_release_delays_gc
|
||||
allmydata.test.test_iputil.ListAddresses.test_get_local_ip_for
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_cygwin
|
||||
@ -57,6 +101,14 @@ allmydata.test.test_iputil.ListAddresses.test_list_async_mock_ip_addr
|
||||
allmydata.test.test_iputil.ListAddresses.test_list_async_mock_route
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_random_port
|
||||
allmydata.test.test_iputil.ListenOnUsed.test_specific_port
|
||||
allmydata.test.test_log.Log.test_default_facility
|
||||
allmydata.test.test_log.Log.test_err
|
||||
allmydata.test.test_log.Log.test_grandparent_id
|
||||
allmydata.test.test_log.Log.test_no_prefix
|
||||
allmydata.test.test_log.Log.test_numming
|
||||
allmydata.test.test_log.Log.test_parent_id
|
||||
allmydata.test.test_log.Log.test_with_bytes_prefix
|
||||
allmydata.test.test_log.Log.test_with_prefix
|
||||
allmydata.test.test_netstring.Netstring.test_encode
|
||||
allmydata.test.test_netstring.Netstring.test_extra
|
||||
allmydata.test.test_netstring.Netstring.test_nested
|
||||
|
@ -11,9 +11,8 @@ cd "../.."
|
||||
|
||||
# Since both of the next calls are expected to exit non-0, relax our guard.
|
||||
set +e
|
||||
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
|
||||
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
|
||||
subunit2pyunit < "$base/results.subunit2"
|
||||
trial --reporter=subunitv2-file allmydata
|
||||
subunit2junitxml < "${SUBUNITREPORTER_OUTPUT_PATH}" > "$base/results.xml"
|
||||
set -e
|
||||
|
||||
# Okay, now we're clear.
|
||||
@ -33,6 +32,14 @@ set -e
|
||||
if [ $TERM = 'dumb' ]; then
|
||||
export TERM=ansi
|
||||
fi
|
||||
git diff "$tracking_filename"
|
||||
|
||||
exit $code
|
||||
echo "The ${tracking_filename} diff is:"
|
||||
echo "================================="
|
||||
# "git diff" gets pretty confused in this execution context when trying to
|
||||
# write to stdout. Somehow it fails with SIGTTOU.
|
||||
git diff -- "${tracking_filename}" > tracking.diff
|
||||
cat tracking.diff
|
||||
echo "================================="
|
||||
|
||||
echo "Exiting with code ${code} from ratchet.py."
|
||||
exit ${code}
|
||||
|
1
newsfragments/3323.removed
Normal file
1
newsfragments/3323.removed
Normal file
@ -0,0 +1 @@
|
||||
Slackware 14.2 is no longer a Tahoe-LAFS supported platform.
|
0
newsfragments/3326.minor
Normal file
0
newsfragments/3326.minor
Normal file
1
newsfragments/3328.installation
Normal file
1
newsfragments/3328.installation
Normal file
@ -0,0 +1 @@
|
||||
Tahoe-LAFS now supports Ubuntu 20.04.
|
0
newsfragments/3336.minor
Normal file
0
newsfragments/3336.minor
Normal file
0
newsfragments/3361.minor
Normal file
0
newsfragments/3361.minor
Normal file
0
newsfragments/3364.minor
Normal file
0
newsfragments/3364.minor
Normal file
0
newsfragments/3365.minor
Normal file
0
newsfragments/3365.minor
Normal file
0
newsfragments/3366.minor
Normal file
0
newsfragments/3366.minor
Normal file
0
newsfragments/3368.minor
Normal file
0
newsfragments/3368.minor
Normal file
0
newsfragments/3370.minor
Normal file
0
newsfragments/3370.minor
Normal file
0
newsfragments/3372.minor
Normal file
0
newsfragments/3372.minor
Normal file
0
newsfragments/3375.minor
Normal file
0
newsfragments/3375.minor
Normal file
@ -7,6 +7,8 @@ bdist_egg = update_version bdist_egg
|
||||
bdist_wheel = update_version bdist_wheel
|
||||
|
||||
[flake8]
|
||||
# For now, only use pyflakes errors; flake8 is still helpful because it allows
|
||||
# ignoring specific errors/warnings when needed.
|
||||
select = F
|
||||
# Enforce all pyflakes constraints, and also prohibit tabs for indentation.
|
||||
# Reference:
|
||||
# https://flake8.pycqa.org/en/latest/user/error-codes.html
|
||||
# https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes
|
||||
select = F, W191
|
||||
|
@ -5,4 +5,15 @@ For the most part, these functions use and return objects that are
|
||||
documented in the `cryptography` library -- however, code inside Tahoe
|
||||
should only use these functions and not rely on features of any
|
||||
objects that `cryptography` documents.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
@ -6,7 +6,17 @@ These functions use and return objects that are documented in the
|
||||
`cryptography` library -- however, code inside Tahoe should only use
|
||||
functions from allmydata.crypto.aes and not rely on features of any
|
||||
objects that `cryptography` documents.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
|
@ -13,7 +13,18 @@ cut-and-pasteability. The base62 encoding is shorter than the base32 form,
|
||||
but the minor usability improvement is not worth the documentation and
|
||||
specification confusion of using a non-standard encoding. So we stick with
|
||||
base32.
|
||||
|
||||
Ported to Python 3.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
|
@ -1,6 +1,16 @@
|
||||
"""
|
||||
Exceptions raise by allmydata.crypto.* modules
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
class BadSignature(Exception):
|
||||
|
@ -9,8 +9,17 @@ features of any objects that `cryptography` documents.
|
||||
|
||||
That is, the public and private keys are opaque objects; DO NOT depend
|
||||
on any of their methods.
|
||||
"""
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
@ -1,6 +1,16 @@
|
||||
"""
|
||||
Utilities used by allmydata.crypto modules
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.crypto.error import BadPrefixError
|
||||
|
||||
|
@ -45,6 +45,8 @@ Written by Connelly Barnes in 2005 and released into the
|
||||
public domain with no warranty of any kind, either expressed
|
||||
or implied. It probably won't make your computer catch on fire,
|
||||
or eat your children, but it might. Use at your own risk.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
@ -54,7 +56,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.util import mathutil # from the pyutil library
|
||||
|
||||
|
@ -1,5 +1,20 @@
|
||||
"""
|
||||
Algorithms for figuring out happiness, the number of unique nodes the data is
|
||||
on.
|
||||
|
||||
from Queue import PriorityQueue
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things for external Python 2 code.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from queue import PriorityQueue
|
||||
|
||||
|
||||
def augmenting_path_for(graph):
|
||||
@ -35,9 +50,9 @@ def bfs(graph, s):
|
||||
GRAY = 1
|
||||
# BLACK vertices are those we have seen and explored
|
||||
BLACK = 2
|
||||
color = [WHITE for i in xrange(len(graph))]
|
||||
predecessor = [None for i in xrange(len(graph))]
|
||||
distance = [-1 for i in xrange(len(graph))]
|
||||
color = [WHITE for i in range(len(graph))]
|
||||
predecessor = [None for i in range(len(graph))]
|
||||
distance = [-1 for i in range(len(graph))]
|
||||
queue = [s] # vertices that we haven't explored yet.
|
||||
color[s] = GRAY
|
||||
distance[s] = 0
|
||||
@ -58,9 +73,9 @@ def residual_network(graph, f):
|
||||
flow network represented by my graph and f arguments. graph is a
|
||||
flow network in adjacency-list form, and f is a flow in graph.
|
||||
"""
|
||||
new_graph = [[] for i in xrange(len(graph))]
|
||||
cf = [[0 for s in xrange(len(graph))] for sh in xrange(len(graph))]
|
||||
for i in xrange(len(graph)):
|
||||
new_graph = [[] for i in range(len(graph))]
|
||||
cf = [[0 for s in range(len(graph))] for sh in range(len(graph))]
|
||||
for i in range(len(graph)):
|
||||
for v in graph[i]:
|
||||
if f[i][v] == 1:
|
||||
# We add an edge (v, i) with cf[v,i] = 1. This means
|
||||
@ -135,7 +150,7 @@ def _compute_maximum_graph(graph, shareIndices):
|
||||
return {}
|
||||
|
||||
dim = len(graph)
|
||||
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
|
||||
flow_function = [[0 for sh in range(dim)] for s in range(dim)]
|
||||
residual_graph, residual_function = residual_network(graph, flow_function)
|
||||
|
||||
while augmenting_path_for(residual_graph):
|
||||
@ -260,9 +275,9 @@ def _servermap_flow_graph(peers, shares, servermap):
|
||||
#print "share_to_index %s" % share_to_index
|
||||
#print "servermap %s" % servermap
|
||||
for peer in peers:
|
||||
if servermap.has_key(peer):
|
||||
if peer in servermap:
|
||||
for s in servermap[peer]:
|
||||
if share_to_index.has_key(s):
|
||||
if s in share_to_index:
|
||||
indexedShares.append(share_to_index[s])
|
||||
graph.insert(peer_to_index[peer], indexedShares)
|
||||
for share in shares:
|
||||
@ -373,7 +388,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
new_mappings = _calculate_mappings(new_peers, new_shares)
|
||||
#print "new_peers %s" % new_peers
|
||||
#print "new_mappings %s" % new_mappings
|
||||
mappings = dict(readonly_mappings.items() + existing_mappings.items() + new_mappings.items())
|
||||
mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
|
||||
homeless_shares = set()
|
||||
for share in mappings:
|
||||
if mappings[share] is None:
|
||||
@ -384,7 +399,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
mappings, homeless_shares,
|
||||
{
|
||||
k: v
|
||||
for k, v in peers_to_shares.items()
|
||||
for k, v in list(peers_to_shares.items())
|
||||
if k not in readonly_peers
|
||||
}
|
||||
)
|
||||
@ -401,5 +416,5 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
|
||||
|
||||
return {
|
||||
k: v.pop() if v else next(peer_iter)
|
||||
for k, v in mappings.items()
|
||||
for k, v in list(mappings.items())
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
import time
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import base64
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
|
@ -1,4 +1,14 @@
|
||||
import six
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from future.utils import native_bytes
|
||||
|
||||
import unittest
|
||||
|
||||
from base64 import b64decode
|
||||
@ -37,17 +47,18 @@ class TestRegression(unittest.TestCase):
|
||||
# priv = rsa.generate(2048)
|
||||
# priv_str = b64encode(priv.serialize())
|
||||
# pub_str = b64encode(priv.get_verifying_key().serialize())
|
||||
RSA_2048_PRIV_KEY = six.b(b64decode(f.read().strip()))
|
||||
RSA_2048_PRIV_KEY = b64decode(f.read().strip())
|
||||
assert isinstance(RSA_2048_PRIV_KEY, native_bytes)
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-2048-sig.txt').open('r') as f:
|
||||
# Signature created using `RSA_2048_PRIV_KEY` via:
|
||||
#
|
||||
# sig = priv.sign(b'test')
|
||||
RSA_2048_SIG = six.b(b64decode(f.read().strip()))
|
||||
RSA_2048_SIG = b64decode(f.read().strip())
|
||||
|
||||
with RESOURCE_DIR.child('pycryptopp-rsa-2048-pub.txt').open('r') as f:
|
||||
# The public key corresponding to `RSA_2048_PRIV_KEY`.
|
||||
RSA_2048_PUB_KEY = six.b(b64decode(f.read().strip()))
|
||||
RSA_2048_PUB_KEY = b64decode(f.read().strip())
|
||||
|
||||
def test_old_start_up_test(self):
|
||||
"""
|
||||
@ -283,7 +294,7 @@ class TestEd25519(unittest.TestCase):
|
||||
private_key, public_key = ed25519.create_signing_keypair()
|
||||
private_key_str = ed25519.string_from_signing_key(private_key)
|
||||
|
||||
self.assertIsInstance(private_key_str, six.string_types)
|
||||
self.assertIsInstance(private_key_str, native_bytes)
|
||||
|
||||
private_key2, public_key2 = ed25519.signing_keypair_from_string(private_key_str)
|
||||
|
||||
@ -299,7 +310,7 @@ class TestEd25519(unittest.TestCase):
|
||||
|
||||
# ditto, but for the verifying keys
|
||||
public_key_str = ed25519.string_from_verifying_key(public_key)
|
||||
self.assertIsInstance(public_key_str, six.string_types)
|
||||
self.assertIsInstance(public_key_str, native_bytes)
|
||||
|
||||
public_key2 = ed25519.verifying_key_from_string(public_key_str)
|
||||
self.assertEqual(
|
||||
@ -403,7 +414,7 @@ class TestRsa(unittest.TestCase):
|
||||
priv_key, pub_key = rsa.create_signing_keypair(2048)
|
||||
priv_key_str = rsa.der_string_from_signing_key(priv_key)
|
||||
|
||||
self.assertIsInstance(priv_key_str, six.string_types)
|
||||
self.assertIsInstance(priv_key_str, native_bytes)
|
||||
|
||||
priv_key2, pub_key2 = rsa.create_signing_keypair_from_string(priv_key_str)
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -1,5 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# We omit dict, just in case newdict breaks things.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from hypothesis import given
|
||||
from hypothesis.strategies import text, sets
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
@ -11,9 +11,10 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, native_str
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re, errno, subprocess, os, socket
|
||||
import gc
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
@ -21,7 +22,7 @@ from tenacity import retry, stop_after_attempt
|
||||
|
||||
from foolscap.api import Tub
|
||||
|
||||
from allmydata.util import iputil
|
||||
from allmydata.util import iputil, gcutil
|
||||
import allmydata.test.common_py3 as testutil
|
||||
from allmydata.util.namespace import Namespace
|
||||
|
||||
@ -228,3 +229,35 @@ class ListenOnUsed(unittest.TestCase):
|
||||
s.close()
|
||||
port2 = iputil.listenOnUnused(tub, port)
|
||||
self.assertEqual(port, port2)
|
||||
|
||||
|
||||
class GcUtil(unittest.TestCase):
|
||||
"""Tests for allmydata.util.gcutil, which is used only by listenOnUnused."""
|
||||
|
||||
def test_gc_after_allocations(self):
|
||||
"""The resource tracker triggers allocations every 26 allocations."""
|
||||
tracker = gcutil._ResourceTracker()
|
||||
collections = []
|
||||
self.patch(gc, "collect", lambda: collections.append(1))
|
||||
for _ in range(2):
|
||||
for _ in range(25):
|
||||
tracker.allocate()
|
||||
self.assertEqual(len(collections), 0)
|
||||
tracker.allocate()
|
||||
self.assertEqual(len(collections), 1)
|
||||
del collections[:]
|
||||
|
||||
def test_release_delays_gc(self):
|
||||
"""Releasing a file descriptor resource delays GC collection."""
|
||||
tracker = gcutil._ResourceTracker()
|
||||
collections = []
|
||||
self.patch(gc, "collect", lambda: collections.append(1))
|
||||
for _ in range(2):
|
||||
tracker.allocate()
|
||||
for _ in range(3):
|
||||
tracker.release()
|
||||
for _ in range(25):
|
||||
tracker.allocate()
|
||||
self.assertEqual(len(collections), 0)
|
||||
tracker.allocate()
|
||||
self.assertEqual(len(collections), 1)
|
||||
|
156
src/allmydata/test/test_log.py
Normal file
156
src/allmydata/test/test_log.py
Normal file
@ -0,0 +1,156 @@
|
||||
"""
|
||||
Tests for allmydata.util.log.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from foolscap.logging import log
|
||||
|
||||
from allmydata.util import log as tahoe_log
|
||||
|
||||
|
||||
class SampleError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Log(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.messages = []
|
||||
|
||||
def msg(msg, facility, parent, *args, **kwargs):
|
||||
self.messages.append((msg, facility, parent, args, kwargs))
|
||||
return "msg{}".format(len(self.messages))
|
||||
|
||||
self.patch(log, "msg", msg)
|
||||
|
||||
def test_err(self):
|
||||
"""Logging with log.err() causes tests to fail."""
|
||||
try:
|
||||
raise SampleError("simple sample")
|
||||
except:
|
||||
f = Failure()
|
||||
tahoe_log.err(format="intentional sample error",
|
||||
failure=f, level=tahoe_log.OPERATIONAL, umid="wO9UoQ")
|
||||
result = self.flushLoggedErrors(SampleError)
|
||||
self.assertEqual(len(result), 1)
|
||||
|
||||
def test_default_facility(self):
|
||||
"""
|
||||
If facility is passed to PrefixingLogMixin.__init__, it is used as
|
||||
default facility.
|
||||
"""
|
||||
class LoggingObject1(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject1(facility="defaultfac")
|
||||
obj.log("hello")
|
||||
obj.log("world", facility="override")
|
||||
self.assertEqual(self.messages[-2][1], "defaultfac")
|
||||
self.assertEqual(self.messages[-1][1], "override")
|
||||
|
||||
def test_with_prefix(self):
|
||||
"""
|
||||
If prefix is passed to PrefixingLogMixin.__init__, it is used in
|
||||
message rendering.
|
||||
"""
|
||||
class LoggingObject4(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject4("fac", prefix="pre1")
|
||||
obj.log("hello")
|
||||
obj.log("world")
|
||||
self.assertEqual(self.messages[-2][0], '<LoggingObject4 #1>(pre1): hello')
|
||||
self.assertEqual(self.messages[-1][0], '<LoggingObject4 #1>(pre1): world')
|
||||
|
||||
def test_with_bytes_prefix(self):
|
||||
"""
|
||||
If bytes prefix is passed to PrefixingLogMixin.__init__, it is used in
|
||||
message rendering.
|
||||
"""
|
||||
class LoggingObject5(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject5("fac", prefix=b"pre1")
|
||||
obj.log("hello")
|
||||
obj.log("world")
|
||||
self.assertEqual(self.messages[-2][0], '<LoggingObject5 #1>(pre1): hello')
|
||||
self.assertEqual(self.messages[-1][0], '<LoggingObject5 #1>(pre1): world')
|
||||
|
||||
def test_no_prefix(self):
|
||||
"""
|
||||
If no prefix is passed to PrefixingLogMixin.__init__, it is not used in
|
||||
message rendering.
|
||||
"""
|
||||
class LoggingObject2(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject2()
|
||||
obj.log("hello")
|
||||
obj.log("world")
|
||||
self.assertEqual(self.messages[-2][0], '<LoggingObject2 #1>: hello')
|
||||
self.assertEqual(self.messages[-1][0], '<LoggingObject2 #1>: world')
|
||||
|
||||
def test_numming(self):
|
||||
"""
|
||||
Objects inheriting from PrefixingLogMixin get a unique number from a
|
||||
class-specific counter.
|
||||
"""
|
||||
class LoggingObject3(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject3()
|
||||
obj2 = LoggingObject3()
|
||||
obj.log("hello")
|
||||
obj2.log("world")
|
||||
self.assertEqual(self.messages[-2][0], '<LoggingObject3 #1>: hello')
|
||||
self.assertEqual(self.messages[-1][0], '<LoggingObject3 #2>: world')
|
||||
|
||||
def test_parent_id(self):
|
||||
"""
|
||||
The parent message id can be passed in, otherwise the first message's
|
||||
id is used as the parent.
|
||||
|
||||
This logic is pretty bogus, but that's what the code does.
|
||||
"""
|
||||
class LoggingObject1(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject1()
|
||||
result = obj.log("zero")
|
||||
self.assertEqual(result, "msg1")
|
||||
obj.log("one", parent="par1")
|
||||
obj.log("two", parent="par2")
|
||||
obj.log("three")
|
||||
obj.log("four")
|
||||
self.assertEqual([m[2] for m in self.messages],
|
||||
[None, "par1", "par2", "msg1", "msg1"])
|
||||
|
||||
def test_grandparent_id(self):
|
||||
"""
|
||||
If grandparent message id is given, it's used as parent id of the first
|
||||
message.
|
||||
"""
|
||||
class LoggingObject1(tahoe_log.PrefixingLogMixin):
|
||||
pass
|
||||
|
||||
obj = LoggingObject1(grandparentmsgid="grand")
|
||||
result = obj.log("zero")
|
||||
self.assertEqual(result, "msg1")
|
||||
obj.log("one", parent="par1")
|
||||
obj.log("two", parent="par2")
|
||||
obj.log("three")
|
||||
obj.log("four")
|
||||
self.assertEqual([m[2] for m in self.messages],
|
||||
["grand", "par1", "par2", "msg1", "msg1"])
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import gc
|
||||
|
||||
|
@ -10,7 +10,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.python.modules import (
|
||||
getModule,
|
||||
|
@ -9,7 +9,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from six.moves import StringIO # native string StringIO
|
||||
|
||||
|
@ -8,7 +8,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
@ -15,14 +15,11 @@ import os, time, sys
|
||||
import yaml
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from allmydata.util import idlib, mathutil
|
||||
from allmydata.util import fileutil
|
||||
from allmydata.util import limiter, pollmixin
|
||||
from allmydata.util import pollmixin
|
||||
from allmydata.util import yamlutil
|
||||
from allmydata.util import log as tahoe_log
|
||||
from allmydata.util.fileutil import EncryptedTemporaryFile
|
||||
from allmydata.test.common_util import ReallyEqualMixin
|
||||
|
||||
@ -441,81 +438,6 @@ class PollMixinTests(unittest.TestCase):
|
||||
return d
|
||||
|
||||
|
||||
class Limiter(unittest.TestCase):
|
||||
|
||||
def job(self, i, foo):
|
||||
self.calls.append( (i, foo) )
|
||||
self.simultaneous += 1
|
||||
self.peak_simultaneous = max(self.simultaneous, self.peak_simultaneous)
|
||||
d = defer.Deferred()
|
||||
def _done():
|
||||
self.simultaneous -= 1
|
||||
d.callback("done %d" % i)
|
||||
reactor.callLater(1.0, _done)
|
||||
return d
|
||||
|
||||
def bad_job(self, i, foo):
|
||||
raise ValueError("bad_job %d" % i)
|
||||
|
||||
def test_limiter(self):
|
||||
self.calls = []
|
||||
self.simultaneous = 0
|
||||
self.peak_simultaneous = 0
|
||||
l = limiter.ConcurrencyLimiter()
|
||||
dl = []
|
||||
for i in range(20):
|
||||
dl.append(l.add(self.job, i, foo=str(i)))
|
||||
d = defer.DeferredList(dl, fireOnOneErrback=True)
|
||||
def _done(res):
|
||||
self.failUnlessEqual(self.simultaneous, 0)
|
||||
self.failUnless(self.peak_simultaneous <= 10)
|
||||
self.failUnlessEqual(len(self.calls), 20)
|
||||
for i in range(20):
|
||||
self.failUnless( (i, str(i)) in self.calls)
|
||||
d.addCallback(_done)
|
||||
return d
|
||||
|
||||
def test_errors(self):
|
||||
self.calls = []
|
||||
self.simultaneous = 0
|
||||
self.peak_simultaneous = 0
|
||||
l = limiter.ConcurrencyLimiter()
|
||||
dl = []
|
||||
for i in range(20):
|
||||
dl.append(l.add(self.job, i, foo=str(i)))
|
||||
d2 = l.add(self.bad_job, 21, "21")
|
||||
d = defer.DeferredList(dl, fireOnOneErrback=True)
|
||||
def _most_done(res):
|
||||
results = []
|
||||
for (success, result) in res:
|
||||
self.failUnlessEqual(success, True)
|
||||
results.append(result)
|
||||
results.sort()
|
||||
expected_results = ["done %d" % i for i in range(20)]
|
||||
expected_results.sort()
|
||||
self.failUnlessEqual(results, expected_results)
|
||||
self.failUnless(self.peak_simultaneous <= 10)
|
||||
self.failUnlessEqual(len(self.calls), 20)
|
||||
for i in range(20):
|
||||
self.failUnless( (i, str(i)) in self.calls)
|
||||
def _good(res):
|
||||
self.fail("should have failed, not got %s" % (res,))
|
||||
def _err(f):
|
||||
f.trap(ValueError)
|
||||
self.failUnless("bad_job 21" in str(f))
|
||||
d2.addCallbacks(_good, _err)
|
||||
return d2
|
||||
d.addCallback(_most_done)
|
||||
def _all_done(res):
|
||||
self.failUnlessEqual(self.simultaneous, 0)
|
||||
self.failUnless(self.peak_simultaneous <= 10)
|
||||
self.failUnlessEqual(len(self.calls), 20)
|
||||
for i in range(20):
|
||||
self.failUnless( (i, str(i)) in self.calls)
|
||||
d.addCallback(_all_done)
|
||||
return d
|
||||
|
||||
|
||||
ctr = [0]
|
||||
class EqButNotIs(object):
|
||||
def __init__(self, x):
|
||||
@ -540,20 +462,6 @@ class EqButNotIs(object):
|
||||
return self.x == other
|
||||
|
||||
|
||||
class SampleError(Exception):
|
||||
pass
|
||||
|
||||
class Log(unittest.TestCase):
|
||||
def test_err(self):
|
||||
try:
|
||||
raise SampleError("simple sample")
|
||||
except:
|
||||
f = Failure()
|
||||
tahoe_log.err(format="intentional sample error",
|
||||
failure=f, level=tahoe_log.OPERATIONAL, umid="wO9UoQ")
|
||||
self.flushLoggedErrors(SampleError)
|
||||
|
||||
|
||||
class YAML(unittest.TestCase):
|
||||
def test_convert(self):
|
||||
data = yaml.safe_dump(["str", u"unicode", u"\u1234nicode"])
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import sys
|
||||
import pkg_resources
|
||||
|
@ -11,11 +11,20 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
# Keep these sorted alphabetically, to reduce merge conflicts:
|
||||
PORTED_MODULES = [
|
||||
"allmydata.crypto",
|
||||
"allmydata.crypto.aes",
|
||||
"allmydata.crypto.ed25519",
|
||||
"allmydata.crypto.error",
|
||||
"allmydata.crypto.rsa",
|
||||
"allmydata.crypto.util",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.immutable.happiness_upload",
|
||||
"allmydata.test.common_py3",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.abbreviate",
|
||||
"allmydata.util.assertutil",
|
||||
"allmydata.util.base32",
|
||||
@ -23,32 +32,35 @@ PORTED_MODULES = [
|
||||
"allmydata.util.deferredutil",
|
||||
"allmydata.util.fileutil",
|
||||
"allmydata.util.dictutil",
|
||||
"allmydata.util.gcutil",
|
||||
"allmydata.util.hashutil",
|
||||
"allmydata.util.humanreadable",
|
||||
"allmydata.util.iputil",
|
||||
"allmydata.util.log",
|
||||
"allmydata.util.mathutil",
|
||||
"allmydata.util.namespace",
|
||||
"allmydata.util.netstring",
|
||||
"allmydata.util.observer",
|
||||
"allmydata.util.pipeline",
|
||||
"allmydata.util.pollmixin",
|
||||
"allmydata.util._python3",
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.test.common_py3",
|
||||
]
|
||||
|
||||
PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_abbreviate",
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
"allmydata.test.test_crypto",
|
||||
"allmydata.test.test_deferredutil",
|
||||
"allmydata.test.test_dictutil",
|
||||
"allmydata.test.test_happiness",
|
||||
"allmydata.test.test_hashtree",
|
||||
"allmydata.test.test_hashutil",
|
||||
"allmydata.test.test_humanreadable",
|
||||
"allmydata.test.test_iputil",
|
||||
"allmydata.test.test_log",
|
||||
"allmydata.test.test_netstring",
|
||||
"allmydata.test.test_observer",
|
||||
"allmydata.test.test_pipeline",
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re
|
||||
from datetime import timedelta
|
||||
|
@ -14,7 +14,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
# The API importers expect:
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
if PY2:
|
||||
def backwardscompat_bytes(b):
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
if PY2:
|
||||
import string
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
|
@ -13,7 +13,7 @@ if PY2:
|
||||
# IMPORTANT: We deliberately don't import dict. The issue is that we're
|
||||
# subclassing dict, so we'd end up exposing Python 3 dict APIs to lots of
|
||||
# code that doesn't support it.
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
class DictOfSets(dict):
|
||||
|
@ -7,7 +7,17 @@ Helpers for managing garbage collection.
|
||||
a result. Register allocation and release of *bare* file descriptors with
|
||||
this object (file objects, socket objects, etc, have their own integration
|
||||
with the garbage collector and don't need to bother with this).
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
__all__ = [
|
||||
"fileDescriptorResource",
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import chr as byteschr
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from reprlib import Repr
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, native_str
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os, re, socket, subprocess, errno
|
||||
from sys import platform
|
||||
|
@ -1,40 +0,0 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
from foolscap.api import eventually
|
||||
|
||||
class ConcurrencyLimiter(object):
|
||||
"""I implement a basic concurrency limiter. Add work to it in the form of
|
||||
(callable, args, kwargs) tuples. No more than LIMIT callables will be
|
||||
outstanding at any one time.
|
||||
"""
|
||||
|
||||
def __init__(self, limit=10):
|
||||
self.limit = limit
|
||||
self.pending = []
|
||||
self.active = 0
|
||||
|
||||
def __repr__(self):
|
||||
return "<Limiter with %d/%d/%d>" % (self.active, len(self.pending),
|
||||
self.limit)
|
||||
|
||||
def add(self, cb, *args, **kwargs):
|
||||
d = defer.Deferred()
|
||||
task = (cb, args, kwargs, d)
|
||||
self.pending.append(task)
|
||||
self.maybe_start_task()
|
||||
return d
|
||||
|
||||
def maybe_start_task(self):
|
||||
if self.active >= self.limit:
|
||||
return
|
||||
if not self.pending:
|
||||
return
|
||||
(cb, args, kwargs, done_d) = self.pending.pop(0)
|
||||
self.active += 1
|
||||
d = defer.maybeDeferred(cb, *args, **kwargs)
|
||||
d.addBoth(self._done, done_d)
|
||||
|
||||
def _done(self, res, done_d):
|
||||
self.active -= 1
|
||||
eventually(done_d.callback, res)
|
||||
eventually(self.maybe_start_task)
|
@ -1,4 +1,18 @@
|
||||
from allmydata.util import nummedobj
|
||||
"""
|
||||
Logging utilities.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from pyutil import nummedobj
|
||||
|
||||
from foolscap.logging import log
|
||||
from twisted.python import log as tw_log
|
||||
@ -36,8 +50,8 @@ class LogMixin(object):
|
||||
def log(self, msg, facility=None, parent=None, *args, **kwargs):
|
||||
if facility is None:
|
||||
facility = self._facility
|
||||
pmsgid = None
|
||||
if parent is None:
|
||||
pmsgid = parent
|
||||
if pmsgid is None:
|
||||
pmsgid = self._parentmsgid
|
||||
if pmsgid is None:
|
||||
pmsgid = self._grandparentmsgid
|
||||
@ -54,6 +68,8 @@ class PrefixingLogMixin(nummedobj.NummedObj, LogMixin):
|
||||
LogMixin.__init__(self, facility, grandparentmsgid)
|
||||
|
||||
if prefix:
|
||||
if isinstance(prefix, bytes):
|
||||
prefix = prefix.decode("utf-8", errors="replace")
|
||||
self._prefix = "%s(%s): " % (self.__repr__(), prefix)
|
||||
else:
|
||||
self._prefix = "%s: " % (self.__repr__(),)
|
||||
|
@ -13,7 +13,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
# The API importers expect:
|
||||
|
@ -10,7 +10,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
|
@ -1,42 +0,0 @@
|
||||
import collections, itertools, functools
|
||||
|
||||
objnums = collections.defaultdict(itertools.count)
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class NummedObj(object):
|
||||
"""
|
||||
This is useful for nicer debug printouts. Instead of objects of the same class being
|
||||
distinguished from one another by their memory address, they each get a unique number, which
|
||||
can be read as "the first object of this class", "the second object of this class", etc. This
|
||||
is especially useful because separate runs of a program will yield identical debug output,
|
||||
(assuming that the objects get created in the same order in each run). This makes it possible
|
||||
to diff outputs from separate runs to see what changed, without having to ignore a difference
|
||||
on every line due to different memory addresses of objects.
|
||||
"""
|
||||
|
||||
def __init__(self, klass=None):
|
||||
"""
|
||||
@param klass: in which class are you counted? If default value of `None', then self.__class__ will be used.
|
||||
"""
|
||||
if klass is None:
|
||||
klass = self.__class__
|
||||
self._classname = klass.__name__
|
||||
|
||||
self._objid = objnums[self._classname].next()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s #%d>" % (self._classname, self._objid,)
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, NummedObj):
|
||||
return (self._objid, self._classname,) < (other._objid, other._classname,)
|
||||
return NotImplemented
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, NummedObj):
|
||||
return (self._objid, self._classname,) == (other._objid, other._classname,)
|
||||
return NotImplemented
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import weakref
|
||||
from twisted.internet import defer
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.python.failure import Failure
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
from twisted.internet import task
|
||||
|
@ -5,7 +5,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
class Spans(object):
|
||||
|
@ -18,7 +18,7 @@ from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from allmydata.util.mathutil import round_sigfigs
|
||||
import math
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from future.utils import native_str
|
||||
|
||||
import calendar, datetime, re, time
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import re
|
||||
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, int, list, object, range, str, max, min # noqa: F401
|
||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
__all__ = [
|
||||
"PackagingError",
|
||||
|
10
tox.ini
10
tox.ini
@ -49,8 +49,8 @@ commands =
|
||||
tahoe --version
|
||||
|
||||
[testenv:py36]
|
||||
# git inside of ratchet.sh needs $HOME.
|
||||
passenv = HOME
|
||||
# On macOS, git inside of ratchet.sh needs $HOME.
|
||||
passenv = {[testenv]passenv} HOME
|
||||
commands = {toxinidir}/misc/python3/ratchet.sh
|
||||
|
||||
[testenv:integration]
|
||||
@ -77,6 +77,8 @@ commands =
|
||||
coverage xml
|
||||
|
||||
[testenv:codechecks]
|
||||
# On macOS, git inside of towncrier needs $HOME.
|
||||
passenv = HOME
|
||||
whitelist_externals =
|
||||
/bin/mv
|
||||
commands =
|
||||
@ -208,7 +210,9 @@ extras =
|
||||
deps =
|
||||
{[testenv]deps}
|
||||
packaging
|
||||
pyinstaller
|
||||
# PyInstaller 4.0 drops Python 2 support. When we finish porting to
|
||||
# Python 3 we can reconsider this constraint.
|
||||
pyinstaller < 4.0
|
||||
# Setting PYTHONHASHSEED to a known value assists with reproducible builds.
|
||||
# See https://pyinstaller.readthedocs.io/en/stable/advanced-topics.html#creating-a-reproducible-build
|
||||
setenv=PYTHONHASHSEED=1
|
||||
|
Loading…
x
Reference in New Issue
Block a user