Merge branch '3376.encodingutil-python-3' into 3377.configutil-connection_status-python-3

This commit is contained in:
Itamar Turner-Trauring 2020-08-13 15:53:02 -04:00
commit 6e24defe4b
29 changed files with 547 additions and 356 deletions

View File

@ -1,5 +1,6 @@
ARG TAG
FROM centos:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
git \
sudo \
make automake gcc gcc-c++ \
python2 \
python2-devel \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-devel \
libffi-devel \
openssl-devel \
libyaml \
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -1,5 +1,6 @@
ARG TAG
FROM debian:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
@ -8,22 +9,22 @@ ENV BUILD_SRC_ROOT /tmp/project
RUN apt-get --quiet update && \
apt-get --quiet --yes install \
git \
lsb-release \
git \
lsb-release \
sudo \
build-essential \
python2.7 \
python2.7-dev \
libffi-dev \
libssl-dev \
libyaml-dev \
virtualenv
build-essential \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-dev \
libffi-dev \
libssl-dev \
libyaml-dev \
virtualenv
# Get the project source. This is better than it seems. CircleCI will
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
# Only the integration tests currently need this but it doesn't hurt to always
# have it present and it's simpler than building a whole extra image just for

View File

@ -1,5 +1,6 @@
ARG TAG
FROM fedora:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
git \
sudo \
make automake gcc gcc-c++ \
python \
python-devel \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-devel \
libffi-devel \
openssl-devel \
libyaml-devel \
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -1,49 +0,0 @@
ARG TAG
FROM vbatts/slackware:${TAG}
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
# This will get updated by the CircleCI checkout step.
ENV BUILD_SRC_ROOT /tmp/project
# Be careful with slackpkg. If the package name given doesn't match anything,
# slackpkg still claims to succeed but you're totally screwed. Slackware
# updates versions of packaged software so including too much version prefix
# is a good way to have your install commands suddenly begin not installing
# anything.
RUN slackpkg update && \
slackpkg install \
openssh-7 git-2 \
ca-certificates \
sudo-1 \
make-4 \
automake-1 \
kernel-headers \
glibc-2 \
binutils-2 \
gcc-5 \
gcc-g++-5 \
python-2 \
libffi-3 \
libyaml-0 \
sqlite-3 \
icu4c-56 \
libmpc-1 </dev/null && \
slackpkg upgrade \
openssl-1 </dev/null
# neither virtualenv nor pip is packaged.
# do it the hard way.
# and it is extra hard since it is slackware.
RUN slackpkg install \
cyrus-sasl-2 \
curl-7 </dev/null && \
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python get-pip.py && \
pip install virtualenv
# Get the project source. This is better than it seems. CircleCI will
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"

View File

@ -1,5 +1,6 @@
ARG TAG
FROM ubuntu:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
@ -13,8 +14,8 @@ RUN apt-get --quiet update && \
apt-get --quiet --yes install \
sudo \
build-essential \
python2.7 \
python2.7-dev \
python${PYTHON_VERSION} \
python${PYTHON_VERSION}-dev \
libffi-dev \
libssl-dev \
libyaml-dev \
@ -26,4 +27,4 @@ RUN apt-get --quiet update && \
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -26,13 +26,14 @@ workflows:
- "centos-8"
- "slackware-14.2"
- "nixos-19.09"
# Test against PyPy 2.7
- "pypy2.7-buster"
# Just one Python 3.6 configuration while the port is in-progress.
- "python3.6"
# Other assorted tasks and configurations
- "lint"
- "pyinstaller"
@ -72,8 +73,8 @@ workflows:
- "build-image-fedora-28"
- "build-image-fedora-29"
- "build-image-centos-8"
- "build-image-slackware-14.2"
- "build-image-pypy-2.7-buster"
- "build-image-python36-ubuntu"
jobs:
@ -121,7 +122,7 @@ jobs:
debian-9: &DEBIAN
docker:
- image: "tahoelafsci/debian:9"
- image: "tahoelafsci/debian:9-py2.7"
user: "nobody"
environment: &UTF_8_ENVIRONMENT
@ -198,14 +199,14 @@ jobs:
debian-8:
<<: *DEBIAN
docker:
- image: "tahoelafsci/debian:8"
- image: "tahoelafsci/debian:8-py2.7"
user: "nobody"
pypy2.7-buster:
<<: *DEBIAN
docker:
- image: "tahoelafsci/pypy:2.7-buster"
- image: "tahoelafsci/pypy:buster-py2"
user: "nobody"
environment:
@ -261,17 +262,28 @@ jobs:
ubuntu-16.04:
<<: *DEBIAN
docker:
- image: "tahoelafsci/ubuntu:16.04"
- image: "tahoelafsci/ubuntu:16.04-py2.7"
user: "nobody"
ubuntu-18.04:
ubuntu-18.04: &UBUNTU_18_04
<<: *DEBIAN
docker:
- image: "tahoelafsci/ubuntu:18.04"
- image: "tahoelafsci/ubuntu:18.04-py2.7"
user: "nobody"
python3.6:
<<: *UBUNTU_18_04
docker:
- image: "tahoelafsci/ubuntu:18.04-py3"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
ubuntu-20.04:
<<: *DEBIAN
docker:
@ -281,7 +293,7 @@ jobs:
centos-8: &RHEL_DERIV
docker:
- image: "tahoelafsci/centos:8"
- image: "tahoelafsci/centos:8-py2"
user: "nobody"
environment: *UTF_8_ENVIRONMENT
@ -303,37 +315,17 @@ jobs:
fedora-28:
<<: *RHEL_DERIV
docker:
- image: "tahoelafsci/fedora:28"
- image: "tahoelafsci/fedora:28-py"
user: "nobody"
fedora-29:
<<: *RHEL_DERIV
docker:
- image: "tahoelafsci/fedora:29"
- image: "tahoelafsci/fedora:29-py"
user: "nobody"
slackware-14.2:
docker:
- image: "tahoelafsci/slackware:14.2"
user: "nobody"
environment: *UTF_8_ENVIRONMENT
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
steps:
- "checkout"
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
- store_test_results: *STORE_TEST_RESULTS
- store_artifacts: *STORE_TEST_LOG
- store_artifacts: *STORE_OTHER_ARTIFACTS
- run: *SUBMIT_COVERAGE
nixos-19.09:
docker:
# Run in a highly Nix-capable environment.
@ -397,8 +389,9 @@ jobs:
- image: "docker:17.05.0-ce-git"
environment:
DISTRO: "tahoelafsci/<DISTRO>:foo"
TAG: "tahoelafsci/distro:<TAG>"
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
TAG: "tahoelafsci/distro:<TAG>-py2"
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
steps:
- "checkout"
@ -450,13 +443,14 @@ jobs:
docker \
build \
--build-arg TAG=${TAG} \
-t tahoelafsci/${DISTRO}:${TAG} \
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
-f ~/project/.circleci/Dockerfile.${DISTRO} \
~/project/
- run:
name: "Push image"
command: |
docker push tahoelafsci/${DISTRO}:${TAG}
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
build-image-debian-8:
@ -465,6 +459,7 @@ jobs:
environment:
DISTRO: "debian"
TAG: "8"
PYTHON_VERSION: "2.7"
build-image-debian-9:
@ -473,6 +468,7 @@ jobs:
environment:
DISTRO: "debian"
TAG: "9"
PYTHON_VERSION: "2.7"
build-image-ubuntu-16.04:
@ -481,6 +477,7 @@ jobs:
environment:
DISTRO: "ubuntu"
TAG: "16.04"
PYTHON_VERSION: "2.7"
build-image-ubuntu-18.04:
@ -489,6 +486,16 @@ jobs:
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "2.7"
build-image-python36-ubuntu:
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "3"
build-image-ubuntu-20.04:
@ -505,6 +512,7 @@ jobs:
environment:
DISTRO: "centos"
TAG: "8"
PYTHON_VERSION: "2"
build-image-fedora-28:
@ -513,6 +521,8 @@ jobs:
environment:
DISTRO: "fedora"
TAG: "28"
# The default on Fedora (this version anyway) is still Python 2.
PYTHON_VERSION: ""
build-image-fedora-29:
@ -523,17 +533,13 @@ jobs:
TAG: "29"
build-image-slackware-14.2:
<<: *BUILD_IMAGE
environment:
DISTRO: "slackware"
TAG: "14.2"
build-image-pypy-2.7-buster:
<<: *BUILD_IMAGE
environment:
DISTRO: "pypy"
TAG: "2.7-buster"
TAG: "buster"
# We only have Python 2 for PyPy right now so there's no support for
# setting up PyPy 3 in the image building toolchain. This value is just
# for constructing the right Docker image tag.
PYTHON_VERSION: "2"

View File

@ -36,8 +36,9 @@ PIP="${BOOTSTRAP_VENV}/bin/pip"
# Tell pip where it can find any existing wheels.
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
# Populate the wheelhouse, if necessary.
"${PIP}" \
# Populate the wheelhouse, if necessary. zfec 1.5.3 can only be built with a
# UTF-8 environment so make sure we have one, at least for this invocation.
LANG="en_US.UTF-8" "${PIP}" \
wheel \
--wheel-dir "${WHEELHOUSE_PATH}" \
"${PROJECT_ROOT}"[test] \

View File

@ -81,7 +81,16 @@ ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
if [ -n "${ARTIFACTS}" ]; then
if [ ! -e "${SUBUNIT2}" ]; then
echo "subunitv2 output file does not exist: ${SUBUNIT2}"
exit 1
fi
# Create a junitxml results area.
mkdir -p "$(dirname "${JUNITXML}")"
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
# Always succeed even if subunit2junitxml fails. subunit2junitxml signals
# failure if the stream it is processing contains test failures. This is
# not what we care about. If we cared about it, the test command above
# would have signalled failure already and we wouldn't be here.
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || true
fi

View File

@ -1,40 +0,0 @@
sudo: false
language: python
cache: pip
dist: xenial
before_cache:
- rm -f $HOME/.cache/pip/log/debug.log
git:
depth: 1000
env:
global:
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
install:
- pip install --upgrade tox setuptools virtualenv
- echo $PATH; which python; which pip; which tox
- python misc/build_helpers/show-tool-versions.py
script:
- |
set -eo pipefail
tox -e ${T}
notifications:
email: false
irc:
channels: "chat.freenode.net#tahoe-lafs"
on_success: always # for testing
on_failure: always
template:
- "%{repository}#%{build_number} [%{branch}: %{commit} by %{author}] %{message}"
- "Changes: %{compare_url} | Details: %{build_url}"
matrix:
include:
- os: linux
python: '3.6'
env: T=py36
fast_finish: true

View File

@ -9,6 +9,9 @@ allmydata.test.test_abbreviate.Abbreviate.test_abbrev_time_year
allmydata.test.test_abbreviate.Abbreviate.test_parse_space
allmydata.test.test_abbreviate.Abbreviate.test_space
allmydata.test.test_abbreviate.Abbreviate.test_time
allmydata.test.test_backupdb.BackupDB.test_basic
allmydata.test.test_backupdb.BackupDB.test_upgrade_v1_v2
allmydata.test.test_backupdb.BackupDB.test_wrong_version
allmydata.test.test_base32.Base32.test_a2b
allmydata.test.test_base32.Base32.test_a2b_b2a_match_Pythons
allmydata.test.test_base32.Base32.test_b2a
@ -70,6 +73,74 @@ allmydata.test.test_deferredutil.DeferredUtilTests.test_success
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
allmydata.test.test_dictutil.DictUtil.test_auxdict
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
allmydata.test.test_encodingutil.EncodingUtilErrors.test_argv_to_unicode
allmydata.test.test_encodingutil.EncodingUtilErrors.test_get_io_encoding
allmydata.test.test_encodingutil.EncodingUtilErrors.test_get_io_encoding_not_from_stdout
allmydata.test.test_encodingutil.EncodingUtilErrors.test_no_unicode_normalization
allmydata.test.test_encodingutil.EncodingUtilErrors.test_unicode_to_output
allmydata.test.test_encodingutil.FilePaths.test_extend_filepath
allmydata.test.test_encodingutil.FilePaths.test_to_filepath
allmydata.test.test_encodingutil.FilePaths.test_unicode_from_filepath
allmydata.test.test_encodingutil.FilePaths.test_unicode_segments_from
allmydata.test.test_encodingutil.MacOSXLeopard.test_argv_to_unicode
allmydata.test.test_encodingutil.MacOSXLeopard.test_listdir_unicode
allmydata.test.test_encodingutil.MacOSXLeopard.test_unicode_platform_py3
allmydata.test.test_encodingutil.MacOSXLeopard.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.MacOSXLeopard.test_unicode_to_output
allmydata.test.test_encodingutil.MacOSXLeopard.test_unicode_to_url
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_argv_to_unicode
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_listdir_unicode
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_unicode_platform_py3
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_unicode_to_output
allmydata.test.test_encodingutil.MacOSXLeopard7bit.test_unicode_to_url
allmydata.test.test_encodingutil.OpenBSD.test_argv_to_unicode
allmydata.test.test_encodingutil.OpenBSD.test_listdir_unicode
allmydata.test.test_encodingutil.OpenBSD.test_unicode_platform_py3
allmydata.test.test_encodingutil.OpenBSD.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.OpenBSD.test_unicode_to_output
allmydata.test.test_encodingutil.OpenBSD.test_unicode_to_url
allmydata.test.test_encodingutil.QuoteOutput.test_quote_output_ascii
allmydata.test.test_encodingutil.QuoteOutput.test_quote_output_default
allmydata.test.test_encodingutil.QuoteOutput.test_quote_output_latin1
allmydata.test.test_encodingutil.QuoteOutput.test_quote_output_utf8
allmydata.test.test_encodingutil.QuotePaths.test_quote_filepath
allmydata.test.test_encodingutil.QuotePaths.test_quote_path
allmydata.test.test_encodingutil.StdlibUnicode.test_mkdir_open_exists_abspath_listdir_expanduser
allmydata.test.test_encodingutil.TestToFromStr.test_from_utf8_or_none
allmydata.test.test_encodingutil.TestToFromStr.test_to_str
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_argv_to_unicode
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_listdir_unicode
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_unicode_platform_py3
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_unicode_to_output
allmydata.test.test_encodingutil.UbuntuKarmicLatin1.test_unicode_to_url
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_argv_to_unicode
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_listdir_unicode
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_unicode_platform_py3
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_unicode_to_output
allmydata.test.test_encodingutil.UbuntuKarmicUTF8.test_unicode_to_url
allmydata.test.test_encodingutil.Windows.test_argv_to_unicode
allmydata.test.test_encodingutil.Windows.test_unicode_platform_py3
allmydata.test.test_encodingutil.Windows.test_unicode_to_argv_py3
allmydata.test.test_encodingutil.Windows.test_unicode_to_output
allmydata.test.test_encodingutil.Windows.test_unicode_to_url
allmydata.test.test_happiness.Happiness.test_100
allmydata.test.test_happiness.Happiness.test_calc_happy
allmydata.test.test_happiness.Happiness.test_everything_broken
allmydata.test.test_happiness.Happiness.test_hypothesis0
allmydata.test.test_happiness.Happiness.test_hypothesis_0
allmydata.test.test_happiness.Happiness.test_hypothesis_1
allmydata.test.test_happiness.Happiness.test_placement_1
allmydata.test.test_happiness.Happiness.test_placement_simple
allmydata.test.test_happiness.Happiness.test_redistribute
allmydata.test.test_happiness.Happiness.test_unhappy
allmydata.test.test_happiness.HappinessUtils.test_residual_0
allmydata.test.test_happiness.HappinessUtils.test_trivial_flow_graph
allmydata.test.test_happiness.HappinessUtils.test_trivial_maximum_graph
allmydata.test.test_happiness.PlacementTests.test_hypothesis_unhappy
allmydata.test.test_happiness.PlacementTests.test_more_hypothesis
allmydata.test.test_hashtree.Complete.test_create
allmydata.test.test_hashtree.Complete.test_dump
allmydata.test.test_hashtree.Complete.test_needed_hashes
@ -143,6 +214,29 @@ allmydata.test.test_time_format.TimeFormat.test_format_time_y2038
allmydata.test.test_time_format.TimeFormat.test_iso_utc
allmydata.test.test_time_format.TimeFormat.test_parse_date
allmydata.test.test_time_format.TimeFormat.test_parse_duration
allmydata.test.test_util.FileUtil.test_abspath_expanduser_unicode
allmydata.test.test_util.FileUtil.test_create_long_path
allmydata.test.test_util.FileUtil.test_disk_stats
allmydata.test.test_util.FileUtil.test_disk_stats_avail_nonnegative
allmydata.test.test_util.FileUtil.test_du
allmydata.test.test_util.FileUtil.test_encrypted_tempfile
allmydata.test.test_util.FileUtil.test_get_pathinfo
allmydata.test.test_util.FileUtil.test_get_pathinfo_symlink
allmydata.test.test_util.FileUtil.test_make_dirs_with_absolute_mode
allmydata.test.test_util.FileUtil.test_remove_if_possible
allmydata.test.test_util.FileUtil.test_rename
allmydata.test.test_util.FileUtil.test_rename_no_overwrite
allmydata.test.test_util.FileUtil.test_replace_file
allmydata.test.test_util.FileUtil.test_rm_dir
allmydata.test.test_util.FileUtil.test_windows_expanduser_win7
allmydata.test.test_util.FileUtil.test_windows_expanduser_xp
allmydata.test.test_util.FileUtil.test_write_atomically
allmydata.test.test_util.IDLib.test_nodeid_b2a
allmydata.test.test_util.Math.test_round_sigfigs
allmydata.test.test_util.PollMixinTests.test_PollMixin_False_then_True
allmydata.test.test_util.PollMixinTests.test_PollMixin_True
allmydata.test.test_util.PollMixinTests.test_timeout
allmydata.test.test_util.YAML.test_convert
allmydata.test.test_version.CheckRequirement.test_cross_check
allmydata.test.test_version.CheckRequirement.test_cross_check_unparseable_versions
allmydata.test.test_version.CheckRequirement.test_extract_openssl_version

View File

@ -9,11 +9,10 @@ base=$(pwd)
# Actually, though, trial outputs some things that are only gitignored in the project root.
cd "../.."
# Since both of the next calls are expected to exit non-0, relax our guard.
set +e
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml"
set -e
export SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2"
# Since the next two calls are expected to exit non-0, relax our guard.
trial --reporter=subunitv2-file allmydata || true
subunit2junitxml < "${SUBUNITREPORTER_OUTPUT_PATH}" > "$base/results.xml" || true
# Okay, now we're clear.
cd "$base"
@ -32,6 +31,14 @@ set -e
if [ $TERM = 'dumb' ]; then
export TERM=ansi
fi
git diff "$tracking_filename"
exit $code
echo "The ${tracking_filename} diff is:"
echo "================================="
# "git diff" gets pretty confused in this execution context when trying to
# write to stdout. Somehow it fails with SIGTTOU.
git diff -- "${tracking_filename}" > tracking.diff
cat tracking.diff
echo "================================="
echo "Exiting with code ${code} from ratchet.py."
exit ${code}

View File

@ -0,0 +1 @@
Slackware 14.2 is no longer a Tahoe-LAFS supported platform.

0
newsfragments/3336.minor Normal file
View File

0
newsfragments/3358.minor Normal file
View File

0
newsfragments/3370.minor Normal file
View File

0
newsfragments/3376.minor Normal file
View File

View File

@ -741,7 +741,7 @@ class _Client(node.Node, pollmixin.PollMixin):
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
public_key_str = ed25519.string_from_verifying_key(public_key)
self.config.write_config_file("node.pubkey", public_key_str + "\n")
self.config.write_config_file("node.pubkey", public_key_str + "\n", "w")
self._node_private_key = private_key
self._node_public_key = public_key

View File

@ -1,5 +1,5 @@
"""Directory Node implementation."""
import time, unicodedata
import time
from zope.interface import implementer
from twisted.internet import defer
@ -18,7 +18,7 @@ from allmydata.check_results import DeepCheckResults, \
DeepCheckAndRepairResults
from allmydata.monitor import Monitor
from allmydata.util import hashutil, base32, log
from allmydata.util.encodingutil import quote_output
from allmydata.util.encodingutil import quote_output, normalize
from allmydata.util.assertutil import precondition
from allmydata.util.netstring import netstring, split_netstring
from allmydata.util.consumer import download_to_data
@ -101,12 +101,6 @@ def update_metadata(metadata, new_metadata, now):
return metadata
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
# be NFC-normalized.
def normalize(namex):
return unicodedata.normalize('NFC', namex)
# TODO: {Deleter,MetadataSetter,Adder}.modify all start by unpacking the
# contents and end by repacking them. It might be better to apply them to
# the unpacked contents.

View File

@ -1,5 +1,20 @@
"""
Algorithms for figuring out happiness, the number of unique nodes the data is
on.
from Queue import PriorityQueue
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit dict, just in case newdict breaks things for external Python 2 code.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from queue import PriorityQueue
def augmenting_path_for(graph):
@ -35,9 +50,9 @@ def bfs(graph, s):
GRAY = 1
# BLACK vertices are those we have seen and explored
BLACK = 2
color = [WHITE for i in xrange(len(graph))]
predecessor = [None for i in xrange(len(graph))]
distance = [-1 for i in xrange(len(graph))]
color = [WHITE for i in range(len(graph))]
predecessor = [None for i in range(len(graph))]
distance = [-1 for i in range(len(graph))]
queue = [s] # vertices that we haven't explored yet.
color[s] = GRAY
distance[s] = 0
@ -58,9 +73,9 @@ def residual_network(graph, f):
flow network represented by my graph and f arguments. graph is a
flow network in adjacency-list form, and f is a flow in graph.
"""
new_graph = [[] for i in xrange(len(graph))]
cf = [[0 for s in xrange(len(graph))] for sh in xrange(len(graph))]
for i in xrange(len(graph)):
new_graph = [[] for i in range(len(graph))]
cf = [[0 for s in range(len(graph))] for sh in range(len(graph))]
for i in range(len(graph)):
for v in graph[i]:
if f[i][v] == 1:
# We add an edge (v, i) with cf[v,i] = 1. This means
@ -135,7 +150,7 @@ def _compute_maximum_graph(graph, shareIndices):
return {}
dim = len(graph)
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)]
flow_function = [[0 for sh in range(dim)] for s in range(dim)]
residual_graph, residual_function = residual_network(graph, flow_function)
while augmenting_path_for(residual_graph):
@ -260,9 +275,9 @@ def _servermap_flow_graph(peers, shares, servermap):
#print "share_to_index %s" % share_to_index
#print "servermap %s" % servermap
for peer in peers:
if servermap.has_key(peer):
if peer in servermap:
for s in servermap[peer]:
if share_to_index.has_key(s):
if s in share_to_index:
indexedShares.append(share_to_index[s])
graph.insert(peer_to_index[peer], indexedShares)
for share in shares:
@ -373,7 +388,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
new_mappings = _calculate_mappings(new_peers, new_shares)
#print "new_peers %s" % new_peers
#print "new_mappings %s" % new_mappings
mappings = dict(readonly_mappings.items() + existing_mappings.items() + new_mappings.items())
mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
homeless_shares = set()
for share in mappings:
if mappings[share] is None:
@ -384,7 +399,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
mappings, homeless_shares,
{
k: v
for k, v in peers_to_shares.items()
for k, v in list(peers_to_shares.items())
if k not in readonly_peers
}
)
@ -401,5 +416,5 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
return {
k: v.pop() if v else next(peer_iter)
for k, v in mappings.items()
for k, v in list(mappings.items())
}

View File

@ -13,11 +13,17 @@ from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode
import os
import time
import signal
from twisted.internet import reactor
from twisted.trial import unittest
from ..util.assertutil import precondition
from ..util.encodingutil import unicode_platform, get_filesystem_encoding
class TimezoneMixin(object):
@ -65,3 +71,20 @@ class SignalMixin(object):
if self.sigchldHandler:
signal.signal(signal.SIGCHLD, self.sigchldHandler)
return super(SignalMixin, self).tearDown()
class ReallyEqualMixin(object):
def failUnlessReallyEqual(self, a, b, msg=None):
self.assertEqual(a, b, msg)
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
def skip_if_cannot_represent_filename(u):
precondition(isinstance(u, unicode))
enc = get_filesystem_encoding()
if not unicode_platform():
try:
u.encode(enc)
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")

View File

@ -9,22 +9,17 @@ from twisted.python import failure
from twisted.trial import unittest
from ..util.assertutil import precondition
from allmydata.util.encodingutil import (unicode_platform, get_filesystem_encoding,
get_io_encoding)
from ..scripts import runner
from .common_py3 import SignalMixin
from allmydata.util.encodingutil import get_io_encoding
from future.utils import PY2
if PY2: # XXX this is a hack that makes some tests pass on Python3, remove
# in the future
from ..scripts import runner
# Imported for backwards compatibility:
from .common_py3 import (
SignalMixin, skip_if_cannot_represent_filename, ReallyEqualMixin,
)
def skip_if_cannot_represent_filename(u):
precondition(isinstance(u, unicode))
enc = get_filesystem_encoding()
if not unicode_platform():
try:
u.encode(enc)
except UnicodeEncodeError:
raise unittest.SkipTest("A non-ASCII filename could not be encoded on this platform.")
def skip_if_cannot_represent_argv(u):
precondition(isinstance(u, unicode))
try:
@ -84,12 +79,6 @@ def flip_one_bit(s, offset=0, size=None):
return result
class ReallyEqualMixin(object):
def failUnlessReallyEqual(self, a, b, msg=None):
self.assertEqual(a, b, msg)
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
class StallMixin(object):
def stall(self, res=None, delay=1):
d = defer.Deferred()
@ -183,3 +172,11 @@ except ImportError:
os.chmod(path, stat.S_IWRITE | stat.S_IEXEC | stat.S_IREAD)
make_readonly = _make_readonly
make_accessible = _make_accessible
__all__ = [
"make_readonly", "make_accessible", "TestMixin", "ShouldFailMixin",
"StallMixin", "skip_if_cannot_represent_argv", "run_cli", "parse_cli",
"DevNullDictionary", "insecurerandstr", "flip_bit", "flip_one_bit",
"SignalMixin", "skip_if_cannot_represent_filename", "ReallyEqualMixin"
]

View File

@ -1,4 +1,14 @@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2, PY3
if PY2:
# We don't import str because omg way too ambiguous in this context.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
from past.builtins import unicode
lumiere_nfc = u"lumi\u00E8re"
Artonwall_nfc = u"\u00C4rtonwall.mp3"
@ -43,8 +53,10 @@ if __name__ == "__main__":
for fname in TEST_FILENAMES:
open(os.path.join(tmpdir, fname), 'w').close()
# Use Unicode API under Windows or MacOS X
if sys.platform in ('win32', 'darwin'):
# On Python 2, listing directories returns unicode under Windows or
# MacOS X if the input is unicode. On Python 3, it always returns
# Unicode.
if PY2 and sys.platform in ('win32', 'darwin'):
dirlist = os.listdir(unicode(tmpdir))
else:
dirlist = os.listdir(tmpdir)
@ -59,20 +71,22 @@ if __name__ == "__main__":
import os, sys, locale
from unittest import skipIf
from twisted.trial import unittest
from twisted.python.filepath import FilePath
from allmydata.test.common_util import ReallyEqualMixin
from allmydata.test.common_py3 import (
ReallyEqualMixin, skip_if_cannot_represent_filename,
)
from allmydata.util import encodingutil, fileutil
from allmydata.util.encodingutil import argv_to_unicode, unicode_to_url, \
unicode_to_output, quote_output, quote_path, quote_local_unicode_path, \
quote_filepath, unicode_platform, listdir_unicode, FilenameEncodingError, \
get_io_encoding, get_filesystem_encoding, to_str, from_utf8_or_none, _reload, \
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from
from allmydata.dirnode import normalize
from .common_util import skip_if_cannot_represent_filename
to_filepath, extend_filepath, unicode_from_filepath, unicode_segments_from, \
unicode_to_argv
from twisted.python import usage
@ -90,7 +104,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
mock_stdout.encoding = 'cp65001'
_reload()
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
self.assertEqual(get_io_encoding(), 'utf-8')
mock_stdout.encoding = 'koi8-r'
expected = sys.platform == "win32" and 'utf-8' or 'koi8-r'
@ -122,7 +136,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
preferredencoding = None
_reload()
self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
self.assertEqual(get_io_encoding(), 'utf-8')
def test_argv_to_unicode(self):
encodingutil.io_encoding = 'utf-8'
@ -150,6 +164,7 @@ class EncodingUtilErrors(ReallyEqualMixin, unittest.TestCase):
# The following tests apply only to platforms that don't store filenames as
# Unicode entities on the filesystem.
class EncodingUtilNonUnicodePlatform(unittest.TestCase):
@skipIf(PY3, "Python 3 is always Unicode, regardless of OS.")
def setUp(self):
# Mock sys.platform because unicode_platform() uses it
self.original_platform = sys.platform
@ -211,7 +226,7 @@ class EncodingUtil(ReallyEqualMixin):
self.failUnlessReallyEqual(argv_to_unicode(argv), argu)
def test_unicode_to_url(self):
self.failUnless(unicode_to_url(lumiere_nfc), "lumi\xc3\xa8re")
self.failUnless(unicode_to_url(lumiere_nfc), b"lumi\xc3\xa8re")
def test_unicode_to_output(self):
if 'argv' not in dir(self):
@ -224,7 +239,18 @@ class EncodingUtil(ReallyEqualMixin):
_reload()
self.failUnlessReallyEqual(unicode_to_output(lumiere_nfc), self.argv)
def test_unicode_platform(self):
@skipIf(PY3, "Python 2 only.")
def test_unicode_to_argv_py2(self):
"""unicode_to_argv() converts to bytes on Python 2."""
self.assertEqual(unicode_to_argv("abc"), u"abc".encode(self.io_encoding))
@skipIf(PY2, "Python 3 only.")
def test_unicode_to_argv_py3(self):
"""unicode_to_argv() is noop on Python 3."""
self.assertEqual(unicode_to_argv("abc"), "abc")
@skipIf(PY3, "Python 3 only.")
def test_unicode_platform_py2(self):
matrix = {
'linux2': False,
'linux3': False,
@ -236,6 +262,11 @@ class EncodingUtil(ReallyEqualMixin):
_reload()
self.failUnlessReallyEqual(unicode_platform(), matrix[self.platform])
@skipIf(PY2, "Python 3 isn't Python 2.")
def test_unicode_platform_py3(self):
_reload()
self.failUnlessReallyEqual(unicode_platform(), True)
def test_listdir_unicode(self):
if 'dirlist' not in dir(self):
return
@ -248,7 +279,14 @@ class EncodingUtil(ReallyEqualMixin):
% (self.filesystem_encoding,))
def call_os_listdir(path):
return self.dirlist
if PY2:
return self.dirlist
else:
# Python 3 always lists unicode filenames:
return [d.decode(self.filesystem_encoding) if isinstance(d, bytes)
else d
for d in self.dirlist]
self.patch(os, 'listdir', call_os_listdir)
def call_sys_getfilesystemencoding():
@ -258,7 +296,7 @@ class EncodingUtil(ReallyEqualMixin):
_reload()
filenames = listdir_unicode(u'/dummy')
self.failUnlessEqual(set([normalize(fname) for fname in filenames]),
self.failUnlessEqual(set([encodingutil.normalize(fname) for fname in filenames]),
set(TEST_FILENAMES))
@ -278,12 +316,16 @@ class StdlibUnicode(unittest.TestCase):
fn = lumiere_nfc + u'/' + lumiere_nfc + u'.txt'
open(fn, 'wb').close()
self.failUnless(os.path.exists(fn))
self.failUnless(os.path.exists(os.path.join(os.getcwdu(), fn)))
if PY2:
getcwdu = os.getcwdu
else:
getcwdu = os.getcwd
self.failUnless(os.path.exists(os.path.join(getcwdu(), fn)))
filenames = listdir_unicode(lumiere_nfc)
# We only require that the listing includes a filename that is canonically equivalent
# to lumiere_nfc (on Mac OS X, it will be the NFD equivalent).
self.failUnlessIn(lumiere_nfc + ".txt", set([normalize(fname) for fname in filenames]))
self.failUnlessIn(lumiere_nfc + u".txt", set([encodingutil.normalize(fname) for fname in filenames]))
expanded = fileutil.expanduser(u"~/" + lumiere_nfc)
self.failIfIn(u"~", expanded)
@ -314,59 +356,70 @@ class QuoteOutput(ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(quote_output(inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
if out[0:2] == 'b"':
pass
elif isinstance(inp, str):
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quote_newlines=quote_newlines), out)
self.failUnlessReallyEqual(quote_output(unicode(inp), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
elif isinstance(inp, bytes):
try:
unicode_inp = inp.decode("utf-8")
except UnicodeDecodeError:
# Some things decode on Python 2, but not Python 3...
return
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quote_newlines=quote_newlines), out)
self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
else:
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quote_newlines=quote_newlines), out)
self.failUnlessReallyEqual(quote_output(inp.encode('utf-8'), encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
try:
bytes_inp = inp.encode('utf-8')
except UnicodeEncodeError:
# Some things encode on Python 2, but not Python 3, e.g.
# surrogates like u"\uDC00\uD800"...
return
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quote_newlines=quote_newlines), out)
self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
def _test_quote_output_all(self, enc):
def check(inp, out, optional_quotes=False, quote_newlines=None):
self._check(inp, out, enc, optional_quotes, quote_newlines)
# optional single quotes
check("foo", "'foo'", True)
check("\\", "'\\'", True)
check("$\"`", "'$\"`'", True)
check("\n", "'\n'", True, quote_newlines=False)
check(b"foo", b"'foo'", True)
check(b"\\", b"'\\'", True)
check(b"$\"`", b"'$\"`'", True)
check(b"\n", b"'\n'", True, quote_newlines=False)
# mandatory single quotes
check("\"", "'\"'")
check(b"\"", b"'\"'")
# double quotes
check("'", "\"'\"")
check("\n", "\"\\x0a\"", quote_newlines=True)
check("\x00", "\"\\x00\"")
check(b"'", b"\"'\"")
check(b"\n", b"\"\\x0a\"", quote_newlines=True)
check(b"\x00", b"\"\\x00\"")
# invalid Unicode and astral planes
check(u"\uFDD0\uFDEF", "\"\\ufdd0\\ufdef\"")
check(u"\uDC00\uD800", "\"\\udc00\\ud800\"")
check(u"\uDC00\uD800\uDC00", "\"\\udc00\\U00010000\"")
check(u"\uD800\uDC00", "\"\\U00010000\"")
check(u"\uD800\uDC01", "\"\\U00010001\"")
check(u"\uD801\uDC00", "\"\\U00010400\"")
check(u"\uDBFF\uDFFF", "\"\\U0010ffff\"")
check(u"'\uDBFF\uDFFF", "\"'\\U0010ffff\"")
check(u"\"\uDBFF\uDFFF", "\"\\\"\\U0010ffff\"")
check(u"\uFDD0\uFDEF", b"\"\\ufdd0\\ufdef\"")
check(u"\uDC00\uD800", b"\"\\udc00\\ud800\"")
check(u"\uDC00\uD800\uDC00", b"\"\\udc00\\U00010000\"")
check(u"\uD800\uDC00", b"\"\\U00010000\"")
check(u"\uD800\uDC01", b"\"\\U00010001\"")
check(u"\uD801\uDC00", b"\"\\U00010400\"")
check(u"\uDBFF\uDFFF", b"\"\\U0010ffff\"")
check(u"'\uDBFF\uDFFF", b"\"'\\U0010ffff\"")
check(u"\"\uDBFF\uDFFF", b"\"\\\"\\U0010ffff\"")
# invalid UTF-8
check("\xFF", "b\"\\xff\"")
check("\x00\"$\\`\x80\xFF", "b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
check(b"\xFF", b"b\"\\xff\"")
check(b"\x00\"$\\`\x80\xFF", b"b\"\\x00\\\"\\$\\\\\\`\\x80\\xff\"")
def test_quote_output_ascii(self, enc='ascii'):
def check(inp, out, optional_quotes=False, quote_newlines=None):
self._check(inp, out, enc, optional_quotes, quote_newlines)
self._test_quote_output_all(enc)
check(u"\u00D7", "\"\\xd7\"")
check(u"'\u00D7", "\"'\\xd7\"")
check(u"\"\u00D7", "\"\\\"\\xd7\"")
check(u"\u2621", "\"\\u2621\"")
check(u"'\u2621", "\"'\\u2621\"")
check(u"\"\u2621", "\"\\\"\\u2621\"")
check(u"\n", "'\n'", True, quote_newlines=False)
check(u"\n", "\"\\x0a\"", quote_newlines=True)
check(u"\u00D7", b"\"\\xd7\"")
check(u"'\u00D7", b"\"'\\xd7\"")
check(u"\"\u00D7", b"\"\\\"\\xd7\"")
check(u"\u2621", b"\"\\u2621\"")
check(u"'\u2621", b"\"'\\u2621\"")
check(u"\"\u2621", b"\"\\\"\\u2621\"")
check(u"\n", b"'\n'", True, quote_newlines=False)
check(u"\n", b"\"\\x0a\"", quote_newlines=True)
def test_quote_output_latin1(self, enc='latin1'):
def check(inp, out, optional_quotes=False, quote_newlines=None):
@ -411,43 +464,43 @@ def win32_other(win32, other):
class QuotePaths(ReallyEqualMixin, unittest.TestCase):
def test_quote_path(self):
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), "'foo/bar'")
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), "'foo/bar'")
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=False), "foo/bar")
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), '"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), '"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), '"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_path([u'foo', u'bar']), b"'foo/bar'")
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=True), b"'foo/bar'")
self.failUnlessReallyEqual(quote_path([u'foo', u'bar'], quotemarks=False), b"foo/bar")
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar']), b'"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=True), b'"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_path([u'foo', u'\nbar'], quotemarks=False), b'"foo/\\x0abar"')
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo"),
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=True),
win32_other("'C:\\foo'", "'\\\\?\\C:\\foo'"))
win32_other(b"'C:\\foo'", b"'\\\\?\\C:\\foo'"))
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\C:\\foo", quotemarks=False),
win32_other("C:\\foo", "\\\\?\\C:\\foo"))
win32_other(b"C:\\foo", b"\\\\?\\C:\\foo"))
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar"),
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=True),
win32_other("'\\\\foo\\bar'", "'\\\\?\\UNC\\foo\\bar'"))
win32_other(b"'\\\\foo\\bar'", b"'\\\\?\\UNC\\foo\\bar'"))
self.failUnlessReallyEqual(quote_local_unicode_path(u"\\\\?\\UNC\\foo\\bar", quotemarks=False),
win32_other("\\\\foo\\bar", "\\\\?\\UNC\\foo\\bar"))
win32_other(b"\\\\foo\\bar", b"\\\\?\\UNC\\foo\\bar"))
def test_quote_filepath(self):
foo_bar_fp = FilePath(win32_other(u'C:\\foo\\bar', u'/foo/bar'))
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp),
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=True),
win32_other("'C:\\foo\\bar'", "'/foo/bar'"))
win32_other(b"'C:\\foo\\bar'", b"'/foo/bar'"))
self.failUnlessReallyEqual(quote_filepath(foo_bar_fp, quotemarks=False),
win32_other("C:\\foo\\bar", "/foo/bar"))
win32_other(b"C:\\foo\\bar", b"/foo/bar"))
if sys.platform == "win32":
foo_longfp = FilePath(u'\\\\?\\C:\\foo')
self.failUnlessReallyEqual(quote_filepath(foo_longfp),
"'C:\\foo'")
b"'C:\\foo'")
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=True),
"'C:\\foo'")
b"'C:\\foo'")
self.failUnlessReallyEqual(quote_filepath(foo_longfp, quotemarks=False),
"C:\\foo")
b"C:\\foo")
class FilePaths(ReallyEqualMixin, unittest.TestCase):
@ -501,23 +554,23 @@ class FilePaths(ReallyEqualMixin, unittest.TestCase):
class UbuntuKarmicUTF8(EncodingUtil, unittest.TestCase):
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
argv = 'lumi\xc3\xa8re'
argv = b'lumi\xc3\xa8re'
platform = 'linux2'
filesystem_encoding = 'UTF-8'
io_encoding = 'UTF-8'
dirlist = ['test_file', '\xc3\x84rtonwall.mp3', 'Blah blah.txt']
dirlist = [b'test_file', b'\xc3\x84rtonwall.mp3', b'Blah blah.txt']
class UbuntuKarmicLatin1(EncodingUtil, unittest.TestCase):
uname = 'Linux korn 2.6.31-14-generic #48-Ubuntu SMP Fri Oct 16 14:05:01 UTC 2009 x86_64'
argv = 'lumi\xe8re'
argv = b'lumi\xe8re'
platform = 'linux2'
filesystem_encoding = 'ISO-8859-1'
io_encoding = 'ISO-8859-1'
dirlist = ['test_file', 'Blah blah.txt', '\xc4rtonwall.mp3']
dirlist = [b'test_file', b'Blah blah.txt', b'\xc4rtonwall.mp3']
class Windows(EncodingUtil, unittest.TestCase):
uname = 'Windows XP 5.1.2600 x86 x86 Family 15 Model 75 Step ping 2, AuthenticAMD'
argv = 'lumi\xc3\xa8re'
argv = b'lumi\xc3\xa8re'
platform = 'win32'
filesystem_encoding = 'mbcs'
io_encoding = 'utf-8'
@ -525,7 +578,7 @@ class Windows(EncodingUtil, unittest.TestCase):
class MacOSXLeopard(EncodingUtil, unittest.TestCase):
uname = 'Darwin g5.local 9.8.0 Darwin Kernel Version 9.8.0: Wed Jul 15 16:57:01 PDT 2009; root:xnu-1228.15.4~1/RELEASE_PPC Power Macintosh powerpc'
output = 'lumi\xc3\xa8re'
output = b'lumi\xc3\xa8re'
platform = 'darwin'
filesystem_encoding = 'utf-8'
io_encoding = 'UTF-8'
@ -548,14 +601,14 @@ class OpenBSD(EncodingUtil, unittest.TestCase):
class TestToFromStr(ReallyEqualMixin, unittest.TestCase):
def test_to_str(self):
self.failUnlessReallyEqual(to_str("foo"), "foo")
self.failUnlessReallyEqual(to_str("lumi\xc3\xa8re"), "lumi\xc3\xa8re")
self.failUnlessReallyEqual(to_str("\xFF"), "\xFF") # passes through invalid UTF-8 -- is this what we want?
self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), "lumi\xc3\xa8re")
self.failUnlessReallyEqual(to_str(b"foo"), b"foo")
self.failUnlessReallyEqual(to_str(b"lumi\xc3\xa8re"), b"lumi\xc3\xa8re")
self.failUnlessReallyEqual(to_str(b"\xFF"), b"\xFF") # passes through invalid UTF-8 -- is this what we want?
self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), b"lumi\xc3\xa8re")
self.failUnlessReallyEqual(to_str(None), None)
def test_from_utf8_or_none(self):
self.failUnlessRaises(AssertionError, from_utf8_or_none, u"foo")
self.failUnlessReallyEqual(from_utf8_or_none("lumi\xc3\xa8re"), u"lumi\u00E8re")
self.failUnlessReallyEqual(from_utf8_or_none(b"lumi\xc3\xa8re"), u"lumi\u00E8re")
self.failUnlessReallyEqual(from_utf8_or_none(None), None)
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, "\xFF")
self.failUnlessRaises(UnicodeDecodeError, from_utf8_or_none, b"\xFF")

View File

@ -1,5 +1,15 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit dict, just in case newdict breaks things.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from hypothesis import given
from hypothesis.strategies import text, sets

View File

@ -1,5 +1,15 @@
from __future__ import print_function
"""
Ported to Python3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import six
import os, time, sys
import yaml
@ -19,7 +29,7 @@ if six.PY3:
class IDLib(unittest.TestCase):
def test_nodeid_b2a(self):
self.failUnlessEqual(idlib.nodeid_b2a("\x00"*20), "a"*32)
self.failUnlessEqual(idlib.nodeid_b2a(b"\x00"*20), "a"*32)
class MyList(list):
@ -85,10 +95,10 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
basedir = "util/FileUtil/test_write_atomically"
fileutil.make_dirs(basedir)
fn = os.path.join(basedir, "here")
fileutil.write_atomically(fn, "one")
self.failUnlessEqual(fileutil.read(fn), "one")
fileutil.write_atomically(fn, "two", mode="") # non-binary
self.failUnlessEqual(fileutil.read(fn), "two")
fileutil.write_atomically(fn, b"one", "b")
self.failUnlessEqual(fileutil.read(fn), b"one")
fileutil.write_atomically(fn, u"two", mode="") # non-binary
self.failUnlessEqual(fileutil.read(fn), b"two")
def test_rename(self):
basedir = "util/FileUtil/test_rename"
@ -111,20 +121,20 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
# when only dest exists
fileutil.write(dest_path, "dest")
fileutil.write(dest_path, b"dest")
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
self.failUnlessEqual(fileutil.read(dest_path), "dest")
self.failUnlessEqual(fileutil.read(dest_path), b"dest")
# when both exist
fileutil.write(source_path, "source")
fileutil.write(source_path, b"source")
self.failUnlessRaises(OSError, fileutil.rename_no_overwrite, source_path, dest_path)
self.failUnlessEqual(fileutil.read(source_path), "source")
self.failUnlessEqual(fileutil.read(dest_path), "dest")
self.failUnlessEqual(fileutil.read(source_path), b"source")
self.failUnlessEqual(fileutil.read(dest_path), b"dest")
# when only source exists
os.remove(dest_path)
fileutil.rename_no_overwrite(source_path, dest_path)
self.failUnlessEqual(fileutil.read(dest_path), "source")
self.failUnlessEqual(fileutil.read(dest_path), b"source")
self.failIf(os.path.exists(source_path))
def test_replace_file(self):
@ -138,21 +148,21 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
# when only replaced exists
fileutil.write(replaced_path, "foo")
fileutil.write(replaced_path, b"foo")
self.failUnlessRaises(fileutil.ConflictError, fileutil.replace_file, replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "foo")
self.failUnlessEqual(fileutil.read(replaced_path), b"foo")
# when both replaced and replacement exist
fileutil.write(replacement_path, "bar")
fileutil.write(replacement_path, b"bar")
fileutil.replace_file(replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
self.failUnlessEqual(fileutil.read(replaced_path), b"bar")
self.failIf(os.path.exists(replacement_path))
# when only replacement exists
os.remove(replaced_path)
fileutil.write(replacement_path, "bar")
fileutil.write(replacement_path, b"bar")
fileutil.replace_file(replaced_path, replacement_path)
self.failUnlessEqual(fileutil.read(replaced_path), "bar")
self.failUnlessEqual(fileutil.read(replaced_path), b"bar")
self.failIf(os.path.exists(replacement_path))
def test_du(self):
@ -170,13 +180,15 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
self.failUnlessEqual(10+11+12+13, used)
def test_abspath_expanduser_unicode(self):
self.failUnlessRaises(AssertionError, fileutil.abspath_expanduser_unicode, "bytestring")
self.failUnlessRaises(AssertionError, fileutil.abspath_expanduser_unicode, b"bytestring")
saved_cwd = os.path.normpath(os.getcwdu())
saved_cwd = os.path.normpath(os.getcwd())
if PY2:
saved_cwd = saved_cwd.decode("utf8")
abspath_cwd = fileutil.abspath_expanduser_unicode(u".")
abspath_cwd_notlong = fileutil.abspath_expanduser_unicode(u".", long_path=False)
self.failUnless(isinstance(saved_cwd, unicode), saved_cwd)
self.failUnless(isinstance(abspath_cwd, unicode), abspath_cwd)
self.failUnless(isinstance(saved_cwd, str), saved_cwd)
self.failUnless(isinstance(abspath_cwd, str), abspath_cwd)
if sys.platform == "win32":
self.failUnlessReallyEqual(abspath_cwd, fileutil.to_windows_long_path(saved_cwd))
else:
@ -237,10 +249,10 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
os.chdir(cwd)
for upath in (u'', u'fuu', u'f\xf9\xf9', u'/fuu', u'U:\\', u'~'):
uabspath = fileutil.abspath_expanduser_unicode(upath)
self.failUnless(isinstance(uabspath, unicode), uabspath)
self.failUnless(isinstance(uabspath, str), uabspath)
uabspath_notlong = fileutil.abspath_expanduser_unicode(upath, long_path=False)
self.failUnless(isinstance(uabspath_notlong, unicode), uabspath_notlong)
self.failUnless(isinstance(uabspath_notlong, str), uabspath_notlong)
finally:
os.chdir(saved_cwd)
@ -293,9 +305,9 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
fileutil.remove(long_path)
self.addCleanup(_cleanup)
fileutil.write(long_path, "test")
fileutil.write(long_path, b"test")
self.failUnless(os.path.exists(long_path))
self.failUnlessEqual(fileutil.read(long_path), "test")
self.failUnlessEqual(fileutil.read(long_path), b"test")
_cleanup()
self.failIf(os.path.exists(long_path))
@ -353,7 +365,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
# create a file
f = os.path.join(basedir, "1.txt")
fileutil.write(f, "a"*10)
fileutil.write(f, b"a"*10)
fileinfo = fileutil.get_pathinfo(f)
self.failUnlessTrue(fileinfo.isfile)
self.failUnlessTrue(fileinfo.exists)
@ -381,7 +393,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
fileutil.make_dirs(basedir)
f = os.path.join(basedir, "1.txt")
fileutil.write(f, "a"*10)
fileutil.write(f, b"a"*10)
# create a symlink pointing to 1.txt
slname = os.path.join(basedir, "linkto1.txt")
@ -394,7 +406,7 @@ class FileUtil(ReallyEqualMixin, unittest.TestCase):
def test_encrypted_tempfile(self):
f = EncryptedTemporaryFile()
f.write("foobar")
f.write(b"foobar")
f.close()
@ -409,7 +421,7 @@ class PollMixinTests(unittest.TestCase):
def test_PollMixin_False_then_True(self):
i = iter([False, True])
d = self.pm.poll(check_f=i.next,
d = self.pm.poll(check_f=lambda: next(i),
pollinterval=0.1)
return d
@ -454,6 +466,6 @@ class YAML(unittest.TestCase):
def test_convert(self):
data = yaml.safe_dump(["str", u"unicode", u"\u1234nicode"])
back = yamlutil.safe_load(data)
self.failUnlessEqual(type(back[0]), unicode)
self.failUnlessEqual(type(back[1]), unicode)
self.failUnlessEqual(type(back[2]), unicode)
self.assertIsInstance(back[0], str)
self.assertIsInstance(back[1], str)
self.assertIsInstance(back[2], str)

View File

@ -22,6 +22,7 @@ PORTED_MODULES = [
"allmydata.crypto.rsa",
"allmydata.crypto.util",
"allmydata.hashtree",
"allmydata.immutable.happiness_upload",
"allmydata.test.common_py3",
"allmydata.util._python3",
"allmydata.util.abbreviate",
@ -31,7 +32,9 @@ PORTED_MODULES = [
"allmydata.util.configutil",
"allmydata.util.connection_status",
"allmydata.util.deferredutil",
"allmydata.util.fileutil",
"allmydata.util.dictutil",
"allmydata.util.encodingutil",
"allmydata.util.gcutil",
"allmydata.util.hashutil",
"allmydata.util.humanreadable",
@ -57,6 +60,8 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_crypto",
"allmydata.test.test_deferredutil",
"allmydata.test.test_dictutil",
"allmydata.test.test_encodingutil",
"allmydata.test.test_happiness",
"allmydata.test.test_hashtree",
"allmydata.test.test_hashutil",
"allmydata.test.test_humanreadable",
@ -69,10 +74,10 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_spans",
"allmydata.test.test_statistics",
"allmydata.test.test_time_format",
"allmydata.test.test_util",
"allmydata.test.test_version",
]
if __name__ == '__main__':
from subprocess import check_call
check_call(["trial"] + PORTED_TEST_MODULES)

View File

@ -1,9 +1,26 @@
"""
Functions used to convert inputs from whatever encoding used in the system to
unicode and back.
Ported to Python 3.
Once Python 2 support is dropped, most of this module will obsolete, since
Unicode is the default everywhere in Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2, PY3, native_str
if PY2:
# We omit str() because that seems too tricky to get right.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
from past.builtins import unicode
import sys, os, re, locale
import unicodedata
from allmydata.util.assertutil import precondition, _assert
from twisted.python import usage
@ -62,13 +79,14 @@ def _reload():
check_encoding(io_encoding)
is_unicode_platform = sys.platform in ["win32", "darwin"]
is_unicode_platform = PY3 or sys.platform in ["win32", "darwin"]
# Despite the Unicode-mode FilePath support added to Twisted in
# <https://twistedmatrix.com/trac/ticket/7805>, we can't yet use
# Unicode-mode FilePaths with INotify on non-Windows platforms
# due to <https://twistedmatrix.com/trac/ticket/7928>.
use_unicode_filepath = sys.platform == "win32"
# due to <https://twistedmatrix.com/trac/ticket/7928>. Supposedly
# 7928 is fixed, though...
use_unicode_filepath = PY3 or sys.platform == "win32"
_reload()
@ -89,7 +107,10 @@ def argv_to_unicode(s):
"""
Decode given argv element to unicode. If this fails, raise a UsageError.
"""
precondition(isinstance(s, str), s)
if isinstance(s, unicode):
return s
precondition(isinstance(s, bytes), s)
try:
return unicode(s, io_encoding)
@ -114,18 +135,22 @@ def unicode_to_argv(s, mangle=False):
If the argument is to be passed to a different process, then the 'mangle' argument
should be true; on Windows, this uses a mangled encoding that will be reversed by
code in runner.py.
On Python 3, just return the string unchanged, since argv is unicode.
"""
precondition(isinstance(s, unicode), s)
if PY3:
return s
if mangle and sys.platform == "win32":
# This must be the same as 'mangle' in bin/tahoe-script.template.
return str(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s))
return bytes(re.sub(u'[^\\x20-\\x7F]', lambda m: u'\x7F%x;' % (ord(m.group(0)),), s), io_encoding)
else:
return s.encode(io_encoding)
def unicode_to_url(s):
"""
Encode an unicode object used in an URL.
Encode an unicode object used in an URL to bytes.
"""
# According to RFC 2718, non-ascii characters in URLs must be UTF-8 encoded.
@ -134,19 +159,19 @@ def unicode_to_url(s):
#precondition(isinstance(s, unicode), s)
#return s.encode('utf-8')
def to_str(s):
if s is None or isinstance(s, str):
def to_str(s): # TODO rename to to_bytes
if s is None or isinstance(s, bytes):
return s
return s.encode('utf-8')
def from_utf8_or_none(s):
precondition(isinstance(s, (NoneType, str)), s)
precondition(isinstance(s, bytes) or s is None, s)
if s is None:
return s
return s.decode('utf-8')
PRINTABLE_ASCII = re.compile(r'^[\n\r\x20-\x7E]*$', re.DOTALL)
PRINTABLE_8BIT = re.compile(r'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
PRINTABLE_ASCII = re.compile(br'^[\n\r\x20-\x7E]*$', re.DOTALL)
PRINTABLE_8BIT = re.compile(br'^[\n\r\x20-\x7E\x80-\xFF]*$', re.DOTALL)
def is_printable_ascii(s):
return PRINTABLE_ASCII.search(s) is not None
@ -160,14 +185,14 @@ def unicode_to_output(s):
try:
out = s.encode(io_encoding)
except (UnicodeEncodeError, UnicodeDecodeError):
raise UnicodeEncodeError(io_encoding, s, 0, 0,
"A string could not be encoded as %s for output to the terminal:\n%r" %
(io_encoding, repr(s)))
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string could not be encoded as %s for output to the terminal:\n%r" %
(io_encoding, repr(s))))
if PRINTABLE_8BIT.search(out) is None:
raise UnicodeEncodeError(io_encoding, s, 0, 0,
"A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
(io_encoding, repr(s)))
raise UnicodeEncodeError(native_str(io_encoding), s, 0, 0,
native_str("A string encoded as %s for output to the terminal contained unsafe bytes:\n%r" %
(io_encoding, repr(s))))
return out
@ -188,14 +213,17 @@ def _unicode_escape(m, quote_newlines):
else:
return u'\\x%02x' % (codepoint,)
def _str_escape(m, quote_newlines):
def _str_escape(m, quote_newlines): # TODO rename to _bytes_escape
"""
Takes a re match on bytes, the result is escaped bytes of group(0).
"""
c = m.group(0)
if c == '"' or c == '$' or c == '`' or c == '\\':
return '\\' + c
elif c == '\n' and not quote_newlines:
if c == b'"' or c == b'$' or c == b'`' or c == b'\\':
return b'\\' + c
elif c == b'\n' and not quote_newlines:
return c
else:
return '\\x%02x' % (ord(c),)
return b'\\x%02x' % (ord(c),)
MUST_DOUBLE_QUOTE_NL = re.compile(u'[^\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
MUST_DOUBLE_QUOTE = re.compile(u'[^\\n\\x20-\\x26\\x28-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]', re.DOTALL)
@ -205,7 +233,7 @@ ESCAPABLE_UNICODE = re.compile(u'([\uD800-\uDBFF][\uDC00-\uDFFF])|' # valid sur
u'[^ !#\\x25-\\x5B\\x5D-\\x5F\\x61-\\x7E\u00A0-\uD7FF\uE000-\uFDCF\uFDF0-\uFFFC]',
re.DOTALL)
ESCAPABLE_8BIT = re.compile( r'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
ESCAPABLE_8BIT = re.compile( br'[^ !#\x25-\x5B\x5D-\x5F\x61-\x7E]', re.DOTALL)
def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
"""
@ -221,32 +249,32 @@ def quote_output(s, quotemarks=True, quote_newlines=None, encoding=None):
If not explicitly given, quote_newlines is True when quotemarks is True.
"""
precondition(isinstance(s, (str, unicode)), s)
precondition(isinstance(s, (bytes, unicode)), s)
if quote_newlines is None:
quote_newlines = quotemarks
if isinstance(s, str):
if isinstance(s, bytes):
try:
s = s.decode('utf-8')
except UnicodeDecodeError:
return 'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _str_escape(m, quote_newlines), s),)
return b'b"%s"' % (ESCAPABLE_8BIT.sub(lambda m: _str_escape(m, quote_newlines), s),)
must_double_quote = quote_newlines and MUST_DOUBLE_QUOTE_NL or MUST_DOUBLE_QUOTE
if must_double_quote.search(s) is None:
try:
out = s.encode(encoding or io_encoding)
if quotemarks or out.startswith('"'):
return "'%s'" % (out,)
if quotemarks or out.startswith(b'"'):
return b"'%s'" % (out,)
else:
return out
except (UnicodeDecodeError, UnicodeEncodeError):
pass
escaped = ESCAPABLE_UNICODE.sub(lambda m: _unicode_escape(m, quote_newlines), s)
return '"%s"' % (escaped.encode(encoding or io_encoding, 'backslashreplace'),)
return b'"%s"' % (escaped.encode(encoding or io_encoding, 'backslashreplace'),)
def quote_path(path, quotemarks=True):
return quote_output("/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
return quote_output(b"/".join(map(to_str, path)), quotemarks=quotemarks, quote_newlines=True)
def quote_local_unicode_path(path, quotemarks=True):
precondition(isinstance(path, unicode), path)
@ -275,7 +303,7 @@ def extend_filepath(fp, segments):
return fp
def to_filepath(path):
precondition(isinstance(path, unicode if use_unicode_filepath else basestring),
precondition(isinstance(path, unicode if use_unicode_filepath else (bytes, unicode)),
path=path)
if isinstance(path, unicode) and not use_unicode_filepath:
@ -290,7 +318,7 @@ def to_filepath(path):
return FilePath(path)
def _decode(s):
precondition(isinstance(s, basestring), s=s)
precondition(isinstance(s, (bytes, unicode)), s=s)
if isinstance(s, bytes):
return s.decode(filesystem_encoding)
@ -356,3 +384,9 @@ def listdir_unicode(path):
def listdir_filepath(fp):
return listdir_unicode(unicode_from_filepath(fp))
# 'x' at the end of a variable name indicates that it holds a Unicode string that may not
# be NFC-normalized.
def normalize(namex):
return unicodedata.normalize('NFC', namex)

View File

@ -1,9 +1,19 @@
from __future__ import print_function
"""
Ported to Python3.
Futz with files like a pro.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# open is not here because we want to use native strings on Py2
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import sys, os, stat, tempfile, time, binascii
import six
from collections import namedtuple
@ -253,6 +263,9 @@ def move_into_place(source, dest):
os.rename(source, dest)
def write_atomically(target, contents, mode="b"):
assert (
isinstance(contents, bytes) and "b" in mode or
isinstance(contents, str) and "t" in mode or mode == ""), (type(contents), mode)
with open(target+".tmp", "w"+mode) as f:
f.write(contents)
move_into_place(target+".tmp", target)
@ -277,7 +290,7 @@ def put_file(path, inf):
outf.write(data)
def precondition_abspath(path):
if not isinstance(path, unicode):
if not isinstance(path, str):
raise AssertionError("an abspath must be a Unicode string")
if sys.platform == "win32":
@ -309,7 +322,7 @@ def abspath_expanduser_unicode(path, base=None, long_path=True):
abspath_expanduser_unicode.
On Windows, the result will be a long path unless long_path is given as False.
"""
if not isinstance(path, unicode):
if not isinstance(path, str):
raise AssertionError("paths must be Unicode strings")
if base is not None and long_path:
precondition_abspath(base)
@ -330,7 +343,10 @@ def abspath_expanduser_unicode(path, base=None, long_path=True):
if not os.path.isabs(path):
if base is None:
path = os.path.join(os.getcwdu(), path)
cwd = os.getcwd()
if PY2:
cwd = cwd.decode('utf8')
path = os.path.join(cwd, path)
else:
path = os.path.join(base, path)
@ -415,7 +431,7 @@ ERROR_ENVVAR_NOT_FOUND = 203
def windows_getenv(name):
# Based on <http://stackoverflow.com/questions/2608200/problems-with-umlauts-in-python-appdata-environvent-variable/2608368#2608368>,
# with improved error handling. Returns None if there is no enivronment variable of the given name.
if not isinstance(name, unicode):
if not isinstance(name, str):
raise AssertionError("name must be Unicode")
n = GetEnvironmentVariableW(name, None, 0)

View File

@ -20,10 +20,9 @@ from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from functools import reduce
from allmydata.util.mathutil import round_sigfigs
import math
from functools import reduce
import sys
def pr_file_loss(p_list, k):

View File

@ -50,7 +50,7 @@ commands =
[testenv:py36]
# On macOS, git inside of ratchet.sh needs $HOME.
passenv = HOME
passenv = {[testenv]passenv} HOME
commands = {toxinidir}/misc/python3/ratchet.sh
[testenv:integration]