Merge remote-tracking branch 'origin/master' into 3376.encodingutil-python-3

This commit is contained in:
Itamar Turner-Trauring 2020-08-12 10:15:41 -04:00
commit 3c5f4f7ddf
18 changed files with 156 additions and 174 deletions

View File

@ -1,5 +1,6 @@
ARG TAG ARG TAG
FROM centos:${TAG} FROM centos:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv ENV VIRTUALENV_PATH /tmp/venv
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
git \ git \
sudo \ sudo \
make automake gcc gcc-c++ \ make automake gcc gcc-c++ \
python2 \ python${PYTHON_VERSION} \
python2-devel \ python${PYTHON_VERSION}-devel \
libffi-devel \ libffi-devel \
openssl-devel \ openssl-devel \
libyaml \ libyaml \
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
# *update* this checkout on each job run, saving us more time per-job. # *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT} COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7" RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -1,5 +1,6 @@
ARG TAG ARG TAG
FROM debian:${TAG} FROM debian:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv ENV VIRTUALENV_PATH /tmp/venv
@ -8,22 +9,22 @@ ENV BUILD_SRC_ROOT /tmp/project
RUN apt-get --quiet update && \ RUN apt-get --quiet update && \
apt-get --quiet --yes install \ apt-get --quiet --yes install \
git \ git \
lsb-release \ lsb-release \
sudo \ sudo \
build-essential \ build-essential \
python2.7 \ python${PYTHON_VERSION} \
python2.7-dev \ python${PYTHON_VERSION}-dev \
libffi-dev \ libffi-dev \
libssl-dev \ libssl-dev \
libyaml-dev \ libyaml-dev \
virtualenv virtualenv
# Get the project source. This is better than it seems. CircleCI will # Get the project source. This is better than it seems. CircleCI will
# *update* this checkout on each job run, saving us more time per-job. # *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT} COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7" RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
# Only the integration tests currently need this but it doesn't hurt to always # Only the integration tests currently need this but it doesn't hurt to always
# have it present and it's simpler than building a whole extra image just for # have it present and it's simpler than building a whole extra image just for

View File

@ -1,5 +1,6 @@
ARG TAG ARG TAG
FROM fedora:${TAG} FROM fedora:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv ENV VIRTUALENV_PATH /tmp/venv
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
git \ git \
sudo \ sudo \
make automake gcc gcc-c++ \ make automake gcc gcc-c++ \
python \ python${PYTHON_VERSION} \
python-devel \ python${PYTHON_VERSION}-devel \
libffi-devel \ libffi-devel \
openssl-devel \ openssl-devel \
libyaml-devel \ libyaml-devel \
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
# *update* this checkout on each job run, saving us more time per-job. # *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT} COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7" RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -1,49 +0,0 @@
ARG TAG
FROM vbatts/slackware:${TAG}
ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv
# This will get updated by the CircleCI checkout step.
ENV BUILD_SRC_ROOT /tmp/project
# Be careful with slackpkg. If the package name given doesn't match anything,
# slackpkg still claims to succeed but you're totally screwed. Slackware
# updates versions of packaged software so including too much version prefix
# is a good way to have your install commands suddenly begin not installing
# anything.
RUN slackpkg update && \
slackpkg install \
openssh-7 git-2 \
ca-certificates \
sudo-1 \
make-4 \
automake-1 \
kernel-headers \
glibc-2 \
binutils-2 \
gcc-5 \
gcc-g++-5 \
python-2 \
libffi-3 \
libyaml-0 \
sqlite-3 \
icu4c-56 \
libmpc-1 </dev/null && \
slackpkg upgrade \
openssl-1 </dev/null
# neither virtualenv nor pip is packaged.
# do it the hard way.
# and it is extra hard since it is slackware.
RUN slackpkg install \
cyrus-sasl-2 \
curl-7 </dev/null && \
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python get-pip.py && \
pip install virtualenv
# Get the project source. This is better than it seems. CircleCI will
# *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"

View File

@ -1,5 +1,6 @@
ARG TAG ARG TAG
FROM ubuntu:${TAG} FROM ubuntu:${TAG}
ARG PYTHON_VERSION
ENV WHEELHOUSE_PATH /tmp/wheelhouse ENV WHEELHOUSE_PATH /tmp/wheelhouse
ENV VIRTUALENV_PATH /tmp/venv ENV VIRTUALENV_PATH /tmp/venv
@ -13,8 +14,8 @@ RUN apt-get --quiet update && \
apt-get --quiet --yes install \ apt-get --quiet --yes install \
sudo \ sudo \
build-essential \ build-essential \
python2.7 \ python${PYTHON_VERSION} \
python2.7-dev \ python${PYTHON_VERSION}-dev \
libffi-dev \ libffi-dev \
libssl-dev \ libssl-dev \
libyaml-dev \ libyaml-dev \
@ -26,4 +27,4 @@ RUN apt-get --quiet update && \
# *update* this checkout on each job run, saving us more time per-job. # *update* this checkout on each job run, saving us more time per-job.
COPY . ${BUILD_SRC_ROOT} COPY . ${BUILD_SRC_ROOT}
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7" RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"

View File

@ -26,13 +26,14 @@ workflows:
- "centos-8" - "centos-8"
- "slackware-14.2"
- "nixos-19.09" - "nixos-19.09"
# Test against PyPy 2.7 # Test against PyPy 2.7
- "pypy2.7-buster" - "pypy2.7-buster"
# Just one Python 3.6 configuration while the port is in-progress.
- "python3.6"
# Other assorted tasks and configurations # Other assorted tasks and configurations
- "lint" - "lint"
- "pyinstaller" - "pyinstaller"
@ -72,8 +73,8 @@ workflows:
- "build-image-fedora-28" - "build-image-fedora-28"
- "build-image-fedora-29" - "build-image-fedora-29"
- "build-image-centos-8" - "build-image-centos-8"
- "build-image-slackware-14.2"
- "build-image-pypy-2.7-buster" - "build-image-pypy-2.7-buster"
- "build-image-python36-ubuntu"
jobs: jobs:
@ -121,7 +122,7 @@ jobs:
debian-9: &DEBIAN debian-9: &DEBIAN
docker: docker:
- image: "tahoelafsci/debian:9" - image: "tahoelafsci/debian:9-py2.7"
user: "nobody" user: "nobody"
environment: &UTF_8_ENVIRONMENT environment: &UTF_8_ENVIRONMENT
@ -198,14 +199,14 @@ jobs:
debian-8: debian-8:
<<: *DEBIAN <<: *DEBIAN
docker: docker:
- image: "tahoelafsci/debian:8" - image: "tahoelafsci/debian:8-py2.7"
user: "nobody" user: "nobody"
pypy2.7-buster: pypy2.7-buster:
<<: *DEBIAN <<: *DEBIAN
docker: docker:
- image: "tahoelafsci/pypy:2.7-buster" - image: "tahoelafsci/pypy:buster-py2"
user: "nobody" user: "nobody"
environment: environment:
@ -261,17 +262,28 @@ jobs:
ubuntu-16.04: ubuntu-16.04:
<<: *DEBIAN <<: *DEBIAN
docker: docker:
- image: "tahoelafsci/ubuntu:16.04" - image: "tahoelafsci/ubuntu:16.04-py2.7"
user: "nobody" user: "nobody"
ubuntu-18.04: ubuntu-18.04: &UBUNTU_18_04
<<: *DEBIAN <<: *DEBIAN
docker: docker:
- image: "tahoelafsci/ubuntu:18.04" - image: "tahoelafsci/ubuntu:18.04-py2.7"
user: "nobody" user: "nobody"
python3.6:
<<: *UBUNTU_18_04
docker:
- image: "tahoelafsci/ubuntu:18.04-py3"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
ubuntu-20.04: ubuntu-20.04:
<<: *DEBIAN <<: *DEBIAN
docker: docker:
@ -281,7 +293,7 @@ jobs:
centos-8: &RHEL_DERIV centos-8: &RHEL_DERIV
docker: docker:
- image: "tahoelafsci/centos:8" - image: "tahoelafsci/centos:8-py2"
user: "nobody" user: "nobody"
environment: *UTF_8_ENVIRONMENT environment: *UTF_8_ENVIRONMENT
@ -303,37 +315,17 @@ jobs:
fedora-28: fedora-28:
<<: *RHEL_DERIV <<: *RHEL_DERIV
docker: docker:
- image: "tahoelafsci/fedora:28" - image: "tahoelafsci/fedora:28-py"
user: "nobody" user: "nobody"
fedora-29: fedora-29:
<<: *RHEL_DERIV <<: *RHEL_DERIV
docker: docker:
- image: "tahoelafsci/fedora:29" - image: "tahoelafsci/fedora:29-py"
user: "nobody" user: "nobody"
slackware-14.2:
docker:
- image: "tahoelafsci/slackware:14.2"
user: "nobody"
environment: *UTF_8_ENVIRONMENT
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
steps:
- "checkout"
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
- store_test_results: *STORE_TEST_RESULTS
- store_artifacts: *STORE_TEST_LOG
- store_artifacts: *STORE_OTHER_ARTIFACTS
- run: *SUBMIT_COVERAGE
nixos-19.09: nixos-19.09:
docker: docker:
# Run in a highly Nix-capable environment. # Run in a highly Nix-capable environment.
@ -397,8 +389,9 @@ jobs:
- image: "docker:17.05.0-ce-git" - image: "docker:17.05.0-ce-git"
environment: environment:
DISTRO: "tahoelafsci/<DISTRO>:foo" DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
TAG: "tahoelafsci/distro:<TAG>" TAG: "tahoelafsci/distro:<TAG>-py2"
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
steps: steps:
- "checkout" - "checkout"
@ -450,13 +443,14 @@ jobs:
docker \ docker \
build \ build \
--build-arg TAG=${TAG} \ --build-arg TAG=${TAG} \
-t tahoelafsci/${DISTRO}:${TAG} \ --build-arg PYTHON_VERSION=${PYTHON_VERSION} \
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
-f ~/project/.circleci/Dockerfile.${DISTRO} \ -f ~/project/.circleci/Dockerfile.${DISTRO} \
~/project/ ~/project/
- run: - run:
name: "Push image" name: "Push image"
command: | command: |
docker push tahoelafsci/${DISTRO}:${TAG} docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
build-image-debian-8: build-image-debian-8:
@ -465,6 +459,7 @@ jobs:
environment: environment:
DISTRO: "debian" DISTRO: "debian"
TAG: "8" TAG: "8"
PYTHON_VERSION: "2.7"
build-image-debian-9: build-image-debian-9:
@ -473,6 +468,7 @@ jobs:
environment: environment:
DISTRO: "debian" DISTRO: "debian"
TAG: "9" TAG: "9"
PYTHON_VERSION: "2.7"
build-image-ubuntu-16.04: build-image-ubuntu-16.04:
@ -481,6 +477,7 @@ jobs:
environment: environment:
DISTRO: "ubuntu" DISTRO: "ubuntu"
TAG: "16.04" TAG: "16.04"
PYTHON_VERSION: "2.7"
build-image-ubuntu-18.04: build-image-ubuntu-18.04:
@ -489,6 +486,16 @@ jobs:
environment: environment:
DISTRO: "ubuntu" DISTRO: "ubuntu"
TAG: "18.04" TAG: "18.04"
PYTHON_VERSION: "2.7"
build-image-python36-ubuntu:
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "3"
build-image-ubuntu-20.04: build-image-ubuntu-20.04:
@ -505,6 +512,7 @@ jobs:
environment: environment:
DISTRO: "centos" DISTRO: "centos"
TAG: "8" TAG: "8"
PYTHON_VERSION: "2"
build-image-fedora-28: build-image-fedora-28:
@ -513,6 +521,8 @@ jobs:
environment: environment:
DISTRO: "fedora" DISTRO: "fedora"
TAG: "28" TAG: "28"
# The default on Fedora (this version anyway) is still Python 2.
PYTHON_VERSION: ""
build-image-fedora-29: build-image-fedora-29:
@ -523,17 +533,13 @@ jobs:
TAG: "29" TAG: "29"
build-image-slackware-14.2:
<<: *BUILD_IMAGE
environment:
DISTRO: "slackware"
TAG: "14.2"
build-image-pypy-2.7-buster: build-image-pypy-2.7-buster:
<<: *BUILD_IMAGE <<: *BUILD_IMAGE
environment: environment:
DISTRO: "pypy" DISTRO: "pypy"
TAG: "2.7-buster" TAG: "buster"
# We only have Python 2 for PyPy right now so there's no support for
# setting up PyPy 3 in the image building toolchain. This value is just
# for constructing the right Docker image tag.
PYTHON_VERSION: "2"

View File

@ -36,8 +36,9 @@ PIP="${BOOTSTRAP_VENV}/bin/pip"
# Tell pip where it can find any existing wheels. # Tell pip where it can find any existing wheels.
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}" export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
# Populate the wheelhouse, if necessary. # Populate the wheelhouse, if necessary. zfec 1.5.3 can only be built with a
"${PIP}" \ # UTF-8 environment so make sure we have one, at least for this invocation.
LANG="en_US.UTF-8" "${PIP}" \
wheel \ wheel \
--wheel-dir "${WHEELHOUSE_PATH}" \ --wheel-dir "${WHEELHOUSE_PATH}" \
"${PROJECT_ROOT}"[test] \ "${PROJECT_ROOT}"[test] \

View File

@ -81,7 +81,16 @@ ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
${TAHOE_LAFS_TOX_ARGS} || "${alternative}" ${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
if [ -n "${ARTIFACTS}" ]; then if [ -n "${ARTIFACTS}" ]; then
if [ ! -e "${SUBUNIT2}" ]; then
echo "subunitv2 output file does not exist: ${SUBUNIT2}"
exit 1
fi
# Create a junitxml results area. # Create a junitxml results area.
mkdir -p "$(dirname "${JUNITXML}")" mkdir -p "$(dirname "${JUNITXML}")"
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}" # Always succeed even if subunit2junitxml fails. subunit2junitxml signals
# failure if the stream it is processing contains test failures. This is
# not what we care about. If we cared about it, the test command above
# would have signalled failure already and we wouldn't be here.
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || true
fi fi

View File

@ -1,40 +0,0 @@
sudo: false
language: python
cache: pip
dist: xenial
before_cache:
- rm -f $HOME/.cache/pip/log/debug.log
git:
depth: 1000
env:
global:
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
install:
- pip install --upgrade tox setuptools virtualenv
- echo $PATH; which python; which pip; which tox
- python misc/build_helpers/show-tool-versions.py
script:
- |
set -eo pipefail
tox -e ${T}
notifications:
email: false
irc:
channels: "chat.freenode.net#tahoe-lafs"
on_success: always # for testing
on_failure: always
template:
- "%{repository}#%{build_number} [%{branch}: %{commit} by %{author}] %{message}"
- "Changes: %{compare_url} | Details: %{build_url}"
matrix:
include:
- os: linux
python: '3.6'
env: T=py36
fast_finish: true

View File

@ -57,6 +57,21 @@ allmydata.test.test_deferredutil.DeferredUtilTests.test_success
allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls allmydata.test.test_deferredutil.DeferredUtilTests.test_wait_for_delayed_calls
allmydata.test.test_dictutil.DictUtil.test_auxdict allmydata.test.test_dictutil.DictUtil.test_auxdict
allmydata.test.test_dictutil.DictUtil.test_dict_of_sets allmydata.test.test_dictutil.DictUtil.test_dict_of_sets
allmydata.test.test_happiness.Happiness.test_100
allmydata.test.test_happiness.Happiness.test_calc_happy
allmydata.test.test_happiness.Happiness.test_everything_broken
allmydata.test.test_happiness.Happiness.test_hypothesis0
allmydata.test.test_happiness.Happiness.test_hypothesis_0
allmydata.test.test_happiness.Happiness.test_hypothesis_1
allmydata.test.test_happiness.Happiness.test_placement_1
allmydata.test.test_happiness.Happiness.test_placement_simple
allmydata.test.test_happiness.Happiness.test_redistribute
allmydata.test.test_happiness.Happiness.test_unhappy
allmydata.test.test_happiness.HappinessUtils.test_residual_0
allmydata.test.test_happiness.HappinessUtils.test_trivial_flow_graph
allmydata.test.test_happiness.HappinessUtils.test_trivial_maximum_graph
allmydata.test.test_happiness.PlacementTests.test_hypothesis_unhappy
allmydata.test.test_happiness.PlacementTests.test_more_hypothesis
allmydata.test.test_hashtree.Complete.test_create allmydata.test.test_hashtree.Complete.test_create
allmydata.test.test_hashtree.Complete.test_dump allmydata.test.test_hashtree.Complete.test_dump
allmydata.test.test_hashtree.Complete.test_needed_hashes allmydata.test.test_hashtree.Complete.test_needed_hashes

View File

@ -11,8 +11,8 @@ cd "../.."
# Since both of the next calls are expected to exit non-0, relax our guard. # Since both of the next calls are expected to exit non-0, relax our guard.
set +e set +e
SUBUNITREPORTER_OUTPUT_PATH="$base/results.subunit2" trial --reporter subunitv2-file allmydata trial --reporter=subunitv2-file allmydata
subunit2junitxml < "$base/results.subunit2" > "$base/results.xml" subunit2junitxml < "${SUBUNITREPORTER_OUTPUT_PATH}" > "$base/results.xml"
set -e set -e
# Okay, now we're clear. # Okay, now we're clear.
@ -32,6 +32,14 @@ set -e
if [ $TERM = 'dumb' ]; then if [ $TERM = 'dumb' ]; then
export TERM=ansi export TERM=ansi
fi fi
git diff "$tracking_filename"
exit $code echo "The ${tracking_filename} diff is:"
echo "================================="
# "git diff" gets pretty confused in this execution context when trying to
# write to stdout. Somehow it fails with SIGTTOU.
git diff -- "${tracking_filename}" > tracking.diff
cat tracking.diff
echo "================================="
echo "Exiting with code ${code} from ratchet.py."
exit ${code}

View File

@ -0,0 +1 @@
Slackware 14.2 is no longer a Tahoe-LAFS supported platform.

0
newsfragments/3336.minor Normal file
View File

0
newsfragments/3370.minor Normal file
View File

View File

@ -1,5 +1,20 @@
"""
Algorithms for figuring out happiness, the number of unique nodes the data is
on.
from Queue import PriorityQueue Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit dict, just in case newdict breaks things for external Python 2 code.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from queue import PriorityQueue
def augmenting_path_for(graph): def augmenting_path_for(graph):
@ -35,9 +50,9 @@ def bfs(graph, s):
GRAY = 1 GRAY = 1
# BLACK vertices are those we have seen and explored # BLACK vertices are those we have seen and explored
BLACK = 2 BLACK = 2
color = [WHITE for i in xrange(len(graph))] color = [WHITE for i in range(len(graph))]
predecessor = [None for i in xrange(len(graph))] predecessor = [None for i in range(len(graph))]
distance = [-1 for i in xrange(len(graph))] distance = [-1 for i in range(len(graph))]
queue = [s] # vertices that we haven't explored yet. queue = [s] # vertices that we haven't explored yet.
color[s] = GRAY color[s] = GRAY
distance[s] = 0 distance[s] = 0
@ -58,9 +73,9 @@ def residual_network(graph, f):
flow network represented by my graph and f arguments. graph is a flow network represented by my graph and f arguments. graph is a
flow network in adjacency-list form, and f is a flow in graph. flow network in adjacency-list form, and f is a flow in graph.
""" """
new_graph = [[] for i in xrange(len(graph))] new_graph = [[] for i in range(len(graph))]
cf = [[0 for s in xrange(len(graph))] for sh in xrange(len(graph))] cf = [[0 for s in range(len(graph))] for sh in range(len(graph))]
for i in xrange(len(graph)): for i in range(len(graph)):
for v in graph[i]: for v in graph[i]:
if f[i][v] == 1: if f[i][v] == 1:
# We add an edge (v, i) with cf[v,i] = 1. This means # We add an edge (v, i) with cf[v,i] = 1. This means
@ -135,7 +150,7 @@ def _compute_maximum_graph(graph, shareIndices):
return {} return {}
dim = len(graph) dim = len(graph)
flow_function = [[0 for sh in xrange(dim)] for s in xrange(dim)] flow_function = [[0 for sh in range(dim)] for s in range(dim)]
residual_graph, residual_function = residual_network(graph, flow_function) residual_graph, residual_function = residual_network(graph, flow_function)
while augmenting_path_for(residual_graph): while augmenting_path_for(residual_graph):
@ -260,9 +275,9 @@ def _servermap_flow_graph(peers, shares, servermap):
#print "share_to_index %s" % share_to_index #print "share_to_index %s" % share_to_index
#print "servermap %s" % servermap #print "servermap %s" % servermap
for peer in peers: for peer in peers:
if servermap.has_key(peer): if peer in servermap:
for s in servermap[peer]: for s in servermap[peer]:
if share_to_index.has_key(s): if s in share_to_index:
indexedShares.append(share_to_index[s]) indexedShares.append(share_to_index[s])
graph.insert(peer_to_index[peer], indexedShares) graph.insert(peer_to_index[peer], indexedShares)
for share in shares: for share in shares:
@ -373,7 +388,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
new_mappings = _calculate_mappings(new_peers, new_shares) new_mappings = _calculate_mappings(new_peers, new_shares)
#print "new_peers %s" % new_peers #print "new_peers %s" % new_peers
#print "new_mappings %s" % new_mappings #print "new_mappings %s" % new_mappings
mappings = dict(readonly_mappings.items() + existing_mappings.items() + new_mappings.items()) mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
homeless_shares = set() homeless_shares = set()
for share in mappings: for share in mappings:
if mappings[share] is None: if mappings[share] is None:
@ -384,7 +399,7 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
mappings, homeless_shares, mappings, homeless_shares,
{ {
k: v k: v
for k, v in peers_to_shares.items() for k, v in list(peers_to_shares.items())
if k not in readonly_peers if k not in readonly_peers
} }
) )
@ -401,5 +416,5 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
return { return {
k: v.pop() if v else next(peer_iter) k: v.pop() if v else next(peer_iter)
for k, v in mappings.items() for k, v in list(mappings.items())
} }

View File

@ -1,5 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit dict, just in case newdict breaks things.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest from twisted.trial import unittest
from hypothesis import given from hypothesis import given
from hypothesis.strategies import text, sets from hypothesis.strategies import text, sets

View File

@ -22,6 +22,7 @@ PORTED_MODULES = [
"allmydata.crypto.rsa", "allmydata.crypto.rsa",
"allmydata.crypto.util", "allmydata.crypto.util",
"allmydata.hashtree", "allmydata.hashtree",
"allmydata.immutable.happiness_upload",
"allmydata.test.common_py3", "allmydata.test.common_py3",
"allmydata.util._python3", "allmydata.util._python3",
"allmydata.util.abbreviate", "allmydata.util.abbreviate",
@ -53,6 +54,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_crypto", "allmydata.test.test_crypto",
"allmydata.test.test_deferredutil", "allmydata.test.test_deferredutil",
"allmydata.test.test_dictutil", "allmydata.test.test_dictutil",
"allmydata.test.test_happiness",
"allmydata.test.test_hashtree", "allmydata.test.test_hashtree",
"allmydata.test.test_hashutil", "allmydata.test.test_hashutil",
"allmydata.test.test_humanreadable", "allmydata.test.test_humanreadable",

View File

@ -50,7 +50,7 @@ commands =
[testenv:py36] [testenv:py36]
# On macOS, git inside of ratchet.sh needs $HOME. # On macOS, git inside of ratchet.sh needs $HOME.
passenv = HOME passenv = {[testenv]passenv} HOME
commands = {toxinidir}/misc/python3/ratchet.sh commands = {toxinidir}/misc/python3/ratchet.sh
[testenv:integration] [testenv:integration]