mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-06-20 08:13:49 +00:00
Merge master
This commit is contained in:
@ -1,95 +0,0 @@
|
|||||||
# adapted from https://packaging.python.org/en/latest/appveyor/
|
|
||||||
|
|
||||||
environment:
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
|
|
||||||
# For Python versions available on Appveyor, see
|
|
||||||
# http://www.appveyor.com/docs/installed-software#python
|
|
||||||
- PYTHON: "C:\\Python27"
|
|
||||||
- PYTHON: "C:\\Python27-x64"
|
|
||||||
# DISTUTILS_USE_SDK: "1"
|
|
||||||
# TOX_TESTENV_PASSENV: "DISTUTILS_USE_SDK INCLUDE LIB"
|
|
||||||
|
|
||||||
install:
|
|
||||||
- |
|
|
||||||
%PYTHON%\python.exe -m pip install -U pip
|
|
||||||
%PYTHON%\python.exe -m pip install wheel tox==3.9.0 virtualenv
|
|
||||||
|
|
||||||
# note:
|
|
||||||
# %PYTHON% has: python.exe
|
|
||||||
# %PYTHON%\Scripts has: pip.exe, tox.exe (and others installed by bare pip)
|
|
||||||
|
|
||||||
# We have a custom "build" system. We don't need MSBuild or whatever.
|
|
||||||
build: off
|
|
||||||
|
|
||||||
# Do not build feature branch with open pull requests. This is documented but
|
|
||||||
# it's not clear it does anything.
|
|
||||||
skip_branch_with_pr: true
|
|
||||||
|
|
||||||
# This, perhaps, is effective.
|
|
||||||
branches:
|
|
||||||
# whitelist
|
|
||||||
only:
|
|
||||||
- 'master'
|
|
||||||
|
|
||||||
skip_commits:
|
|
||||||
files:
|
|
||||||
# The Windows builds are unaffected by news fragments.
|
|
||||||
- 'newsfragments/*'
|
|
||||||
# Also, all this build junk.
|
|
||||||
- '.circleci/*'
|
|
||||||
- '.lgtm.yml'
|
|
||||||
- '.travis.yml'
|
|
||||||
|
|
||||||
# we run from C:\projects\tahoe-lafs
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
# Put your test command here.
|
|
||||||
# Note that you must use the environment variable %PYTHON% to refer to
|
|
||||||
# the interpreter you're using - Appveyor does not do anything special
|
|
||||||
# to put the Python version you want to use on PATH.
|
|
||||||
- |
|
|
||||||
%PYTHON%\Scripts\tox.exe -e coverage
|
|
||||||
%PYTHON%\Scripts\tox.exe -e pyinstaller
|
|
||||||
# To verify that the resultant PyInstaller-generated binary executes
|
|
||||||
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
|
|
||||||
# failing due to import/packaging-related errors, etc.).
|
|
||||||
- dist\Tahoe-LAFS\tahoe.exe --version
|
|
||||||
|
|
||||||
after_test:
|
|
||||||
# This builds the main tahoe wheel, and wheels for all dependencies.
|
|
||||||
# Again, you only need build.cmd if you're building C extensions for
|
|
||||||
# 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct
|
|
||||||
# interpreter. If _trial_temp still exists, the "pip wheel" fails on
|
|
||||||
# _trial_temp\local_dir (not sure why).
|
|
||||||
- |
|
|
||||||
copy _trial_temp\test.log trial_test_log.txt
|
|
||||||
rd /s /q _trial_temp
|
|
||||||
%PYTHON%\python.exe setup.py bdist_wheel
|
|
||||||
%PYTHON%\python.exe -m pip wheel -w dist .
|
|
||||||
- |
|
|
||||||
%PYTHON%\python.exe -m pip install codecov "coverage ~= 4.5"
|
|
||||||
%PYTHON%\python.exe -m coverage xml -o coverage.xml -i
|
|
||||||
%PYTHON%\python.exe -m codecov -X search -X gcov -f coverage.xml
|
|
||||||
|
|
||||||
artifacts:
|
|
||||||
# bdist_wheel puts your built wheel in the dist directory
|
|
||||||
# "pip wheel -w dist ." puts all the dependency wheels there too
|
|
||||||
# this gives us a zipfile with everything
|
|
||||||
- path: 'dist\*'
|
|
||||||
- path: trial_test_log.txt
|
|
||||||
name: Trial test.log
|
|
||||||
- path: eliot.log
|
|
||||||
name: Eliot test log
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
# Artifacts are not normally uploaded when the job fails. To get the test
|
|
||||||
# logs, we have to push them ourselves.
|
|
||||||
- ps: Push-AppveyorArtifact _trial_temp\test.log -Filename trial.log
|
|
||||||
- ps: Push-AppveyorArtifact eliot.log -Filename eliot.log
|
|
||||||
|
|
||||||
#on_success:
|
|
||||||
# You can use this step to upload your artifacts to a public website.
|
|
||||||
# See Appveyor's documentation for more details. Or you can simply
|
|
||||||
# access your wheels from the Appveyor "artifacts" tab for your build.
|
|
@ -1,5 +1,6 @@
|
|||||||
ARG TAG
|
ARG TAG
|
||||||
FROM centos:${TAG}
|
FROM centos:${TAG}
|
||||||
|
ARG PYTHON_VERSION
|
||||||
|
|
||||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||||
ENV VIRTUALENV_PATH /tmp/venv
|
ENV VIRTUALENV_PATH /tmp/venv
|
||||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
|||||||
git \
|
git \
|
||||||
sudo \
|
sudo \
|
||||||
make automake gcc gcc-c++ \
|
make automake gcc gcc-c++ \
|
||||||
python2 \
|
python${PYTHON_VERSION} \
|
||||||
python2-devel \
|
python${PYTHON_VERSION}-devel \
|
||||||
libffi-devel \
|
libffi-devel \
|
||||||
openssl-devel \
|
openssl-devel \
|
||||||
libyaml \
|
libyaml \
|
||||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
|||||||
# *update* this checkout on each job run, saving us more time per-job.
|
# *update* this checkout on each job run, saving us more time per-job.
|
||||||
COPY . ${BUILD_SRC_ROOT}
|
COPY . ${BUILD_SRC_ROOT}
|
||||||
|
|
||||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
ARG TAG
|
ARG TAG
|
||||||
FROM debian:${TAG}
|
FROM debian:${TAG}
|
||||||
|
ARG PYTHON_VERSION
|
||||||
|
|
||||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||||
ENV VIRTUALENV_PATH /tmp/venv
|
ENV VIRTUALENV_PATH /tmp/venv
|
||||||
@ -12,8 +13,8 @@ RUN apt-get --quiet update && \
|
|||||||
lsb-release \
|
lsb-release \
|
||||||
sudo \
|
sudo \
|
||||||
build-essential \
|
build-essential \
|
||||||
python2.7 \
|
python${PYTHON_VERSION} \
|
||||||
python2.7-dev \
|
python${PYTHON_VERSION}-dev \
|
||||||
libffi-dev \
|
libffi-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
libyaml-dev \
|
libyaml-dev \
|
||||||
@ -23,7 +24,7 @@ RUN apt-get --quiet update && \
|
|||||||
# *update* this checkout on each job run, saving us more time per-job.
|
# *update* this checkout on each job run, saving us more time per-job.
|
||||||
COPY . ${BUILD_SRC_ROOT}
|
COPY . ${BUILD_SRC_ROOT}
|
||||||
|
|
||||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||||
|
|
||||||
# Only the integration tests currently need this but it doesn't hurt to always
|
# Only the integration tests currently need this but it doesn't hurt to always
|
||||||
# have it present and it's simpler than building a whole extra image just for
|
# have it present and it's simpler than building a whole extra image just for
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
ARG TAG
|
ARG TAG
|
||||||
FROM fedora:${TAG}
|
FROM fedora:${TAG}
|
||||||
|
ARG PYTHON_VERSION
|
||||||
|
|
||||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||||
ENV VIRTUALENV_PATH /tmp/venv
|
ENV VIRTUALENV_PATH /tmp/venv
|
||||||
@ -11,8 +12,8 @@ RUN yum install --assumeyes \
|
|||||||
git \
|
git \
|
||||||
sudo \
|
sudo \
|
||||||
make automake gcc gcc-c++ \
|
make automake gcc gcc-c++ \
|
||||||
python \
|
python${PYTHON_VERSION} \
|
||||||
python-devel \
|
python${PYTHON_VERSION}-devel \
|
||||||
libffi-devel \
|
libffi-devel \
|
||||||
openssl-devel \
|
openssl-devel \
|
||||||
libyaml-devel \
|
libyaml-devel \
|
||||||
@ -23,4 +24,4 @@ RUN yum install --assumeyes \
|
|||||||
# *update* this checkout on each job run, saving us more time per-job.
|
# *update* this checkout on each job run, saving us more time per-job.
|
||||||
COPY . ${BUILD_SRC_ROOT}
|
COPY . ${BUILD_SRC_ROOT}
|
||||||
|
|
||||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||||
|
@ -1,49 +0,0 @@
|
|||||||
ARG TAG
|
|
||||||
FROM vbatts/slackware:${TAG}
|
|
||||||
|
|
||||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
|
||||||
ENV VIRTUALENV_PATH /tmp/venv
|
|
||||||
# This will get updated by the CircleCI checkout step.
|
|
||||||
ENV BUILD_SRC_ROOT /tmp/project
|
|
||||||
|
|
||||||
# Be careful with slackpkg. If the package name given doesn't match anything,
|
|
||||||
# slackpkg still claims to succeed but you're totally screwed. Slackware
|
|
||||||
# updates versions of packaged software so including too much version prefix
|
|
||||||
# is a good way to have your install commands suddenly begin not installing
|
|
||||||
# anything.
|
|
||||||
RUN slackpkg update && \
|
|
||||||
slackpkg install \
|
|
||||||
openssh-7 git-2 \
|
|
||||||
ca-certificates \
|
|
||||||
sudo-1 \
|
|
||||||
make-4 \
|
|
||||||
automake-1 \
|
|
||||||
kernel-headers \
|
|
||||||
glibc-2 \
|
|
||||||
binutils-2 \
|
|
||||||
gcc-5 \
|
|
||||||
gcc-g++-5 \
|
|
||||||
python-2 \
|
|
||||||
libffi-3 \
|
|
||||||
libyaml-0 \
|
|
||||||
sqlite-3 \
|
|
||||||
icu4c-56 \
|
|
||||||
libmpc-1 </dev/null && \
|
|
||||||
slackpkg upgrade \
|
|
||||||
openssl-1 </dev/null
|
|
||||||
|
|
||||||
# neither virtualenv nor pip is packaged.
|
|
||||||
# do it the hard way.
|
|
||||||
# and it is extra hard since it is slackware.
|
|
||||||
RUN slackpkg install \
|
|
||||||
cyrus-sasl-2 \
|
|
||||||
curl-7 </dev/null && \
|
|
||||||
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
|
||||||
python get-pip.py && \
|
|
||||||
pip install virtualenv
|
|
||||||
|
|
||||||
# Get the project source. This is better than it seems. CircleCI will
|
|
||||||
# *update* this checkout on each job run, saving us more time per-job.
|
|
||||||
COPY . ${BUILD_SRC_ROOT}
|
|
||||||
|
|
||||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
|
@ -1,5 +1,6 @@
|
|||||||
ARG TAG
|
ARG TAG
|
||||||
FROM ubuntu:${TAG}
|
FROM ubuntu:${TAG}
|
||||||
|
ARG PYTHON_VERSION
|
||||||
|
|
||||||
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
ENV WHEELHOUSE_PATH /tmp/wheelhouse
|
||||||
ENV VIRTUALENV_PATH /tmp/venv
|
ENV VIRTUALENV_PATH /tmp/venv
|
||||||
@ -13,8 +14,8 @@ RUN apt-get --quiet update && \
|
|||||||
apt-get --quiet --yes install \
|
apt-get --quiet --yes install \
|
||||||
sudo \
|
sudo \
|
||||||
build-essential \
|
build-essential \
|
||||||
python2.7 \
|
python${PYTHON_VERSION} \
|
||||||
python2.7-dev \
|
python${PYTHON_VERSION}-dev \
|
||||||
libffi-dev \
|
libffi-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
libyaml-dev \
|
libyaml-dev \
|
||||||
@ -26,4 +27,4 @@ RUN apt-get --quiet update && \
|
|||||||
# *update* this checkout on each job run, saving us more time per-job.
|
# *update* this checkout on each job run, saving us more time per-job.
|
||||||
COPY . ${BUILD_SRC_ROOT}
|
COPY . ${BUILD_SRC_ROOT}
|
||||||
|
|
||||||
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python2.7"
|
RUN "${BUILD_SRC_ROOT}"/.circleci/prepare-image.sh "${WHEELHOUSE_PATH}" "${VIRTUALENV_PATH}" "${BUILD_SRC_ROOT}" "python${PYTHON_VERSION}"
|
||||||
|
@ -1,8 +1,17 @@
|
|||||||
# https://circleci.com/docs/2.0/
|
# https://circleci.com/docs/2.0/
|
||||||
|
|
||||||
version: 2
|
# We use version 2.1 of CircleCI's configuration format (the docs are still at
|
||||||
|
# the 2.0 link) in order to have access to Windows executors. This means we
|
||||||
|
# can't use dots in job names anymore. They have a new "parameters" feature
|
||||||
|
# that is supposed to remove the need to have version numbers in job names (the
|
||||||
|
# source of our dots), but switching to that is going to be a bigger refactor:
|
||||||
|
#
|
||||||
|
# https://discuss.circleci.com/t/v2-1-job-name-validation/31123
|
||||||
|
# https://circleci.com/docs/2.0/reusing-config/
|
||||||
|
#
|
||||||
|
version: 2.1
|
||||||
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
|
||||||
ci:
|
ci:
|
||||||
jobs:
|
jobs:
|
||||||
# Platforms
|
# Platforms
|
||||||
@ -11,10 +20,13 @@ workflows:
|
|||||||
requires:
|
requires:
|
||||||
- "debian-9"
|
- "debian-9"
|
||||||
|
|
||||||
- "ubuntu-18.04"
|
- "ubuntu-20-04"
|
||||||
- "ubuntu-16.04":
|
- "ubuntu-18-04":
|
||||||
requires:
|
requires:
|
||||||
- "ubuntu-18.04"
|
- "ubuntu-20-04"
|
||||||
|
- "ubuntu-16-04":
|
||||||
|
requires:
|
||||||
|
- "ubuntu-20-04"
|
||||||
|
|
||||||
- "fedora-29"
|
- "fedora-29"
|
||||||
- "fedora-28":
|
- "fedora-28":
|
||||||
@ -23,12 +35,13 @@ workflows:
|
|||||||
|
|
||||||
- "centos-8"
|
- "centos-8"
|
||||||
|
|
||||||
- "slackware-14.2"
|
- "nixos-19-09"
|
||||||
|
|
||||||
- "nixos-19.09"
|
|
||||||
|
|
||||||
# Test against PyPy 2.7
|
# Test against PyPy 2.7
|
||||||
- "pypy2.7-buster"
|
- "pypy27-buster"
|
||||||
|
|
||||||
|
# Just one Python 3.6 configuration while the port is in-progress.
|
||||||
|
- "python36"
|
||||||
|
|
||||||
# Other assorted tasks and configurations
|
# Other assorted tasks and configurations
|
||||||
- "lint"
|
- "lint"
|
||||||
@ -63,13 +76,14 @@ workflows:
|
|||||||
jobs:
|
jobs:
|
||||||
- "build-image-debian-8"
|
- "build-image-debian-8"
|
||||||
- "build-image-debian-9"
|
- "build-image-debian-9"
|
||||||
- "build-image-ubuntu-16.04"
|
- "build-image-ubuntu-16-04"
|
||||||
- "build-image-ubuntu-18.04"
|
- "build-image-ubuntu-18-04"
|
||||||
|
- "build-image-ubuntu-20-04"
|
||||||
- "build-image-fedora-28"
|
- "build-image-fedora-28"
|
||||||
- "build-image-fedora-29"
|
- "build-image-fedora-29"
|
||||||
- "build-image-centos-8"
|
- "build-image-centos-8"
|
||||||
- "build-image-slackware-14.2"
|
- "build-image-pypy27-buster"
|
||||||
- "build-image-pypy-2.7-buster"
|
- "build-image-python36-ubuntu"
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -117,7 +131,7 @@ jobs:
|
|||||||
|
|
||||||
debian-9: &DEBIAN
|
debian-9: &DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/debian:9"
|
- image: "tahoelafsci/debian:9-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment: &UTF_8_ENVIRONMENT
|
environment: &UTF_8_ENVIRONMENT
|
||||||
@ -194,20 +208,20 @@ jobs:
|
|||||||
debian-8:
|
debian-8:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/debian:8"
|
- image: "tahoelafsci/debian:8-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
pypy2.7-buster:
|
pypy27-buster:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/pypy:2.7-buster"
|
- image: "tahoelafsci/pypy:buster-py2"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
<<: *UTF_8_ENVIRONMENT
|
<<: *UTF_8_ENVIRONMENT
|
||||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27-coverage"
|
# We don't do coverage since it makes PyPy far too slow:
|
||||||
ALLOWED_FAILURE: "yes"
|
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
||||||
|
|
||||||
|
|
||||||
c-locale:
|
c-locale:
|
||||||
@ -255,23 +269,45 @@ jobs:
|
|||||||
- run: *RUN_TESTS
|
- run: *RUN_TESTS
|
||||||
|
|
||||||
|
|
||||||
ubuntu-16.04:
|
ubuntu-16-04:
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:16.04"
|
- image: "tahoelafsci/ubuntu:16.04-py2.7"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
ubuntu-18.04:
|
ubuntu-18-04: &UBUNTU_18_04
|
||||||
<<: *DEBIAN
|
<<: *DEBIAN
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/ubuntu:18.04"
|
- image: "tahoelafsci/ubuntu:18.04-py2.7"
|
||||||
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
|
python36:
|
||||||
|
<<: *UBUNTU_18_04
|
||||||
|
docker:
|
||||||
|
- image: "tahoelafsci/ubuntu:18.04-py3"
|
||||||
|
user: "nobody"
|
||||||
|
|
||||||
|
environment:
|
||||||
|
<<: *UTF_8_ENVIRONMENT
|
||||||
|
# The default trial args include --rterrors which is incompatible with
|
||||||
|
# this reporter on Python 3. So drop that and just specify the
|
||||||
|
# reporter.
|
||||||
|
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
|
||||||
|
TAHOE_LAFS_TOX_ENVIRONMENT: "py36-coverage"
|
||||||
|
|
||||||
|
|
||||||
|
ubuntu-20-04:
|
||||||
|
<<: *DEBIAN
|
||||||
|
docker:
|
||||||
|
- image: "tahoelafsci/ubuntu:20.04"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
centos-8: &RHEL_DERIV
|
centos-8: &RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/centos:8"
|
- image: "tahoelafsci/centos:8-py2"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
environment: *UTF_8_ENVIRONMENT
|
environment: *UTF_8_ENVIRONMENT
|
||||||
@ -293,38 +329,18 @@ jobs:
|
|||||||
fedora-28:
|
fedora-28:
|
||||||
<<: *RHEL_DERIV
|
<<: *RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/fedora:28"
|
- image: "tahoelafsci/fedora:28-py"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
fedora-29:
|
fedora-29:
|
||||||
<<: *RHEL_DERIV
|
<<: *RHEL_DERIV
|
||||||
docker:
|
docker:
|
||||||
- image: "tahoelafsci/fedora:29"
|
- image: "tahoelafsci/fedora:29-py"
|
||||||
user: "nobody"
|
user: "nobody"
|
||||||
|
|
||||||
|
|
||||||
slackware-14.2:
|
nixos-19-09:
|
||||||
docker:
|
|
||||||
- image: "tahoelafsci/slackware:14.2"
|
|
||||||
user: "nobody"
|
|
||||||
|
|
||||||
environment: *UTF_8_ENVIRONMENT
|
|
||||||
|
|
||||||
# pip cannot install packages if the working directory is not readable.
|
|
||||||
# We want to run a lot of steps as nobody instead of as root.
|
|
||||||
working_directory: "/tmp/project"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- "checkout"
|
|
||||||
- run: *SETUP_VIRTUALENV
|
|
||||||
- run: *RUN_TESTS
|
|
||||||
- store_test_results: *STORE_TEST_RESULTS
|
|
||||||
- store_artifacts: *STORE_TEST_LOG
|
|
||||||
- store_artifacts: *STORE_OTHER_ARTIFACTS
|
|
||||||
- run: *SUBMIT_COVERAGE
|
|
||||||
|
|
||||||
nixos-19.09:
|
|
||||||
docker:
|
docker:
|
||||||
# Run in a highly Nix-capable environment.
|
# Run in a highly Nix-capable environment.
|
||||||
- image: "nixorg/nix:circleci"
|
- image: "nixorg/nix:circleci"
|
||||||
@ -387,8 +403,9 @@ jobs:
|
|||||||
- image: "docker:17.05.0-ce-git"
|
- image: "docker:17.05.0-ce-git"
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "tahoelafsci/<DISTRO>:foo"
|
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
|
||||||
TAG: "tahoelafsci/distro:<TAG>"
|
TAG: "tahoelafsci/distro:<TAG>-py2"
|
||||||
|
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- "checkout"
|
- "checkout"
|
||||||
@ -440,13 +457,14 @@ jobs:
|
|||||||
docker \
|
docker \
|
||||||
build \
|
build \
|
||||||
--build-arg TAG=${TAG} \
|
--build-arg TAG=${TAG} \
|
||||||
-t tahoelafsci/${DISTRO}:${TAG} \
|
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
|
||||||
|
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
|
||||||
-f ~/project/.circleci/Dockerfile.${DISTRO} \
|
-f ~/project/.circleci/Dockerfile.${DISTRO} \
|
||||||
~/project/
|
~/project/
|
||||||
- run:
|
- run:
|
||||||
name: "Push image"
|
name: "Push image"
|
||||||
command: |
|
command: |
|
||||||
docker push tahoelafsci/${DISTRO}:${TAG}
|
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
|
||||||
|
|
||||||
|
|
||||||
build-image-debian-8:
|
build-image-debian-8:
|
||||||
@ -455,6 +473,7 @@ jobs:
|
|||||||
environment:
|
environment:
|
||||||
DISTRO: "debian"
|
DISTRO: "debian"
|
||||||
TAG: "8"
|
TAG: "8"
|
||||||
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
build-image-debian-9:
|
build-image-debian-9:
|
||||||
@ -463,22 +482,43 @@ jobs:
|
|||||||
environment:
|
environment:
|
||||||
DISTRO: "debian"
|
DISTRO: "debian"
|
||||||
TAG: "9"
|
TAG: "9"
|
||||||
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
build-image-ubuntu-16.04:
|
build-image-ubuntu-16-04:
|
||||||
<<: *BUILD_IMAGE
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "ubuntu"
|
DISTRO: "ubuntu"
|
||||||
TAG: "16.04"
|
TAG: "16.04"
|
||||||
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
build-image-ubuntu-18.04:
|
build-image-ubuntu-18-04:
|
||||||
<<: *BUILD_IMAGE
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "ubuntu"
|
DISTRO: "ubuntu"
|
||||||
TAG: "18.04"
|
TAG: "18.04"
|
||||||
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
|
build-image-python36-ubuntu:
|
||||||
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
|
environment:
|
||||||
|
DISTRO: "ubuntu"
|
||||||
|
TAG: "18.04"
|
||||||
|
PYTHON_VERSION: "3"
|
||||||
|
|
||||||
|
|
||||||
|
build-image-ubuntu-20-04:
|
||||||
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
|
environment:
|
||||||
|
DISTRO: "ubuntu"
|
||||||
|
TAG: "20.04"
|
||||||
|
PYTHON_VERSION: "2.7"
|
||||||
|
|
||||||
|
|
||||||
build-image-centos-8:
|
build-image-centos-8:
|
||||||
@ -487,6 +527,7 @@ jobs:
|
|||||||
environment:
|
environment:
|
||||||
DISTRO: "centos"
|
DISTRO: "centos"
|
||||||
TAG: "8"
|
TAG: "8"
|
||||||
|
PYTHON_VERSION: "2"
|
||||||
|
|
||||||
|
|
||||||
build-image-fedora-28:
|
build-image-fedora-28:
|
||||||
@ -495,6 +536,8 @@ jobs:
|
|||||||
environment:
|
environment:
|
||||||
DISTRO: "fedora"
|
DISTRO: "fedora"
|
||||||
TAG: "28"
|
TAG: "28"
|
||||||
|
# The default on Fedora (this version anyway) is still Python 2.
|
||||||
|
PYTHON_VERSION: ""
|
||||||
|
|
||||||
|
|
||||||
build-image-fedora-29:
|
build-image-fedora-29:
|
||||||
@ -505,17 +548,13 @@ jobs:
|
|||||||
TAG: "29"
|
TAG: "29"
|
||||||
|
|
||||||
|
|
||||||
build-image-slackware-14.2:
|
build-image-pypy27-buster:
|
||||||
<<: *BUILD_IMAGE
|
|
||||||
|
|
||||||
environment:
|
|
||||||
DISTRO: "slackware"
|
|
||||||
TAG: "14.2"
|
|
||||||
|
|
||||||
|
|
||||||
build-image-pypy-2.7-buster:
|
|
||||||
<<: *BUILD_IMAGE
|
<<: *BUILD_IMAGE
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
DISTRO: "pypy"
|
DISTRO: "pypy"
|
||||||
TAG: "2.7-buster"
|
TAG: "buster"
|
||||||
|
# We only have Python 2 for PyPy right now so there's no support for
|
||||||
|
# setting up PyPy 3 in the image building toolchain. This value is just
|
||||||
|
# for constructing the right Docker image tag.
|
||||||
|
PYTHON_VERSION: "2"
|
||||||
|
@ -36,8 +36,9 @@ PIP="${BOOTSTRAP_VENV}/bin/pip"
|
|||||||
# Tell pip where it can find any existing wheels.
|
# Tell pip where it can find any existing wheels.
|
||||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||||
|
|
||||||
# Populate the wheelhouse, if necessary.
|
# Populate the wheelhouse, if necessary. zfec 1.5.3 can only be built with a
|
||||||
"${PIP}" \
|
# UTF-8 environment so make sure we have one, at least for this invocation.
|
||||||
|
LANG="en_US.UTF-8" "${PIP}" \
|
||||||
wheel \
|
wheel \
|
||||||
--wheel-dir "${WHEELHOUSE_PATH}" \
|
--wheel-dir "${WHEELHOUSE_PATH}" \
|
||||||
"${PROJECT_ROOT}"[test] \
|
"${PROJECT_ROOT}"[test] \
|
||||||
|
@ -65,9 +65,13 @@ TIMEOUT="timeout --kill-after 1m 15m"
|
|||||||
# Send the output directly to a file because transporting the binary subunit2
|
# Send the output directly to a file because transporting the binary subunit2
|
||||||
# via tox and then scraping it out is hideous and failure prone.
|
# via tox and then scraping it out is hideous and failure prone.
|
||||||
export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
export SUBUNITREPORTER_OUTPUT_PATH="${SUBUNIT2}"
|
||||||
export TAHOE_LAFS_TRIAL_ARGS="--reporter=subunitv2-file --rterrors"
|
export TAHOE_LAFS_TRIAL_ARGS="${TAHOE_LAFS_TRIAL_ARGS:---reporter=subunitv2-file --rterrors}"
|
||||||
export PIP_NO_INDEX="1"
|
export PIP_NO_INDEX="1"
|
||||||
|
|
||||||
|
# Make output unbuffered, so progress reports from subunitv2-file get streamed
|
||||||
|
# and notify CircleCI we're still alive.
|
||||||
|
export PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
if [ "${ALLOWED_FAILURE}" = "yes" ]; then
|
||||||
alternative="true"
|
alternative="true"
|
||||||
else
|
else
|
||||||
@ -81,7 +85,12 @@ ${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
|
|||||||
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
||||||
|
|
||||||
if [ -n "${ARTIFACTS}" ]; then
|
if [ -n "${ARTIFACTS}" ]; then
|
||||||
|
if [ ! -e "${SUBUNIT2}" ]; then
|
||||||
|
echo "subunitv2 output file does not exist: ${SUBUNIT2}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Create a junitxml results area.
|
# Create a junitxml results area.
|
||||||
mkdir -p "$(dirname "${JUNITXML}")"
|
mkdir -p "$(dirname "${JUNITXML}")"
|
||||||
${BOOTSTRAP_VENV}/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||||
fi
|
fi
|
||||||
|
34
.codecov.yml
Normal file
34
.codecov.yml
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# Override defaults for codecov.io checks.
|
||||||
|
#
|
||||||
|
# Documentation is at https://docs.codecov.io/docs/codecov-yaml;
|
||||||
|
# reference is at https://docs.codecov.io/docs/codecovyml-reference.
|
||||||
|
#
|
||||||
|
# To validate this file, use:
|
||||||
|
#
|
||||||
|
# curl --data-binary @.codecov.yml https://codecov.io/validate
|
||||||
|
#
|
||||||
|
# Codecov's defaults seem to leave red marks in GitHub CI checks in a
|
||||||
|
# rather arbitrary manner, probably because of non-determinism in
|
||||||
|
# coverage (see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2891)
|
||||||
|
# and maybe because computers are bad with floating point numbers.
|
||||||
|
|
||||||
|
# Allow coverage percentage a precision of zero decimals, and round to
|
||||||
|
# the nearest number (for example, 89.957 to to 90; 89.497 to 89%).
|
||||||
|
# Coverage above 90% is good, below 80% is bad.
|
||||||
|
coverage:
|
||||||
|
round: nearest
|
||||||
|
range: 80..90
|
||||||
|
precision: 0
|
||||||
|
|
||||||
|
# Aim for a target test coverage of 90% in codecov/project check (do
|
||||||
|
# not allow project coverage to drop below that), and allow
|
||||||
|
# codecov/patch a threshold of 1% (allow coverage in changes to drop
|
||||||
|
# by that much, and no less). That should be good enough for us.
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 90%
|
||||||
|
threshold: 1%
|
||||||
|
patch:
|
||||||
|
default:
|
||||||
|
threshold: 1%
|
@ -10,3 +10,7 @@ omit =
|
|||||||
*/allmydata/_version.py
|
*/allmydata/_version.py
|
||||||
parallel = True
|
parallel = True
|
||||||
branch = True
|
branch = True
|
||||||
|
|
||||||
|
[report]
|
||||||
|
show_missing = True
|
||||||
|
skip_covered = True
|
||||||
|
17
.github/workflows/ci.yml
vendored
17
.github/workflows/ci.yml
vendored
@ -49,8 +49,8 @@ jobs:
|
|||||||
- name: Display tool versions
|
- name: Display tool versions
|
||||||
run: python misc/build_helpers/show-tool-versions.py
|
run: python misc/build_helpers/show-tool-versions.py
|
||||||
|
|
||||||
- name: Run "tox -e coverage"
|
- name: Run "tox -e py27-coverage"
|
||||||
run: tox -e coverage
|
run: tox -e py27-coverage
|
||||||
|
|
||||||
- name: Upload eliot.log in case of failure
|
- name: Upload eliot.log in case of failure
|
||||||
uses: actions/upload-artifact@v1
|
uses: actions/upload-artifact@v1
|
||||||
@ -72,6 +72,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os:
|
os:
|
||||||
- macos-latest
|
- macos-latest
|
||||||
|
- windows-latest
|
||||||
python-version:
|
python-version:
|
||||||
- 2.7
|
- 2.7
|
||||||
|
|
||||||
@ -162,7 +163,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade codecov tox setuptools
|
pip install --upgrade tox
|
||||||
pip list
|
pip list
|
||||||
|
|
||||||
- name: Display tool versions
|
- name: Display tool versions
|
||||||
@ -170,3 +171,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Run "tox -e pyinstaller"
|
- name: Run "tox -e pyinstaller"
|
||||||
run: tox -e pyinstaller
|
run: tox -e pyinstaller
|
||||||
|
|
||||||
|
# This step is to ensure there are no packaging/import errors.
|
||||||
|
- name: Test PyInstaller executable
|
||||||
|
run: dist/Tahoe-LAFS/tahoe --version
|
||||||
|
|
||||||
|
- name: Upload PyInstaller package
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: Tahoe-LAFS-${{ matrix.os }}-Python-${{ matrix.python-version }}
|
||||||
|
path: dist/Tahoe-LAFS-*-*.*
|
||||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -1,4 +1,4 @@
|
|||||||
venv
|
venv*
|
||||||
|
|
||||||
# vim swap files
|
# vim swap files
|
||||||
*.swp
|
*.swp
|
||||||
@ -9,6 +9,7 @@ venv
|
|||||||
*~
|
*~
|
||||||
*.DS_Store
|
*.DS_Store
|
||||||
.*.kate-swp
|
.*.kate-swp
|
||||||
|
*.bak
|
||||||
|
|
||||||
/build/
|
/build/
|
||||||
/support/
|
/support/
|
||||||
@ -36,6 +37,7 @@ zope.interface-*.egg
|
|||||||
/tahoe-deps/
|
/tahoe-deps/
|
||||||
/tahoe-deps.tar.gz
|
/tahoe-deps.tar.gz
|
||||||
/.coverage
|
/.coverage
|
||||||
|
/.coverage.*
|
||||||
/.coverage.el
|
/.coverage.el
|
||||||
/coverage-html/
|
/coverage-html/
|
||||||
/miscaptures.txt
|
/miscaptures.txt
|
||||||
@ -43,7 +45,11 @@ zope.interface-*.egg
|
|||||||
/.tox/
|
/.tox/
|
||||||
/docs/_build/
|
/docs/_build/
|
||||||
/coverage.xml
|
/coverage.xml
|
||||||
|
/.pre-commit-config.local.yaml
|
||||||
/.hypothesis/
|
/.hypothesis/
|
||||||
|
/eliot.log
|
||||||
|
/misc/python3/results.xml
|
||||||
|
/misc/python3/results.subunit2
|
||||||
|
|
||||||
# This is the plaintext of the private environment needed for some CircleCI
|
# This is the plaintext of the private environment needed for some CircleCI
|
||||||
# operations. It's never supposed to be checked in.
|
# operations. It's never supposed to be checked in.
|
||||||
|
15
.pre-commit-config.yaml
Normal file
15
.pre-commit-config.yaml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: codechecks
|
||||||
|
name: codechecks
|
||||||
|
stages: ["commit"]
|
||||||
|
entry: "tox -e codechecks"
|
||||||
|
language: system
|
||||||
|
pass_filenames: false
|
||||||
|
- id: test
|
||||||
|
name: test
|
||||||
|
stages: ["push"]
|
||||||
|
entry: "make test"
|
||||||
|
language: system
|
||||||
|
pass_filenames: false
|
77
.travis.yml
77
.travis.yml
@ -1,77 +0,0 @@
|
|||||||
sudo: false
|
|
||||||
language: python
|
|
||||||
cache: pip
|
|
||||||
dist: trusty
|
|
||||||
before_cache:
|
|
||||||
- rm -f $HOME/.cache/pip/log/debug.log
|
|
||||||
git:
|
|
||||||
depth: 1000
|
|
||||||
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- TAHOE_LAFS_HYPOTHESIS_PROFILE=ci
|
|
||||||
|
|
||||||
install:
|
|
||||||
# ~/.local/bin is on $PATH by default, but on OS-X, --user puts it elsewhere
|
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then export PATH=$HOME/Library/Python/2.7/bin:$PATH; fi
|
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then wget https://bootstrap.pypa.io/get-pip.py && sudo python ./get-pip.py; fi
|
|
||||||
- pip list
|
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "osx" ]; then pip install --user --upgrade codecov tox setuptools; fi
|
|
||||||
- if [ "${TRAVIS_OS_NAME}" = "linux" ]; then pip install --upgrade codecov tox setuptools; fi
|
|
||||||
- echo $PATH; which python; which pip; which tox
|
|
||||||
- python misc/build_helpers/show-tool-versions.py
|
|
||||||
|
|
||||||
script:
|
|
||||||
- |
|
|
||||||
set -eo pipefail
|
|
||||||
if [ "${T}" = "py35" ]; then
|
|
||||||
python3 -m compileall -f -x tahoe-depgraph.py .
|
|
||||||
else
|
|
||||||
tox -e ${T}
|
|
||||||
fi
|
|
||||||
# To verify that the resultant PyInstaller-generated binary executes
|
|
||||||
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
|
|
||||||
# failing due to import/packaging-related errors, etc.).
|
|
||||||
if [ "${T}" = "pyinstaller" ]; then dist/Tahoe-LAFS/tahoe --version; fi
|
|
||||||
|
|
||||||
after_success:
|
|
||||||
- if [ "${T}" = "coverage" ]; then codecov; fi
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
email: false
|
|
||||||
irc:
|
|
||||||
channels: "chat.freenode.net#tahoe-lafs"
|
|
||||||
on_success: always # for testing
|
|
||||||
on_failure: always
|
|
||||||
template:
|
|
||||||
- "%{repository}#%{build_number} [%{branch}: %{commit} by %{author}] %{message}"
|
|
||||||
- "Changes: %{compare_url} | Details: %{build_url}"
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- os: linux
|
|
||||||
python: '2.7'
|
|
||||||
env: T=coverage LANG=en_US.UTF-8
|
|
||||||
- os: linux
|
|
||||||
python: '2.7'
|
|
||||||
env: T=codechecks LANG=en_US.UTF-8
|
|
||||||
- os: linux
|
|
||||||
python: '2.7'
|
|
||||||
env: T=pyinstaller LANG=en_US.UTF-8
|
|
||||||
- os: linux
|
|
||||||
python: '2.7'
|
|
||||||
env: T=py27 LANG=C
|
|
||||||
- os: osx
|
|
||||||
python: '2.7'
|
|
||||||
env: T=py27 LANG=en_US.UTF-8
|
|
||||||
language: generic # "python" is not available on OS-X
|
|
||||||
- os: osx
|
|
||||||
python: '2.7'
|
|
||||||
env: T=pyinstaller LANG=en_US.UTF-8
|
|
||||||
language: generic # "python" is not available on OS-X
|
|
||||||
# this is a "lint" job that checks for python3 compatibility
|
|
||||||
- os: linux
|
|
||||||
python: '3.5'
|
|
||||||
env: T=py35
|
|
||||||
|
|
||||||
fast_finish: true
|
|
96
Makefile
96
Makefile
@ -1,16 +1,54 @@
|
|||||||
|
# Tahoe LFS Development and maintenance tasks
|
||||||
|
#
|
||||||
# NOTE: this Makefile requires GNU make
|
# NOTE: this Makefile requires GNU make
|
||||||
|
|
||||||
|
### Defensive settings for make:
|
||||||
|
# https://tech.davis-hansson.com/p/make/
|
||||||
|
SHELL := bash
|
||||||
|
.ONESHELL:
|
||||||
|
.SHELLFLAGS := -xeu -o pipefail -c
|
||||||
|
.SILENT:
|
||||||
|
.DELETE_ON_ERROR:
|
||||||
|
MAKEFLAGS += --warn-undefined-variables
|
||||||
|
MAKEFLAGS += --no-builtin-rules
|
||||||
|
|
||||||
|
# Local target variables
|
||||||
|
VCS_HOOK_SAMPLES=$(wildcard .git/hooks/*.sample)
|
||||||
|
VCS_HOOKS=$(VCS_HOOK_SAMPLES:%.sample=%)
|
||||||
|
PYTHON=python
|
||||||
|
export PYTHON
|
||||||
|
PYFLAKES=flake8
|
||||||
|
export PYFLAKES
|
||||||
|
SOURCES=src/allmydata static misc setup.py
|
||||||
|
APPNAME=tahoe-lafs
|
||||||
|
|
||||||
|
|
||||||
|
# Top-level, phony targets
|
||||||
|
|
||||||
|
.PHONY: default
|
||||||
default:
|
default:
|
||||||
@echo "no default target"
|
@echo "no default target"
|
||||||
|
|
||||||
PYTHON=python
|
.PHONY: install-vcs-hooks
|
||||||
export PYTHON
|
## Install the VCS hooks to run linters on commit and all tests on push
|
||||||
PYFLAKES=pyflakes
|
install-vcs-hooks: .git/hooks/pre-commit .git/hooks/pre-push
|
||||||
export PYFLAKES
|
.PHONY: uninstall-vcs-hooks
|
||||||
|
## Remove the VCS hooks
|
||||||
|
uninstall-vcs-hooks: .tox/create-venvs.log
|
||||||
|
"./$(dir $(<))py36/bin/pre-commit" uninstall || true
|
||||||
|
"./$(dir $(<))py36/bin/pre-commit" uninstall -t pre-push || true
|
||||||
|
|
||||||
SOURCES=src/allmydata static misc setup.py
|
.PHONY: test
|
||||||
APPNAME=tahoe-lafs
|
## Run all tests and code reports
|
||||||
|
test: .tox/create-venvs.log
|
||||||
|
# Run codechecks first since it takes the least time to report issues early.
|
||||||
|
tox --develop -e codechecks
|
||||||
|
# Run all the test environments in parallel to reduce run-time
|
||||||
|
tox --develop -p auto -e 'py27,py36,pypy27'
|
||||||
|
.PHONY: test-py3-all
|
||||||
|
## Run all tests under Python 3
|
||||||
|
test-py3-all: .tox/create-venvs.log
|
||||||
|
tox --develop -e py36 allmydata
|
||||||
|
|
||||||
# This is necessary only if you want to automatically produce a new
|
# This is necessary only if you want to automatically produce a new
|
||||||
# _version.py file from the current git history (without doing a build).
|
# _version.py file from the current git history (without doing a build).
|
||||||
@ -18,20 +56,16 @@ APPNAME=tahoe-lafs
|
|||||||
make-version:
|
make-version:
|
||||||
$(PYTHON) ./setup.py update_version
|
$(PYTHON) ./setup.py update_version
|
||||||
|
|
||||||
.built:
|
|
||||||
$(MAKE) build
|
|
||||||
|
|
||||||
src/allmydata/_version.py:
|
|
||||||
$(MAKE) make-version
|
|
||||||
|
|
||||||
# Build OS X pkg packages.
|
# Build OS X pkg packages.
|
||||||
.PHONY: build-osx-pkg test-osx-pkg upload-osx-pkg
|
.PHONY: build-osx-pkg
|
||||||
build-osx-pkg:
|
build-osx-pkg:
|
||||||
misc/build_helpers/build-osx-pkg.sh $(APPNAME)
|
misc/build_helpers/build-osx-pkg.sh $(APPNAME)
|
||||||
|
|
||||||
|
.PHONY: test-osx-pkg
|
||||||
test-osx-pkg:
|
test-osx-pkg:
|
||||||
$(PYTHON) misc/build_helpers/test-osx-pkg.py
|
$(PYTHON) misc/build_helpers/test-osx-pkg.py
|
||||||
|
|
||||||
|
.PHONY: upload-osx-pkg
|
||||||
upload-osx-pkg:
|
upload-osx-pkg:
|
||||||
# [Failure instance: Traceback: <class 'OpenSSL.SSL.Error'>: [('SSL routines', 'ssl3_read_bytes', 'tlsv1 alert unknown ca'), ('SSL routines', 'ssl3_write_bytes', 'ssl handshake failure')]
|
# [Failure instance: Traceback: <class 'OpenSSL.SSL.Error'>: [('SSL routines', 'ssl3_read_bytes', 'tlsv1 alert unknown ca'), ('SSL routines', 'ssl3_write_bytes', 'ssl handshake failure')]
|
||||||
#
|
#
|
||||||
@ -42,29 +76,12 @@ upload-osx-pkg:
|
|||||||
# echo not uploading tahoe-lafs-osx-pkg because this is not trunk but is branch \"${BB_BRANCH}\" ; \
|
# echo not uploading tahoe-lafs-osx-pkg because this is not trunk but is branch \"${BB_BRANCH}\" ; \
|
||||||
# fi
|
# fi
|
||||||
|
|
||||||
# code coverage-based testing is disabled temporarily, as we switch to tox.
|
|
||||||
# This will eventually be added to a tox environment. The following comments
|
|
||||||
# and variable settings are retained as notes for that future effort.
|
|
||||||
|
|
||||||
## # code coverage: install the "coverage" package from PyPI, do "make
|
|
||||||
## # test-coverage" to do a unit test run with coverage-gathering enabled, then
|
|
||||||
## # use "make coverage-output" to generate an HTML report. Also see "make
|
|
||||||
## # .coverage.el" and misc/coding_tools/coverage.el for Emacs integration.
|
|
||||||
##
|
|
||||||
## # This might need to be python-coverage on Debian-based distros.
|
|
||||||
## COVERAGE=coverage
|
|
||||||
##
|
|
||||||
## COVERAGEARGS=--branch --source=src/allmydata
|
|
||||||
##
|
|
||||||
## # --include appeared in coverage-3.4
|
|
||||||
## COVERAGE_OMIT=--include '$(CURDIR)/src/allmydata/*' --omit '$(CURDIR)/src/allmydata/test/*'
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: code-checks
|
.PHONY: code-checks
|
||||||
#code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
|
#code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
|
||||||
code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes
|
code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes
|
||||||
|
|
||||||
.PHONY: check-interfaces
|
.PHONY: check-interfaces
|
||||||
|
check-interfaces:
|
||||||
$(PYTHON) misc/coding_tools/check-interfaces.py 2>&1 |tee violations.txt
|
$(PYTHON) misc/coding_tools/check-interfaces.py 2>&1 |tee violations.txt
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
@ -184,10 +201,11 @@ clean:
|
|||||||
rm -f *.pkg
|
rm -f *.pkg
|
||||||
|
|
||||||
.PHONY: distclean
|
.PHONY: distclean
|
||||||
distclean: clean
|
distclean: clean uninstall-vcs-hooks
|
||||||
rm -rf src/*.egg-info
|
rm -rf src/*.egg-info
|
||||||
rm -f src/allmydata/_version.py
|
rm -f src/allmydata/_version.py
|
||||||
rm -f src/allmydata/_appname.py
|
rm -f src/allmydata/_appname.py
|
||||||
|
rm -rf ./.tox/
|
||||||
|
|
||||||
|
|
||||||
.PHONY: find-trailing-spaces
|
.PHONY: find-trailing-spaces
|
||||||
@ -220,3 +238,15 @@ tarballs: # delegated to tox, so setup.py can update setuptools if needed
|
|||||||
.PHONY: upload-tarballs
|
.PHONY: upload-tarballs
|
||||||
upload-tarballs:
|
upload-tarballs:
|
||||||
@if [ "X${BB_BRANCH}" = "Xmaster" ] || [ "X${BB_BRANCH}" = "X" ]; then for f in dist/*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi
|
@if [ "X${BB_BRANCH}" = "Xmaster" ] || [ "X${BB_BRANCH}" = "X" ]; then for f in dist/*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi
|
||||||
|
|
||||||
|
|
||||||
|
# Real targets
|
||||||
|
|
||||||
|
src/allmydata/_version.py:
|
||||||
|
$(MAKE) make-version
|
||||||
|
|
||||||
|
.tox/create-venvs.log: tox.ini setup.py
|
||||||
|
tox --notest -p all | tee -a "$(@)"
|
||||||
|
|
||||||
|
$(VCS_HOOKS): .tox/create-venvs.log .pre-commit-config.yaml
|
||||||
|
"./$(dir $(<))py36/bin/pre-commit" install --hook-type $(@:.git/hooks/%=%)
|
||||||
|
@ -10,7 +10,8 @@ function correctly, preserving your privacy and security.
|
|||||||
For full documentation, please see
|
For full documentation, please see
|
||||||
http://tahoe-lafs.readthedocs.io/en/latest/ .
|
http://tahoe-lafs.readthedocs.io/en/latest/ .
|
||||||
|
|
||||||
|readthedocs| |travis| |circleci| |codecov|
|
|Contributor Covenant| |readthedocs| |travis| |circleci| |codecov|
|
||||||
|
|
||||||
|
|
||||||
INSTALLING
|
INSTALLING
|
||||||
==========
|
==========
|
||||||
@ -105,3 +106,7 @@ slides.
|
|||||||
.. |codecov| image:: https://codecov.io/github/tahoe-lafs/tahoe-lafs/coverage.svg?branch=master
|
.. |codecov| image:: https://codecov.io/github/tahoe-lafs/tahoe-lafs/coverage.svg?branch=master
|
||||||
:alt: test coverage percentage
|
:alt: test coverage percentage
|
||||||
:target: https://codecov.io/github/tahoe-lafs/tahoe-lafs?branch=master
|
:target: https://codecov.io/github/tahoe-lafs/tahoe-lafs?branch=master
|
||||||
|
|
||||||
|
.. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg
|
||||||
|
:alt: code of conduct
|
||||||
|
:target: docs/CODE_OF_CONDUCT.md
|
||||||
|
54
docs/CODE_OF_CONDUCT.md
Normal file
54
docs/CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Contributor Code of Conduct
|
||||||
|
|
||||||
|
As contributors and maintainers of this project, and in the interest of
|
||||||
|
fostering an open and welcoming community, we pledge to respect all people who
|
||||||
|
contribute through reporting issues, posting feature requests, updating
|
||||||
|
documentation, submitting pull requests or patches, and other activities.
|
||||||
|
|
||||||
|
We are committed to making participation in this project a harassment-free
|
||||||
|
experience for everyone, regardless of level of experience, gender, gender
|
||||||
|
identity and expression, sexual orientation, disability, personal appearance,
|
||||||
|
body size, race, ethnicity, age, religion, or nationality.
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery
|
||||||
|
* Personal attacks
|
||||||
|
* Trolling or insulting/derogatory comments
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing other's private information, such as physical or electronic
|
||||||
|
addresses, without explicit permission
|
||||||
|
* Other unethical or unprofessional conduct
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
By adopting this Code of Conduct, project maintainers commit themselves to
|
||||||
|
fairly and consistently applying these principles to every aspect of managing
|
||||||
|
this project. Project maintainers who do not follow or enforce the Code of
|
||||||
|
Conduct may be permanently removed from the project team.
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community.
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting a project maintainer (see below). All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. Maintainers are
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an
|
||||||
|
incident.
|
||||||
|
|
||||||
|
The following community members have made themselves available for conduct issues:
|
||||||
|
|
||||||
|
- Jean-Paul Calderone (jean-paul at leastauthority dot com)
|
||||||
|
- meejah (meejah at meejah dot ca)
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 1.3.0, available at
|
||||||
|
[http://contributor-covenant.org/version/1/3/0/][version]
|
||||||
|
|
||||||
|
[homepage]: http://contributor-covenant.org
|
||||||
|
[version]: http://contributor-covenant.org/version/1/3/0/
|
89
docs/developer-guide.rst
Normal file
89
docs/developer-guide.rst
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
Developer Guide
|
||||||
|
===============
|
||||||
|
|
||||||
|
|
||||||
|
Pre-commit Checks
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
This project is configured for use with `pre-commit`_ to install `VCS/git hooks`_ which
|
||||||
|
perform some static code analysis checks and other code checks to catch common errors
|
||||||
|
before each commit and to run the full self-test suite to find less obvious regressions
|
||||||
|
before each push to a remote.
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
tahoe-lafs $ make install-vcs-hooks
|
||||||
|
...
|
||||||
|
+ ./.tox//py36/bin/pre-commit install --hook-type pre-commit
|
||||||
|
pre-commit installed at .git/hooks/pre-commit
|
||||||
|
+ ./.tox//py36/bin/pre-commit install --hook-type pre-push
|
||||||
|
pre-commit installed at .git/hooks/pre-push
|
||||||
|
tahoe-lafs $ python -c "import pathlib; pathlib.Path('src/allmydata/tabbed.py').write_text('def foo():\\n\\tpass\\n')"
|
||||||
|
tahoe-lafs $ git add src/allmydata/tabbed.py
|
||||||
|
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
||||||
|
...
|
||||||
|
codechecks...............................................................Failed
|
||||||
|
- hook id: codechecks
|
||||||
|
- exit code: 1
|
||||||
|
|
||||||
|
GLOB sdist-make: ./tahoe-lafs/setup.py
|
||||||
|
codechecks inst-nodeps: ...
|
||||||
|
codechecks installed: ...
|
||||||
|
codechecks run-test-pre: PYTHONHASHSEED='...'
|
||||||
|
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
||||||
|
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
||||||
|
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
||||||
|
___________________________________ summary ____________________________________
|
||||||
|
ERROR: codechecks: commands failed
|
||||||
|
...
|
||||||
|
|
||||||
|
To uninstall::
|
||||||
|
|
||||||
|
tahoe-lafs $ make uninstall-vcs-hooks
|
||||||
|
...
|
||||||
|
+ ./.tox/py36/bin/pre-commit uninstall
|
||||||
|
pre-commit uninstalled
|
||||||
|
+ ./.tox/py36/bin/pre-commit uninstall -t pre-push
|
||||||
|
pre-push uninstalled
|
||||||
|
|
||||||
|
Note that running the full self-test suite takes several minutes so expect pushing to
|
||||||
|
take some time. If you can't or don't want to wait for the hooks in some cases, use the
|
||||||
|
``--no-verify`` option to ``$ git commit ...`` or ``$ git push ...``. Alternatively,
|
||||||
|
see the `pre-commit`_ documentation and CLI help output and use the committed
|
||||||
|
`pre-commit configuration`_ as a starting point to write a local, uncommitted
|
||||||
|
``../.pre-commit-config.local.yaml`` configuration to use instead. For example::
|
||||||
|
|
||||||
|
tahoe-lafs $ ./.tox/py36/bin/pre-commit --help
|
||||||
|
tahoe-lafs $ ./.tox/py36/bin/pre-commit instll --help
|
||||||
|
tahoe-lafs $ cp "./.pre-commit-config.yaml" "./.pre-commit-config.local.yaml"
|
||||||
|
tahoe-lafs $ editor "./.pre-commit-config.local.yaml"
|
||||||
|
...
|
||||||
|
tahoe-lafs $ ./.tox/py36/bin/pre-commit install -c "./.pre-commit-config.local.yaml" -t pre-push
|
||||||
|
pre-commit installed at .git/hooks/pre-push
|
||||||
|
tahoe-lafs $ git commit -a -m "Add a file that violates flake8"
|
||||||
|
[3398.pre-commit 29f8f43d2] Add a file that violates flake8
|
||||||
|
1 file changed, 2 insertions(+)
|
||||||
|
create mode 100644 src/allmydata/tabbed.py
|
||||||
|
tahoe-lafs $ git push
|
||||||
|
...
|
||||||
|
codechecks...............................................................Failed
|
||||||
|
- hook id: codechecks
|
||||||
|
- exit code: 1
|
||||||
|
|
||||||
|
GLOB sdist-make: ./tahoe-lafs/setup.py
|
||||||
|
codechecks inst-nodeps: ...
|
||||||
|
codechecks installed: ...
|
||||||
|
codechecks run-test-pre: PYTHONHASHSEED='...'
|
||||||
|
codechecks run-test: commands[0] | flake8 src static misc setup.py
|
||||||
|
src/allmydata/tabbed.py:2:1: W191 indentation contains tabs
|
||||||
|
ERROR: InvocationError for command ./tahoe-lafs/.tox/codechecks/bin/flake8 src static misc setup.py (exited with code 1)
|
||||||
|
___________________________________ summary ____________________________________
|
||||||
|
ERROR: codechecks: commands failed
|
||||||
|
...
|
||||||
|
|
||||||
|
error: failed to push some refs to 'github.com:jaraco/tahoe-lafs.git'
|
||||||
|
|
||||||
|
|
||||||
|
.. _`pre-commit`: https://pre-commit.com
|
||||||
|
.. _`VCS/git hooks`: `pre-commit`_
|
||||||
|
.. _`pre-commit configuration`: ../.pre-commit-config.yaml
|
@ -36,7 +36,7 @@ people are Release Maintainers:
|
|||||||
- [ ] documentation is ready (see above)
|
- [ ] documentation is ready (see above)
|
||||||
- [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
|
- [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
|
||||||
- [ ] build code locally:
|
- [ ] build code locally:
|
||||||
tox -e py27,codechecks,coverage,deprecations,docs,integration,upcoming-deprecations
|
tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations
|
||||||
- [ ] created tarballs (they'll be in dist/ for later comparison)
|
- [ ] created tarballs (they'll be in dist/ for later comparison)
|
||||||
tox -e tarballs
|
tox -e tarballs
|
||||||
- [ ] release version is reporting itself as intended version
|
- [ ] release version is reporting itself as intended version
|
||||||
|
@ -24,6 +24,7 @@ Contents:
|
|||||||
|
|
||||||
known_issues
|
known_issues
|
||||||
../.github/CONTRIBUTING
|
../.github/CONTRIBUTING
|
||||||
|
CODE_OF_CONDUCT
|
||||||
|
|
||||||
servers
|
servers
|
||||||
helper
|
helper
|
||||||
@ -38,6 +39,8 @@ Contents:
|
|||||||
write_coordination
|
write_coordination
|
||||||
backupdb
|
backupdb
|
||||||
|
|
||||||
|
developer-guide
|
||||||
|
|
||||||
anonymity-configuration
|
anonymity-configuration
|
||||||
|
|
||||||
nodekeys
|
nodekeys
|
||||||
|
@ -264,7 +264,10 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||||
|
'Tor tests are unstable on Windows')
|
||||||
def chutney(reactor, temp_dir):
|
def chutney(reactor, temp_dir):
|
||||||
|
|
||||||
chutney_dir = join(temp_dir, 'chutney')
|
chutney_dir = join(temp_dir, 'chutney')
|
||||||
mkdir(chutney_dir)
|
mkdir(chutney_dir)
|
||||||
|
|
||||||
@ -283,18 +286,39 @@ def chutney(reactor, temp_dir):
|
|||||||
proto,
|
proto,
|
||||||
'git',
|
'git',
|
||||||
(
|
(
|
||||||
'git', 'clone', '--depth=1',
|
'git', 'clone',
|
||||||
'https://git.torproject.org/chutney.git',
|
'https://git.torproject.org/chutney.git',
|
||||||
chutney_dir,
|
chutney_dir,
|
||||||
),
|
),
|
||||||
env=environ,
|
env=environ,
|
||||||
)
|
)
|
||||||
pytest_twisted.blockon(proto.done)
|
pytest_twisted.blockon(proto.done)
|
||||||
|
|
||||||
|
# XXX: Here we reset Chutney to the last revision known to work
|
||||||
|
# with Python 2, as a workaround for Chutney moving to Python 3.
|
||||||
|
# When this is no longer necessary, we will have to drop this and
|
||||||
|
# add '--depth=1' back to the above 'git clone' subprocess.
|
||||||
|
proto = _DumpOutputProtocol(None)
|
||||||
|
reactor.spawnProcess(
|
||||||
|
proto,
|
||||||
|
'git',
|
||||||
|
(
|
||||||
|
'git', '-C', chutney_dir,
|
||||||
|
'reset', '--hard',
|
||||||
|
'99bd06c7554b9113af8c0877b6eca4ceb95dcbaa'
|
||||||
|
),
|
||||||
|
env=environ,
|
||||||
|
)
|
||||||
|
pytest_twisted.blockon(proto.done)
|
||||||
|
|
||||||
return chutney_dir
|
return chutney_dir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
|
@pytest.mark.skipif(sys.platform.startswith('win'),
|
||||||
|
reason='Tor tests are unstable on Windows')
|
||||||
def tor_network(reactor, temp_dir, chutney, request):
|
def tor_network(reactor, temp_dir, chutney, request):
|
||||||
|
|
||||||
# this is the actual "chutney" script at the root of a chutney checkout
|
# this is the actual "chutney" script at the root of a chutney checkout
|
||||||
chutney_dir = chutney
|
chutney_dir = chutney
|
||||||
chut = join(chutney_dir, 'chutney')
|
chut = join(chutney_dir, 'chutney')
|
||||||
|
@ -10,12 +10,21 @@ from six.moves import StringIO
|
|||||||
from twisted.internet.protocol import ProcessProtocol
|
from twisted.internet.protocol import ProcessProtocol
|
||||||
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
from twisted.internet.error import ProcessExitedAlready, ProcessDone
|
||||||
from twisted.internet.defer import inlineCallbacks, Deferred
|
from twisted.internet.defer import inlineCallbacks, Deferred
|
||||||
|
|
||||||
|
import pytest
|
||||||
import pytest_twisted
|
import pytest_twisted
|
||||||
|
|
||||||
import util
|
import util
|
||||||
|
|
||||||
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
# see "conftest.py" for the fixtures (e.g. "tor_network")
|
||||||
|
|
||||||
|
# XXX: Integration tests that involve Tor do not run reliably on
|
||||||
|
# Windows. They are skipped for now, in order to reduce CI noise.
|
||||||
|
#
|
||||||
|
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3347
|
||||||
|
if sys.platform.startswith('win'):
|
||||||
|
pytest.skip('Skipping Tor tests on Windows', allow_module_level=True)
|
||||||
|
|
||||||
@pytest_twisted.inlineCallbacks
|
@pytest_twisted.inlineCallbacks
|
||||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||||
yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||||
|
@ -219,23 +219,21 @@ def test_status(alice):
|
|||||||
found_upload = False
|
found_upload = False
|
||||||
found_download = False
|
found_download = False
|
||||||
for href in hrefs:
|
for href in hrefs:
|
||||||
if href.startswith(u"/") or not href:
|
if href == u"/" or not href:
|
||||||
continue
|
continue
|
||||||
resp = requests.get(
|
resp = requests.get(util.node_url(alice.node_dir, href))
|
||||||
util.node_url(alice.node_dir, u"status/{}".format(href)),
|
if href.startswith(u"/status/up"):
|
||||||
)
|
|
||||||
if href.startswith(u'up'):
|
|
||||||
assert "File Upload Status" in resp.content
|
assert "File Upload Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
||||||
found_upload = True
|
found_upload = True
|
||||||
elif href.startswith(u'down'):
|
elif href.startswith(u"/status/down"):
|
||||||
assert "File Download Status" in resp.content
|
assert "File Download Status" in resp.content
|
||||||
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
||||||
found_download = True
|
found_download = True
|
||||||
|
|
||||||
# download the specialized event information
|
# download the specialized event information
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
util.node_url(alice.node_dir, u"status/{}/event_json".format(href)),
|
util.node_url(alice.node_dir, u"{}/event_json".format(href)),
|
||||||
)
|
)
|
||||||
js = json.loads(resp.content)
|
js = json.loads(resp.content)
|
||||||
# there's usually just one "read" operation, but this can handle many ..
|
# there's usually just one "read" operation, but this can handle many ..
|
||||||
|
@ -156,6 +156,6 @@ for pkg in sorted(platform_independent_pkgs):
|
|||||||
print('</table>')
|
print('</table>')
|
||||||
|
|
||||||
# The document does validate, but not when it is included at the bottom of a directory listing.
|
# The document does validate, but not when it is included at the bottom of a directory listing.
|
||||||
#print '<hr>'
|
#print('<hr>')
|
||||||
#print '<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>'
|
#print('<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>')
|
||||||
print('</body></html>')
|
print('</body></html>')
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#! /usr/bin/python
|
#! /usr/bin/python
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import math
|
import math
|
||||||
from allmydata.util import statistics
|
from allmydata.util import statistics
|
||||||
from numpy import array, matrix, dot
|
from numpy import array, matrix, dot
|
||||||
@ -72,11 +74,11 @@ class ReliabilityModel(object):
|
|||||||
|
|
||||||
repair = self.build_repair_matrix(k, N, R)
|
repair = self.build_repair_matrix(k, N, R)
|
||||||
|
|
||||||
#print "DECAY:", decay
|
#print("DECAY:", decay)
|
||||||
#print "OLD-POST-REPAIR:", old_post_repair
|
#print("OLD-POST-REPAIR:", old_post_repair)
|
||||||
#print "NEW-POST-REPAIR:", decay * repair
|
#print("NEW-POST-REPAIR:", decay * repair)
|
||||||
#print "REPAIR:", repair
|
#print("REPAIR:", repair)
|
||||||
#print "DIFF:", (old_post_repair - decay * repair)
|
#print("DIFF:", (old_post_repair - decay * repair))
|
||||||
|
|
||||||
START = array([0]*N + [1])
|
START = array([0]*N + [1])
|
||||||
DEAD = array([1]*k + [0]*(1+N-k))
|
DEAD = array([1]*k + [0]*(1+N-k))
|
||||||
@ -85,9 +87,9 @@ class ReliabilityModel(object):
|
|||||||
[N-i for i in range(k, R)] +
|
[N-i for i in range(k, R)] +
|
||||||
[0]*(1+N-R))
|
[0]*(1+N-R))
|
||||||
assert REPAIR_newshares.shape[0] == N+1
|
assert REPAIR_newshares.shape[0] == N+1
|
||||||
#print "START", START
|
#print("START", START)
|
||||||
#print "REPAIRp", REPAIRp
|
#print("REPAIRp", REPAIRp)
|
||||||
#print "REPAIR_newshares", REPAIR_newshares
|
#print("REPAIR_newshares", REPAIR_newshares)
|
||||||
|
|
||||||
unmaintained_state = START
|
unmaintained_state = START
|
||||||
maintained_state = START
|
maintained_state = START
|
||||||
@ -141,15 +143,15 @@ class ReliabilityModel(object):
|
|||||||
# return "%dy.%dm" % (int(seconds/YEAR), int( (seconds%YEAR)/MONTH))
|
# return "%dy.%dm" % (int(seconds/YEAR), int( (seconds%YEAR)/MONTH))
|
||||||
#needed_repairs_total = sum(needed_repairs)
|
#needed_repairs_total = sum(needed_repairs)
|
||||||
#needed_new_shares_total = sum(needed_new_shares)
|
#needed_new_shares_total = sum(needed_new_shares)
|
||||||
#print "at 2y:"
|
#print("at 2y:")
|
||||||
#print " unmaintained", unmaintained_state
|
#print(" unmaintained", unmaintained_state)
|
||||||
#print " maintained", maintained_state
|
#print(" maintained", maintained_state)
|
||||||
#print " number of repairs", needed_repairs_total
|
#print(" number of repairs", needed_repairs_total)
|
||||||
#print " new shares generated", needed_new_shares_total
|
#print(" new shares generated", needed_new_shares_total)
|
||||||
#repair_rate_inv = report_span / needed_repairs_total
|
#repair_rate_inv = report_span / needed_repairs_total
|
||||||
#print " avg repair rate: once every %s" % yandm(repair_rate_inv)
|
#print(" avg repair rate: once every %s" % yandm(repair_rate_inv))
|
||||||
#print " avg repair download: one share every %s" % yandm(repair_rate_inv/k)
|
#print(" avg repair download: one share every %s" % yandm(repair_rate_inv/k))
|
||||||
#print " avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total)
|
#print(" avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total))
|
||||||
|
|
||||||
return report
|
return report
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from allmydata import provisioning
|
from allmydata import provisioning
|
||||||
@ -99,7 +100,7 @@ class Reliability(unittest.TestCase):
|
|||||||
self.failUnlessEqual(len(r.samples), 20)
|
self.failUnlessEqual(len(r.samples), 20)
|
||||||
|
|
||||||
last_row = r.samples[-1]
|
last_row = r.samples[-1]
|
||||||
#print last_row
|
#print(last_row)
|
||||||
(when, unmaintained_shareprobs, maintained_shareprobs,
|
(when, unmaintained_shareprobs, maintained_shareprobs,
|
||||||
P_repaired_last_check_period,
|
P_repaired_last_check_period,
|
||||||
cumulative_number_of_repairs,
|
cumulative_number_of_repairs,
|
||||||
|
43
misc/python3/audit-dict-for-loops.py
Normal file
43
misc/python3/audit-dict-for-loops.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""
|
||||||
|
The following code is valid in Python 2:
|
||||||
|
|
||||||
|
for x in my_dict.keys():
|
||||||
|
if something(x):
|
||||||
|
del my_dict[x]
|
||||||
|
|
||||||
|
But broken in Python 3.
|
||||||
|
|
||||||
|
One solution is:
|
||||||
|
|
||||||
|
for x in list(my_dict.keys()):
|
||||||
|
if something(x):
|
||||||
|
del my_dict[x]
|
||||||
|
|
||||||
|
Some but not all code in Tahoe has been changed to that. In other cases, the code was left unchanged since there was no `del`.
|
||||||
|
|
||||||
|
However, some mistakes may have slept through.
|
||||||
|
|
||||||
|
To help catch cases that were incorrectly ported, this script runs futurize on all ported modules, which should convert it into the `list()` form.
|
||||||
|
You can then look at git diffs to see if any of the impacted would be buggy without the newly added `list()`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from subprocess import check_call
|
||||||
|
|
||||||
|
from allmydata.util import _python3
|
||||||
|
|
||||||
|
|
||||||
|
def fix_potential_issue():
|
||||||
|
for module in _python3.PORTED_MODULES + _python3.PORTED_TEST_MODULES:
|
||||||
|
filename = "src/" + module.replace(".", "/") + ".py"
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
# Package, probably
|
||||||
|
filename = "src/" + module.replace(".", "/") + "/__init__.py"
|
||||||
|
check_call(["futurize", "-f", "lib2to3.fixes.fix_dict", "-w", filename])
|
||||||
|
print(
|
||||||
|
"All loops converted. Check diff to see if there are any that need to be commitedd."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
fix_potential_issue()
|
@ -60,7 +60,8 @@ class mymf(modulefinder.ModuleFinder):
|
|||||||
self._depgraph[last_caller.__name__].add(fqname)
|
self._depgraph[last_caller.__name__].add(fqname)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
|
def load_module(self, fqname, fp, pathname, additional_info):
|
||||||
|
(suffix, mode, type) = additional_info
|
||||||
r = modulefinder.ModuleFinder.load_module(
|
r = modulefinder.ModuleFinder.load_module(
|
||||||
self, fqname, fp, pathname, (suffix, mode, type))
|
self, fqname, fp, pathname, (suffix, mode, type))
|
||||||
if r is not None:
|
if r is not None:
|
||||||
@ -71,7 +72,7 @@ class mymf(modulefinder.ModuleFinder):
|
|||||||
return {
|
return {
|
||||||
'depgraph': {
|
'depgraph': {
|
||||||
name: dict.fromkeys(deps, 1)
|
name: dict.fromkeys(deps, 1)
|
||||||
for name, deps in self._depgraph.iteritems()},
|
for name, deps in self._depgraph.items()},
|
||||||
'types': self._types,
|
'types': self._types,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -101,20 +102,25 @@ def main(target):
|
|||||||
filepath = path
|
filepath = path
|
||||||
moduleNames.append(reflect.filenameToModuleName(filepath))
|
moduleNames.append(reflect.filenameToModuleName(filepath))
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile() as tmpfile:
|
with tempfile.NamedTemporaryFile("w") as tmpfile:
|
||||||
for moduleName in moduleNames:
|
for moduleName in moduleNames:
|
||||||
tmpfile.write('import %s\n' % moduleName)
|
tmpfile.write('import %s\n' % moduleName)
|
||||||
tmpfile.flush()
|
tmpfile.flush()
|
||||||
mf.run_script(tmpfile.name)
|
mf.run_script(tmpfile.name)
|
||||||
|
|
||||||
with open('tahoe-deps.json', 'wb') as outfile:
|
with open('tahoe-deps.json', 'w') as outfile:
|
||||||
json_dump(mf.as_json(), outfile)
|
json_dump(mf.as_json(), outfile)
|
||||||
outfile.write('\n')
|
outfile.write('\n')
|
||||||
|
|
||||||
ported_modules_path = os.path.join(target, "src", "allmydata", "ported-modules.txt")
|
ported_modules_path = os.path.join(target, "src", "allmydata", "util", "_python3.py")
|
||||||
with open(ported_modules_path) as ported_modules:
|
with open(ported_modules_path) as f:
|
||||||
port_status = dict.fromkeys((line.strip() for line in ported_modules), "ported")
|
ported_modules = {}
|
||||||
with open('tahoe-ported.json', 'wb') as outfile:
|
exec(f.read(), ported_modules, ported_modules)
|
||||||
|
port_status = dict.fromkeys(
|
||||||
|
ported_modules["PORTED_MODULES"] + ported_modules["PORTED_TEST_MODULES"],
|
||||||
|
"ported"
|
||||||
|
)
|
||||||
|
with open('tahoe-ported.json', 'w') as outfile:
|
||||||
json_dump(port_status, outfile)
|
json_dump(port_status, outfile)
|
||||||
outfile.write('\n')
|
outfile.write('\n')
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ class B(object):
|
|||||||
count += 1
|
count += 1
|
||||||
inline = self.inf.readline()
|
inline = self.inf.readline()
|
||||||
|
|
||||||
# print self.stats
|
# print(self.stats)
|
||||||
|
|
||||||
benchutil.print_bench_footer(UNITS_PER_SECOND=1000000)
|
benchutil.print_bench_footer(UNITS_PER_SECOND=1000000)
|
||||||
print("(microseconds)")
|
print("(microseconds)")
|
||||||
|
@ -89,9 +89,9 @@ def scan(root):
|
|||||||
num_files = 0
|
num_files = 0
|
||||||
num_dirs = 0
|
num_dirs = 0
|
||||||
for absroot, dirs, files in os.walk(root):
|
for absroot, dirs, files in os.walk(root):
|
||||||
#print absroot
|
#print(absroot)
|
||||||
#print " %d files" % len(files)
|
#print(" %d files" % len(files))
|
||||||
#print " %d subdirs" % len(dirs)
|
#print(" %d subdirs" % len(dirs))
|
||||||
num_files += len(files)
|
num_files += len(files)
|
||||||
num_dirs += len(dirs)
|
num_dirs += len(dirs)
|
||||||
stringsize = len(''.join(files) + ''.join(dirs))
|
stringsize = len(''.join(files) + ''.join(dirs))
|
||||||
|
@ -146,8 +146,8 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
|
|||||||
lg_q = lg(q_cand)
|
lg_q = lg(q_cand)
|
||||||
lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in xrange(1, j)]
|
lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in xrange(1, j)]
|
||||||
if max(lg_pforge) < -L_hash + lg(j) and lg_px[j-1] + 1.0 < -L_hash:
|
if max(lg_pforge) < -L_hash + lg(j) and lg_px[j-1] + 1.0 < -L_hash:
|
||||||
#print "K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f" \
|
#print("K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f"
|
||||||
# % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3)
|
# % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3))
|
||||||
q = q_cand
|
q = q_cand
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -268,7 +268,7 @@ def search():
|
|||||||
trees[y] = (h, c_y, (dau, tri))
|
trees[y] = (h, c_y, (dau, tri))
|
||||||
|
|
||||||
#for x in xrange(1, K_max+1):
|
#for x in xrange(1, K_max+1):
|
||||||
# print x, trees[x]
|
# print(x, trees[x])
|
||||||
|
|
||||||
candidates = []
|
candidates = []
|
||||||
progress = 0
|
progress = 0
|
||||||
|
@ -130,8 +130,8 @@ class Ring(object):
|
|||||||
# used is actual per-server ciphertext
|
# used is actual per-server ciphertext
|
||||||
usedpf = [1.0*u/numfiles for u in used]
|
usedpf = [1.0*u/numfiles for u in used]
|
||||||
# usedpf is actual per-server-per-file ciphertext
|
# usedpf is actual per-server-per-file ciphertext
|
||||||
#print "min/max usage: %s/%s" % (abbreviate_space(used[-1]),
|
#print("min/max usage: %s/%s" % (abbreviate_space(used[-1]),
|
||||||
# abbreviate_space(used[0]))
|
# abbreviate_space(used[0])))
|
||||||
avg_usage_per_file = avg_space_per_file/len(self.servers)
|
avg_usage_per_file = avg_space_per_file/len(self.servers)
|
||||||
# avg_usage_per_file is expected per-server-per-file ciphertext
|
# avg_usage_per_file is expected per-server-per-file ciphertext
|
||||||
spreadpf = usedpf[0] - usedpf[-1]
|
spreadpf = usedpf[0] - usedpf[-1]
|
||||||
@ -146,7 +146,7 @@ class Ring(object):
|
|||||||
abbreviate_space(avg_usage_per_file) ), end=' ')
|
abbreviate_space(avg_usage_per_file) ), end=' ')
|
||||||
print("spread-pf: %s (%.2f%%)" % (
|
print("spread-pf: %s (%.2f%%)" % (
|
||||||
abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file), end=' ')
|
abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file), end=' ')
|
||||||
#print "average_usage:", abbreviate_space(average_usagepf)
|
#print("average_usage:", abbreviate_space(average_usagepf))
|
||||||
print("stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
|
print("stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
|
||||||
100.0*sd_of_total))
|
100.0*sd_of_total))
|
||||||
if self.SHOW_MINMAX:
|
if self.SHOW_MINMAX:
|
||||||
@ -176,14 +176,14 @@ def do_run(ring, opts):
|
|||||||
for filenum in count(0):
|
for filenum in count(0):
|
||||||
#used = list(reversed(sorted([s.used for s in ring.servers])))
|
#used = list(reversed(sorted([s.used for s in ring.servers])))
|
||||||
#used = [s.used for s in ring.servers]
|
#used = [s.used for s in ring.servers]
|
||||||
#print used
|
#print(used)
|
||||||
si = myhash(fileseed+str(filenum)).hexdigest()
|
si = myhash(fileseed+str(filenum)).hexdigest()
|
||||||
filesize = make_up_a_file_size(si)
|
filesize = make_up_a_file_size(si)
|
||||||
sharesize = filesize / opts["k"]
|
sharesize = filesize / opts["k"]
|
||||||
if filenum%4000==0 and filenum > 1:
|
if filenum%4000==0 and filenum > 1:
|
||||||
ring.dump_usage(filenum, avg_space_per_file)
|
ring.dump_usage(filenum, avg_space_per_file)
|
||||||
servers = ring.servers_for_si(si)
|
servers = ring.servers_for_si(si)
|
||||||
#print ring.show_servers(servers[:opts["N"]])
|
#print(ring.show_servers(servers[:opts["N"]]))
|
||||||
remaining_shares = opts["N"]
|
remaining_shares = opts["N"]
|
||||||
index = 0
|
index = 0
|
||||||
server_was_full = False
|
server_was_full = False
|
||||||
|
@ -59,7 +59,7 @@ def go(permutedpeerlist):
|
|||||||
server.full_at_tick = tick
|
server.full_at_tick = tick
|
||||||
fullservers += 1
|
fullservers += 1
|
||||||
if fullservers == len(servers):
|
if fullservers == len(servers):
|
||||||
# print "Couldn't place share -- all servers full. Stopping."
|
# print("Couldn't place share -- all servers full. Stopping.")
|
||||||
return (servers, doubled_up_shares)
|
return (servers, doubled_up_shares)
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
@ -96,9 +96,9 @@ class Sizes(object):
|
|||||||
# means storing (and eventually transmitting) more hashes. This
|
# means storing (and eventually transmitting) more hashes. This
|
||||||
# count includes all the low-level share hashes and the root.
|
# count includes all the low-level share hashes and the root.
|
||||||
hash_nodes = (num_leaves*k - 1) / (k - 1)
|
hash_nodes = (num_leaves*k - 1) / (k - 1)
|
||||||
#print "hash_depth", d
|
#print("hash_depth", d)
|
||||||
#print "num_leaves", num_leaves
|
#print("num_leaves", num_leaves)
|
||||||
#print "hash_nodes", hash_nodes
|
#print("hash_nodes", hash_nodes)
|
||||||
# the storage overhead is this
|
# the storage overhead is this
|
||||||
self.share_storage_overhead = 32 * (hash_nodes - 1)
|
self.share_storage_overhead = 32 * (hash_nodes - 1)
|
||||||
# the transmission overhead is smaller: if we actually transmit
|
# the transmission overhead is smaller: if we actually transmit
|
||||||
|
1
newsfragments/1792.feature
Normal file
1
newsfragments/1792.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
PyPy is now a supported platform.
|
1
newsfragments/2755.other
Normal file
1
newsfragments/2755.other
Normal file
@ -0,0 +1 @@
|
|||||||
|
The Tahoe-LAFS project has adopted a formal code of conduct.
|
0
newsfragments/3247.minor
Normal file
0
newsfragments/3247.minor
Normal file
0
newsfragments/3254.minor
Normal file
0
newsfragments/3254.minor
Normal file
0
newsfragments/3287.minor
Normal file
0
newsfragments/3287.minor
Normal file
0
newsfragments/3288.minor
Normal file
0
newsfragments/3288.minor
Normal file
0
newsfragments/3289.minor
Normal file
0
newsfragments/3289.minor
Normal file
0
newsfragments/3290.minor
Normal file
0
newsfragments/3290.minor
Normal file
0
newsfragments/3291.minor
Normal file
0
newsfragments/3291.minor
Normal file
0
newsfragments/3304.minor
Normal file
0
newsfragments/3304.minor
Normal file
0
newsfragments/3308.minor
Normal file
0
newsfragments/3308.minor
Normal file
0
newsfragments/3309.minor
Normal file
0
newsfragments/3309.minor
Normal file
1
newsfragments/3312.bugfix
Normal file
1
newsfragments/3312.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Make directory page links work.
|
1
newsfragments/3313.minor
Normal file
1
newsfragments/3313.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Replace nevow with twisted.web in web.operations.OphandleTable
|
1
newsfragments/3315.minor
Normal file
1
newsfragments/3315.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Replace nevow with twisted.web in web.operations.ReloadMixin
|
1
newsfragments/3316.minor
Normal file
1
newsfragments/3316.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
Port checker result pages' rendering from nevow to twisted web templates.
|
1
newsfragments/3317.feature
Normal file
1
newsfragments/3317.feature
Normal file
@ -0,0 +1 @@
|
|||||||
|
allmydata.testing.web, a new module, now offers a supported Python API for testing Tahoe-LAFS web API clients.
|
0
newsfragments/3320.minor
Normal file
0
newsfragments/3320.minor
Normal file
1
newsfragments/3323.removed
Normal file
1
newsfragments/3323.removed
Normal file
@ -0,0 +1 @@
|
|||||||
|
Slackware 14.2 is no longer a Tahoe-LAFS supported platform.
|
0
newsfragments/3324.other
Normal file
0
newsfragments/3324.other
Normal file
0
newsfragments/3325.minor
Normal file
0
newsfragments/3325.minor
Normal file
0
newsfragments/3326.minor
Normal file
0
newsfragments/3326.minor
Normal file
1
newsfragments/3328.installation
Normal file
1
newsfragments/3328.installation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Tahoe-LAFS now supports Ubuntu 20.04.
|
0
newsfragments/3329.minor
Normal file
0
newsfragments/3329.minor
Normal file
0
newsfragments/3330.minor
Normal file
0
newsfragments/3330.minor
Normal file
0
newsfragments/3331.minor
Normal file
0
newsfragments/3331.minor
Normal file
0
newsfragments/3332.minor
Normal file
0
newsfragments/3332.minor
Normal file
0
newsfragments/3333.minor
Normal file
0
newsfragments/3333.minor
Normal file
0
newsfragments/3334.minor
Normal file
0
newsfragments/3334.minor
Normal file
0
newsfragments/3335.minor
Normal file
0
newsfragments/3335.minor
Normal file
0
newsfragments/3336.minor
Normal file
0
newsfragments/3336.minor
Normal file
0
newsfragments/3338.minor
Normal file
0
newsfragments/3338.minor
Normal file
0
newsfragments/3339.minor
Normal file
0
newsfragments/3339.minor
Normal file
0
newsfragments/3340.minor
Normal file
0
newsfragments/3340.minor
Normal file
0
newsfragments/3341.minor
Normal file
0
newsfragments/3341.minor
Normal file
0
newsfragments/3342.minor
Normal file
0
newsfragments/3342.minor
Normal file
0
newsfragments/3343.minor
Normal file
0
newsfragments/3343.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3344.minor
Normal file
0
newsfragments/3346.minor
Normal file
0
newsfragments/3346.minor
Normal file
1
newsfragments/3348.bugfix
Normal file
1
newsfragments/3348.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Use last known revision of Chutney that is known to work with Python 2 for Tor integration tests.
|
1
newsfragments/3349.bugfix
Normal file
1
newsfragments/3349.bugfix
Normal file
@ -0,0 +1 @@
|
|||||||
|
Mutable files now use RSA exponent 65537
|
0
newsfragments/3351.minor
Normal file
0
newsfragments/3351.minor
Normal file
0
newsfragments/3353.minor
Normal file
0
newsfragments/3353.minor
Normal file
1
newsfragments/3354.minor
Normal file
1
newsfragments/3354.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
1
newsfragments/3355.other
Normal file
1
newsfragments/3355.other
Normal file
@ -0,0 +1 @@
|
|||||||
|
The "coverage" tox environment has been replaced by the "py27-coverage" and "py36-coverage" environments.
|
0
newsfragments/3356.minor
Normal file
0
newsfragments/3356.minor
Normal file
1
newsfragments/3357.minor
Normal file
1
newsfragments/3357.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
0
newsfragments/3358.minor
Normal file
0
newsfragments/3358.minor
Normal file
0
newsfragments/3359.minor
Normal file
0
newsfragments/3359.minor
Normal file
0
newsfragments/3361.minor
Normal file
0
newsfragments/3361.minor
Normal file
0
newsfragments/3364.minor
Normal file
0
newsfragments/3364.minor
Normal file
0
newsfragments/3365.minor
Normal file
0
newsfragments/3365.minor
Normal file
0
newsfragments/3366.minor
Normal file
0
newsfragments/3366.minor
Normal file
0
newsfragments/3367.minor
Normal file
0
newsfragments/3367.minor
Normal file
0
newsfragments/3368.minor
Normal file
0
newsfragments/3368.minor
Normal file
0
newsfragments/3370.minor
Normal file
0
newsfragments/3370.minor
Normal file
1
newsfragments/3372.minor
Normal file
1
newsfragments/3372.minor
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
0
newsfragments/3373.minor
Normal file
0
newsfragments/3373.minor
Normal file
0
newsfragments/3374.minor
Normal file
0
newsfragments/3374.minor
Normal file
0
newsfragments/3375.minor
Normal file
0
newsfragments/3375.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3376.minor
Normal file
0
newsfragments/3377.minor
Normal file
0
newsfragments/3377.minor
Normal file
0
newsfragments/3378.minor
Normal file
0
newsfragments/3378.minor
Normal file
0
newsfragments/3380.minor
Normal file
0
newsfragments/3380.minor
Normal file
0
newsfragments/3381.minor
Normal file
0
newsfragments/3381.minor
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user