tahoe-lafs/.circleci/config.yml

606 lines
16 KiB
YAML
Raw Normal View History

# https://circleci.com/docs/2.0/
2020-09-14 11:38:54 +00:00
# We use version 2.1 of CircleCI's configuration format (the docs are still at
# the 2.0 link) in order to have access to Windows executors. This means we
# can't use dots in job names anymore. They have a new "parameters" feature
# that is supposed to remove the need to have version numbers in job names (the
# source of our dots), but switching to that is going to be a bigger refactor:
#
# https://discuss.circleci.com/t/v2-1-job-name-validation/31123
# https://circleci.com/docs/2.0/reusing-config/
#
version: 2.1
2020-09-14 11:38:54 +00:00
workflows:
ci:
jobs:
# Start with jobs testing various platforms.
# Every job that pulls a Docker image from Docker Hub needs to provide
# credentials for that pull operation to avoid being subjected to
# unauthenticated pull limits shared across all of CircleCI. Use this
# first job to define a yaml anchor that can be used to supply a
# CircleCI job context which makes Docker Hub credentials available in
# the environment.
#
# Contexts are managed in the CircleCI web interface:
#
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
- "debian-9": &DOCKERHUB_CONTEXT
context: "dockerhub-auth"
2021-02-11 20:59:51 +00:00
- "debian-10":
<<: *DOCKERHUB_CONTEXT
requires:
- "debian-9"
- "ubuntu-20-04":
<<: *DOCKERHUB_CONTEXT
- "ubuntu-18-04":
<<: *DOCKERHUB_CONTEXT
2020-08-07 18:59:27 +00:00
requires:
- "ubuntu-20-04"
- "ubuntu-16-04":
<<: *DOCKERHUB_CONTEXT
requires:
- "ubuntu-20-04"
- "fedora-29":
<<: *DOCKERHUB_CONTEXT
- "fedora-28":
<<: *DOCKERHUB_CONTEXT
requires:
- "fedora-29"
- "centos-8":
<<: *DOCKERHUB_CONTEXT
- "nixos-19-09":
<<: *DOCKERHUB_CONTEXT
2019-12-13 15:39:15 +00:00
# Test against PyPy 2.7
- "pypy27-buster":
<<: *DOCKERHUB_CONTEXT
# Just one Python 3.6 configuration while the port is in-progress.
- "python36":
<<: *DOCKERHUB_CONTEXT
# Other assorted tasks and configurations
- "lint":
<<: *DOCKERHUB_CONTEXT
- "pyinstaller":
<<: *DOCKERHUB_CONTEXT
- "deprecations":
<<: *DOCKERHUB_CONTEXT
- "c-locale":
<<: *DOCKERHUB_CONTEXT
# Any locale other than C or UTF-8.
- "another-locale":
<<: *DOCKERHUB_CONTEXT
- "integration":
<<: *DOCKERHUB_CONTEXT
requires:
# If the unit test suite doesn't pass, don't bother running the
# integration tests.
- "debian-9"
2020-12-04 15:34:21 +00:00
- "typechecks":
<<: *DOCKERHUB_CONTEXT
2021-03-19 19:55:19 +00:00
- "docs":
<<: *DOCKERHUB_CONTEXT
2020-10-22 21:00:02 +00:00
images:
# Build the Docker images used by the ci jobs. This makes the ci jobs
# faster and takes various spurious failures out of the critical path.
triggers:
# Build once a day
- schedule:
cron: "0 0 * * *"
filters:
branches:
only:
- "master"
2019-04-04 20:54:22 +00:00
2020-10-22 21:00:02 +00:00
jobs:
2021-02-11 20:59:51 +00:00
- "build-image-debian-10":
<<: *DOCKERHUB_CONTEXT
- "build-image-debian-9":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-16-04":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-18-04":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-20-04":
<<: *DOCKERHUB_CONTEXT
- "build-image-fedora-28":
<<: *DOCKERHUB_CONTEXT
- "build-image-fedora-29":
<<: *DOCKERHUB_CONTEXT
- "build-image-centos-8":
<<: *DOCKERHUB_CONTEXT
- "build-image-pypy27-buster":
<<: *DOCKERHUB_CONTEXT
- "build-image-python36-ubuntu":
<<: *DOCKERHUB_CONTEXT
2019-04-04 20:54:22 +00:00
2018-06-15 12:57:50 +00:00
jobs:
dockerhub-auth-template:
# This isn't a real job. It doesn't get scheduled as part of any
2020-10-22 16:27:22 +00:00
# workflow. Instead, it's just a place we can hang a yaml anchor to
# finish the Docker Hub authentication configuration. Workflow jobs using
# the DOCKERHUB_CONTEXT anchor will have access to the environment
# variables used here. These variables will allow the Docker Hub image
# pull to be authenticated and hopefully avoid hitting and rate limits.
docker: &DOCKERHUB_AUTH
- image: "null"
auth:
username: $DOCKERHUB_USERNAME
password: $DOCKERHUB_PASSWORD
steps:
- run:
2020-10-22 18:46:26 +00:00
name: "CircleCI YAML schema conformity"
command: |
2020-10-22 18:46:26 +00:00
# This isn't a real command. We have to have something in this
# space, though, or the CircleCI yaml schema validator gets angry.
# Since this job is never scheduled this step is never run so the
# actual value here is irrelevant.
2020-10-22 16:12:58 +00:00
lint:
docker:
- <<: *DOCKERHUB_AUTH
image: "circleci/python:2"
steps:
- "checkout"
- run:
name: "Install tox"
command: |
2018-06-08 18:47:00 +00:00
pip install --user tox
- run:
name: "Static-ish code checks"
command: |
2018-06-08 18:47:00 +00:00
~/.local/bin/tox -e codechecks
2018-06-08 18:58:25 +00:00
pyinstaller:
docker:
- <<: *DOCKERHUB_AUTH
image: "circleci/python:2"
steps:
- "checkout"
- run:
name: "Install tox"
command: |
pip install --user tox
- run:
name: "Make PyInstaller executable"
command: |
~/.local/bin/tox -e pyinstaller
- run:
# To verify that the resultant PyInstaller-generated binary executes
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
# failing due to import/packaging-related errors, etc.).
name: "Test PyInstaller executable"
command: |
dist/Tahoe-LAFS/tahoe --version
debian-9: &DEBIAN
2018-06-08 18:58:25 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:9-py2.7"
2019-04-04 16:52:35 +00:00
user: "nobody"
2018-06-08 18:58:25 +00:00
environment: &UTF_8_ENVIRONMENT
# In general, the test suite is not allowed to fail while the job
# succeeds. But you can set this to "yes" if you want it to be
# otherwise.
ALLOWED_FAILURE: "no"
# Tell Hypothesis which configuration we want it to use.
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
# Tell the C runtime things about character encoding (mainly to do with
2018-06-14 16:32:49 +00:00
# filenames and argv).
LANG: "en_US.UTF-8"
# Select a tox environment to run for this job.
2021-01-05 20:58:21 +00:00
TAHOE_LAFS_TOX_ENVIRONMENT: "py27"
2018-06-15 17:34:17 +00:00
# Additional arguments to pass to tox.
2018-07-09 19:55:32 +00:00
TAHOE_LAFS_TOX_ARGS: ""
# The path in which test artifacts will be placed.
ARTIFACTS_OUTPUT_PATH: "/tmp/artifacts"
# Convince all of our pip invocations to look at the cached wheelhouse
# we maintain.
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
# Upload the coverage report.
2021-01-05 20:58:21 +00:00
UPLOAD_COVERAGE: ""
2018-06-13 16:36:51 +00:00
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
2018-06-08 18:58:25 +00:00
steps:
- "checkout"
- run: &SETUP_VIRTUALENV
name: "Setup virtualenv"
2018-06-15 19:14:55 +00:00
command: |
2019-04-04 16:52:35 +00:00
/tmp/project/.circleci/setup-virtualenv.sh \
"/tmp/venv" \
"/tmp/project" \
"${WHEELHOUSE_PATH}" \
2018-07-06 15:32:12 +00:00
"${TAHOE_LAFS_TOX_ENVIRONMENT}" \
"${TAHOE_LAFS_TOX_ARGS}"
2018-06-08 18:58:25 +00:00
- run: &RUN_TESTS
2018-06-08 18:58:25 +00:00
name: "Run test suite"
2018-06-15 19:14:55 +00:00
command: |
/tmp/project/.circleci/run-tests.sh \
"/tmp/venv" \
"/tmp/project" \
"${ALLOWED_FAILURE}" \
"${ARTIFACTS_OUTPUT_PATH}" \
"${TAHOE_LAFS_TOX_ENVIRONMENT}" \
"${TAHOE_LAFS_TOX_ARGS}"
2018-07-08 23:06:34 +00:00
# trial output gets directed straight to a log. avoid the circleci
# timeout while the test suite runs.
no_output_timeout: "20m"
- store_test_results: &STORE_TEST_RESULTS
path: "/tmp/artifacts/junit"
- store_artifacts: &STORE_TEST_LOG
# Despite passing --workdir /tmp to tox above, it still runs trial
# in the project source checkout.
path: "/tmp/project/_trial_temp/test.log"
- store_artifacts: &STORE_OTHER_ARTIFACTS
# Store any other artifacts, too. This is handy to allow other jobs
# sharing most of the definition of this one to be able to
# contribute artifacts easily.
path: "/tmp/artifacts"
- run: &SUBMIT_COVERAGE
name: "Submit coverage results"
command: |
if [ -n "${UPLOAD_COVERAGE}" ]; then
/tmp/venv/bin/codecov
fi
2018-06-14 15:43:59 +00:00
2021-02-11 20:59:51 +00:00
debian-10:
<<: *DEBIAN
2018-06-08 19:52:29 +00:00
docker:
- <<: *DOCKERHUB_AUTH
2021-02-11 20:59:51 +00:00
image: "tahoelafsci/debian:10-py2.7"
user: "nobody"
2019-06-25 17:12:57 +00:00
pypy27-buster:
2019-06-25 17:12:57 +00:00
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/pypy:buster-py2"
2019-06-25 17:12:57 +00:00
user: "nobody"
2018-06-08 19:52:29 +00:00
environment:
<<: *UTF_8_ENVIRONMENT
# We don't do coverage since it makes PyPy far too slow:
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
# Since we didn't collect it, don't upload it.
UPLOAD_COVERAGE: ""
2018-06-13 17:59:52 +00:00
2018-06-19 12:37:18 +00:00
c-locale:
<<: *DEBIAN
environment:
<<: *UTF_8_ENVIRONMENT
LANG: "C"
2019-03-20 19:56:01 +00:00
another-locale:
<<: *DEBIAN
environment:
<<: *UTF_8_ENVIRONMENT
# aka "Latin 1"
LANG: "en_US.ISO-8859-1"
2018-06-15 12:57:50 +00:00
deprecations:
<<: *DEBIAN
2018-06-15 12:57:50 +00:00
environment:
<<: *UTF_8_ENVIRONMENT
# Select the deprecations tox environments.
2018-06-15 12:57:50 +00:00
TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations"
# Put the logs somewhere we can report them.
TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log"
# The deprecations tox environments don't do coverage measurement.
UPLOAD_COVERAGE: ""
2018-06-15 12:57:50 +00:00
integration:
<<: *DEBIAN
environment:
<<: *UTF_8_ENVIRONMENT
# Select the integration tests tox environments.
TAHOE_LAFS_TOX_ENVIRONMENT: "integration"
# Disable artifact collection because py.test can't produce any.
ARTIFACTS_OUTPUT_PATH: ""
steps:
- "checkout"
2019-04-03 18:27:45 +00:00
# DRY, YAML-style. See the debian-9 steps.
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
ubuntu-16-04:
2018-06-14 15:44:39 +00:00
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:16.04-py2.7"
user: "nobody"
2018-06-14 16:32:49 +00:00
2018-06-14 15:44:39 +00:00
ubuntu-18-04: &UBUNTU_18_04
2018-06-14 14:46:45 +00:00
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py2.7"
user: "nobody"
2018-06-14 14:46:45 +00:00
python36:
<<: *UBUNTU_18_04
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
# The default trial args include --rterrors which is incompatible with
# this reporter on Python 3. So drop that and just specify the
# reporter.
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
2021-01-05 20:58:21 +00:00
TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
ubuntu-20-04:
2020-08-07 18:59:27 +00:00
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:20.04"
2020-08-07 18:59:27 +00:00
user: "nobody"
2020-04-13 14:09:25 +00:00
centos-8: &RHEL_DERIV
2018-06-13 17:59:52 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/centos:8-py2"
2019-04-04 18:00:19 +00:00
user: "nobody"
2018-06-13 17:59:52 +00:00
2018-06-14 16:32:49 +00:00
environment: *UTF_8_ENVIRONMENT
2018-06-13 17:59:52 +00:00
2019-04-04 18:00:19 +00:00
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
2018-06-13 17:59:52 +00:00
2019-04-04 18:00:19 +00:00
steps:
2018-06-13 17:59:52 +00:00
- "checkout"
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
- store_test_results: *STORE_TEST_RESULTS
- store_artifacts: *STORE_TEST_LOG
- store_artifacts: *STORE_OTHER_ARTIFACTS
- run: *SUBMIT_COVERAGE
fedora-28:
<<: *RHEL_DERIV
2018-06-14 14:09:42 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/fedora:28-py"
user: "nobody"
2018-06-14 14:09:42 +00:00
2018-06-14 14:11:49 +00:00
fedora-29:
<<: *RHEL_DERIV
2018-06-14 14:11:49 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/fedora:29-py"
user: "nobody"
2018-06-15 17:34:17 +00:00
nixos-19-09:
2019-12-13 15:39:15 +00:00
docker:
# Run in a highly Nix-capable environment.
- <<: *DOCKERHUB_AUTH
image: "nixorg/nix:circleci"
2019-12-13 15:39:15 +00:00
environment:
NIX_PATH: "nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-19.09-small.tar.gz"
steps:
- "checkout"
- "run":
name: "Build and Test"
command: |
# CircleCI build environment looks like it has a zillion and a
# half cores. Don't let Nix autodetect this high core count
# because it blows up memory usage and fails the test run. Pick a
# number of cores that suites the build environment we're paying
# for (the free one!).
#
# Also, let it run more than one job at a time because we have to
# build a couple simple little dependencies that don't take
# advantage of multiple cores and we get a little speedup by doing
# them in parallel.
nix-build --cores 3 --max-jobs 2 nix/
2019-12-13 15:39:15 +00:00
2020-12-04 15:34:21 +00:00
typechecks:
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3"
steps:
- "checkout"
- run:
2020-12-04 15:34:21 +00:00
name: "Validate Types"
command: |
/tmp/venv/bin/tox -e typechecks
2021-03-09 21:57:20 +00:00
docs:
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:18.04-py3"
steps:
- "checkout"
- run:
name: "Build documentation"
command: |
/tmp/venv/bin/tox -e docs
2019-04-04 19:34:34 +00:00
build-image: &BUILD_IMAGE
# This is a template for a job to build a Docker image that has as much of
# the setup as we can manage already done and baked in. This cuts down on
# the per-job setup time the actual testing jobs have to perform - by
# perhaps 10% - 20%.
#
# https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
docker:
- <<: *DOCKERHUB_AUTH
image: "docker:17.05.0-ce-git"
2019-04-04 19:34:34 +00:00
environment:
DISTRO: "tahoelafsci/<DISTRO>:foo-py2"
TAG: "tahoelafsci/distro:<TAG>-py2"
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
2019-04-04 19:34:34 +00:00
steps:
- "checkout"
- "setup_remote_docker"
- run:
name: "Log in to Dockerhub"
command: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- run:
2019-04-04 19:34:34 +00:00
name: "Build image"
command: |
2019-04-04 19:34:34 +00:00
docker \
build \
--build-arg TAG=${TAG} \
2020-07-17 20:26:56 +00:00
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
2019-04-04 20:51:51 +00:00
-f ~/project/.circleci/Dockerfile.${DISTRO} \
2019-04-04 19:34:34 +00:00
~/project/
2019-04-04 19:41:22 +00:00
- run:
name: "Push image"
command: |
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
2019-04-04 19:34:34 +00:00
2021-02-11 20:59:51 +00:00
build-image-debian-10:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "debian"
2021-02-11 20:59:51 +00:00
TAG: "10"
PYTHON_VERSION: "2.7"
2019-04-04 19:34:34 +00:00
build-image-debian-9:
<<: *BUILD_IMAGE
environment:
DISTRO: "debian"
TAG: "9"
PYTHON_VERSION: "2.7"
2019-04-04 19:34:34 +00:00
build-image-ubuntu-16-04:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "16.04"
PYTHON_VERSION: "2.7"
2019-04-04 19:34:34 +00:00
build-image-ubuntu-18-04:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "2.7"
build-image-python36-ubuntu:
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "18.04"
PYTHON_VERSION: "3"
2019-04-04 19:34:34 +00:00
build-image-ubuntu-20-04:
2020-08-07 18:59:27 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "20.04"
PYTHON_VERSION: "2.7"
2020-08-07 18:59:27 +00:00
2020-04-13 14:09:25 +00:00
build-image-centos-8:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "centos"
2020-04-13 14:09:25 +00:00
TAG: "8"
PYTHON_VERSION: "2"
2019-04-04 19:34:34 +00:00
build-image-fedora-28:
<<: *BUILD_IMAGE
environment:
DISTRO: "fedora"
TAG: "28"
# The default on Fedora (this version anyway) is still Python 2.
PYTHON_VERSION: ""
2019-04-04 19:34:34 +00:00
build-image-fedora-29:
<<: *BUILD_IMAGE
environment:
DISTRO: "fedora"
TAG: "29"
2019-04-04 20:50:59 +00:00
build-image-pypy27-buster:
<<: *BUILD_IMAGE
environment:
DISTRO: "pypy"
TAG: "buster"
# We only have Python 2 for PyPy right now so there's no support for
# setting up PyPy 3 in the image building toolchain. This value is just
# for constructing the right Docker image tag.
PYTHON_VERSION: "2"