tahoe-lafs/.circleci/config.yml

811 lines
26 KiB
YAML
Raw Normal View History

# https://circleci.com/docs/2.0/
2020-09-14 11:38:54 +00:00
# We use version 2.1 of CircleCI's configuration format (the docs are still at
# the 2.0 link) in order to have access to Windows executors. This means we
# can't use dots in job names anymore. They have a new "parameters" feature
# that is supposed to remove the need to have version numbers in job names (the
# source of our dots), but switching to that is going to be a bigger refactor:
#
# https://discuss.circleci.com/t/v2-1-job-name-validation/31123
# https://circleci.com/docs/2.0/reusing-config/
#
version: 2.1
2020-09-14 11:38:54 +00:00
# Every job that pushes a Docker image from Docker Hub must authenticate to
2023-03-27 00:04:46 +00:00
# it. Define a couple yaml anchors that can be used to supply the necessary
# credentials.
# First is a CircleCI job context which makes Docker Hub credentials available
# in the environment.
#
# Contexts are managed in the CircleCI web interface:
#
# https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
dockerhub-context-template: &DOCKERHUB_CONTEXT
context: "dockerhub-auth"
# Required environment for using the coveralls tool to upload partial coverage
# reports and then finish the process.
coveralls-environment: &COVERALLS_ENVIRONMENT
COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
# Next is a Docker executor template that gets the credentials from the
# environment and supplies them to the executor.
dockerhub-auth-template: &DOCKERHUB_AUTH
- auth:
username: $DOCKERHUB_USERNAME
password: $DOCKERHUB_PASSWORD
# A template that can be shared between the two different image-building
# workflows.
.images: &IMAGES
jobs:
- "build-image-debian-11":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-20-04":
<<: *DOCKERHUB_CONTEXT
- "build-image-ubuntu-22-04":
<<: *DOCKERHUB_CONTEXT
- "build-image-fedora-35":
<<: *DOCKERHUB_CONTEXT
- "build-image-oraclelinux-8":
<<: *DOCKERHUB_CONTEXT
# Restore later as PyPy38
#- "build-image-pypy27-buster":
# <<: *DOCKERHUB_CONTEXT
parameters:
2022-12-14 14:56:16 +00:00
# Control whether the image-building workflow runs as part of this pipeline.
# Generally we do not want this to run because we don't need our
# dependencies to move around all the time and because building the image
# takes a couple minutes.
#
# An easy way to trigger a pipeline with this set to true is with the
# rebuild-images.sh tool in this directory. You can also do so via the
# CircleCI web UI.
build-images:
default: false
type: "boolean"
2022-12-14 14:56:16 +00:00
# Control whether the test-running workflow runs as part of this pipeline.
# Generally we do want this to run because running the tests is the primary
# purpose of this pipeline.
run-tests:
default: true
type: "boolean"
workflows:
ci:
when: "<< pipeline.parameters.run-tests >>"
jobs:
2022-12-13 14:09:02 +00:00
# Start with jobs testing various platforms.
- "debian-11":
{}
2022-12-13 14:09:02 +00:00
- "ubuntu-20-04":
{}
- "ubuntu-22-04":
{}
2022-12-13 14:09:02 +00:00
# Equivalent to RHEL 8; CentOS 8 is dead.
- "oraclelinux-8":
{}
- "nixos":
name: "<<matrix.pythonVersion>>"
nixpkgs: "nixpkgs-unstable"
matrix:
parameters:
pythonVersion:
- "python39"
- "python310"
- "python311"
2022-12-13 14:09:02 +00:00
# Eventually, test against PyPy 3.8
#- "pypy27-buster":
# {}
2022-12-13 14:09:02 +00:00
# Other assorted tasks and configurations
- "codechecks":
{}
- "pyinstaller":
{}
- "c-locale":
{}
# Any locale other than C or UTF-8.
- "another-locale":
{}
- "windows-server-2022":
2023-08-09 13:23:43 +00:00
name: "Windows Server 2022, CPython <<matrix.pythonVersion>>"
matrix:
parameters:
# Run the job for a number of CPython versions. These are the
# two versions installed on the version of the Windows VM image
# we specify (in the executor). This is handy since it means we
# don't have to do any Python installation work. We pin the
# Windows VM image so these shouldn't shuffle around beneath us
# but if we want to update that image or get different versions
# of Python, we probably have to do something here.
pythonVersion:
- "3.9"
- "3.12"
2022-12-13 14:09:02 +00:00
- "integration":
2023-01-17 15:27:20 +00:00
# Run even the slow integration tests here. We need the `--` to
# sneak past tox and get to pytest.
tox-args: "-- --runslow integration"
requires:
# If the unit test suite doesn't pass, don't bother running the
# integration tests.
- "debian-11"
2022-12-13 14:09:02 +00:00
- "typechecks":
{}
- "docs":
{}
2021-03-19 19:55:19 +00:00
- "finish-coverage-report":
requires:
2023-08-09 13:28:42 +00:00
# Referencing the job by "alias" (as CircleCI calls the mapping
# key) instead of the value of its "name" property causes us to
# require every instance of the job from its matrix expansion. So
# this requirement is enough to require every Windows Server 2022
# job.
- "windows-server-2022"
images:
<<: *IMAGES
# Build as part of the workflow but only if requested.
when: "<< pipeline.parameters.build-images >>"
jobs:
finish-coverage-report:
docker:
- <<: *DOCKERHUB_AUTH
image: "python:3-slim"
steps:
- run:
name: "Indicate completion to coveralls.io"
environment:
<<: *COVERALLS_ENVIRONMENT
command: |
2023-08-09 13:34:32 +00:00
pip install coveralls==3.3.1
python -m coveralls --finish
2022-02-15 19:33:00 +00:00
codechecks:
2021-08-11 13:50:23 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "cimg/python:3.9"
2021-08-11 13:50:23 +00:00
steps:
- "checkout"
2022-12-09 19:07:38 +00:00
- run: &INSTALL_TOX
2021-08-11 13:50:23 +00:00
name: "Install tox"
command: |
2022-12-09 19:07:38 +00:00
pip install --user 'tox~=3.0'
2021-08-11 13:50:23 +00:00
- run:
name: "Static-ish code checks"
command: |
2022-02-15 19:33:00 +00:00
~/.local/bin/tox -e codechecks
2021-08-11 13:50:23 +00:00
2023-08-07 14:43:58 +00:00
windows-server-2022:
parameters:
pythonVersion:
description: >-
An argument to pass to the `py` launcher to choose a Python version.
type: "string"
default: ""
2023-08-07 14:43:58 +00:00
executor: "windows"
environment:
# Tweak Hypothesis to make its behavior more suitable for the CI
# environment. This should improve reproducibility and lessen the
# effects of variable compute resources.
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
# Tell pip where its download cache lives. This must agree with the
# "save_cache" step below or caching won't really work right.
PIP_CACHE_DIR: "pip-cache"
2023-08-09 16:34:09 +00:00
# And tell pip where it can find out cached wheelhouse for fast wheel
# installation, even for projects that don't distribute wheels. This
# must also agree with the "save_cache" step below.
PIP_FIND_LINKS: "wheelhouse"
2023-08-07 14:43:58 +00:00
steps:
- "checkout"
# If possible, restore a pip download cache to save us from having to
# download all our Python dependencies from PyPI.
- "restore_cache":
keys:
2023-08-09 16:34:09 +00:00
# The download cache and/or the wheelhouse may contain Python
# version-specific binary packages so include the Python version
# in this key, as well as the canonical source of our
# dependencies.
- &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}"
- "run":
name: "Fix $env:PATH"
command: |
# The Python this job is parameterized is not necessarily the one
# at the front of $env:PATH. Modify $env:PATH so that it is so we
# can just say "python" in the rest of the steps. Also get the
# related Scripts directory so tools from packages we install are
# also available.
$p = py -<<parameters.pythonVersion>> -c "import sys; print(sys.prefix)"
$q = py -<<parameters.pythonVersion>> -c "import sysconfig; print(sysconfig.get_path('scripts'))"
New-Item $Profile.CurrentUserAllHosts -Force
# $p gets "python" on PATH and $q gets tools from packages we
# install. Note we carefully construct the string so that
# $env:PATH is not substituted now but $p and $q are. ` is the
# PowerShell string escape character.
2023-08-09 14:55:49 +00:00
Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`""
- "run":
2023-08-09 16:34:09 +00:00
# It's faster to install a wheel than a source package. If we don't
# have a cached wheelhouse then build all of the wheels and dump
# them into a directory where they can become a cached wheelhouse.
# We would have built these wheels during installation anyway so it
# doesn't cost us anything extra and saves us effort next time.
name: "(Maybe) Build Wheels"
command: |
2024-11-08 13:17:29 +00:00
python -m pip install setuptools # Some Pythons for Windows do not come with setuptools
2024-11-08 13:26:47 +00:00
python setup.py update_version # Cheat to win a race about writing _version.py
2023-08-09 16:34:09 +00:00
if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) {
echo "Found populated wheelhouse, skipping wheel building."
} else {
2024-11-08 13:17:29 +00:00
python -m pip install wheel
2023-08-09 16:34:09 +00:00
python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test]
}
- "save_cache":
paths:
# Make sure this agrees with PIP_CACHE_DIR in the environment.
- "pip-cache"
2023-08-09 16:34:09 +00:00
- "wheelhouse"
key: *CACHE_KEY
2023-08-09 16:34:09 +00:00
- "run":
name: "Install Dependencies"
environment:
# By this point we should no longer need an index.
PIP_NO_INDEX: "1"
command: |
python -m pip install .[testenv] .[test]
- "run":
name: "Display tool versions"
command: |
python misc/build_helpers/show-tool-versions.py
2023-08-07 14:47:36 +00:00
- "run":
name: "Run Unit Tests"
environment:
2023-08-09 13:34:40 +00:00
# Configure the results location for the subunitv2-file reporter
# from subunitreporter
SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2"
2023-08-09 13:34:40 +00:00
# Try to get prompt output from the reporter to avoid no-output
# timeouts.
PYTHONUNBUFFERED: "1"
2023-08-09 13:34:40 +00:00
2023-08-07 14:47:36 +00:00
command: |
2023-08-09 13:34:40 +00:00
# Run the test suite under coverage measurement using the
# parameterized version of Python, writing subunitv2-format
# results to the file given in the environment.
2023-08-09 15:03:33 +00:00
python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata
2023-08-07 14:47:36 +00:00
- "run":
name: "Upload Coverage"
environment:
<<: *COVERALLS_ENVIRONMENT
# Mark the data as just one piece of many because we have more
# than one instance of this job (two on Windows now, some on other
# platforms later) which collects and reports coverage. This is
# necessary to cause Coveralls to merge multiple coverage results
# into a single report. Note the merge only happens when we
# "finish" a particular build, as identified by its "build_num"
# (aka "service_number").
COVERALLS_PARALLEL: "true"
command: |
python -m pip install coveralls==3.3.1
# .coveragerc sets parallel = True so we don't have a `.coverage`
# file but a `.coverage.<unique stuff>` file (or maybe more than
# one, but probably not). coveralls can't work with these so
# merge them before invoking it.
python -m coverage combine
# Now coveralls will be able to find the data, so have it do the
# upload. Also, have it strip the system config-specific prefix
# from all of the source paths.
$prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))"
python -m coveralls --basedir $prefix
- "run":
name: "Convert Result Log"
command: |
# subunit2junitxml exits with error if the result stream it is
# converting has test failures in it! So this step might fail.
# Since the step in which we actually _ran_ the tests won't fail
# even if there are test failures, this is a good thing for now.
subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2
2023-08-08 13:19:20 +00:00
- "store_test_results":
path: "test-results.xml"
- "store_artifacts":
path: "_trial_temp/test.log"
- "store_artifacts":
path: "eliot.log"
- "store_artifacts":
path: ".coverage"
pyinstaller:
docker:
- <<: *DOCKERHUB_AUTH
image: "cimg/python:3.9"
steps:
- "checkout"
- run:
2022-12-09 19:07:38 +00:00
<<: *INSTALL_TOX
- run:
name: "Make PyInstaller executable"
command: |
~/.local/bin/tox -e pyinstaller
- run:
# To verify that the resultant PyInstaller-generated binary executes
# cleanly (i.e., that it terminates with an exit code of 0 and isn't
# failing due to import/packaging-related errors, etc.).
name: "Test PyInstaller executable"
command: |
dist/Tahoe-LAFS/tahoe --version
2023-01-09 16:01:45 +00:00
debian-11: &DEBIAN
environment: &UTF_8_ENVIRONMENT
# In general, the test suite is not allowed to fail while the job
# succeeds. But you can set this to "yes" if you want it to be
# otherwise.
ALLOWED_FAILURE: "no"
# Tell Hypothesis which configuration we want it to use.
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
# Tell the C runtime things about character encoding (mainly to do with
2018-06-14 16:32:49 +00:00
# filenames and argv).
LANG: "en_US.UTF-8"
# Select a tox environment to run for this job.
2023-01-09 16:01:45 +00:00
TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
2018-06-15 17:34:17 +00:00
# Additional arguments to pass to tox.
2018-07-09 19:55:32 +00:00
TAHOE_LAFS_TOX_ARGS: ""
# The path in which test artifacts will be placed.
ARTIFACTS_OUTPUT_PATH: "/tmp/artifacts"
# Convince all of our pip invocations to look at the cached wheelhouse
# we maintain.
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
# Upload the coverage report.
2021-01-05 20:58:21 +00:00
UPLOAD_COVERAGE: ""
2018-06-13 16:36:51 +00:00
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
2018-06-08 18:58:25 +00:00
steps:
- "checkout"
- run: &SETUP_VIRTUALENV
name: "Setup virtualenv"
2018-06-15 19:14:55 +00:00
command: |
2019-04-04 16:52:35 +00:00
/tmp/project/.circleci/setup-virtualenv.sh \
"/tmp/venv" \
"/tmp/project" \
"${WHEELHOUSE_PATH}" \
2018-07-06 15:32:12 +00:00
"${TAHOE_LAFS_TOX_ENVIRONMENT}" \
"${TAHOE_LAFS_TOX_ARGS}"
2018-06-08 18:58:25 +00:00
- run: &RUN_TESTS
2018-06-08 18:58:25 +00:00
name: "Run test suite"
2018-06-15 19:14:55 +00:00
command: |
/tmp/project/.circleci/run-tests.sh \
"/tmp/venv" \
"/tmp/project" \
"${ALLOWED_FAILURE}" \
"${ARTIFACTS_OUTPUT_PATH}" \
"${TAHOE_LAFS_TOX_ENVIRONMENT}" \
"${TAHOE_LAFS_TOX_ARGS}"
2018-07-08 23:06:34 +00:00
# trial output gets directed straight to a log. avoid the circleci
# timeout while the test suite runs.
no_output_timeout: "20m"
- store_test_results: &STORE_TEST_RESULTS
path: "/tmp/artifacts/junit"
- store_artifacts: &STORE_TEST_LOG
# Despite passing --workdir /tmp to tox above, it still runs trial
# in the project source checkout.
path: "/tmp/project/_trial_temp/test.log"
2021-09-08 13:14:47 +00:00
- store_artifacts: &STORE_ELIOT_LOG
# Despite passing --workdir /tmp to tox above, it still runs trial
# in the project source checkout.
path: "/tmp/project/eliot.log"
- store_artifacts: &STORE_OTHER_ARTIFACTS
# Store any other artifacts, too. This is handy to allow other jobs
# sharing most of the definition of this one to be able to
# contribute artifacts easily.
path: "/tmp/artifacts"
- run: &SUBMIT_COVERAGE
name: "Submit coverage results"
command: |
if [ -n "${UPLOAD_COVERAGE}" ]; then
2023-04-13 13:44:52 +00:00
echo "TODO: Need a new coverage solution, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4011"
fi
2019-06-25 17:12:57 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:11-py3.9"
2019-06-25 17:12:57 +00:00
user: "nobody"
2023-01-09 16:01:45 +00:00
# Restore later using PyPy3.8
# pypy27-buster:
# <<: *DEBIAN
# docker:
# - <<: *DOCKERHUB_AUTH
# image: "tahoelafsci/pypy:buster-py2"
# user: "nobody"
# environment:
# <<: *UTF_8_ENVIRONMENT
# # We don't do coverage since it makes PyPy far too slow:
# TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
# # Since we didn't collect it, don't upload it.
# UPLOAD_COVERAGE: ""
2018-06-13 17:59:52 +00:00
2018-06-19 12:37:18 +00:00
c-locale:
<<: *DEBIAN
environment:
<<: *UTF_8_ENVIRONMENT
LANG: "C"
2019-03-20 19:56:01 +00:00
another-locale:
<<: *DEBIAN
environment:
<<: *UTF_8_ENVIRONMENT
# aka "Latin 1"
LANG: "en_US.ISO-8859-1"
integration:
<<: *DEBIAN
parameters:
tox-args:
description: >-
Additional arguments to pass to the tox command.
2023-01-17 14:44:27 +00:00
type: "string"
default: ""
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/debian:11-py3.9"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
# Select the integration tests tox environments.
TAHOE_LAFS_TOX_ENVIRONMENT: "integration"
# Disable artifact collection because py.test can't produce any.
ARTIFACTS_OUTPUT_PATH: ""
# Pass on anything we got in our parameters.
TAHOE_LAFS_TOX_ARGS: "<< parameters.tox-args >>"
steps:
- "checkout"
2019-04-03 18:27:45 +00:00
# DRY, YAML-style. See the debian-9 steps.
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
ubuntu-20-04:
2020-08-07 18:59:27 +00:00
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
2022-02-14 16:40:03 +00:00
image: "tahoelafsci/ubuntu:20.04-py3.9"
2020-08-07 18:59:27 +00:00
user: "nobody"
2022-02-14 16:30:24 +00:00
environment:
2022-02-15 15:26:54 +00:00
<<: *UTF_8_ENVIRONMENT
2022-02-14 16:30:24 +00:00
TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
2020-08-07 18:59:27 +00:00
ubuntu-22-04:
<<: *DEBIAN
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/ubuntu:22.04-py3.10"
user: "nobody"
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "py310"
2022-02-14 15:44:44 +00:00
oraclelinux-8: &RHEL_DERIV
2018-06-13 17:59:52 +00:00
docker:
- <<: *DOCKERHUB_AUTH
image: "tahoelafsci/oraclelinux:8-py3.8"
2019-04-04 18:00:19 +00:00
user: "nobody"
2018-06-13 17:59:52 +00:00
2022-02-14 16:30:24 +00:00
environment:
<<: *UTF_8_ENVIRONMENT
TAHOE_LAFS_TOX_ENVIRONMENT: "py38"
2018-06-13 17:59:52 +00:00
2019-04-04 18:00:19 +00:00
# pip cannot install packages if the working directory is not readable.
# We want to run a lot of steps as nobody instead of as root.
working_directory: "/tmp/project"
2018-06-13 17:59:52 +00:00
2019-04-04 18:00:19 +00:00
steps:
2018-06-13 17:59:52 +00:00
- "checkout"
- run: *SETUP_VIRTUALENV
- run: *RUN_TESTS
- store_test_results: *STORE_TEST_RESULTS
- store_artifacts: *STORE_TEST_LOG
2021-09-08 13:14:47 +00:00
- store_artifacts: *STORE_ELIOT_LOG
- store_artifacts: *STORE_OTHER_ARTIFACTS
- run: *SUBMIT_COVERAGE
fedora-35:
<<: *RHEL_DERIV
2018-06-14 14:09:42 +00:00
docker:
- <<: *DOCKERHUB_AUTH
2022-02-14 16:05:31 +00:00
image: "tahoelafsci/fedora:35-py3"
user: "nobody"
2018-06-15 17:34:17 +00:00
nixos:
parameters:
nixpkgs:
description: >-
2023-07-04 18:20:50 +00:00
Reference the name of a flake-managed nixpkgs input (see `nix flake
metadata` and flake.nix)
type: "string"
pythonVersion:
description: >-
Reference the name of a Python package in nixpkgs to use.
type: "string"
executor: "nix"
2019-12-13 15:39:15 +00:00
steps:
- "nix-build":
nixpkgs: "<<parameters.nixpkgs>>"
pythonVersion: "<<parameters.pythonVersion>>"
buildSteps:
- "run":
name: "Unit Test"
command: |
source .circleci/lib.sh
2023-07-04 16:48:45 +00:00
# Translate the nixpkgs selection into a flake reference we
# can use to override the default nixpkgs input.
2023-07-04 18:20:50 +00:00
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
2023-07-04 16:48:45 +00:00
cache_if_able nix run \
--override-input nixpkgs "$NIXPKGS" \
.#<<parameters.pythonVersion>>-unittest -- \
--jobs $UNITTEST_CORES \
allmydata
2022-01-27 20:09:52 +00:00
2020-12-04 15:34:21 +00:00
typechecks:
docker:
- <<: *DOCKERHUB_AUTH
2023-01-09 16:01:45 +00:00
image: "tahoelafsci/ubuntu:20.04-py3.9"
steps:
- "checkout"
- run:
2020-12-04 15:34:21 +00:00
name: "Validate Types"
command: |
/tmp/venv/bin/tox -e typechecks
2021-03-09 21:57:20 +00:00
docs:
docker:
- <<: *DOCKERHUB_AUTH
2023-01-09 16:01:45 +00:00
image: "tahoelafsci/ubuntu:20.04-py3.9"
2021-03-09 21:57:20 +00:00
steps:
- "checkout"
- run:
name: "Build documentation"
command: |
/tmp/venv/bin/tox -e docs
2019-04-04 19:34:34 +00:00
build-image: &BUILD_IMAGE
# This is a template for a job to build a Docker image that has as much of
# the setup as we can manage already done and baked in. This cuts down on
# the per-job setup time the actual testing jobs have to perform - by
# perhaps 10% - 20%.
#
# https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
docker:
- <<: *DOCKERHUB_AUTH
2022-01-24 16:01:47 +00:00
# CircleCI build images; https://github.com/CircleCI-Public/cimg-base
# for details.
image: "cimg/base:2022.01"
2019-04-04 19:34:34 +00:00
environment:
DISTRO: "tahoelafsci/<DISTRO>:foo-py3.9"
TAG: "tahoelafsci/distro:<TAG>-py3.9"
PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
2019-04-04 19:34:34 +00:00
steps:
- "checkout"
2022-02-15 15:13:35 +00:00
- setup_remote_docker:
version: "20.10.11"
- run:
name: "Log in to Dockerhub"
command: |
docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
- run:
2019-04-04 19:34:34 +00:00
name: "Build image"
command: |
2019-04-04 19:34:34 +00:00
docker \
build \
--build-arg TAG=${TAG} \
2020-07-17 20:26:56 +00:00
--build-arg PYTHON_VERSION=${PYTHON_VERSION} \
-t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
2019-04-04 20:51:51 +00:00
-f ~/project/.circleci/Dockerfile.${DISTRO} \
2019-04-04 19:34:34 +00:00
~/project/
2019-04-04 19:41:22 +00:00
- run:
name: "Push image"
command: |
docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
2019-04-04 19:34:34 +00:00
build-image-debian-11:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "debian"
TAG: "11"
PYTHON_VERSION: "3.9"
2019-04-04 19:34:34 +00:00
build-image-ubuntu-20-04:
2020-08-07 18:59:27 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "20.04"
PYTHON_VERSION: "3.9"
2020-08-07 18:59:27 +00:00
build-image-ubuntu-22-04:
<<: *BUILD_IMAGE
environment:
DISTRO: "ubuntu"
TAG: "22.04"
PYTHON_VERSION: "3.10"
2022-02-14 15:49:17 +00:00
build-image-oraclelinux-8:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "oraclelinux"
2020-04-13 14:09:25 +00:00
TAG: "8"
PYTHON_VERSION: "3.8"
2019-04-04 19:34:34 +00:00
build-image-fedora-35:
2019-04-04 19:34:34 +00:00
<<: *BUILD_IMAGE
environment:
DISTRO: "fedora"
TAG: "35"
2022-02-14 16:05:31 +00:00
PYTHON_VERSION: "3"
# build-image-pypy27-buster:
# <<: *BUILD_IMAGE
# environment:
# DISTRO: "pypy"
# TAG: "buster"
# # We only have Python 2 for PyPy right now so there's no support for
# # setting up PyPy 3 in the image building toolchain. This value is just
# # for constructing the right Docker image tag.
# PYTHON_VERSION: "2"
executors:
windows:
# Choose a Windows environment that closest matches our testing
# requirements and goals.
# https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022
2023-08-07 14:27:52 +00:00
machine:
2024-10-02 04:28:42 +00:00
image: "windows-server-2022-gui:current"
2023-08-07 14:39:13 +00:00
shell: "powershell.exe -ExecutionPolicy Bypass"
2023-08-07 19:25:58 +00:00
resource_class: "windows.large"
nix:
docker:
# Run in a highly Nix-capable environment.
- <<: *DOCKERHUB_AUTH
2023-07-08 12:08:47 +00:00
image: "nixos/nix:2.16.1"
environment:
# CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
# to push to CACHIX_NAME. CACHIX_NAME tells cachix which cache to push
# to.
CACHIX_NAME: "tahoe-lafs-opensource"
2023-07-04 16:50:48 +00:00
# Let us use features marked "experimental". For example, most/all of
# the `nix <subcommand>` forms.
NIX_CONFIG: "experimental-features = nix-command flakes"
commands:
nix-build:
parameters:
nixpkgs:
description: >-
2023-07-04 18:36:27 +00:00
Reference the name of a flake-managed nixpkgs input (see `nix flake
metadata` and flake.nix)
type: "string"
pythonVersion:
description: >-
Reference the name of a Python package in nixpkgs to use.
type: "string"
buildSteps:
description: >-
The build steps to execute after setting up the build environment.
type: "steps"
steps:
- "run":
# Get cachix for Nix-friendly caching.
name: "Install Basic Dependencies"
command: |
2023-07-19 18:33:49 +00:00
# Get some build environment dependencies and let them float on a
# certain release branch. These aren't involved in the actual
# package build (only in CI environment setup) so the fact that
# they float shouldn't hurt reproducibility.
NIXPKGS="nixpkgs/nixos-23.05"
nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp
2023-07-19 18:33:49 +00:00
# Activate our cachix cache for "binary substitution". This sets
# up configuration tht lets Nix download something from the cache
# instead of building it locally, if possible.
cachix use "${CACHIX_NAME}"
- "checkout"
- "run":
# The Nix package doesn't know how to do this part, unfortunately.
name: "Generate version"
command: |
nix-shell \
-p 'python3.withPackages (ps: [ ps.setuptools ])' \
--run 'python setup.py update_version'
- "run":
name: "Build Package"
command: |
source .circleci/lib.sh
2023-07-04 18:20:50 +00:00
NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
2023-07-04 16:48:45 +00:00
cache_if_able nix build \
2023-07-04 17:28:24 +00:00
--verbose \
2023-07-04 17:45:01 +00:00
--print-build-logs \
2023-07-04 17:13:16 +00:00
--cores "$DEPENDENCY_CORES" \
2023-07-04 16:48:45 +00:00
--override-input nixpkgs "$NIXPKGS" \
.#<<parameters.pythonVersion>>-tahoe-lafs
- steps: "<<parameters.buildSteps>>"