source: trunk/.circleci/config.yml

Last change on this file was a95a6b8, checked in by Jean-Paul Calderone <exarkun@…>, at 2023-08-09T21:04:26Z

note motivation for our choice of these python versions

  • Property mode set to 100644
File size: 25.8 KB
Line 
1# https://circleci.com/docs/2.0/
2
3# We use version 2.1 of CircleCI's configuration format (the docs are still at
4# the 2.0 link) in order to have access to Windows executors. This means we
5# can't use dots in job names anymore. They have a new "parameters" feature
6# that is supposed to remove the need to have version numbers in job names (the
7# source of our dots), but switching to that is going to be a bigger refactor:
8#
9#   https://discuss.circleci.com/t/v2-1-job-name-validation/31123
10#   https://circleci.com/docs/2.0/reusing-config/
11#
12version: 2.1
13
14# Every job that pushes a Docker image from Docker Hub must authenticate to
15# it.  Define a couple yaml anchors that can be used to supply the necessary
16# credentials.
17
18# First is a CircleCI job context which makes Docker Hub credentials available
19# in the environment.
20#
21# Contexts are managed in the CircleCI web interface:
22#
23#  https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
24dockerhub-context-template: &DOCKERHUB_CONTEXT
25  context: "dockerhub-auth"
26
27# Required environment for using the coveralls tool to upload partial coverage
28# reports and then finish the process.
29coveralls-environment: &COVERALLS_ENVIRONMENT
30  COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
31
32# Next is a Docker executor template that gets the credentials from the
33# environment and supplies them to the executor.
34dockerhub-auth-template: &DOCKERHUB_AUTH
35  - auth:
36      username: $DOCKERHUB_USERNAME
37      password: $DOCKERHUB_PASSWORD
38
39  # A template that can be shared between the two different image-building
40# workflows.
41.images: &IMAGES
42  jobs:
43    - "build-image-debian-11":
44        <<: *DOCKERHUB_CONTEXT
45    - "build-image-ubuntu-20-04":
46        <<: *DOCKERHUB_CONTEXT
47    - "build-image-ubuntu-22-04":
48        <<: *DOCKERHUB_CONTEXT
49    - "build-image-fedora-35":
50        <<: *DOCKERHUB_CONTEXT
51    - "build-image-oraclelinux-8":
52        <<: *DOCKERHUB_CONTEXT
53    # Restore later as PyPy38
54    #- "build-image-pypy27-buster":
55    #    <<: *DOCKERHUB_CONTEXT
56
57parameters:
58  # Control whether the image-building workflow runs as part of this pipeline.
59  # Generally we do not want this to run because we don't need our
60  # dependencies to move around all the time and because building the image
61  # takes a couple minutes.
62  #
63  # An easy way to trigger a pipeline with this set to true is with the
64  # rebuild-images.sh tool in this directory.  You can also do so via the
65  # CircleCI web UI.
66  build-images:
67    default: false
68    type: "boolean"
69
70  # Control whether the test-running workflow runs as part of this pipeline.
71  # Generally we do want this to run because running the tests is the primary
72  # purpose of this pipeline.
73  run-tests:
74    default: true
75    type: "boolean"
76
77workflows:
78  ci:
79    when: "<< pipeline.parameters.run-tests >>"
80    jobs:
81      # Start with jobs testing various platforms.
82      - "debian-11":
83          {}
84
85      - "ubuntu-20-04":
86          {}
87
88      - "ubuntu-22-04":
89          {}
90
91      # Equivalent to RHEL 8; CentOS 8 is dead.
92      - "oraclelinux-8":
93          {}
94
95      - "nixos":
96          name: "<<matrix.pythonVersion>>"
97          nixpkgs: "nixpkgs-unstable"
98          matrix:
99            parameters:
100              pythonVersion:
101                - "python39"
102                - "python310"
103                - "python311"
104
105      # Eventually, test against PyPy 3.8
106      #- "pypy27-buster":
107      #    {}
108
109      # Other assorted tasks and configurations
110      - "codechecks":
111          {}
112      - "pyinstaller":
113          {}
114      - "c-locale":
115          {}
116      # Any locale other than C or UTF-8.
117      - "another-locale":
118          {}
119
120      - "windows-server-2022":
121          name: "Windows Server 2022, CPython <<matrix.pythonVersion>>"
122          matrix:
123            parameters:
124              # Run the job for a number of CPython versions.  These are the
125              # two versions installed on the version of the Windows VM image
126              # we specify (in the executor).  This is handy since it means we
127              # don't have to do any Python installation work.  We pin the
128              # Windows VM image so these shouldn't shuffle around beneath us
129              # but if we want to update that image or get different versions
130              # of Python, we probably have to do something here.
131              pythonVersion:
132                - "3.9"
133                - "3.11"
134
135      - "integration":
136          # Run even the slow integration tests here.  We need the `--` to
137          # sneak past tox and get to pytest.
138          tox-args: "-- --runslow integration"
139          requires:
140            # If the unit test suite doesn't pass, don't bother running the
141            # integration tests.
142            - "debian-11"
143
144      - "typechecks":
145          {}
146      - "docs":
147          {}
148
149      - "finish-coverage-report":
150          requires:
151            # Referencing the job by "alias" (as CircleCI calls the mapping
152            # key) instead of the value of its "name" property causes us to
153            # require every instance of the job from its matrix expansion.  So
154            # this requirement is enough to require every Windows Server 2022
155            # job.
156            - "windows-server-2022"
157
158  images:
159    <<: *IMAGES
160
161    # Build as part of the workflow but only if requested.
162    when: "<< pipeline.parameters.build-images >>"
163
164jobs:
165  finish-coverage-report:
166    docker:
167      - <<: *DOCKERHUB_AUTH
168        image: "python:3-slim"
169
170    steps:
171      - run:
172          name: "Indicate completion to coveralls.io"
173          environment:
174            <<: *COVERALLS_ENVIRONMENT
175          command: |
176            pip install coveralls==3.3.1
177            python -m coveralls --finish
178
179  codechecks:
180    docker:
181      - <<: *DOCKERHUB_AUTH
182        image: "cimg/python:3.9"
183
184    steps:
185      - "checkout"
186
187      - run: &INSTALL_TOX
188          name: "Install tox"
189          command: |
190            pip install --user 'tox~=3.0'
191
192      - run:
193          name: "Static-ish code checks"
194          command: |
195            ~/.local/bin/tox -e codechecks
196
197  windows-server-2022:
198    parameters:
199      pythonVersion:
200        description: >-
201          An argument to pass to the `py` launcher to choose a Python version.
202        type: "string"
203        default: ""
204
205    executor: "windows"
206    environment:
207      # Tweak Hypothesis to make its behavior more suitable for the CI
208      # environment.  This should improve reproducibility and lessen the
209      # effects of variable compute resources.
210      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
211
212      # Tell pip where its download cache lives.  This must agree with the
213      # "save_cache" step below or caching won't really work right.
214      PIP_CACHE_DIR: "pip-cache"
215
216      # And tell pip where it can find out cached wheelhouse for fast wheel
217      # installation, even for projects that don't distribute wheels.  This
218      # must also agree with the "save_cache" step below.
219      PIP_FIND_LINKS: "wheelhouse"
220
221    steps:
222      - "checkout"
223
224      # If possible, restore a pip download cache to save us from having to
225      # download all our Python dependencies from PyPI.
226      - "restore_cache":
227          keys:
228            # The download cache and/or the wheelhouse may contain Python
229            # version-specific binary packages so include the Python version
230            # in this key, as well as the canonical source of our
231            # dependencies.
232            - &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}"
233
234      - "run":
235          name: "Fix $env:PATH"
236          command: |
237            # The Python this job is parameterized is not necessarily the one
238            # at the front of $env:PATH.  Modify $env:PATH so that it is so we
239            # can just say "python" in the rest of the steps.  Also get the
240            # related Scripts directory so tools from packages we install are
241            # also available.
242            $p = py -<<parameters.pythonVersion>> -c "import sys; print(sys.prefix)"
243            $q = py -<<parameters.pythonVersion>> -c "import sysconfig; print(sysconfig.get_path('scripts'))"
244
245            New-Item $Profile.CurrentUserAllHosts -Force
246            # $p gets "python" on PATH and $q gets tools from packages we
247            # install.  Note we carefully construct the string so that
248            # $env:PATH is not substituted now but $p and $q are.  ` is the
249            # PowerShell string escape character.
250            Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`""
251
252      - "run":
253          name: "Display tool versions"
254          command: |
255            python misc/build_helpers/show-tool-versions.py
256
257      - "run":
258          # It's faster to install a wheel than a source package.  If we don't
259          # have a cached wheelhouse then build all of the wheels and dump
260          # them into a directory where they can become a cached wheelhouse.
261          # We would have built these wheels during installation anyway so it
262          # doesn't cost us anything extra and saves us effort next time.
263          name: "(Maybe) Build Wheels"
264          command: |
265            if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) {
266              echo "Found populated wheelhouse, skipping wheel building."
267            } else {
268              python -m pip install wheel
269              python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test]
270            }
271
272      - "save_cache":
273          paths:
274            # Make sure this agrees with PIP_CACHE_DIR in the environment.
275            - "pip-cache"
276            - "wheelhouse"
277          key: *CACHE_KEY
278
279      - "run":
280          name: "Install Dependencies"
281          environment:
282            # By this point we should no longer need an index.
283            PIP_NO_INDEX: "1"
284          command: |
285            python -m pip install .[testenv] .[test]
286
287      - "run":
288          name: "Run Unit Tests"
289          environment:
290            # Configure the results location for the subunitv2-file reporter
291            # from subunitreporter
292            SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2"
293
294            # Try to get prompt output from the reporter to avoid no-output
295            # timeouts.
296            PYTHONUNBUFFERED: "1"
297
298          command: |
299            # Run the test suite under coverage measurement using the
300            # parameterized version of Python, writing subunitv2-format
301            # results to the file given in the environment.
302            python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata
303
304      - "run":
305          name: "Upload Coverage"
306          environment:
307            <<: *COVERALLS_ENVIRONMENT
308            # Mark the data as just one piece of many because we have more
309            # than one instance of this job (two on Windows now, some on other
310            # platforms later) which collects and reports coverage.  This is
311            # necessary to cause Coveralls to merge multiple coverage results
312            # into a single report.  Note the merge only happens when we
313            # "finish" a particular build, as identified by its "build_num"
314            # (aka "service_number").
315            COVERALLS_PARALLEL: "true"
316          command: |
317            python -m pip install coveralls==3.3.1
318
319            # .coveragerc sets parallel = True so we don't have a `.coverage`
320            # file but a `.coverage.<unique stuff>` file (or maybe more than
321            # one, but probably not).  coveralls can't work with these so
322            # merge them before invoking it.
323            python -m coverage combine
324
325            # Now coveralls will be able to find the data, so have it do the
326            # upload.  Also, have it strip the system config-specific prefix
327            # from all of the source paths.
328            $prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))"
329            python -m coveralls --basedir $prefix
330
331      - "run":
332          name: "Convert Result Log"
333          command: |
334            # subunit2junitxml exits with error if the result stream it is
335            # converting has test failures in it!  So this step might fail.
336            # Since the step in which we actually _ran_ the tests won't fail
337            # even if there are test failures, this is a good thing for now.
338            subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2
339
340      - "store_test_results":
341          path: "test-results.xml"
342
343      - "store_artifacts":
344          path: "_trial_temp/test.log"
345
346      - "store_artifacts":
347          path: "eliot.log"
348
349      - "store_artifacts":
350          path: ".coverage"
351
352  pyinstaller:
353    docker:
354      - <<: *DOCKERHUB_AUTH
355        image: "cimg/python:3.9"
356
357    steps:
358      - "checkout"
359
360      - run:
361          <<: *INSTALL_TOX
362
363      - run:
364          name: "Make PyInstaller executable"
365          command: |
366            ~/.local/bin/tox -e pyinstaller
367
368      - run:
369          # To verify that the resultant PyInstaller-generated binary executes
370          # cleanly (i.e., that it terminates with an exit code of 0 and isn't
371          # failing due to import/packaging-related errors, etc.).
372          name: "Test PyInstaller executable"
373          command: |
374            dist/Tahoe-LAFS/tahoe --version
375
376  debian-11: &DEBIAN
377    environment: &UTF_8_ENVIRONMENT
378      # In general, the test suite is not allowed to fail while the job
379      # succeeds.  But you can set this to "yes" if you want it to be
380      # otherwise.
381      ALLOWED_FAILURE: "no"
382      # Tell Hypothesis which configuration we want it to use.
383      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
384      # Tell the C runtime things about character encoding (mainly to do with
385      # filenames and argv).
386      LANG: "en_US.UTF-8"
387      # Select a tox environment to run for this job.
388      TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
389      # Additional arguments to pass to tox.
390      TAHOE_LAFS_TOX_ARGS: ""
391      # The path in which test artifacts will be placed.
392      ARTIFACTS_OUTPUT_PATH: "/tmp/artifacts"
393      # Convince all of our pip invocations to look at the cached wheelhouse
394      # we maintain.
395      WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
396      PIP_FIND_LINKS: "file:///tmp/wheelhouse"
397      # Upload the coverage report.
398      UPLOAD_COVERAGE: ""
399
400    # pip cannot install packages if the working directory is not readable.
401    # We want to run a lot of steps as nobody instead of as root.
402    working_directory: "/tmp/project"
403
404    steps:
405      - "checkout"
406      - run: &SETUP_VIRTUALENV
407          name: "Setup virtualenv"
408          command: |
409            /tmp/project/.circleci/setup-virtualenv.sh \
410                "/tmp/venv" \
411                "/tmp/project" \
412                "${WHEELHOUSE_PATH}" \
413                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
414                "${TAHOE_LAFS_TOX_ARGS}"
415
416      - run: &RUN_TESTS
417          name: "Run test suite"
418          command: |
419            /tmp/project/.circleci/run-tests.sh \
420                "/tmp/venv" \
421                "/tmp/project" \
422                "${ALLOWED_FAILURE}" \
423                "${ARTIFACTS_OUTPUT_PATH}" \
424                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
425                "${TAHOE_LAFS_TOX_ARGS}"
426          # trial output gets directed straight to a log.  avoid the circleci
427          # timeout while the test suite runs.
428          no_output_timeout: "20m"
429
430      - store_test_results: &STORE_TEST_RESULTS
431          path: "/tmp/artifacts/junit"
432
433      - store_artifacts: &STORE_TEST_LOG
434          # Despite passing --workdir /tmp to tox above, it still runs trial
435          # in the project source checkout.
436          path: "/tmp/project/_trial_temp/test.log"
437
438      - store_artifacts: &STORE_ELIOT_LOG
439          # Despite passing --workdir /tmp to tox above, it still runs trial
440          # in the project source checkout.
441          path: "/tmp/project/eliot.log"
442
443      - store_artifacts: &STORE_OTHER_ARTIFACTS
444          # Store any other artifacts, too.  This is handy to allow other jobs
445          # sharing most of the definition of this one to be able to
446          # contribute artifacts easily.
447          path: "/tmp/artifacts"
448
449      - run: &SUBMIT_COVERAGE
450          name: "Submit coverage results"
451          command: |
452            if [ -n "${UPLOAD_COVERAGE}" ]; then
453              echo "TODO: Need a new coverage solution, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4011"
454            fi
455
456    docker:
457      - <<: *DOCKERHUB_AUTH
458        image: "tahoelafsci/debian:11-py3.9"
459        user: "nobody"
460
461
462  # Restore later using PyPy3.8
463  # pypy27-buster:
464  #   <<: *DEBIAN
465  #   docker:
466  #     - <<: *DOCKERHUB_AUTH
467  #       image: "tahoelafsci/pypy:buster-py2"
468  #       user: "nobody"
469  #   environment:
470  #     <<: *UTF_8_ENVIRONMENT
471  #     # We don't do coverage since it makes PyPy far too slow:
472  #     TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
473  #     # Since we didn't collect it, don't upload it.
474  #     UPLOAD_COVERAGE: ""
475
476  c-locale:
477    <<: *DEBIAN
478
479    environment:
480      <<: *UTF_8_ENVIRONMENT
481      LANG: "C"
482
483
484  another-locale:
485    <<: *DEBIAN
486
487    environment:
488      <<: *UTF_8_ENVIRONMENT
489      # aka "Latin 1"
490      LANG: "en_US.ISO-8859-1"
491
492  integration:
493    <<: *DEBIAN
494
495    parameters:
496      tox-args:
497        description: >-
498          Additional arguments to pass to the tox command.
499        type: "string"
500        default: ""
501
502    docker:
503      - <<: *DOCKERHUB_AUTH
504        image: "tahoelafsci/debian:11-py3.9"
505        user: "nobody"
506
507    environment:
508      <<: *UTF_8_ENVIRONMENT
509      # Select the integration tests tox environments.
510      TAHOE_LAFS_TOX_ENVIRONMENT: "integration"
511      # Disable artifact collection because py.test can't produce any.
512      ARTIFACTS_OUTPUT_PATH: ""
513
514      # Pass on anything we got in our parameters.
515      TAHOE_LAFS_TOX_ARGS: "<< parameters.tox-args >>"
516
517    steps:
518      - "checkout"
519      # DRY, YAML-style.  See the debian-9 steps.
520      - run: *SETUP_VIRTUALENV
521      - run: *RUN_TESTS
522
523  ubuntu-20-04:
524    <<: *DEBIAN
525    docker:
526      - <<: *DOCKERHUB_AUTH
527        image: "tahoelafsci/ubuntu:20.04-py3.9"
528        user: "nobody"
529    environment:
530      <<: *UTF_8_ENVIRONMENT
531      TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
532
533  ubuntu-22-04:
534    <<: *DEBIAN
535    docker:
536      - <<: *DOCKERHUB_AUTH
537        image: "tahoelafsci/ubuntu:22.04-py3.10"
538        user: "nobody"
539    environment:
540      <<: *UTF_8_ENVIRONMENT
541      TAHOE_LAFS_TOX_ENVIRONMENT: "py310"
542
543  oraclelinux-8: &RHEL_DERIV
544    docker:
545      - <<: *DOCKERHUB_AUTH
546        image: "tahoelafsci/oraclelinux:8-py3.8"
547        user: "nobody"
548
549    environment:
550      <<: *UTF_8_ENVIRONMENT
551      TAHOE_LAFS_TOX_ENVIRONMENT: "py38"
552
553    # pip cannot install packages if the working directory is not readable.
554    # We want to run a lot of steps as nobody instead of as root.
555    working_directory: "/tmp/project"
556
557    steps:
558      - "checkout"
559      - run: *SETUP_VIRTUALENV
560      - run: *RUN_TESTS
561      - store_test_results: *STORE_TEST_RESULTS
562      - store_artifacts: *STORE_TEST_LOG
563      - store_artifacts: *STORE_ELIOT_LOG
564      - store_artifacts: *STORE_OTHER_ARTIFACTS
565      - run: *SUBMIT_COVERAGE
566
567  fedora-35:
568    <<: *RHEL_DERIV
569    docker:
570      - <<: *DOCKERHUB_AUTH
571        image: "tahoelafsci/fedora:35-py3"
572        user: "nobody"
573
574  nixos:
575    parameters:
576      nixpkgs:
577        description: >-
578          Reference the name of a flake-managed nixpkgs input (see `nix flake
579          metadata` and flake.nix)
580        type: "string"
581      pythonVersion:
582        description: >-
583          Reference the name of a Python package in nixpkgs to use.
584        type: "string"
585
586    executor: "nix"
587
588    steps:
589      - "nix-build":
590          nixpkgs: "<<parameters.nixpkgs>>"
591          pythonVersion: "<<parameters.pythonVersion>>"
592          buildSteps:
593            - "run":
594                name: "Unit Test"
595                command: |
596                  source .circleci/lib.sh
597
598                  # Translate the nixpkgs selection into a flake reference we
599                  # can use to override the default nixpkgs input.
600                  NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
601
602                  cache_if_able nix run \
603                    --override-input nixpkgs "$NIXPKGS" \
604                    .#<<parameters.pythonVersion>>-unittest -- \
605                    --jobs $UNITTEST_CORES \
606                    allmydata
607
608  typechecks:
609    docker:
610      - <<: *DOCKERHUB_AUTH
611        image: "tahoelafsci/ubuntu:20.04-py3.9"
612
613    steps:
614      - "checkout"
615      - run:
616          name: "Validate Types"
617          command: |
618            /tmp/venv/bin/tox -e typechecks
619
620  docs:
621    docker:
622      - <<: *DOCKERHUB_AUTH
623        image: "tahoelafsci/ubuntu:20.04-py3.9"
624
625    steps:
626      - "checkout"
627      - run:
628          name: "Build documentation"
629          command: |
630            /tmp/venv/bin/tox -e docs
631
632  build-image: &BUILD_IMAGE
633    # This is a template for a job to build a Docker image that has as much of
634    # the setup as we can manage already done and baked in.  This cuts down on
635    # the per-job setup time the actual testing jobs have to perform - by
636    # perhaps 10% - 20%.
637    #
638    # https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
639    docker:
640      - <<: *DOCKERHUB_AUTH
641        # CircleCI build images; https://github.com/CircleCI-Public/cimg-base
642        # for details.
643        image: "cimg/base:2022.01"
644
645    environment:
646      DISTRO: "tahoelafsci/<DISTRO>:foo-py3.9"
647      TAG: "tahoelafsci/distro:<TAG>-py3.9"
648      PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
649
650    steps:
651      - "checkout"
652      - setup_remote_docker:
653          version: "20.10.11"
654      - run:
655          name: "Log in to Dockerhub"
656          command: |
657            docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
658      - run:
659          name: "Build image"
660          command: |
661            docker \
662                build \
663                --build-arg TAG=${TAG} \
664                --build-arg PYTHON_VERSION=${PYTHON_VERSION} \
665                -t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
666                -f ~/project/.circleci/Dockerfile.${DISTRO} \
667                ~/project/
668      - run:
669          name: "Push image"
670          command: |
671            docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
672
673
674  build-image-debian-11:
675    <<: *BUILD_IMAGE
676
677    environment:
678      DISTRO: "debian"
679      TAG: "11"
680      PYTHON_VERSION: "3.9"
681
682
683  build-image-ubuntu-20-04:
684    <<: *BUILD_IMAGE
685
686    environment:
687      DISTRO: "ubuntu"
688      TAG: "20.04"
689      PYTHON_VERSION: "3.9"
690
691
692  build-image-ubuntu-22-04:
693    <<: *BUILD_IMAGE
694
695    environment:
696      DISTRO: "ubuntu"
697      TAG: "22.04"
698      PYTHON_VERSION: "3.10"
699
700
701  build-image-oraclelinux-8:
702    <<: *BUILD_IMAGE
703
704    environment:
705      DISTRO: "oraclelinux"
706      TAG: "8"
707      PYTHON_VERSION: "3.8"
708
709  build-image-fedora-35:
710    <<: *BUILD_IMAGE
711
712    environment:
713      DISTRO: "fedora"
714      TAG: "35"
715      PYTHON_VERSION: "3"
716
717  # build-image-pypy27-buster:
718  #   <<: *BUILD_IMAGE
719  #   environment:
720  #     DISTRO: "pypy"
721  #     TAG: "buster"
722  #     # We only have Python 2 for PyPy right now so there's no support for
723  #     # setting up PyPy 3 in the image building toolchain.  This value is just
724  #     # for constructing the right Docker image tag.
725  #     PYTHON_VERSION: "2"
726
727executors:
728  windows:
729    # Choose a Windows environment that closest matches our testing
730    # requirements and goals.
731    # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022
732    machine:
733      image: "windows-server-2022-gui:2023.06.1"
734      shell: "powershell.exe -ExecutionPolicy Bypass"
735    resource_class: "windows.large"
736
737  nix:
738    docker:
739      # Run in a highly Nix-capable environment.
740      - <<: *DOCKERHUB_AUTH
741        image: "nixos/nix:2.16.1"
742    environment:
743      # CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
744      # to push to CACHIX_NAME.  CACHIX_NAME tells cachix which cache to push
745      # to.
746      CACHIX_NAME: "tahoe-lafs-opensource"
747      # Let us use features marked "experimental".  For example, most/all of
748      # the `nix <subcommand>` forms.
749      NIX_CONFIG: "experimental-features = nix-command flakes"
750
751commands:
752  nix-build:
753    parameters:
754      nixpkgs:
755        description: >-
756          Reference the name of a flake-managed nixpkgs input (see `nix flake
757          metadata` and flake.nix)
758        type: "string"
759      pythonVersion:
760        description: >-
761          Reference the name of a Python package in nixpkgs to use.
762        type: "string"
763      buildSteps:
764        description: >-
765          The build steps to execute after setting up the build environment.
766        type: "steps"
767
768    steps:
769      - "run":
770          # Get cachix for Nix-friendly caching.
771          name: "Install Basic Dependencies"
772          command: |
773            # Get some build environment dependencies and let them float on a
774            # certain release branch.  These aren't involved in the actual
775            # package build (only in CI environment setup) so the fact that
776            # they float shouldn't hurt reproducibility.
777            NIXPKGS="nixpkgs/nixos-23.05"
778            nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp
779
780            # Activate our cachix cache for "binary substitution".  This sets
781            # up configuration tht lets Nix download something from the cache
782            # instead of building it locally, if possible.
783            cachix use "${CACHIX_NAME}"
784
785      - "checkout"
786
787      - "run":
788          # The Nix package doesn't know how to do this part, unfortunately.
789          name: "Generate version"
790          command: |
791            nix-shell \
792              -p 'python3.withPackages (ps: [ ps.setuptools ])' \
793              --run 'python setup.py update_version'
794
795      - "run":
796          name: "Build Package"
797          command: |
798            source .circleci/lib.sh
799            NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
800            cache_if_able nix build \
801              --verbose \
802              --print-build-logs \
803              --cores "$DEPENDENCY_CORES" \
804              --override-input nixpkgs "$NIXPKGS" \
805              .#<<parameters.pythonVersion>>-tahoe-lafs
806
807      - steps: "<<parameters.buildSteps>>"
Note: See TracBrowser for help on using the repository browser.