source: trunk/.circleci/config.yml

Last change on this file was aedd948, checked in by Florian Sesser <florian@…>, at 2024-11-08T13:26:47Z

grammar

  • Property mode set to 100644
File size: 26.0 KB
Line 
1# https://circleci.com/docs/2.0/
2
3# We use version 2.1 of CircleCI's configuration format (the docs are still at
4# the 2.0 link) in order to have access to Windows executors. This means we
5# can't use dots in job names anymore. They have a new "parameters" feature
6# that is supposed to remove the need to have version numbers in job names (the
7# source of our dots), but switching to that is going to be a bigger refactor:
8#
9#   https://discuss.circleci.com/t/v2-1-job-name-validation/31123
10#   https://circleci.com/docs/2.0/reusing-config/
11#
12version: 2.1
13
14# Every job that pushes a Docker image from Docker Hub must authenticate to
15# it.  Define a couple yaml anchors that can be used to supply the necessary
16# credentials.
17
18# First is a CircleCI job context which makes Docker Hub credentials available
19# in the environment.
20#
21# Contexts are managed in the CircleCI web interface:
22#
23#  https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
24dockerhub-context-template: &DOCKERHUB_CONTEXT
25  context: "dockerhub-auth"
26
27# Required environment for using the coveralls tool to upload partial coverage
28# reports and then finish the process.
29coveralls-environment: &COVERALLS_ENVIRONMENT
30  COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
31
32# Next is a Docker executor template that gets the credentials from the
33# environment and supplies them to the executor.
34dockerhub-auth-template: &DOCKERHUB_AUTH
35  - auth:
36      username: $DOCKERHUB_USERNAME
37      password: $DOCKERHUB_PASSWORD
38
39  # A template that can be shared between the two different image-building
40# workflows.
41.images: &IMAGES
42  jobs:
43    - "build-image-debian-11":
44        <<: *DOCKERHUB_CONTEXT
45    - "build-image-ubuntu-20-04":
46        <<: *DOCKERHUB_CONTEXT
47    - "build-image-ubuntu-22-04":
48        <<: *DOCKERHUB_CONTEXT
49    - "build-image-fedora-35":
50        <<: *DOCKERHUB_CONTEXT
51    - "build-image-oraclelinux-8":
52        <<: *DOCKERHUB_CONTEXT
53    # Restore later as PyPy38
54    #- "build-image-pypy27-buster":
55    #    <<: *DOCKERHUB_CONTEXT
56
57parameters:
58  # Control whether the image-building workflow runs as part of this pipeline.
59  # Generally we do not want this to run because we don't need our
60  # dependencies to move around all the time and because building the image
61  # takes a couple minutes.
62  #
63  # An easy way to trigger a pipeline with this set to true is with the
64  # rebuild-images.sh tool in this directory.  You can also do so via the
65  # CircleCI web UI.
66  build-images:
67    default: false
68    type: "boolean"
69
70  # Control whether the test-running workflow runs as part of this pipeline.
71  # Generally we do want this to run because running the tests is the primary
72  # purpose of this pipeline.
73  run-tests:
74    default: true
75    type: "boolean"
76
77workflows:
78  ci:
79    when: "<< pipeline.parameters.run-tests >>"
80    jobs:
81      # Start with jobs testing various platforms.
82      - "debian-11":
83          {}
84
85      - "ubuntu-20-04":
86          {}
87
88      - "ubuntu-22-04":
89          {}
90
91      # Equivalent to RHEL 8; CentOS 8 is dead.
92      - "oraclelinux-8":
93          {}
94
95      - "nixos":
96          name: "<<matrix.pythonVersion>>"
97          nixpkgs: "nixpkgs-unstable"
98          matrix:
99            parameters:
100              pythonVersion:
101                - "python39"
102                - "python310"
103                - "python311"
104
105      # Eventually, test against PyPy 3.8
106      #- "pypy27-buster":
107      #    {}
108
109      # Other assorted tasks and configurations
110      - "codechecks":
111          {}
112      - "pyinstaller":
113          {}
114      - "c-locale":
115          {}
116      # Any locale other than C or UTF-8.
117      - "another-locale":
118          {}
119
120      - "windows-server-2022":
121          name: "Windows Server 2022, CPython <<matrix.pythonVersion>>"
122          matrix:
123            parameters:
124              # Run the job for a number of CPython versions.  These are the
125              # two versions installed on the version of the Windows VM image
126              # we specify (in the executor).  This is handy since it means we
127              # don't have to do any Python installation work.  We pin the
128              # Windows VM image so these shouldn't shuffle around beneath us
129              # but if we want to update that image or get different versions
130              # of Python, we probably have to do something here.
131              pythonVersion:
132                - "3.9"
133                - "3.12"
134
135      - "integration":
136          # Run even the slow integration tests here.  We need the `--` to
137          # sneak past tox and get to pytest.
138          tox-args: "-- --runslow integration"
139          requires:
140            # If the unit test suite doesn't pass, don't bother running the
141            # integration tests.
142            - "debian-11"
143
144      - "typechecks":
145          {}
146      - "docs":
147          {}
148
149      - "finish-coverage-report":
150          requires:
151            # Referencing the job by "alias" (as CircleCI calls the mapping
152            # key) instead of the value of its "name" property causes us to
153            # require every instance of the job from its matrix expansion.  So
154            # this requirement is enough to require every Windows Server 2022
155            # job.
156            - "windows-server-2022"
157
158  images:
159    <<: *IMAGES
160
161    # Build as part of the workflow but only if requested.
162    when: "<< pipeline.parameters.build-images >>"
163
164jobs:
165  finish-coverage-report:
166    docker:
167      - <<: *DOCKERHUB_AUTH
168        image: "python:3-slim"
169
170    steps:
171      - run:
172          name: "Indicate completion to coveralls.io"
173          environment:
174            <<: *COVERALLS_ENVIRONMENT
175          command: |
176            pip install coveralls==3.3.1
177            python -m coveralls --finish
178
179  codechecks:
180    docker:
181      - <<: *DOCKERHUB_AUTH
182        image: "cimg/python:3.9"
183
184    steps:
185      - "checkout"
186
187      - run: &INSTALL_TOX
188          name: "Install tox"
189          command: |
190            pip install --user 'tox~=3.0'
191
192      - run:
193          name: "Static-ish code checks"
194          command: |
195            ~/.local/bin/tox -e codechecks
196
197  windows-server-2022:
198    parameters:
199      pythonVersion:
200        description: >-
201          An argument to pass to the `py` launcher to choose a Python version.
202        type: "string"
203        default: ""
204
205    executor: "windows"
206    environment:
207      # Tweak Hypothesis to make its behavior more suitable for the CI
208      # environment.  This should improve reproducibility and lessen the
209      # effects of variable compute resources.
210      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
211
212      # Tell pip where its download cache lives.  This must agree with the
213      # "save_cache" step below or caching won't really work right.
214      PIP_CACHE_DIR: "pip-cache"
215
216      # And tell pip where it can find out cached wheelhouse for fast wheel
217      # installation, even for projects that don't distribute wheels.  This
218      # must also agree with the "save_cache" step below.
219      PIP_FIND_LINKS: "wheelhouse"
220
221    steps:
222      - "checkout"
223
224      # If possible, restore a pip download cache to save us from having to
225      # download all our Python dependencies from PyPI.
226      - "restore_cache":
227          keys:
228            # The download cache and/or the wheelhouse may contain Python
229            # version-specific binary packages so include the Python version
230            # in this key, as well as the canonical source of our
231            # dependencies.
232            - &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"setup.py\" }}"
233
234      - "run":
235          name: "Fix $env:PATH"
236          command: |
237            # The Python this job is parameterized is not necessarily the one
238            # at the front of $env:PATH.  Modify $env:PATH so that it is so we
239            # can just say "python" in the rest of the steps.  Also get the
240            # related Scripts directory so tools from packages we install are
241            # also available.
242            $p = py -<<parameters.pythonVersion>> -c "import sys; print(sys.prefix)"
243            $q = py -<<parameters.pythonVersion>> -c "import sysconfig; print(sysconfig.get_path('scripts'))"
244
245            New-Item $Profile.CurrentUserAllHosts -Force
246            # $p gets "python" on PATH and $q gets tools from packages we
247            # install.  Note we carefully construct the string so that
248            # $env:PATH is not substituted now but $p and $q are.  ` is the
249            # PowerShell string escape character.
250            Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`""
251
252      - "run":
253          # It's faster to install a wheel than a source package.  If we don't
254          # have a cached wheelhouse then build all of the wheels and dump
255          # them into a directory where they can become a cached wheelhouse.
256          # We would have built these wheels during installation anyway so it
257          # doesn't cost us anything extra and saves us effort next time.
258          name: "(Maybe) Build Wheels"
259          command: |
260            python -m pip install setuptools # Some Pythons for Windows do not come with setuptools
261            python setup.py update_version # Cheat to win a race about writing _version.py
262
263            if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) {
264              echo "Found populated wheelhouse, skipping wheel building."
265            } else {
266              python -m pip install wheel
267              python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test]
268            }
269
270      - "save_cache":
271          paths:
272            # Make sure this agrees with PIP_CACHE_DIR in the environment.
273            - "pip-cache"
274            - "wheelhouse"
275          key: *CACHE_KEY
276
277      - "run":
278          name: "Install Dependencies"
279          environment:
280            # By this point we should no longer need an index.
281            PIP_NO_INDEX: "1"
282          command: |
283            python -m pip install .[testenv] .[test]
284
285      - "run":
286          name: "Display tool versions"
287          command: |
288            python misc/build_helpers/show-tool-versions.py
289
290      - "run":
291          name: "Run Unit Tests"
292          environment:
293            # Configure the results location for the subunitv2-file reporter
294            # from subunitreporter
295            SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2"
296
297            # Try to get prompt output from the reporter to avoid no-output
298            # timeouts.
299            PYTHONUNBUFFERED: "1"
300
301          command: |
302            # Run the test suite under coverage measurement using the
303            # parameterized version of Python, writing subunitv2-format
304            # results to the file given in the environment.
305            python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata
306
307      - "run":
308          name: "Upload Coverage"
309          environment:
310            <<: *COVERALLS_ENVIRONMENT
311            # Mark the data as just one piece of many because we have more
312            # than one instance of this job (two on Windows now, some on other
313            # platforms later) which collects and reports coverage.  This is
314            # necessary to cause Coveralls to merge multiple coverage results
315            # into a single report.  Note the merge only happens when we
316            # "finish" a particular build, as identified by its "build_num"
317            # (aka "service_number").
318            COVERALLS_PARALLEL: "true"
319          command: |
320            python -m pip install coveralls==3.3.1
321
322            # .coveragerc sets parallel = True so we don't have a `.coverage`
323            # file but a `.coverage.<unique stuff>` file (or maybe more than
324            # one, but probably not).  coveralls can't work with these so
325            # merge them before invoking it.
326            python -m coverage combine
327
328            # Now coveralls will be able to find the data, so have it do the
329            # upload.  Also, have it strip the system config-specific prefix
330            # from all of the source paths.
331            $prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))"
332            python -m coveralls --basedir $prefix
333
334      - "run":
335          name: "Convert Result Log"
336          command: |
337            # subunit2junitxml exits with error if the result stream it is
338            # converting has test failures in it!  So this step might fail.
339            # Since the step in which we actually _ran_ the tests won't fail
340            # even if there are test failures, this is a good thing for now.
341            subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2
342
343      - "store_test_results":
344          path: "test-results.xml"
345
346      - "store_artifacts":
347          path: "_trial_temp/test.log"
348
349      - "store_artifacts":
350          path: "eliot.log"
351
352      - "store_artifacts":
353          path: ".coverage"
354
355  pyinstaller:
356    docker:
357      - <<: *DOCKERHUB_AUTH
358        image: "cimg/python:3.9"
359
360    steps:
361      - "checkout"
362
363      - run:
364          <<: *INSTALL_TOX
365
366      - run:
367          name: "Make PyInstaller executable"
368          command: |
369            ~/.local/bin/tox -e pyinstaller
370
371      - run:
372          # To verify that the resultant PyInstaller-generated binary executes
373          # cleanly (i.e., that it terminates with an exit code of 0 and isn't
374          # failing due to import/packaging-related errors, etc.).
375          name: "Test PyInstaller executable"
376          command: |
377            dist/Tahoe-LAFS/tahoe --version
378
379  debian-11: &DEBIAN
380    environment: &UTF_8_ENVIRONMENT
381      # In general, the test suite is not allowed to fail while the job
382      # succeeds.  But you can set this to "yes" if you want it to be
383      # otherwise.
384      ALLOWED_FAILURE: "no"
385      # Tell Hypothesis which configuration we want it to use.
386      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
387      # Tell the C runtime things about character encoding (mainly to do with
388      # filenames and argv).
389      LANG: "en_US.UTF-8"
390      # Select a tox environment to run for this job.
391      TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
392      # Additional arguments to pass to tox.
393      TAHOE_LAFS_TOX_ARGS: ""
394      # The path in which test artifacts will be placed.
395      ARTIFACTS_OUTPUT_PATH: "/tmp/artifacts"
396      # Convince all of our pip invocations to look at the cached wheelhouse
397      # we maintain.
398      WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
399      PIP_FIND_LINKS: "file:///tmp/wheelhouse"
400      # Upload the coverage report.
401      UPLOAD_COVERAGE: ""
402
403    # pip cannot install packages if the working directory is not readable.
404    # We want to run a lot of steps as nobody instead of as root.
405    working_directory: "/tmp/project"
406
407    steps:
408      - "checkout"
409      - run: &SETUP_VIRTUALENV
410          name: "Setup virtualenv"
411          command: |
412            /tmp/project/.circleci/setup-virtualenv.sh \
413                "/tmp/venv" \
414                "/tmp/project" \
415                "${WHEELHOUSE_PATH}" \
416                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
417                "${TAHOE_LAFS_TOX_ARGS}"
418
419      - run: &RUN_TESTS
420          name: "Run test suite"
421          command: |
422            /tmp/project/.circleci/run-tests.sh \
423                "/tmp/venv" \
424                "/tmp/project" \
425                "${ALLOWED_FAILURE}" \
426                "${ARTIFACTS_OUTPUT_PATH}" \
427                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
428                "${TAHOE_LAFS_TOX_ARGS}"
429          # trial output gets directed straight to a log.  avoid the circleci
430          # timeout while the test suite runs.
431          no_output_timeout: "20m"
432
433      - store_test_results: &STORE_TEST_RESULTS
434          path: "/tmp/artifacts/junit"
435
436      - store_artifacts: &STORE_TEST_LOG
437          # Despite passing --workdir /tmp to tox above, it still runs trial
438          # in the project source checkout.
439          path: "/tmp/project/_trial_temp/test.log"
440
441      - store_artifacts: &STORE_ELIOT_LOG
442          # Despite passing --workdir /tmp to tox above, it still runs trial
443          # in the project source checkout.
444          path: "/tmp/project/eliot.log"
445
446      - store_artifacts: &STORE_OTHER_ARTIFACTS
447          # Store any other artifacts, too.  This is handy to allow other jobs
448          # sharing most of the definition of this one to be able to
449          # contribute artifacts easily.
450          path: "/tmp/artifacts"
451
452      - run: &SUBMIT_COVERAGE
453          name: "Submit coverage results"
454          command: |
455            if [ -n "${UPLOAD_COVERAGE}" ]; then
456              echo "TODO: Need a new coverage solution, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4011"
457            fi
458
459    docker:
460      - <<: *DOCKERHUB_AUTH
461        image: "tahoelafsci/debian:11-py3.9"
462        user: "nobody"
463
464
465  # Restore later using PyPy3.8
466  # pypy27-buster:
467  #   <<: *DEBIAN
468  #   docker:
469  #     - <<: *DOCKERHUB_AUTH
470  #       image: "tahoelafsci/pypy:buster-py2"
471  #       user: "nobody"
472  #   environment:
473  #     <<: *UTF_8_ENVIRONMENT
474  #     # We don't do coverage since it makes PyPy far too slow:
475  #     TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
476  #     # Since we didn't collect it, don't upload it.
477  #     UPLOAD_COVERAGE: ""
478
479  c-locale:
480    <<: *DEBIAN
481
482    environment:
483      <<: *UTF_8_ENVIRONMENT
484      LANG: "C"
485
486
487  another-locale:
488    <<: *DEBIAN
489
490    environment:
491      <<: *UTF_8_ENVIRONMENT
492      # aka "Latin 1"
493      LANG: "en_US.ISO-8859-1"
494
495  integration:
496    <<: *DEBIAN
497
498    parameters:
499      tox-args:
500        description: >-
501          Additional arguments to pass to the tox command.
502        type: "string"
503        default: ""
504
505    docker:
506      - <<: *DOCKERHUB_AUTH
507        image: "tahoelafsci/debian:11-py3.9"
508        user: "nobody"
509
510    environment:
511      <<: *UTF_8_ENVIRONMENT
512      # Select the integration tests tox environments.
513      TAHOE_LAFS_TOX_ENVIRONMENT: "integration"
514      # Disable artifact collection because py.test can't produce any.
515      ARTIFACTS_OUTPUT_PATH: ""
516
517      # Pass on anything we got in our parameters.
518      TAHOE_LAFS_TOX_ARGS: "<< parameters.tox-args >>"
519
520    steps:
521      - "checkout"
522      # DRY, YAML-style.  See the debian-9 steps.
523      - run: *SETUP_VIRTUALENV
524      - run: *RUN_TESTS
525
526  ubuntu-20-04:
527    <<: *DEBIAN
528    docker:
529      - <<: *DOCKERHUB_AUTH
530        image: "tahoelafsci/ubuntu:20.04-py3.9"
531        user: "nobody"
532    environment:
533      <<: *UTF_8_ENVIRONMENT
534      TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
535
536  ubuntu-22-04:
537    <<: *DEBIAN
538    docker:
539      - <<: *DOCKERHUB_AUTH
540        image: "tahoelafsci/ubuntu:22.04-py3.10"
541        user: "nobody"
542    environment:
543      <<: *UTF_8_ENVIRONMENT
544      TAHOE_LAFS_TOX_ENVIRONMENT: "py310"
545
546  oraclelinux-8: &RHEL_DERIV
547    docker:
548      - <<: *DOCKERHUB_AUTH
549        image: "tahoelafsci/oraclelinux:8-py3.8"
550        user: "nobody"
551
552    environment:
553      <<: *UTF_8_ENVIRONMENT
554      TAHOE_LAFS_TOX_ENVIRONMENT: "py38"
555
556    # pip cannot install packages if the working directory is not readable.
557    # We want to run a lot of steps as nobody instead of as root.
558    working_directory: "/tmp/project"
559
560    steps:
561      - "checkout"
562      - run: *SETUP_VIRTUALENV
563      - run: *RUN_TESTS
564      - store_test_results: *STORE_TEST_RESULTS
565      - store_artifacts: *STORE_TEST_LOG
566      - store_artifacts: *STORE_ELIOT_LOG
567      - store_artifacts: *STORE_OTHER_ARTIFACTS
568      - run: *SUBMIT_COVERAGE
569
570  fedora-35:
571    <<: *RHEL_DERIV
572    docker:
573      - <<: *DOCKERHUB_AUTH
574        image: "tahoelafsci/fedora:35-py3"
575        user: "nobody"
576
577  nixos:
578    parameters:
579      nixpkgs:
580        description: >-
581          Reference the name of a flake-managed nixpkgs input (see `nix flake
582          metadata` and flake.nix)
583        type: "string"
584      pythonVersion:
585        description: >-
586          Reference the name of a Python package in nixpkgs to use.
587        type: "string"
588
589    executor: "nix"
590
591    steps:
592      - "nix-build":
593          nixpkgs: "<<parameters.nixpkgs>>"
594          pythonVersion: "<<parameters.pythonVersion>>"
595          buildSteps:
596            - "run":
597                name: "Unit Test"
598                command: |
599                  source .circleci/lib.sh
600
601                  # Translate the nixpkgs selection into a flake reference we
602                  # can use to override the default nixpkgs input.
603                  NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
604
605                  cache_if_able nix run \
606                    --override-input nixpkgs "$NIXPKGS" \
607                    .#<<parameters.pythonVersion>>-unittest -- \
608                    --jobs $UNITTEST_CORES \
609                    allmydata
610
611  typechecks:
612    docker:
613      - <<: *DOCKERHUB_AUTH
614        image: "tahoelafsci/ubuntu:20.04-py3.9"
615
616    steps:
617      - "checkout"
618      - run:
619          name: "Validate Types"
620          command: |
621            /tmp/venv/bin/tox -e typechecks
622
623  docs:
624    docker:
625      - <<: *DOCKERHUB_AUTH
626        image: "tahoelafsci/ubuntu:20.04-py3.9"
627
628    steps:
629      - "checkout"
630      - run:
631          name: "Build documentation"
632          command: |
633            /tmp/venv/bin/tox -e docs
634
635  build-image: &BUILD_IMAGE
636    # This is a template for a job to build a Docker image that has as much of
637    # the setup as we can manage already done and baked in.  This cuts down on
638    # the per-job setup time the actual testing jobs have to perform - by
639    # perhaps 10% - 20%.
640    #
641    # https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
642    docker:
643      - <<: *DOCKERHUB_AUTH
644        # CircleCI build images; https://github.com/CircleCI-Public/cimg-base
645        # for details.
646        image: "cimg/base:2022.01"
647
648    environment:
649      DISTRO: "tahoelafsci/<DISTRO>:foo-py3.9"
650      TAG: "tahoelafsci/distro:<TAG>-py3.9"
651      PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
652
653    steps:
654      - "checkout"
655      - setup_remote_docker:
656          version: "20.10.11"
657      - run:
658          name: "Log in to Dockerhub"
659          command: |
660            docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
661      - run:
662          name: "Build image"
663          command: |
664            docker \
665                build \
666                --build-arg TAG=${TAG} \
667                --build-arg PYTHON_VERSION=${PYTHON_VERSION} \
668                -t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
669                -f ~/project/.circleci/Dockerfile.${DISTRO} \
670                ~/project/
671      - run:
672          name: "Push image"
673          command: |
674            docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
675
676
677  build-image-debian-11:
678    <<: *BUILD_IMAGE
679
680    environment:
681      DISTRO: "debian"
682      TAG: "11"
683      PYTHON_VERSION: "3.9"
684
685
686  build-image-ubuntu-20-04:
687    <<: *BUILD_IMAGE
688
689    environment:
690      DISTRO: "ubuntu"
691      TAG: "20.04"
692      PYTHON_VERSION: "3.9"
693
694
695  build-image-ubuntu-22-04:
696    <<: *BUILD_IMAGE
697
698    environment:
699      DISTRO: "ubuntu"
700      TAG: "22.04"
701      PYTHON_VERSION: "3.10"
702
703
704  build-image-oraclelinux-8:
705    <<: *BUILD_IMAGE
706
707    environment:
708      DISTRO: "oraclelinux"
709      TAG: "8"
710      PYTHON_VERSION: "3.8"
711
712  build-image-fedora-35:
713    <<: *BUILD_IMAGE
714
715    environment:
716      DISTRO: "fedora"
717      TAG: "35"
718      PYTHON_VERSION: "3"
719
720  # build-image-pypy27-buster:
721  #   <<: *BUILD_IMAGE
722  #   environment:
723  #     DISTRO: "pypy"
724  #     TAG: "buster"
725  #     # We only have Python 2 for PyPy right now so there's no support for
726  #     # setting up PyPy 3 in the image building toolchain.  This value is just
727  #     # for constructing the right Docker image tag.
728  #     PYTHON_VERSION: "2"
729
730executors:
731  windows:
732    # Choose a Windows environment that closest matches our testing
733    # requirements and goals.
734    # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022
735    machine:
736      image: "windows-server-2022-gui:current"
737      shell: "powershell.exe -ExecutionPolicy Bypass"
738    resource_class: "windows.large"
739
740  nix:
741    docker:
742      # Run in a highly Nix-capable environment.
743      - <<: *DOCKERHUB_AUTH
744        image: "nixos/nix:2.16.1"
745    environment:
746      # CACHIX_AUTH_TOKEN is manually set in the CircleCI web UI and allows us
747      # to push to CACHIX_NAME.  CACHIX_NAME tells cachix which cache to push
748      # to.
749      CACHIX_NAME: "tahoe-lafs-opensource"
750      # Let us use features marked "experimental".  For example, most/all of
751      # the `nix <subcommand>` forms.
752      NIX_CONFIG: "experimental-features = nix-command flakes"
753
754commands:
755  nix-build:
756    parameters:
757      nixpkgs:
758        description: >-
759          Reference the name of a flake-managed nixpkgs input (see `nix flake
760          metadata` and flake.nix)
761        type: "string"
762      pythonVersion:
763        description: >-
764          Reference the name of a Python package in nixpkgs to use.
765        type: "string"
766      buildSteps:
767        description: >-
768          The build steps to execute after setting up the build environment.
769        type: "steps"
770
771    steps:
772      - "run":
773          # Get cachix for Nix-friendly caching.
774          name: "Install Basic Dependencies"
775          command: |
776            # Get some build environment dependencies and let them float on a
777            # certain release branch.  These aren't involved in the actual
778            # package build (only in CI environment setup) so the fact that
779            # they float shouldn't hurt reproducibility.
780            NIXPKGS="nixpkgs/nixos-23.05"
781            nix profile install $NIXPKGS#cachix $NIXPKGS#bash $NIXPKGS#jp
782
783            # Activate our cachix cache for "binary substitution".  This sets
784            # up configuration tht lets Nix download something from the cache
785            # instead of building it locally, if possible.
786            cachix use "${CACHIX_NAME}"
787
788      - "checkout"
789
790      - "run":
791          # The Nix package doesn't know how to do this part, unfortunately.
792          name: "Generate version"
793          command: |
794            nix-shell \
795              -p 'python3.withPackages (ps: [ ps.setuptools ])' \
796              --run 'python setup.py update_version'
797
798      - "run":
799          name: "Build Package"
800          command: |
801            source .circleci/lib.sh
802            NIXPKGS=$(nixpkgs_flake_reference <<parameters.nixpkgs>>)
803            cache_if_able nix build \
804              --verbose \
805              --print-build-logs \
806              --cores "$DEPENDENCY_CORES" \
807              --override-input nixpkgs "$NIXPKGS" \
808              .#<<parameters.pythonVersion>>-tahoe-lafs
809
810      - steps: "<<parameters.buildSteps>>"
Note: See TracBrowser for help on using the repository browser.