source: trunk/.circleci/config.yml

Last change on this file was 50ecf30, checked in by Florian Sesser <florian@…>, at 2025-01-24T15:11:20Z

Cleanup / Less defensive programming.

If these assumptions fail the build *should* fail.

  • Property mode set to 100644
File size: 26.8 KB
Line 
1# https://circleci.com/docs/2.0/
2
3# We use version 2.1 of CircleCI's configuration format (the docs are still at
4# the 2.0 link) in order to have access to Windows executors. This means we
5# can't use dots in job names anymore. They have a new "parameters" feature
6# that is supposed to remove the need to have version numbers in job names (the
7# source of our dots), but switching to that is going to be a bigger refactor:
8#
9#   https://discuss.circleci.com/t/v2-1-job-name-validation/31123
10#   https://circleci.com/docs/2.0/reusing-config/
11#
12version: 2.1
13
14# Every job that pushes a Docker image from Docker Hub must authenticate to
15# it.  Define a couple yaml anchors that can be used to supply the necessary
16# credentials.
17
18# First is a CircleCI job context which makes Docker Hub credentials available
19# in the environment.
20#
21# Contexts are managed in the CircleCI web interface:
22#
23#  https://app.circleci.com/settings/organization/github/tahoe-lafs/contexts
24dockerhub-context-template: &DOCKERHUB_CONTEXT
25  context: "dockerhub-auth"
26
27# Required environment for using the coveralls tool to upload partial coverage
28# reports and then finish the process.
29coveralls-environment: &COVERALLS_ENVIRONMENT
30  COVERALLS_REPO_TOKEN: "JPf16rLB7T2yjgATIxFzTsEgMdN1UNq6o"
31
32# Next is a Docker executor template that gets the credentials from the
33# environment and supplies them to the executor.
34dockerhub-auth-template: &DOCKERHUB_AUTH
35  - auth:
36      username: $DOCKERHUB_USERNAME
37      password: $DOCKERHUB_PASSWORD
38
39  # A template that can be shared between the two different image-building
40# workflows.
41.images: &IMAGES
42  jobs:
43    - "build-image-debian-12":
44        <<: *DOCKERHUB_CONTEXT
45    - "build-image-ubuntu-20-04":
46        <<: *DOCKERHUB_CONTEXT
47    - "build-image-ubuntu-22-04":
48        <<: *DOCKERHUB_CONTEXT
49    - "build-image-ubuntu-24-04":
50        <<: *DOCKERHUB_CONTEXT
51    - "build-image-oraclelinux-8":
52        <<: *DOCKERHUB_CONTEXT
53    - "build-image-fedora-35":
54        <<: *DOCKERHUB_CONTEXT
55    - "build-image-fedora-40":
56        <<: *DOCKERHUB_CONTEXT
57    # Restore later as PyPy38
58    #- "build-image-pypy27-buster":
59    #    <<: *DOCKERHUB_CONTEXT
60
61parameters:
62  # Control whether the image-building workflow runs as part of this pipeline.
63  # Generally we do not want this to run because we don't need our
64  # dependencies to move around all the time and because building the image
65  # takes a couple minutes.
66  #
67  # An easy way to trigger a pipeline with this set to true is with the
68  # rebuild-images.sh tool in this directory.  You can also do so via the
69  # CircleCI web UI.
70  build-images:
71    default: false
72    type: "boolean"
73
74  # Control whether the test-running workflow runs as part of this pipeline.
75  # Generally we do want this to run because running the tests is the primary
76  # purpose of this pipeline.
77  run-tests:
78    default: true
79    type: "boolean"
80
81workflows:
82  ci:
83    when: "<< pipeline.parameters.run-tests >>"
84    jobs:
85      # Start with jobs testing various platforms.
86      - "debian-12":
87          {}
88
89      - "ubuntu-20-04":
90          {}
91
92      - "ubuntu-22-04":
93          {}
94
95      - "nixos":
96          name: "<<matrix.nixpkgs>>-<<matrix.pythonVersion>>"
97          matrix:
98            parameters:
99              nixpkgs:
100                - "nixpkgs-24_11"
101              pythonVersion:
102                - "python311"
103                - "python312"
104
105      # Eventually, test against PyPy 3.8
106      #- "pypy27-buster":
107      #    {}
108
109      # Other assorted tasks and configurations
110      - "codechecks":
111          {}
112      - "pyinstaller":
113          {}
114      - "c-locale":
115          {}
116      # Any locale other than C or UTF-8.
117      - "another-locale":
118          {}
119
120      # Test our sources with the packaging for Debian 13 (Trixie)
121      # https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4150
122      - "debian-13-package":
123          {}
124
125      - "windows-server-2022":
126          name: "Windows Server 2022, CPython <<matrix.pythonVersion>>"
127          matrix:
128            parameters:
129              # Run the job for a number of CPython versions.  These are the
130              # two versions installed on the version of the Windows VM image
131              # we specify (in the executor).  This is handy since it means we
132              # don't have to do any Python installation work.  We pin the
133              # Windows VM image so these shouldn't shuffle around beneath us
134              # but if we want to update that image or get different versions
135              # of Python, we probably have to do something here.
136              pythonVersion:
137                - "3.9"
138                - "3.12"
139
140      - "integration":
141          # attempt to ignore failures from this build, since it
142          # usually does (on one of the test_tor.py integration
143          # tests). This works locally with "tox 4.21.0" but not on
144          # the container.
145          tox-args: "-x testenv.integration.ignore_outcome=True -- integration"
146          requires:
147            # If the unit test suite doesn't pass, don't bother running the
148            # integration tests.
149            - "debian-12"
150
151      - "typechecks":
152          {}
153      - "docs":
154          {}
155
156      - "finish-coverage-report":
157          requires:
158            # Referencing the job by "alias" (as CircleCI calls the mapping
159            # key) instead of the value of its "name" property causes us to
160            # require every instance of the job from its matrix expansion.  So
161            # this requirement is enough to require every Windows Server 2022
162            # job.
163            - "windows-server-2022"
164
165  images:
166    <<: *IMAGES
167
168    # Build as part of the workflow but only if requested.
169    when: "<< pipeline.parameters.build-images >>"
170
171jobs:
172  finish-coverage-report:
173    docker:
174      - <<: *DOCKERHUB_AUTH
175        image: "python:3-slim"
176
177    steps:
178      - run:
179          name: "Indicate completion to coveralls.io"
180          environment:
181            <<: *COVERALLS_ENVIRONMENT
182          command: |
183            pip install coveralls==3.3.1
184            python -m coveralls --finish
185
186  codechecks:
187    docker:
188      - <<: *DOCKERHUB_AUTH
189        image: "cimg/python:3.9"
190
191    steps:
192      - "checkout"
193
194      - run: &INSTALL_TOX
195          name: "Install tox"
196          command: |
197            pip install --user 'tox~=3.0'
198
199      - run:
200          name: "Static-ish code checks"
201          command: |
202            ~/.local/bin/tox -e codechecks
203
204  windows-server-2022:
205    parameters:
206      pythonVersion:
207        description: >-
208          An argument to pass to the `py` launcher to choose a Python version.
209        type: "string"
210        default: ""
211
212    executor: "windows"
213    environment:
214      # Tweak Hypothesis to make its behavior more suitable for the CI
215      # environment.  This should improve reproducibility and lessen the
216      # effects of variable compute resources.
217      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
218
219      # Tell pip where its download cache lives.  This must agree with the
220      # "save_cache" step below or caching won't really work right.
221      PIP_CACHE_DIR: "pip-cache"
222
223      # And tell pip where it can find out cached wheelhouse for fast wheel
224      # installation, even for projects that don't distribute wheels.  This
225      # must also agree with the "save_cache" step below.
226      PIP_FIND_LINKS: "wheelhouse"
227
228    steps:
229      - "checkout"
230
231      # If possible, restore a pip download cache to save us from having to
232      # download all our Python dependencies from PyPI.
233      - "restore_cache":
234          keys:
235            # The download cache and/or the wheelhouse may contain Python
236            # version-specific binary packages so include the Python version
237            # in this key, as well as the canonical source of our
238            # dependencies.
239            - &CACHE_KEY "pip-packages-v1-<< parameters.pythonVersion >>-{{ checksum \"pyproject.toml\" }}"
240
241      - "run":
242          name: "Fix $env:PATH"
243          command: |
244            # The Python this job is parameterized is not necessarily the one
245            # at the front of $env:PATH.  Modify $env:PATH so that it is so we
246            # can just say "python" in the rest of the steps.  Also get the
247            # related Scripts directory so tools from packages we install are
248            # also available.
249            $p = py -<<parameters.pythonVersion>> -c "import sys; print(sys.prefix)"
250            $q = py -<<parameters.pythonVersion>> -c "import sysconfig; print(sysconfig.get_path('scripts'))"
251
252            New-Item $Profile.CurrentUserAllHosts -Force
253            # $p gets "python" on PATH and $q gets tools from packages we
254            # install.  Note we carefully construct the string so that
255            # $env:PATH is not substituted now but $p and $q are.  ` is the
256            # PowerShell string escape character.
257            Add-Content -Path $Profile.CurrentUserAllHosts -Value "`$env:PATH = `"$p;$q;`$env:PATH`""
258
259      - "run":
260          # It's faster to install a wheel than a source package.  If we don't
261          # have a cached wheelhouse then build all of the wheels and dump
262          # them into a directory where they can become a cached wheelhouse.
263          # We would have built these wheels during installation anyway so it
264          # doesn't cost us anything extra and saves us effort next time.
265          name: "(Maybe) Build Wheels"
266          command: |
267            if ((Test-Path .\wheelhouse) -and (Test-Path .\wheelhouse\*)) {
268              echo "Found populated wheelhouse, skipping wheel building."
269            } else {
270              python -m pip install wheel
271              python -m pip wheel --wheel-dir $env:PIP_FIND_LINKS .[testenv] .[test]
272            }
273
274      - "save_cache":
275          paths:
276            # Make sure this agrees with PIP_CACHE_DIR in the environment.
277            - "pip-cache"
278            - "wheelhouse"
279          key: *CACHE_KEY
280
281      - "run":
282          name: "Install Dependencies"
283          environment:
284            # By this point we should no longer need an index.
285##            PIP_NO_INDEX: "1"
286          command: |
287            python -m pip install .[testenv] .[test]
288
289      - "run":
290          name: "Display tool versions"
291          command: |
292            python misc/build_helpers/show-tool-versions.py
293
294      - "run":
295          name: "Run Unit Tests"
296          environment:
297            # Configure the results location for the subunitv2-file reporter
298            # from subunitreporter
299            SUBUNITREPORTER_OUTPUT_PATH: "test-results.subunit2"
300
301            # Try to get prompt output from the reporter to avoid no-output
302            # timeouts.
303            PYTHONUNBUFFERED: "1"
304
305          command: |
306            # Run the test suite under coverage measurement using the
307            # parameterized version of Python, writing subunitv2-format
308            # results to the file given in the environment.
309            python -b -m coverage run -m twisted.trial --reporter=subunitv2-file --rterrors allmydata
310
311      - "run":
312          name: "Upload Coverage"
313          environment:
314            <<: *COVERALLS_ENVIRONMENT
315            # Mark the data as just one piece of many because we have more
316            # than one instance of this job (two on Windows now, some on other
317            # platforms later) which collects and reports coverage.  This is
318            # necessary to cause Coveralls to merge multiple coverage results
319            # into a single report.  Note the merge only happens when we
320            # "finish" a particular build, as identified by its "build_num"
321            # (aka "service_number").
322            COVERALLS_PARALLEL: "true"
323          command: |
324            python -m pip install coveralls==3.3.1
325
326            # .coveragerc sets parallel = True so we don't have a `.coverage`
327            # file but a `.coverage.<unique stuff>` file (or maybe more than
328            # one, but probably not).  coveralls can't work with these so
329            # merge them before invoking it.
330            python -m coverage combine
331
332            # Now coveralls will be able to find the data, so have it do the
333            # upload.  Also, have it strip the system config-specific prefix
334            # from all of the source paths.
335            $prefix = python -c "import sysconfig; print(sysconfig.get_path('purelib'))"
336            python -m coveralls --basedir $prefix
337
338      - "run":
339          name: "Convert Result Log"
340          command: |
341            # subunit2junitxml exits with error if the result stream it is
342            # converting has test failures in it!  So this step might fail.
343            # Since the step in which we actually _ran_ the tests won't fail
344            # even if there are test failures, this is a good thing for now.
345            subunit2junitxml.exe --output-to=test-results.xml test-results.subunit2
346
347      - "store_test_results":
348          path: "test-results.xml"
349
350      - "store_artifacts":
351          path: "_trial_temp/test.log"
352
353      - "store_artifacts":
354          path: "eliot.log"
355
356      - "store_artifacts":
357          path: ".coverage"
358
359  pyinstaller:
360    docker:
361      - <<: *DOCKERHUB_AUTH
362        image: "cimg/python:3.9"
363
364    steps:
365      - "checkout"
366
367      - run:
368          <<: *INSTALL_TOX
369
370      - run:
371          name: "Make PyInstaller executable"
372          command: |
373            ~/.local/bin/tox -e pyinstaller
374
375      - run:
376          # To verify that the resultant PyInstaller-generated binary executes
377          # cleanly (i.e., that it terminates with an exit code of 0 and isn't
378          # failing due to import/packaging-related errors, etc.).
379          name: "Test PyInstaller executable"
380          command: |
381            dist/Tahoe-LAFS/tahoe --version
382
383  debian-12: &DEBIAN
384    environment: &UTF_8_ENVIRONMENT
385      # In general, the test suite is not allowed to fail while the job
386      # succeeds.  But you can set this to "yes" if you want it to be
387      # otherwise.
388      ALLOWED_FAILURE: "no"
389      # Tell Hypothesis which configuration we want it to use.
390      TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
391      # Tell the C runtime things about character encoding (mainly to do with
392      # filenames and argv).
393      LANG: "en_US.UTF-8"
394      # Select a tox environment to run for this job.
395      TAHOE_LAFS_TOX_ENVIRONMENT: "py311"
396      # Additional arguments to pass to tox.
397      TAHOE_LAFS_TOX_ARGS: ""
398      # The path in which test artifacts will be placed.
399      ARTIFACTS_OUTPUT_PATH: "/tmp/artifacts"
400      # Convince all of our pip invocations to look at the cached wheelhouse
401      # we maintain.
402      WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
403      PIP_FIND_LINKS: "file:///tmp/wheelhouse"
404      # Upload the coverage report.
405      UPLOAD_COVERAGE: ""
406
407    # pip cannot install packages if the working directory is not readable.
408    # We want to run a lot of steps as nobody instead of as root.
409    working_directory: "/tmp/project"
410
411    steps:
412      - "checkout"
413      - run: &SETUP_VIRTUALENV
414          name: "Setup virtualenv"
415          command: |
416            /tmp/project/.circleci/setup-virtualenv.sh \
417                "/tmp/venv" \
418                "/tmp/project" \
419                "${WHEELHOUSE_PATH}" \
420                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
421                "${TAHOE_LAFS_TOX_ARGS}"
422
423      - run: &RUN_TESTS
424          name: "Run test suite"
425          command: |
426            /tmp/project/.circleci/run-tests.sh \
427                "/tmp/venv" \
428                "/tmp/project" \
429                "${ALLOWED_FAILURE}" \
430                "${ARTIFACTS_OUTPUT_PATH}" \
431                "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
432                "${TAHOE_LAFS_TOX_ARGS}"
433          # trial output gets directed straight to a log.  avoid the circleci
434          # timeout while the test suite runs.
435          no_output_timeout: "20m"
436
437      - store_test_results: &STORE_TEST_RESULTS
438          path: "/tmp/artifacts/junit"
439
440      - store_artifacts: &STORE_TEST_LOG
441          # Despite passing --workdir /tmp to tox above, it still runs trial
442          # in the project source checkout.
443          path: "/tmp/project/_trial_temp/test.log"
444
445      - store_artifacts: &STORE_ELIOT_LOG
446          # Despite passing --workdir /tmp to tox above, it still runs trial
447          # in the project source checkout.
448          path: "/tmp/project/eliot.log"
449
450      - store_artifacts: &STORE_OTHER_ARTIFACTS
451          # Store any other artifacts, too.  This is handy to allow other jobs
452          # sharing most of the definition of this one to be able to
453          # contribute artifacts easily.
454          path: "/tmp/artifacts"
455
456      - run: &SUBMIT_COVERAGE
457          name: "Submit coverage results"
458          command: |
459            if [ -n "${UPLOAD_COVERAGE}" ]; then
460              echo "TODO: Need a new coverage solution, see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/4011"
461            fi
462
463    docker:
464      - <<: *DOCKERHUB_AUTH
465        image: "tahoelafsci/debian:12-py3.11"
466        user: "nobody"
467
468
469  # Restore later using PyPy3.8
470  # pypy27-buster:
471  #   <<: *DEBIAN
472  #   docker:
473  #     - <<: *DOCKERHUB_AUTH
474  #       image: "tahoelafsci/pypy:buster-py2"
475  #       user: "nobody"
476  #   environment:
477  #     <<: *UTF_8_ENVIRONMENT
478  #     # We don't do coverage since it makes PyPy far too slow:
479  #     TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
480  #     # Since we didn't collect it, don't upload it.
481  #     UPLOAD_COVERAGE: ""
482
483  c-locale:
484    <<: *DEBIAN
485
486    environment:
487      <<: *UTF_8_ENVIRONMENT
488      LANG: "C"
489
490
491  another-locale:
492    <<: *DEBIAN
493
494    environment:
495      <<: *UTF_8_ENVIRONMENT
496      # aka "Latin 1"
497      LANG: "en_US.ISO-8859-1"
498
499  integration:
500    <<: *DEBIAN
501
502    parameters:
503      tox-args:
504        description: >-
505          Additional arguments to pass to the tox command.
506        type: "string"
507        default: ""
508
509    docker:
510      - <<: *DOCKERHUB_AUTH
511        image: "tahoelafsci/debian:12-py3.11"
512        user: "nobody"
513
514    environment:
515      <<: *UTF_8_ENVIRONMENT
516      # Select the integration tests tox environments.
517      TAHOE_LAFS_TOX_ENVIRONMENT: "integration"
518      # Disable artifact collection because py.test can't produce any.
519      ARTIFACTS_OUTPUT_PATH: ""
520
521      # Pass on anything we got in our parameters.
522      TAHOE_LAFS_TOX_ARGS: "<< parameters.tox-args >>"
523
524    steps:
525      - "checkout"
526      # DRY, YAML-style.  See the debian-9 steps.
527      - run: *SETUP_VIRTUALENV
528      - run: *RUN_TESTS
529
530  ubuntu-20-04:
531    <<: *DEBIAN
532    docker:
533      - <<: *DOCKERHUB_AUTH
534        image: "tahoelafsci/ubuntu:20.04-py3.9"
535        user: "nobody"
536    environment:
537      <<: *UTF_8_ENVIRONMENT
538      TAHOE_LAFS_TOX_ENVIRONMENT: "py39"
539
540  ubuntu-22-04:
541    <<: *DEBIAN
542    docker:
543      - <<: *DOCKERHUB_AUTH
544        image: "tahoelafsci/ubuntu:22.04-py3.10"
545        user: "nobody"
546    environment:
547      <<: *UTF_8_ENVIRONMENT
548      TAHOE_LAFS_TOX_ENVIRONMENT: "py310"
549
550  debian-13-package:
551    docker:
552      - <<: *DOCKERHUB_AUTH
553        image: "debian:trixie"
554    steps:
555      - run:
556          name: "Get latest Debian repository index and install essentials"
557          command: |
558            apt update
559            apt-get install --no-install-recommends --yes \
560              git \
561              ca-certificates
562      - run:
563          name: "Download sources for Tahoe-LAFS and @merkys Debian packaging"
564          command: |
565            mkdir /tmp/debian-13-package
566            cd /tmp/debian-13-package
567            # Take Tahoe-LAFS source
568            git clone https://github.com/tahoe-lafs/tahoe-lafs.git
569            # Take debian/ directory from Debian packaging repository
570            git clone https://salsa.debian.org/merkys/tahoe-lafs.git merkys-tahoe-lafs
571            cd tahoe-lafs/
572            ln -s ../merkys-tahoe-lafs/debian
573      - run:
574          name: "Install Tahoe-LAFS dependencies from the Debian repositories"
575          command: |
576            # 'apt-get install' dependencies from debian/control
577            # Via https://stackoverflow.com/a/47707412
578            apt-get install --no-install-recommends --yes $(awk '
579              /^(Build-)?Depends:/ || /^ / && deps {
580                sub(/^[^ ]+: /, "")
581                deps = 1
582                dep_str = dep_str ", " $0
583                next
584              }
585              { deps=0 }
586              END {
587                split(dep_str, dep_array, /[,|] */)
588                for (d in dep_array) {
589                  dep = dep_array[d]
590                  gsub(/[^a-z0-9_.+-].*$/, "", dep)
591                  if (dep && !seen[dep]++) print dep
592                }
593              }' /tmp/debian-13-package/tahoe-lafs/debian/control)
594      - run:
595          name: "Build & run tests"
596          command: |
597            cd /tmp/debian-13-package/tahoe-lafs/
598            make -f debian/rules binary
599      - run:
600          name: "Install the built package"
601          command: |
602            # Work around CircleCI not supporting globbing in store_artifacts
603            mkdir /tmp/debian-13-package/dist
604            mv /tmp/debian-13-package/tahoe-lafs_*.deb /tmp/debian-13-package/dist/
605            dpkg -i /tmp/debian-13-package/dist/tahoe-lafs_*.deb
606      - run:
607          name: "Run our newly system-wide installed tahoe"
608          command: tahoe --version
609      - store_artifacts:
610          path: /tmp/debian-13-package/dist
611
612  nixos:
613    parameters:
614      nixpkgs:
615        description: >-
616          Reference the name of a flake-managed nixpkgs input (see `nix flake
617          metadata` and flake.nix)
618        type: "string"
619      pythonVersion:
620        description: >-
621          Reference the name of a Python package in nixpkgs to use.
622        type: "string"
623
624    executor: "nix"
625
626    steps:
627      - "nix-build":
628          nixpkgs: "<<parameters.nixpkgs>>"
629          pythonVersion: "<<parameters.pythonVersion>>"
630          buildSteps:
631            - "run":
632                name: "Unit Test"
633                environment:
634                  # Once dependencies are built, we can allow some more concurrency for our own
635                  # test suite.
636                  UNITTEST_CORES: 8
637                command: |
638                  nix run \
639                    .#<<parameters.pythonVersion>>-unittest -- \
640                    --jobs $UNITTEST_CORES \
641                    allmydata
642
643  typechecks:
644    docker:
645      - <<: *DOCKERHUB_AUTH
646        image: "tahoelafsci/ubuntu:20.04-py3.9"
647
648    steps:
649      - "checkout"
650      - run:
651          name: "Validate Types"
652          command: |
653            /tmp/venv/bin/tox -e typechecks
654
655  docs:
656    docker:
657      - <<: *DOCKERHUB_AUTH
658        image: "tahoelafsci/ubuntu:20.04-py3.9"
659
660    steps:
661      - "checkout"
662      - run:
663          name: "Build documentation"
664          command: |
665            /tmp/venv/bin/tox -e docs
666
667  build-image: &BUILD_IMAGE
668    # This is a template for a job to build a Docker image that has as much of
669    # the setup as we can manage already done and baked in.  This cuts down on
670    # the per-job setup time the actual testing jobs have to perform - by
671    # perhaps 10% - 20%.
672    #
673    # https://circleci.com/blog/how-to-build-a-docker-image-on-circleci-2-0/
674    docker:
675      - <<: *DOCKERHUB_AUTH
676        # CircleCI build images; https://github.com/CircleCI-Public/cimg-base
677        # for details.
678        image: "cimg/base:2022.09"
679
680    environment:
681      DISTRO: "tahoelafsci/<DISTRO>:foo-py3.9"
682      TAG: "tahoelafsci/distro:<TAG>-py3.9"
683      PYTHON_VERSION: "tahoelafsci/distro:tag-py<PYTHON_VERSION}"
684
685    steps:
686      - "checkout"
687      - setup_remote_docker:
688          docker_layer_caching: true
689      - run:
690          name: "Log in to Dockerhub"
691          command: |
692            docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}
693      - run:
694          name: "Build image"
695          command: |
696            docker \
697                build \
698                --build-arg TAG=${TAG} \
699                --build-arg PYTHON_VERSION=${PYTHON_VERSION} \
700                -t tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION} \
701                -f ~/project/.circleci/Dockerfile.${DISTRO} \
702                ~/project/
703      - run:
704          name: "Push image"
705          command: |
706            docker push tahoelafsci/${DISTRO}:${TAG}-py${PYTHON_VERSION}
707
708
709  build-image-debian-12:
710    <<: *BUILD_IMAGE
711
712    environment:
713      DISTRO: "debian"
714      TAG: "12"
715      PYTHON_VERSION: "3.11"
716
717  build-image-ubuntu-20-04:
718    <<: *BUILD_IMAGE
719
720    environment:
721      DISTRO: "ubuntu"
722      TAG: "20.04"
723      PYTHON_VERSION: "3.9"
724
725
726  build-image-ubuntu-22-04:
727    <<: *BUILD_IMAGE
728
729    environment:
730      DISTRO: "ubuntu"
731      TAG: "22.04"
732      PYTHON_VERSION: "3.10"
733
734  build-image-ubuntu-24-04:
735    <<: *BUILD_IMAGE
736
737    environment:
738      DISTRO: "ubuntu"
739      TAG: "24.04"
740      PYTHON_VERSION: "3.12"
741
742  build-image-oraclelinux-8:
743    <<: *BUILD_IMAGE
744
745    environment:
746      DISTRO: "oraclelinux"
747      TAG: "8"
748      PYTHON_VERSION: "3.9"
749
750  build-image-fedora-35:
751    <<: *BUILD_IMAGE
752
753    environment:
754      DISTRO: "fedora"
755      TAG: "35"
756      PYTHON_VERSION: "3"
757
758  build-image-fedora-40:
759    <<: *BUILD_IMAGE
760
761    environment:
762      DISTRO: "fedora"
763      TAG: "40"
764      PYTHON_VERSION: "3"
765
766  # build-image-pypy27-buster:
767  #   <<: *BUILD_IMAGE
768  #   environment:
769  #     DISTRO: "pypy"
770  #     TAG: "buster"
771  #     # We only have Python 2 for PyPy right now so there's no support for
772  #     # setting up PyPy 3 in the image building toolchain.  This value is just
773  #     # for constructing the right Docker image tag.
774  #     PYTHON_VERSION: "2"
775
776executors:
777  windows:
778    # Choose a Windows environment that closest matches our testing
779    # requirements and goals.
780    # https://circleci.com/developer/orbs/orb/circleci/windows#executors-server-2022
781    machine:
782      image: "windows-server-2022-gui:current"
783      shell: "powershell.exe -ExecutionPolicy Bypass"
784    resource_class: "windows.large"
785
786  nix:
787    docker:
788      # Run in a highly Nix-capable environment.
789      - <<: *DOCKERHUB_AUTH
790        image: "nixos/nix:2.25.3"
791    environment:
792      # Let us use features marked "experimental".  For example, most/all of
793      # the `nix <subcommand>` forms.
794      NIX_CONFIG: "experimental-features = nix-command flakes"
795
796commands:
797  nix-build:
798    parameters:
799      nixpkgs:
800        description: >-
801          Reference the name of a flake-managed nixpkgs input (see `nix flake
802          metadata` and flake.nix)
803        type: "string"
804      pythonVersion:
805        description: >-
806          Reference the name of a Python package in nixpkgs to use.
807        type: "string"
808      buildSteps:
809        description: >-
810          The build steps to execute after setting up the build environment.
811        type: "steps"
812
813    steps:
814      - "checkout"
815
816      - "run":
817          name: "Build Package"
818          environment:
819            # CircleCI build environment looks like it has a zillion and a half cores.
820            # Don't let Nix autodetect this high core count because it blows up memory
821            # usage and fails the test run.  Pick a number of cores that suits the build
822            # environment we're paying for (the free one!).
823            DEPENDENCY_CORES: 3
824          command: |
825            nix build \
826              --verbose \
827              --print-build-logs \
828              --cores "$DEPENDENCY_CORES" \
829              .#<<parameters.pythonVersion>>-tahoe-lafs
830
831      - steps: "<<parameters.buildSteps>>"
Note: See TracBrowser for help on using the repository browser.