Skip to content

CI checks for pull_request by mhucka #6

CI checks for pull_request by mhucka

CI checks for pull_request by mhucka #6

Workflow file for this run

# Summary: OpenFermion continuous integration status checks.
#
# This workflow runs various tests to verify that changes to the OpenFermion
# codebase pass validation and conform to project format and style standards.
# It triggers on certain events such as pull requests and merge-queue merges,
# and can also be invoked manually via the "Run workflow" button at
# https://github.com/quantumlib/OpenFermion/actions/workflows/ci.yaml
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
name: Continuous integration checks
run-name: CI checks for ${{github.event_name}} by ${{github.actor}}
on:
pull_request:
types: [opened, synchronize]
branches:
- master
merge_group:
types:
- checks_requested
# Allow manual invocation.
workflow_dispatch:
inputs:
sha:
description: 'SHA of commit to run against:'
type: string
required: true
python_ver:
description: Normal version of Python to use
type: string
python_compat_ver:
description: Max compat version of Python
type: string
# Declare default permissions as read only.
permissions: read-all
concurrency:
# Cancel any previously-started but still active runs on the same branch.
cancel-in-progress: true
group: ${{github.workflow}}-${{github.event.pull_request.number||github.ref}}
env:
# Default Python version to use. Make sure to use full x.y.z number.
python_ver: '3.12.8'
# Oldest Python version to use, for max_compat tests.
python_compat_ver: '3.10.15'
# Files listing dependencies we install using pip in the various jobs below.
# This is used by setup-python to check whether its cache needs updating.
python_dep_files: >-
dev_tools/requirements/envs/format.env.txt
dev_tools/requirements/envs/mypy.env.txt
dev_tools/requirements/envs/pylint.env.txt
dev_tools/requirements/envs/pytest-extra.env.txt
dev_tools/requirements/envs/pytest.env.txt
dev_tools/requirements/max_compat/pytest-max-compat.env.txt
jobs:
# GitHub Actions can have path filters (i.e., the use of a "paths:" keyword
# on the trigger definitions in the "on:" block earlier in this file). Path
# filters *would* be the natural way to make workflows trigger only when the
# desired files are affected by a pull request – except that the way branch
# protection rules work today is: "If a workflow is skipped due to path
# filtering [...] then checks associated with that workflow will remain in a
# Pending state. A pull request that requires those checks to be successful
# will be blocked from merging." Surprisingly, GitHub doesn't provide
# guidance on how to handle this. Discussions about solutions sometimes
# suggest hacky solutions (c.f. https://stackoverflow.com/a/78003720/743730).
# The approach taken here is to forgo the use of path filtering rules in the
# trigger condition, and instead, do our own filtering using a combination
# of testing specific file patterns (in the changes job below) and "if:"
# conditions on individual jobs in the rest of this workflow.
changes:
name: (Find changed files)
runs-on: ubuntu-24.04
timeout-minutes: 5
outputs:
gha: ${{steps.filter.outputs.gha}}
gha_files: ${{steps.filter.outputs.gha_files}}
# The following all test both the relevant file condition & the CI config
# because a change in the CI workflows can affect the CI check results.
python: |-
${{steps.filter.outputs.python}} || ${{steps.filter.outputs.ci}}
python_files: ${{steps.filter.outputs.python_files}}
yaml: |-
${{steps.filter.outputs.yaml}} || ${{steps.filter.outputs.ci}}
yaml_files: ${{steps.filter.outputs.yaml_files}}
cff: |-
${{steps.filter.outputs.cff}} || ${{steps.filter.outputs.ci}}
cff_files: ${{steps.filter.outputs.cff_files}}
json: |-
${{steps.filter.outputs.json}} || ${{steps.filter.outputs.ci}}
json_files: ${{steps.filter.outputs.json_files}}
docker: |-
${{steps.filter.outputs.docker}} || ${{steps.filter.outputs.ci}}
docker_files: ${{steps.filter.outputs.docker_files}}
shell: |-
${{steps.filter.outputs.shell}} || ${{steps.filter.outputs.ci}}
shell_files: ${{steps.filter.outputs.shell_files}}
steps:
# When invoked manually, use the given SHA to figure out the change list.
- if: github.event_name == 'workflow_dispatch'
name: Use the user-provided SHA as the basis for comparison
env:
GH_TOKEN: ${{github.token}}
run: |
set -x +e
url="repos/${{github.repository}}/commits/${{inputs.sha}}"
if full_sha="$(gh api $url -q '.sha')"; then
echo "base=$full_sha" >> "$GITHUB_ENV"
else
{
echo "### :x: Workflow error"
echo "The SHA provided to _Run Workflow_ does not exist:"
echo "<code>${{inputs.sha}}</code>"
} >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
- if: github.event_name != 'workflow_dispatch'
name: Use ref ${{github.ref_name}} as the basis for comparison
run: |
echo base=${{github.ref_name}} >> "$GITHUB_ENV"
- name: Check out a copy of the OpenFermion git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Determine files changed by this ${{github.event_name}} event
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
id: filter
with:
base: ${{env.base}}
list-files: 'shell'
# The outputs will be variables named "foo_files" for a filter "foo".
filters: |
ci:
- './.github/workflows/ci.yml'
- './.github/workflows/codeql.yaml'
- './.github/workflows/osv-scanner.yaml'
cff:
- added|modified:
- '**/CITATION.cff'
python:
- '**/*.py'
gha:
- added|modified:
- './.github/workflows/*.yaml'
- './.github/workflows/*.yml'
yaml:
- added|modified:
- '**/*.yaml'
- '**/*.yml'
json:
- added|modified:
- '**/*.json'
docker:
- '**/dockerfile'
- '**/Dockerfile'
shell:
- '**/*.sh'
- 'check/*'
setup:
if: needs.changes.outputs.python == 'true'
name: (Set up Python)
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python with caching of pip dependencies
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: 'x64'
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install Python requirements
run: |
set -x
for file in ${{env.python_dep_files}}; do
pip install -r $file
done
set +x
echo "::group::List of installed pip packages and their versions"
pip list
echo "::endgroup::"
python-format:
if: needs.changes.outputs.python == 'true'
name: Python format checks
needs: [changes, setup]
runs-on: ubuntu-24.04
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: 'x64'
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/format.env.txt
- name: Set up problem matcher for Black output
run: echo '::add-matcher::.github/problem-matchers/black.json'
- name: Run format checks
run: check/format-incremental ${{inputs.sha}}
python-mypy:
if: needs.changes.outputs.python == 'true'
name: Python type checks
needs: [changes, setup]
runs-on: ubuntu-24.04
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: 'x64'
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/mypy.env.txt
- name: Set up Mypy output problem matcher
run: echo '::add-matcher::.github/problem-matchers/mypy.json'
- name: Type check
run: check/mypy
python-lint:
if: needs.changes.outputs.python == 'true'
name: Python lint checks
needs: [changes, setup]
runs-on: ubuntu-24.04
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: 'x64'
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/pylint.env.txt
- name: Set up Pylint output problem matcher
run: echo '::add-matcher::.github/problem-matchers/pylint.json'
- name: Run pylint
run: check/pylint
# The next set of matrix tests each consist of 2 job definitions. The job
# named "Thing-matrix" define a matrix of runs for different platforms. It's
# set with "fail-fast: false" so that a failure in one of matrix jobs doesn't
# cause this entire CI workflow to abort. Then, the job named "Thing" is the
# one that actually reports the results, and is the one used in the list of
# required status checks in the repository branch protection rules. It needs
# to be an independent job it has to test the results of all the matrix runs.
pytest-matrix:
if: needs.changes.outputs.python == 'true'
name: (Python pytest matrix)
needs: [changes, setup]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ ubuntu-24.04, macos-14, windows-2022 ]
cirq-version: [ 1.4.1 ]
fail-fast: false
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: |
pip install -r dev_tools/requirements/envs/pytest.env.txt
pip install cirq-core==${{matrix.cirq-version}}
- name: Set up Pytest output problem matcher
run: echo '::add-matcher::.github/problem-matchers/pytest.json'
- name: Run pytest
run: check/pytest
pytest:
if: needs.changes.outputs.python == 'true' && (success() || failure())
name: Python pytest checks
needs: [changes, pytest-matrix]
runs-on: ubuntu-24.04
steps:
- run: |
result="${{needs.pytest-matrix.result}}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi
pytest-extra-matrix:
if: needs.changes.outputs.python == 'true'
name: (Python extra pytest matrix)
needs: [changes, setup]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ubuntu-24.04, macos-14]
cirq-version: [ 1.4.1 ]
fail-fast: false
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: |
pip install -r dev_tools/requirements/envs/pytest-extra.env.txt
pip install cirq-core==${{matrix.cirq-version}}
- name: Set up Pytest output problem matcher
run: echo '::add-matcher::.github/problem-matchers/pytest.json'
- name: Run pytest
run: check/pytest -m "not slow" src/openfermion/resource_estimates
pytest-extra:
if: needs.changes.outputs.python == 'true' && (success() || failure())
name: Python extra pytest checks
needs: [changes, pytest-extra-matrix]
runs-on: ubuntu-24.04
steps:
- run: |
result="${{needs.pytest-extra-matrix.result}}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi
python-compat:
if: needs.changes.outputs.python == 'true'
name: Python compatibility checks
needs: [changes, setup]
runs-on: ubuntu-20.04
timeout-minutes: 15
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# Note: deliberately not using our Python cache here b/c this runs
# a different version of Python.
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{env.python_compat_ver}}
- name: Install requirements
run: pip install -r dev_tools/requirements/max_compat/pytest-max-compat.env.txt
- name: Set up Pytest output problem matcher
run: echo '::add-matcher::.github/problem-matchers/pytest.json'
- name: Run pytest
run: check/pytest
coverage:
if: needs.changes.outputs.python == 'true'
name: Python code coverage checks
needs: [changes, setup]
runs-on: ubuntu-24.04
timeout-minutes: 15
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
- name: Set up Python and restore cache
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/pytest.env.txt
- name: Set up Pytest output problem matcher
run: echo '::add-matcher::.github/problem-matchers/pytest.json'
- name: Run code coverage tests
run: check/pytest-and-incremental-coverage
yaml-lint:
if: needs.changes.outputs.yaml == 'true'
name: YAML lint checks
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.yaml_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up yamllint output problem matcher
run: echo "::add-matcher::.github/problem-matchers/yamllint.json"
- name: Run yamllint
run: |
set -x
# shellcheck disable=SC2086
yamllint $changed_files
json-lint:
if: needs.changes.outputs.json == 'true'
name: JSON lint checks
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.json_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Install jsonlint
run: npm install -g @prantlf/jsonlint
- name: Set up jsonlint output problem matcher
run: echo '::add-matcher::.github/problem-matchers/jsonlint.json'
- name: Run jsonlint on JSON files
run: jsonlint --continue ${{env.changed_files}}
cff-validation:
if: needs.changes.outputs.cff == 'true'
name: CITATION.cff validation
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.cff_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Install cffconvert
run: pip install cffconvert
- name: Run cffconvert in validation mode
run: cffconvert --validate
docker-lint:
if: needs.changes.outputs.docker == 'true'
name: Dockerfile lint checks
needs: changes
# This uses a Mac runner because hadolint isn't available via Linux apt.
runs-on: macos-14
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.docker_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# Note: there is a hadolint GitHub Actions available, but it only accepts
# one Dockerfile to check. We have > 1 file to check, so we need the CLI.
- name: Install hadolint
run: brew install hadolint
- name: Set up hadolint output problem matcher
run: echo '::add-matcher::.github/problem-matchers/hadolint.json'
- name: Run hadolint on Dockerfiles that have been changed
run: hadolint ${{env.changed_files}}
workflow-validation:
if: needs.changes.outputs.gha == 'true'
name: GitHub Actions validation
needs: [changes, yaml-lint]
# This uses a Mac runner because actionlint isn't available via Linux apt.
runs-on: macos-14
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.gha_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# The next action simply fails if there are any unpinned actions.
- name: Verify that all workflow actions have pinned versions
uses: zgosalvez/github-actions-ensure-sha-pinned-actions@25ed13d0628a1601b4b44048e63cc4328ed03633
# If we didn't fail the previous check, go on to more time-consuming ones.
- name: Install actionlint
run: HOMEBREW_NO_AUTO_UPDATE=1 brew install actionlint
- name: Set up actionlint output problem matcher
run: echo "::add-matcher::.github/problem-matchers/actionlint.json"
- name: Verify that all GitHub Actions workflows are valid
run: /opt/homebrew/bin/actionlint -color
shell-script-lint:
if: needs.changes.outputs.shell == 'true'
name: Shell script checks
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.shell_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up shellcheck output problem matcher
run: echo "::add-matcher::.github/problem-matchers/shellcheck.json"
- name: Run shellcheck on shell scripts that have been changed
run: shellcheck ${{env.changed_files}}