Skip to content

Fix part of #5343: Introduce new CI workflow for Code Coverage #3

Fix part of #5343: Introduce new CI workflow for Code Coverage

Fix part of #5343: Introduce new CI workflow for Code Coverage #3

Workflow file for this run

# Contains jobs corresponding to code coverage.
name: Code Coverage
# Controls when the action will run. Triggers the workflow on pull request
# events or push events in the develop branch.
on:
workflow_dispatch:
pull_request:
push:
branches:
# Push events on develop branch
- develop
jobs:
compute_changed_files:
name: Compute changed files
runs-on: ubuntu-20.04
outputs:
matrix: ${{ steps.compute-file-matrix.outputs.matrix }}
can_skip_tests: ${{ steps.compute-file-matrix.outputs.can_skip_files }}
env:
CACHE_DIRECTORY: ~/.bazel_cache
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Bazel
uses: abhinavsingh/setup-bazel@v3
with:
version: 6.5.0
- uses: actions/cache@v2
id: scripts_cache
with:
path: ${{ env.CACHE_DIRECTORY }}
key: ${{ runner.os }}-${{ env.CACHE_DIRECTORY }}-bazel-scripts-${{ github.sha }}
restore-keys: |
${{ runner.os }}-${{ env.CACHE_DIRECTORY }}-bazel-scripts-
${{ runner.os }}-${{ env.CACHE_DIRECTORY }}-bazel-
# This check is needed to ensure that Bazel's unbounded cache growth doesn't result in a
# situation where the cache never updates (e.g. due to exceeding GitHub's cache size limit)
# thereby only ever using the last successful cache version. This solution will result in a
# few slower CI actions around the time cache is detected to be too large, but it should
# incrementally improve thereafter.
- name: Ensure cache size
env:
BAZEL_CACHE_DIR: ${{ env.CACHE_DIRECTORY }}
run: |
# See https://stackoverflow.com/a/27485157 for reference.
EXPANDED_BAZEL_CACHE_PATH="${BAZEL_CACHE_DIR/#\~/$HOME}"
CACHE_SIZE_MB=$(du -smc $EXPANDED_BAZEL_CACHE_PATH | grep total | cut -f1)
echo "Total size of Bazel cache (rounded up to MBs): $CACHE_SIZE_MB"
# Use a 4.5GB threshold since actions/cache compresses the results, and Bazel caches seem
# to only increase by a few hundred megabytes across changes for unrelated branches. This
# is also a reasonable upper-bound (local tests as of 2021-03-31 suggest that a full build
# of the codebase (e.g. //...) from scratch only requires a ~2.1GB uncompressed/~900MB
# compressed cache).
if [[ "$CACHE_SIZE_MB" -gt 4500 ]]; then
echo "Cache exceeds cut-off; resetting it (will result in a slow build)"
rm -rf $EXPANDED_BAZEL_CACHE_PATH
fi
- name: Configure Bazel to use a local cache
env:
BAZEL_CACHE_DIR: ${{ env.CACHE_DIRECTORY }}
run: |
EXPANDED_BAZEL_CACHE_PATH="${BAZEL_CACHE_DIR/#\~/$HOME}"
echo "Using $EXPANDED_BAZEL_CACHE_PATH as Bazel's cache path"
echo "build --disk_cache=$EXPANDED_BAZEL_CACHE_PATH" >> $HOME/.bazelrc
shell: bash
- name: Compute file matrix
id: compute-file-matrix
env:
compute_all_files: ${{ contains(github.event.pull_request.title, '[RunAllTests]') }}
# See: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request. Defer to origin/develop outside a PR (such as develop's main CI runs).
base_commit_hash: ${{ github.event.pull_request.base.sha || 'origin/develop' }}
# https://unix.stackexchange.com/a/338124 for reference on creating a JSON-friendly
# comma-separated list of test targets for the matrix.
run: |
bazel run //scripts:compute_changed_files -- $(pwd) $(pwd)/changed_files.log $base_commit_hash compute_all_files=$compute_all_files
FILE_BUCKET_LIST=$(cat ./changed_files.log | sed 's/^\|$/"/g' | paste -sd, -)
echo "Changed files (note that this might be all files if configured to run all or on the develop branch): $FILE_BUCKET_LIST"
echo "::set-output name=matrix::{\"changed-files-bucket-base64-encoded-shard\":[$FILE_BUCKET_LIST]}"
if [[ ! -z "$FILE_BUCKET_LIST" ]]; then
echo "::set-output name=can_skip_files::false"
else
echo "::set-output name=can_skip_files::true"
echo "No files are affected by this change. If this is wrong, you can add '[RunAllTests]' to the PR title to force a run."
fi