Bump github/codeql-action from 3.26.0 to 3.26.2 #4231
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Conda package | |
on: | |
push: | |
branches: | |
- main | |
- release* | |
pull_request: | |
branches: | |
- main | |
- release* | |
permissions: read-all | |
env: | |
PACKAGE_NAME: numba-dpex | |
MODULE_NAME: numba_dpex | |
# There is a separate action that removes defaults. | |
CHANNELS: 'dppy/label/dev,conda-forge,numba,nodefaults' | |
VER_JSON_NAME: 'version.json' | |
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); " | |
VER_SCRIPT2: "d = j['numba-dpex'][0]; print('='.join((d[s] for s in ('version', 'build'))))" | |
PYTHONIOENCODING: 'utf-8' | |
jobs: | |
build: | |
name: Build ['${{ matrix.os }}', python='${{ matrix.python }}'] | |
strategy: | |
fail-fast: false | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
os: [ubuntu-latest, windows-2019] | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: bash -l {0} | |
continue-on-error: false | |
steps: | |
- name: Cancel Previous Runs | |
uses: styfle/cancel-workflow-action@0.12.1 | |
with: | |
access_token: ${{ github.token }} | |
- name: Checkout ${{ env.PACKAGE_NAME }} repo | |
uses: actions/checkout@v4 | |
with: | |
# We need tags to build proper version | |
fetch-depth: 0 | |
- name: Setup miniconda | |
uses: ./.github/actions/setup-miniconda | |
with: | |
python-version: ${{ matrix.python }} | |
activate-environment: "build" | |
channels: ${{ env.CHANNELS }} | |
channel-priority: "disabled" | |
dependencies: "conda-build" | |
environment-file: environment/conda-package-build.yml | |
- name: Store conda paths as envs | |
run: | | |
echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" '/' >> $GITHUB_ENV | |
echo "WHEELS_OUTPUT_FOLDER=$GITHUB_WORKSPACE${{ runner.os == 'Linux' && '/' || '\\' }}" >> $GITHUB_ENV | |
echo "WHEELS_NAME=$PACKAGE_NAME" | tr "-" "_" >> $GITHUB_ENV | |
- name: Build conda package | |
run: conda build --no-test --python ${{ matrix.python }} conda-recipe | |
- name: Upload artifact | |
uses: actions/upload-artifact@v4.3.6 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Upload wheels artifact | |
uses: actions/upload-artifact@v4.3.6 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
path: ${{ env.WHEELS_OUTPUT_FOLDER }}${{ env.WHEELS_NAME }}-*.whl | |
test: | |
name: Test ['${{ matrix.os }}', '${{ matrix.scope }}', python='${{ matrix.python }}'] | |
needs: build | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }} | |
strategy: | |
fail-fast: false | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
os: [ubuntu-20.04, ubuntu-latest, windows-latest] | |
experimental: [false] | |
scope: ['tests', 'examples', 'gdb'] | |
exclude: | |
- os: windows-latest | |
scope: 'gdb' | |
continue-on-error: ${{ matrix.experimental }} | |
steps: | |
- name: Checkout setup config | |
uses: actions/checkout@v4 | |
with: | |
sparse-checkout: | | |
.github/actions | |
environment | |
pyproject.toml | |
sparse-checkout-cone-mode: false | |
- name: Setup miniconda | |
uses: ./.github/actions/setup-miniconda | |
with: | |
python-version: ${{ matrix.python }} | |
activate-environment: "test" | |
channels: ${{ env.CHANNELS }} | |
channel-priority: "disabled" | |
dependencies: "conda-index;pytest-cov;pexpect;conda-tree" | |
environment-file: environment/conda-package-test.yml | |
- name: Store conda paths as envs | |
shell: bash -l {0} | |
run: | | |
echo "CHANNEL_PATH=${{ github.workspace }}/channel/" | tr "\\" "/" >> $GITHUB_ENV | |
echo "EXTRACTED_PKG_PATH=${{ github.workspace }}/pkg/" | tr "\\" "/" >> $GITHUB_ENV | |
echo "VER_JSON_PATH=${{ github.workspace }}/version.json" | tr "\\" "/" >> $GITHUB_ENV | |
echo "PKG_PATH_IN_CHANNEL=${{ github.workspace }}/channel/${{ runner.os == 'Linux' && 'linux' || 'win' }}-64/" | tr "\\" "/" >> $GITHUB_ENV | |
echo "EXAMPLES_PATH=$CONDA_PREFIX/${{ runner.os == 'Linux' && format('lib/python{0}',matrix.python) || 'Lib' }}/site-packages/numba_dpex/examples/" | tr "\\" "/" >> $GITHUB_ENV | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
path: ${{ env.PKG_PATH_IN_CHANNEL }} | |
# We need --force-local because on windows path looks like C:/foo/bar | |
# and tar interprits semicolons as remote device. | |
- name: Extract package archive | |
shell: bash -l {0} | |
run: | | |
mkdir -p ${EXTRACTED_PKG_PATH} | |
tar -xvf ${PKG_PATH_IN_CHANNEL}${PACKAGE_NAME}-*.tar.bz2 -C ${EXTRACTED_PKG_PATH} --force-local | |
- name: Create conda channel | |
# Note conda index (instead of python -m conda_index) may find legacy | |
# conda-build index. | |
run: | | |
python -m conda_index ${{ env.CHANNEL_PATH }} | |
- name: Test conda channel | |
run: | | |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.CHANNEL_PATH }} --override-channels --info --json > ${{ env.VER_JSON_PATH }} | |
cat ${{ env.VER_JSON_PATH }} | |
- name: Print dependencies | |
run: | | |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.CHANNEL_PATH }} --override-channels --info | |
- name: Collect dependencies | |
shell: bash -l {0} | |
run: | | |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") | |
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV | |
# We want to make sure that all dependecies install automatically. | |
# intel-opencl-rt is needed for set-intel-ocl-icd-registry.ps1 | |
- name: Install built package | |
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} intel-opencl-rt -c ${{ env.CHANNEL_PATH }} | |
- name: List installed packages | |
run: conda list | |
- name: Check dpcpp-llvm-spirv | |
run: | | |
python -c "import dpcpp_llvm_spirv as p; print(p.get_llvm_spirv_path())" | |
- name: Check dependency tree | |
run: conda-tree depends -t numba-dpex | |
- name: Smoke test | |
run: python -c "import dpnp, dpctl, numba_dpex; dpctl.lsplatform(verbosity=2)" | |
- name: Smoke test - test import with no default device | |
env: | |
ONEAPI_DEVICE_SELECTOR: unknown:unknown | |
run: python -c "import numba_dpex" | |
- name: Run tests | |
if: ${{ matrix.scope == 'tests' }} | |
run: | | |
pytest -q -ra --disable-warnings --pyargs ${{ env.MODULE_NAME }} -vv -k "not test_1d_strided_dpnp_array_in_kernel[2]" | |
- name: Run backendless optimization tests | |
# Running tests that have been found to fail on AMD CPUs with | |
# -cl-opt-disable. The test failures do not happen on other platforms | |
# and are possibly due to some driver/opencl compiler bug. | |
if: ${{ matrix.scope == 'tests' }} | |
env: | |
# Disabling device driver optimization to prevent catching bugs | |
# from driver compiler. | |
ONEAPI_DEVICE_SELECTOR: "opencl:cpu" | |
NUMBA_DPEX_BUILD_KERNEL_OPTIONS: "-cl-opt-disable" | |
run: | | |
pytest -q -ra --disable-warnings --pyargs ${{ env.MODULE_NAME }} -vv -k "test_1d_strided_dpnp_array_in_kernel[2]" | |
- name: Run examples | |
if: ${{ matrix.scope == 'examples' }} | |
shell: bash -l {0} | |
run: | | |
cd ${{ env.EXAMPLES_PATH }} | |
for script in $(find . \( -not -name "_*" -not -name "side-by-side*" -not -name "scan.py" -and -name "*.py" \)) | |
do | |
echo "Executing ${script}" | |
python ${script} || exit 1 | |
done | |
- name: Run gdb tests | |
if: ${{ matrix.scope == 'gdb' }} | |
env: | |
GDB_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/fc87666c-d626-47bc-a861-a1578d2ecbd3/l_dpcpp_dbg_p_2024.1.0.439_offline.sh | |
GDB_INSTALLER: l_dpcpp_dbg_p_2024.1.0.439_offline.sh | |
# To read gdb communication in case test fails to determine what the | |
# issue is | |
NUMBA_DPEX_TESTING_LOG_DEBUGGING: 1 | |
run: | | |
wget -nc -q ${{ env.GDB_URL }} | |
chmod +x ${{ env.GDB_INSTALLER }} | |
mkdir /tmp/gdb | |
export ONEAPI_ROOT=/tmp/gdb | |
./${{ env.GDB_INSTALLER }} -a -s --eula accept --install-dir $ONEAPI_ROOT | |
source $ONEAPI_ROOT/debugger/latest/env/vars.sh | |
# We match only major version because latest gdb is not often | |
# available. | |
gdb_version=$(echo "$GDB_INSTALLER" | grep -oP '\d+' | head -n 1) | |
icpx_version=$(conda list dpcpp-cpp-rt | tail -n 1 | awk '{print $2}' | grep -oP '\d+' | head -n 1) | |
if [ "$gdb_version" != "$icpx_version" ]; then | |
echo "Error: GDB version ($gdb_version) does not match icpx version ($icpx_version)" | |
exit 1 | |
fi | |
pytest -q -ra --disable-warnings --pyargs ${{ env.MODULE_NAME }}.tests.debugging -vv | |
upload_anaconda: | |
name: Upload dppy/label/dev ['${{ matrix.os }}', python='${{ matrix.python }}'] | |
if: ${{github.ref == 'refs/heads/main' || (startsWith(github.ref, 'refs/heads/release') == true)}} | |
needs: [test] | |
strategy: | |
matrix: | |
python: ['3.9', '3.10', '3.11'] | |
os: [ubuntu-latest, windows-latest] | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: bash -l {0} | |
continue-on-error: false | |
steps: | |
- name: Checkout setup config | |
uses: actions/checkout@v4 | |
with: | |
sparse-checkout: | | |
.github/actions | |
environment | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} | |
- name: Download wheels artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Wheels Python ${{ matrix.python }} | |
- name: Setup miniconda | |
uses: ./.github/actions/setup-miniconda | |
with: | |
python-version: ${{ matrix.python }} | |
activate-environment: "anaconda" | |
channel-priority: "disabled" | |
dependencies: "anaconda-client" | |
environment-file: environment/conda-package-anaconda.yml | |
- name: Upload | |
run: anaconda --token ${{ secrets.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 | |
- name: Package version | |
run: echo "PACKAGE_VERSION=$(basename ${{ env.PACKAGE_NAME }}-*.tar.bz2 | sed 's/^${{ env.PACKAGE_NAME }}-\([^-]*\).*/\1/')" >> $GITHUB_ENV | |
- name: Store wheels name | |
run: | | |
echo "WHEELS_NAME=$PACKAGE_NAME" | tr "-" "_" >> $GITHUB_ENV | |
- name: Upload Wheels | |
run: anaconda --token ${{ secrets.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.WHEELS_NAME }}-*.whl --version ${{ env.PACKAGE_VERSION }} | |
cleanup_packages: | |
name: Clean up anaconda packages | |
needs: [upload_anaconda] | |
runs-on: 'ubuntu-latest' | |
defaults: | |
run: | |
shell: bash -el {0} | |
steps: | |
- name: Checkout setup config | |
uses: actions/checkout@v4 | |
with: | |
sparse-checkout: | | |
.github/actions | |
environment | |
- name: Setup miniconda | |
uses: ./.github/actions/setup-miniconda | |
with: | |
python-version: '3.11' | |
activate-environment: "anaconda" | |
channel-priority: "disabled" | |
dependencies: "anaconda-client" | |
environment-file: environment/conda-package-anaconda.yml | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
with: | |
repository: IntelPython/devops-tools | |
- name: Cleanup old packages | |
run: | | |
python scripts/cleanup-old-packages.py \ | |
--verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \ | |
--package dppy/${{ env.PACKAGE_NAME }} --label dev |