Skip to content

fix(libcudf): add make (CMAKE_BUILD_PROGRAM not set) #20928

fix(libcudf): add make (CMAKE_BUILD_PROGRAM not set)

fix(libcudf): add make (CMAKE_BUILD_PROGRAM not set) #20928

Workflow file for this run

name: pr
on:
push:
branches:
- "pull-request/[0-9]+"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
# Please keep pr-builder as the top job here
pr-builder:
needs:
- check-nightly-ci
- changed-files
- checks
- conda-cpp-build
# - cpp-linters
- conda-cpp-checks
- conda-cpp-tests
- conda-python-build
- conda-python-cudf-tests
- conda-python-other-tests
- conda-java-tests
# - static-configure
# - conda-notebook-tests
# - docs-build
# - wheel-build-libcudf
# - wheel-build-pylibcudf
# - wheel-build-cudf
# - wheel-tests-cudf
# - wheel-build-cudf-polars
# - wheel-tests-cudf-polars
# - cudf-polars-polars-tests
# - wheel-build-dask-cudf
# - wheel-tests-dask-cudf
# - devcontainer
# - unit-tests-cudf-pandas
# - pandas-tests
# - pandas-tests-diff
- telemetry-setup
# - third-party-integration-tests-cudf-pandas
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
if: always()
with:
needs: ${{ toJSON(needs) }}
telemetry-setup:
continue-on-error: true
runs-on: ubuntu-latest
env:
OTEL_SERVICE_NAME: 'pr-cudf'
steps:
- name: Telemetry setup
if: ${{ vars.TELEMETRY_ENABLED == 'true' }}
uses: rapidsai/shared-actions/telemetry-dispatch-stash-base-env-vars@main
check-nightly-ci:
# Switch to ubuntu-latest once it defaults to a version of Ubuntu that
# provides at least Python 3.11 (see
# https://docs.python.org/3/library/datetime.html#datetime.date.fromisoformat)
runs-on: ubuntu-24.04
env:
RAPIDS_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Check if nightly CI is passing
uses: rapidsai/shared-actions/check_nightly_success/dispatch@main
with:
repo: cudf
changed-files:
secrets: inherit
needs: telemetry-setup
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
files_yaml: |
test_cpp:
- '**'
- '!.devcontainer/**'
- '!CONTRIBUTING.md'
- '!README.md'
- '!ci/cudf_pandas_scripts/**'
- '!docs/**'
- '!img/**'
- '!java/**'
- '!notebooks/**'
- '!python/**'
test_cudf_pandas:
- '**'
- '!.devcontainer/**'
- '!CONTRIBUTING.md'
- '!README.md'
- '!docs/**'
- '!img/**'
- '!java/**'
- '!notebooks/**'
test_java:
- '**'
- '!.devcontainer/**'
- '!CONTRIBUTING.md'
- '!README.md'
- '!ci/cudf_pandas_scripts/**'
- '!docs/**'
- '!img/**'
- '!notebooks/**'
- '!python/**'
test_notebooks:
- '**'
- '!.devcontainer/**'
- '!CONTRIBUTING.md'
- '!README.md'
- '!ci/cudf_pandas_scripts/**'
- '!java/**'
test_python:
- '**'
- '!.devcontainer/**'
- '!CONTRIBUTING.md'
- '!README.md'
- '!ci/cudf_pandas_scripts/**'
- '!docs/**'
- '!img/**'
- '!java/**'
- '!notebooks/**'
checks:
secrets: inherit
needs: telemetry-setup
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
enable_check_generated_files: false
ignored_pr_jobs: "telemetry-summarize spark-rapids-jni"
conda-cpp-build:
needs: checks
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
node_type: "cpu16"
# cpp-linters:
# secrets: inherit
# needs: checks
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# run_script: "ci/cpp_linters.sh"
# node_type: "cpu16"
conda-cpp-checks:
needs: conda-cpp-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
enable_check_symbols: true
conda-cpp-tests:
needs: [conda-cpp-build, changed-files]
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_cpp
with:
build_type: pull-request
conda-python-build:
needs: conda-cpp-build
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
with:
build_type: pull-request
conda-python-cudf-tests:
needs: [conda-python-build, changed-files]
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python
with:
build_type: pull-request
script: "ci/test_python_cudf.sh"
conda-python-other-tests:
# Tests for dask_cudf, custreamz, cudf_kafka are separated for CI parallelism
needs: [conda-python-build, changed-files]
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python
with:
build_type: pull-request
script: "ci/test_python_other.sh"
conda-java-tests:
needs: [conda-cpp-build, changed-files]
secrets: inherit
uses: rapidsai/shared-workflows/.github/workflows/[email protected]
if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_java
with:
build_type: pull-request
node_type: "gpu-l4-latest-1"
arch: "amd64"
container_image: "rapidsai/ci-conda:latest"
run_script: "ci/test_java.sh"
# static-configure:
# needs: checks
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# # Use the wheel container so we can skip conda solves and since our
# # primary static consumers (Spark) are not in conda anyway.
# container_image: "rapidsai/ci-wheel:latest"
# run_script: "ci/configure_cpp_static.sh"
# conda-notebook-tests:
# needs: [conda-python-build, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_notebooks
# with:
# build_type: pull-request
# node_type: "gpu-l4-latest-1"
# arch: "amd64"
# container_image: "rapidsai/ci-conda:latest"
# run_script: "ci/test_notebooks.sh"
# docs-build:
# needs: conda-python-build
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# node_type: "gpu-l4-latest-1"
# arch: "amd64"
# container_image: "rapidsai/ci-conda:latest"
# run_script: "ci/build_docs.sh"
# wheel-build-libcudf:
# needs: checks
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# # build for every combination of arch and CUDA version, but only for the latest Python
# matrix_filter: group_by([.ARCH, (.CUDA_VER|split(".")|map(tonumber)|.[0])]) | map(max_by(.PY_VER|split(".")|map(tonumber)))
# build_type: pull-request
# node_type: "cpu16"
# script: "ci/build_wheel_libcudf.sh"
# wheel-build-pylibcudf:
# needs: [checks, wheel-build-libcudf]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# script: "ci/build_wheel_pylibcudf.sh"
# wheel-build-cudf:
# needs: wheel-build-pylibcudf
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# script: "ci/build_wheel_cudf.sh"
# wheel-tests-cudf:
# needs: [wheel-build-cudf, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python
# with:
# build_type: pull-request
# script: ci/test_wheel_cudf.sh
# wheel-build-cudf-polars:
# needs: wheel-build-pylibcudf
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: "ci/build_wheel_cudf_polars.sh"
# wheel-tests-cudf-polars:
# needs: [wheel-build-cudf-polars, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: "ci/test_wheel_cudf_polars.sh"
# cudf-polars-polars-tests:
# needs: wheel-build-cudf-polars
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: "ci/test_cudf_polars_polars_tests.sh"
# wheel-build-dask-cudf:
# needs: wheel-build-cudf
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: "ci/build_wheel_dask_cudf.sh"
# wheel-tests-dask-cudf:
# needs: [wheel-build-dask-cudf, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: ci/test_wheel_dask_cudf.sh
# devcontainer:
# secrets: inherit
# needs: telemetry-setup
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# node_type: "cpu32"
# arch: '["amd64"]'
# cuda: '["12.8"]'
# build_command: |
# sccache -z;
# build-all -DBUILD_BENCHMARKS=ON --verbose;
# sccache -s;
# unit-tests-cudf-pandas:
# needs: [wheel-build-cudf, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python || fromJSON(needs.changed-files.outputs.changed_file_groups).test_cudf_pandas
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: ci/cudf_pandas_scripts/run_tests.sh
# third-party-integration-tests-cudf-pandas:
# needs: conda-python-build
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# build_type: pull-request
# branch: ${{ inputs.branch }}
# date: ${{ inputs.date }}
# sha: ${{ inputs.sha }}
# node_type: "gpu-l4-latest-1"
# continue-on-error: true
# container_image: "rapidsai/ci-conda:latest"
# run_script: |
# ci/cudf_pandas_scripts/third-party-integration/test.sh python/cudf/cudf_pandas_tests/third_party_integration_tests/dependencies.yaml
# pandas-tests:
# # run the Pandas unit tests using PR branch
# needs: [wheel-build-cudf, changed-files]
# secrets: inherit
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_python || fromJSON(needs.changed-files.outputs.changed_file_groups).test_cudf_pandas
# with:
# # This selects "ARCH=amd64 + the latest supported Python + CUDA".
# matrix_filter: map(select(.ARCH == "amd64")) | group_by(.CUDA_VER|split(".")|map(tonumber)|.[0]) | map(max_by([(.PY_VER|split(".")|map(tonumber)), (.CUDA_VER|split(".")|map(tonumber))]))
# build_type: pull-request
# script: ci/cudf_pandas_scripts/pandas-tests/run.sh pr
# # Hide test failures because they exceed the GITHUB_STEP_SUMMARY output limit.
# test_summary_show: "none"
# pandas-tests-diff:
# # diff the results of running the Pandas unit tests and publish a job summary
# needs: pandas-tests
# uses: rapidsai/shared-workflows/.github/workflows/[email protected]
# with:
# node_type: "cpu4"
# build_type: pull-request
# run_script: "ci/cudf_pandas_scripts/pandas-tests/diff.sh"
# spark-rapids-jni:
# needs: changed-files
# uses: ./.github/workflows/spark-rapids-jni.yaml
# if: fromJSON(needs.changed-files.outputs.changed_file_groups).test_java
telemetry-summarize:
# This job must use a self-hosted runner to record telemetry traces.
runs-on: linux-amd64-cpu4
needs: pr-builder
if: ${{ vars.TELEMETRY_ENABLED == 'true' && !cancelled() }}
continue-on-error: true
steps:
- name: Telemetry summarize
uses: rapidsai/shared-actions/telemetry-dispatch-summarize@main