Skip to content

31 ci benchmarking #138

31 ci benchmarking

31 ci benchmarking #138

Workflow file for this run

name: GridTools Benchmark CI Pipeline
on:
push:
branches:
- main
tags: ['*']
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }}
jobs:
benchmark:
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
julia_version:
- '1.8'
python_version:
- '3.10'
os:
- ubuntu-latest
arch:
- x64
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y libboost-all-dev
python -m pip install --upgrade pip
- name: Install GT4Py and atlas4py
run: |
git clone --branch fix_python_interp_path_in_cmake https://github.com/tehrengruber/gt4py.git
cd gt4py
pip install -r requirements-dev.txt
pip install .
pip install -i https://test.pypi.org/simple/ atlas4py
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.julia_version }}
arch: ${{ matrix.arch }}
- uses: julia-actions/cache@v1
- name: Install and Build Benchmarking Tools
run: |
julia -e 'using Pkg; Pkg.add("AirspeedVelocity"); Pkg.build("AirspeedVelocity")'
echo "PATH=$PATH:$HOME/.julia/bin" >> $GITHUB_ENV
ls $HOME/.julia/bin
- name: Run Benchmarks
run: |
echo $PATH
mkdir results
benchpkg --rev="${{ github.sha }},$(git rev-parse HEAD^1)" --bench-on="${{ github.sha }}" --output-dir=results/ --tune
- name: Generate and Upload Benchmark Plots
run: |
mkdir -p plots
git fetch origin main:refs/remotes/origin/main
LAST_MAIN_COMMIT=$(git rev-parse origin/main)
CURRENT_COMMIT=$(git rev-parse HEAD)
echo "Comparing CURRENT_COMMIT ($CURRENT_COMMIT) with LAST_MAIN_COMMIT ($LAST_MAIN_COMMIT)"
benchpkgplot --rev="$LAST_MAIN_COMMIT,$CURRENT_COMMIT" --npart=10 --format=png --input-dir=results/ --output-dir=plots/
if: always()
- name: Upload Plots as Artifacts
uses: actions/upload-artifact@v4
with:
name: benchmark-plots
path: plots
- name: Create and Display Benchmark Table
run: |
git fetch origin main:refs/remotes/origin/main # Ensures you have the latest main branch
if [ $(git rev-list --count HEAD) -gt 1 ]; then
PREV_COMMIT=$(git rev-parse HEAD^1)
else
PREV_COMMIT=$(git rev-parse HEAD) # Fallback to the current commit if there's no previous commit
fi
LAST_MAIN_COMMIT=$(git rev-parse origin/main) # Ensures comparison against the latest commit in main
echo "Using revisions: $LAST_MAIN_COMMIT and $PREV_COMMIT for benchmark comparison."
benchpkgtable --rev="$LAST_MAIN_COMMIT,$PREV_COMMIT" --input-dir=results/ > table.md
echo '### Benchmark Results' > body.md
cat table.md >> body.md
echo '### Benchmark Plots' >> body.md
echo 'A plot of the benchmark results has been uploaded as an artifact to this workflow run.' >> body.md
cat body.md # Print the markdown table to the log for review.
if: always()
- name: Upload Benchmark Results Table
uses: actions/upload-artifact@v4
with:
name: benchmark-results-table
path: body.md
- name: Find and Comment Benchmark Results
uses: peter-evans/find-comment@v2
id: fcbenchmark
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: Benchmark Results
- name: Comment on PR with Benchmark Results
uses: peter-evans/create-or-update-comment@v3
with:
comment-id: ${{ steps.fcbenchmark.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body-path: body.md
edit-mode: replace