Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into 2483_support_derive…
Browse files Browse the repository at this point in the history
…d_types_in_driver
  • Loading branch information
hiker committed Nov 18, 2024
2 parents 7370377 + a33a99b commit 5afbf84
Show file tree
Hide file tree
Showing 424 changed files with 13,352 additions and 7,833 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/compilation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,14 @@ on:
push

env:
CUDA_VERSION: 12.6.0
CUDA_VERSION: 12.6.2
GFORTRAN_VERSION: 14.2.0
HDF5_VERSION: 1.14.4.3
HDF5_VERSION: 1.14.5
NETCDF_C_VERSION: 4.9.2
NETCDF_FORTRAN_VERSION: 4.6.1
NVFORTRAN_VERSION: 24.7
NVFORTRAN_VERSION: 24.9
OPENMPI_VERSION: 5.0.5
PYTHON_VERSION: 3.12.5
PYTHON_VERSION: 3.13.0

jobs:
run_if_on_mirror:
Expand Down Expand Up @@ -130,5 +130,5 @@ jobs:
module load nvidia-hpcsdk/${NVFORTRAN_VERSION}
module load hdf5/${HDF5_VERSION} netcdf_c/${NETCDF_C_VERSION} netcdf_fortran/${NETCDF_FORTRAN_VERSION}
F90=nvfortran F90FLAGS="-acc -Minfo=all" make -C tutorial/practicals/LFRic compile
F90=nvfortran F90FLAGS="-acc -Minfo=all -Mnofma -O2" make -C tutorial/practicals/nemo run
make -C tutorial/practicals/nemo/4_nemo_openacc acc_test
F90=nvfortran F90FLAGS="-acc -Minfo=all -Mnofma -O2" make -C tutorial/practicals/generic run
make -C tutorial/practicals/generic/4_openacc acc_test
151 changes: 120 additions & 31 deletions .github/workflows/lfric_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ jobs:
if: ${{ github.repository == 'stfc/PSyclone-mirror' }}
runs-on: self-hosted
env:
LFRIC_APPS_REV: 1192
PYTHON_VERSION: 3.12.5
LFRIC_APPS_REV: 3269
PYTHON_VERSION: 3.13.0

steps:
- uses: actions/checkout@v3
Expand All @@ -73,27 +73,130 @@ jobs:
# than the latest release from pypi.
# pip install external/fparser
pip install .[test]
pip install jinja2
# PSyclone, compile and run MetOffice gungho_model on GPU
- name: LFRic GungHo with OpenMP offload
run: |
# Set up environment
source /apps/spack/psyclone-spack/spack-repo/share/spack/setup-env.sh
spack load lfric-build-environment%nvhpc
source .runner_venv/bin/activate
export PSYCLONE_LFRIC_DIR=${GITHUB_WORKSPACE}/examples/lfric/scripts
export PSYCLONE_CONFIG_FILE=${PSYCLONE_LFRIC_DIR}/KGOs/lfric_psyclone.cfg
# The LFRic source must be patched to workaround bugs in the NVIDIA
# compiler's namelist handling.
rm -rf ${HOME}/LFRic/gpu_build
mkdir -p ${HOME}/LFRic/gpu_build
cp -r ${HOME}/LFRic/lfric_apps_${LFRIC_APPS_REV} ${HOME}/LFRic/gpu_build/lfric_apps
cp -r ${HOME}/LFRic/lfric_core_50869 ${HOME}/LFRic/gpu_build/lfric
cd ${HOME}/LFRic/gpu_build
patch -p1 < ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_${LFRIC_APPS_REV}_nvidia.patch
# Update the compiler definitions to build for GPU
cp ${PSYCLONE_LFRIC_DIR}/KGOs/nvfortran_acc.mk lfric/infrastructure/build/fortran/nvfortran.mk
cp ${PSYCLONE_LFRIC_DIR}/KGOs/nvc++.mk lfric/infrastructure/build/cxx/.
# Update the PSyclone commands to ensure transformed kernels are written
# to working directory.
cp ${PSYCLONE_LFRIC_DIR}/KGOs/psyclone.mk lfric/infrastructure/build/psyclone/.
# Update dependencies.sh to point to our patched lfric core.
sed -i -e 's/export lfric_core_sources=.*$/export lfric_core_sources\=\/home\/gh_runner\/LFRic\/gpu_build\/lfric/' lfric_apps/dependencies.sh
export LFRIC_DIR=${HOME}/LFRic/gpu_build/lfric_apps
export OPT_DIR=${LFRIC_DIR}/applications/gungho_model/optimisation/psyclone-test
cd ${LFRIC_DIR}
# PSyclone scripts must now be under 'optimisation' and be called 'global.py'
mkdir -p ${OPT_DIR}
cp ${PSYCLONE_LFRIC_DIR}/gpu_offloading.py ${OPT_DIR}/global.py
# Clean previous version and compile again
rm -rf applications/gungho_model/working
LFRIC_OFFLOAD_DIRECTIVES=omp ./build/local_build.py -a gungho_model -p psyclone-test
cd applications/gungho_model/example
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its.nml configuration.nml
mpirun -n 1 ../bin/gungho_model configuration.nml |& tee output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its_checksums.txt gungho_model-checksums.txt
cat timer.txt
export VAR_TIME=$(grep "gungho_model" timer.txt | cut -d'|' -f5)
export VAR_HALOS=$(grep "gungho_model" halo_calls_counter.txt | cut -d'|' -f5)
echo $GITHUB_REF_NAME $GITHUB_SHA $VAR_TIME $VAR_HALOS >> ${HOME}/store_results/lfric_omp_performance_history
${HOME}/mongosh-2.1.1-linux-x64/bin/mongosh \
"mongodb+srv://cluster0.x8ncpxi.mongodb.net/PerformanceMonitoring" \
--quiet --apiVersion 1 --username ${{ secrets.MONGODB_USERNAME }} \
--password ${{ secrets.MONGODB_PASSWORD }} \
--eval 'db.GitHub_CI.insertOne({branch_name: "'"$GITHUB_REF_NAME"'", commit: "'"$GITHUB_SHA"'",
github_job: "'"$GITHUB_RUN_ID"'"-"'"$GITHUB_RUN_ATTEMPT"'",
ci_test: "LFRic OpenMP offloading", lfric_apps_version: '"$LFRIC_APPS_REV"', system: "GlaDos",
compiler:"spack-nvhpc-24.5", date: new Date(), elapsed_time: '"$VAR_TIME"',
num_of_halo_exchanges: '"$VAR_HALOS"'})'
# PSyclone, compile and run MetOffice gungho_model on GPU
- name: LFRic GungHo with OpenACC offload
run: |
# Set up environment
source /apps/spack/psyclone-spack/spack-repo/share/spack/setup-env.sh
spack load lfric-build-environment%nvhpc
source .runner_venv/bin/activate
export PSYCLONE_LFRIC_DIR=${GITHUB_WORKSPACE}/examples/lfric/scripts
export PSYCLONE_CONFIG_FILE=${PSYCLONE_LFRIC_DIR}/KGOs/lfric_psyclone.cfg
# The LFRic source must be patched to workaround bugs in the NVIDIA
# compiler's namelist handling.
rm -rf ${HOME}/LFRic/gpu_build
mkdir -p ${HOME}/LFRic/gpu_build
cp -r ${HOME}/LFRic/lfric_apps_${LFRIC_APPS_REV} ${HOME}/LFRic/gpu_build/lfric_apps
cp -r ${HOME}/LFRic/lfric_core_50869 ${HOME}/LFRic/gpu_build/lfric
cd ${HOME}/LFRic/gpu_build
patch -p1 < ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_${LFRIC_APPS_REV}_nvidia.patch
# Update the compiler definitions to build for GPU
cp ${PSYCLONE_LFRIC_DIR}/KGOs/nvfortran_acc.mk lfric/infrastructure/build/fortran/nvfortran.mk
cp ${PSYCLONE_LFRIC_DIR}/KGOs/nvc++.mk lfric/infrastructure/build/cxx/.
# Update the PSyclone commands to ensure transformed kernels are written
# to working directory.
cp ${PSYCLONE_LFRIC_DIR}/KGOs/psyclone.mk lfric/infrastructure/build/psyclone/.
# Update dependencies.sh to point to our patched lfric core.
sed -i -e 's/export lfric_core_sources=.*$/export lfric_core_sources\=\/home\/gh_runner\/LFRic\/gpu_build\/lfric/' lfric_apps/dependencies.sh
export LFRIC_DIR=${HOME}/LFRic/gpu_build/lfric_apps
export OPT_DIR=${LFRIC_DIR}/applications/gungho_model/optimisation/psyclone-test
cd ${LFRIC_DIR}
# PSyclone scripts must now be under 'optimisation' and be called 'global.py'
mkdir -p ${OPT_DIR}
cp ${PSYCLONE_LFRIC_DIR}/gpu_offloading.py ${OPT_DIR}/global.py
# Clean previous version and compile again
rm -rf applications/gungho_model/working
LFRIC_OFFLOAD_DIRECTIVES=acc ./build/local_build.py -a gungho_model -p psyclone-test
cd applications/gungho_model/example
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its.nml configuration.nml
mpirun -n 1 ../bin/gungho_model configuration.nml |& tee output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its_checksums.txt gungho_model-checksums.txt
cat timer.txt
export VAR_TIME=$(grep "gungho_model" timer.txt | cut -d'|' -f5)
export VAR_HALOS=$(grep "gungho_model" halo_calls_counter.txt | cut -d'|' -f5)
echo $GITHUB_REF_NAME $GITHUB_SHA $VAR_TIME $VAR_HALOS >> ${HOME}/store_results/lfric_acc_performance_history
${HOME}/mongosh-2.1.1-linux-x64/bin/mongosh \
"mongodb+srv://cluster0.x8ncpxi.mongodb.net/PerformanceMonitoring" \
--quiet --apiVersion 1 --username ${{ secrets.MONGODB_USERNAME }} \
--password ${{ secrets.MONGODB_PASSWORD }} \
--eval 'db.GitHub_CI.insertOne({branch_name: "'"$GITHUB_REF_NAME"'", commit: "'"$GITHUB_SHA"'",
github_job: "'"$GITHUB_RUN_ID"'"-"'"$GITHUB_RUN_ATTEMPT"'",
ci_test: "LFRic OpenACC", lfric_apps_version: '"$LFRIC_APPS_REV"', system: "GlaDos",
compiler:"spack-nvhpc-24.5", date: new Date(), elapsed_time: '"$VAR_TIME"',
num_of_halo_exchanges: '"$VAR_HALOS"'})'
# PSyclone, compile and run MetOffice LFRic with 6 MPI ranks
- name: LFRic passthrough (with DistributedMemory)
run: |
# Set up environment
source /apps/spack/spack-upstream/share/spack/setup-env.sh
spack load lfric-buildenv%gcc
source /apps/spack/psyclone-spack/spack-repo/share/spack/setup-env.sh
spack load lfric-build-environment%gcc@14
source .runner_venv/bin/activate
export PSYCLONE_LFRIC_DIR=${GITHUB_WORKSPACE}/examples/lfric/scripts
export PSYCLONE_CONFIG_FILE=${PSYCLONE_LFRIC_DIR}/KGOs/lfric_psyclone.cfg
export LFRIC_DIR=${HOME}/LFRic/lfric_apps
export LFRIC_DIR=${HOME}/LFRic/lfric_apps_${LFRIC_APPS_REV}
cd ${LFRIC_DIR}
# Clean previous version and compile again
rm -rf applications/gungho_model/working
./build/local_build.py -a gungho_model -v
# Run
cd applications/gungho_model/example
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gunho_configuration_4its.nml configuration.nml
mpirun -n 1 ../bin/gungho_model configuration.nml |& tee output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gunho_configuration_4its_output.txt output.txt
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its.nml configuration.nml
mpirun -n 6 ../bin/gungho_model configuration.nml |& tee output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its_checksums.txt gungho_model-checksums.txt
cat timer.txt
export VAR_TIME=$(grep "gungho_model" timer.txt | cut -d'|' -f5)
export VAR_HALOS=$(grep "gungho_model" halo_calls_counter.txt | cut -d'|' -f5)
Expand All @@ -105,40 +208,33 @@ jobs:
--eval 'db.GitHub_CI.insertOne({branch_name: "'"$GITHUB_REF_NAME"'", commit: "'"$GITHUB_SHA"'",
github_job: "'"$GITHUB_RUN_ID"'"-"'"$GITHUB_RUN_ATTEMPT"'",
ci_test: "LFRic Passthrough with DM", lfric_apps_version: '"$LFRIC_APPS_REV"', system: "GlaDos",
compiler:"spack-gfortran-11", date: new Date(), elapsed_time: '"$VAR_TIME"',
compiler:"spack-gfortran-14", date: new Date(), elapsed_time: '"$VAR_TIME"',
num_of_halo_exchanges: '"$VAR_HALOS"'})'
- name: Upload LFRic passthrough results
uses: exuanbo/actions-deploy-gist@v1
with:
token: ${{ secrets.GIST_TOKEN }}
gist_id: a4049a0fc0a0a11651a5ce6a04d76160
file_path: ../../../../store_results/lfric_passthrough_performance_history

# PSyclone, compile and run MetOffice LFRic with all optimisations and 6 OpenMP threads
- name: LFRic with all transformations
run: |
# Set up environment
source /apps/spack/spack-upstream/share/spack/setup-env.sh
spack load lfric-buildenv%gcc
source /apps/spack/psyclone-spack/spack-repo/share/spack/setup-env.sh
spack load lfric-build-environment%gcc@14
source .runner_venv/bin/activate
export PSYCLONE_LFRIC_DIR=${GITHUB_WORKSPACE}/examples/lfric/scripts
export PSYCLONE_CONFIG_FILE=${PSYCLONE_LFRIC_DIR}/KGOs/lfric_psyclone.cfg
export LFRIC_DIR=${HOME}/LFRic/lfric_apps
export LFRIC_DIR=${HOME}/LFRic/lfric_apps_${LFRIC_APPS_REV}
export OPT_DIR=${LFRIC_DIR}/applications/gungho_model/optimisation/psyclone-test
cd ${LFRIC_DIR}
# Psyclone scripts must now be under 'optimisation' and be called 'global.py'
# PSyclone scripts must now be under 'optimisation' and be called 'global.py'
mkdir -p applications/gungho_model/optimisation/psyclone-test
cp ${PSYCLONE_LFRIC_DIR}/everything_everywhere_all_at_once.py ${OPT_DIR}/global.py
# Clean previous version and compile again
rm -rf applications/gungho_model/working
./build/local_build.py -a gungho_model -p psyclone-test -v
# Run
cd applications/gungho_model/example
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gunho_configuration_4its.nml configuration.nml
cp ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its.nml configuration.nml
export OMP_NUM_THREADS=6
mpirun -n 1 ../bin/gungho_model configuration.nml |& tee output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gunho_configuration_4its_output.txt output.txt
python ${PSYCLONE_LFRIC_DIR}/compare_ouput.py ${PSYCLONE_LFRIC_DIR}/KGOs/lfric_gungho_configuration_4its_checksums.txt gungho_model-checksums.txt
cat timer.txt
export VAR_TIME=$(grep "gungho_model" timer.txt | cut -d'|' -f5)
export VAR_HALOS=$(grep "gungho_model" halo_calls_counter.txt | cut -d'|' -f5)
Expand All @@ -150,12 +246,5 @@ jobs:
--eval 'db.GitHub_CI.insertOne({branch_name: "'"$GITHUB_REF_NAME"'", commit: "'"$GITHUB_SHA"'",
github_job: "'"$GITHUB_RUN_ID"'"-"'"$GITHUB_RUN_ATTEMPT"'",
ci_test: "LFRic all transformations", lfric_version: '"$LFRIC_APPS_REV"', omp_threads: '"$OMP_NUM_THREADS"',
system: "GlaDos", compiler:"spack-gfortran-11", date: new Date(), elapsed_time: '"$VAR_TIME"',
system: "GlaDos", compiler:"spack-gfortran-14", date: new Date(), elapsed_time: '"$VAR_TIME"',
num_of_halo_exchanges: '"$VAR_HALOS"'})'
- name: Upload LFRic optimised results
uses: exuanbo/actions-deploy-gist@v1
with:
token: ${{ secrets.GIST_TOKEN }}
gist_id: a4049a0fc0a0a11651a5ce6a04d76160
file_path: ../../../../store_results/lfric_optimised_performance_history
28 changes: 3 additions & 25 deletions .github/workflows/nemo_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
# This workflow will use a self-hosted runner to perform the more expensive
# integrations tests that are not run on GHA systems.

name: NEMO Integration Tests
name: NEMOv4 Integration Tests

on:
push
Expand All @@ -46,13 +46,13 @@ jobs:
if: ${{ github.repository == 'stfc/PSyclone-mirror' }}
runs-on: self-hosted
env:
HDF5_VERSION: 1.14.4.3
HDF5_VERSION: 1.14.5
NETCDF_C_VERSION: 4.9.2
NETCDF_FORTRAN_VERSION: 4.6.1
NVFORTRAN_VERSION: 23.7
ONEAPI_VERSION: 2024.2.1
PERL_VERSION: 5.40.0
PYTHON_VERSION: 3.12.5
PYTHON_VERSION: 3.13.0

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -82,28 +82,6 @@ jobs:
cd lib/profiling/nvidia/
F90=nvfortran make
# PSyclone passthrough for 5.0-beta of NEMO.
- name: NEMO 5.0 beta passthrough without optimisation
run: |
. .runner_venv/bin/activate
export PSYCLONE_NEMO_DIR=${GITHUB_WORKSPACE}/examples/nemo/scripts
# PSYCLONE_HOME has `/bin` appended to it by the build system.
export PSYCLONE_HOME=${PWD}/.runner_venv
export NEMO_DIR=${HOME}/NEMOv5
cd $NEMO_DIR
module load nvidia-hpcsdk/${NVFORTRAN_VERSION}
module load hdf5/${HDF5_VERSION} netcdf_c/${NETCDF_C_VERSION} netcdf_fortran/${NETCDF_FORTRAN_VERSION}
module load perl/${PERL_VERSION}
# We compile at -O1 to permit comparison of the results. (N.B. this test
# passes at -O3 with the Intel ifx compiler.)
./makenemo -r BENCH -m linux_nvidia_O1 -n BENCH_PASSTHROUGH -p passthrough del_key "key_xios key_iomput key_top" add_key "key_nosignedzero" clean -y
./makenemo -r BENCH -m linux_nvidia_O1 -n BENCH_PASSTHROUGH -p passthrough del_key "key_xios key_iomput key_top" add_key "key_nosignedzero" -j 4
cd $NEMO_DIR/tests/BENCH_PASSTHROUGH/EXP00
mpirun -np 4 ./nemo
diff run.stat ${PSYCLONE_NEMO_DIR}/KGOs/run.stat.bench.orca2.4mpi.10steps
export VAR_TIME=$(awk '/ step /{print $3}' timing.output | head -n 1 | sed -e 's/s//')
echo "Time-stepping duration = " $VAR_TIME
# PSyclone passthrough for MetOffice NEMO
- name: NEMO MetOffice Passthrough
run: |
Expand Down
Loading

0 comments on commit 5afbf84

Please sign in to comment.