Skip to content

[WIP] Migrate tests to github actions #7

[WIP] Migrate tests to github actions

[WIP] Migrate tests to github actions #7

Workflow file for this run

name: Tests
on:
push:
branches:
- main
pull_request:
branches:
- main
schedule:
- cron: "0 0 * * *"
jobs:
check-docs:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow-docs
- name: Cache Conda env
uses: actions/cache@v3
with:
path: ${{ env.CONDA }}/envs
key:
conda-${{ hashFiles('docs/source/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run:
conda env update -n glow-docs -f docs/source/environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Install Certs
run: sudo apt-get install -y ca-certificates
- name: Check docs links
run: (cd docs && make linkcheck)
# - name: Configure Databricks CLI
# run: |
# printf "[docs-ci]\nhost = https://adb-984752964297111.11.azuredatabricks.net\ntoken = ${{ secrets.DATABRICKS_API_TOKEN }}\njobs-api-version = 2.1\n" > ~/.databrickscfg
# - name: Generate notebook source files
# run: |
# export PATH=$HOME/conda/envs/glow/bin:$PATH
# for f in $(find docs/source/_static/notebooks -type f -name '*.html'); do
# python docs/dev/gen-nb-src.py --html "${f}" --cli-profile docs-ci
# done
spark-tests:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
strategy:
matrix:
spark_version: [3.4.1, 3.5.0]
scala_version: [2.12.18]
env:
SPARK_VERSION: ${{ matrix.spark_version }}
SCALA_VERSION: ${{ matrix.scala_version }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'adopt'
java-version: '8'
cache: 'sbt'
cache-dependency-path: 'build.sbt'
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow
- name: Cache Conda env
uses: actions/cache@v3
with:
path: ${{ env.CONDA }}/envs
key:
conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run:
conda env update -n glow -f python/environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Install correct PySpark version
run: pip install pyspark==${{ matrix.spark_version }}
- name: Scala tests
run: sbt core/test exit
- name: Python tests
run: sbt python/test exit
- name: Docs tests
run: sbt docs/test exit
- name: Upload results
uses: actions/upload-artifact@v4
with:
name: test-results
path: |
~/glow/core/target/scala-2.13/test-reports
~/glow/unit-tests.log
spark-4-tests:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
env:
SPARK_VERSION: 4.0.0-SNAPSHOT
SCALA_VERSION: 2.13.12
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'adopt'
java-version: '17'
cache: 'sbt'
cache-dependency-path: 'build.sbt'
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow-spark4
- name: Cache Conda env
uses: actions/cache@v3
with:
path: ${{ env.CONDA }}/envs
key:
conda-${{ hashFiles('python/spark-4-environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run:
conda env update -n glow-spark4 -f python/spark-4-environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Clone Spark (for PySpark source)
run: (cd $HOME && git clone https://github.com/apache/spark.git)
- name: Scala tests
run: sbt core/test exit
- name: Uninstall PySpark
run: pip uninstall -y pyspark
- name: Python tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt python/test exit
- name: Docs tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt docs/test exit
- name: Upload results
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: test-results
path: |
~/glow/core/target/scala-2.13/test-reports
~/glow/unit-tests.log