Skip to content

Release v0.3.7

Release v0.3.7 #65

Workflow file for this run

name: Render Example Notebooks
on:
workflow_dispatch:
push:
paths:
- "notebooks/*py"
- ".github/workflows/render.yml"
jobs:
build-binder-env:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/cache@v3
id: binder_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}
- uses: conda-incubator/setup-miniconda@v2
if: steps.binder_cache.outputs.cache-hit != 'true'
with:
channels: conda-forge
channel-priority: true
activate-environment: ""
mamba-version: "*"
use-mamba: true
- name: Dump Conda Environment Info
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
conda info
conda list
mamba -V
conda config --show-sources
conda config --show
printenv | sort
- name: Build Python Environment for Notebooks
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
cd binder
mamba env create -f environment.yml -p /tmp/binder_env
- name: Check Python Env
shell: bash -l {0}
if: steps.binder_cache.outputs.cache-hit != 'true'
run: |
mamba env export -p /tmp/binder_env
render:
runs-on: ubuntu-latest
needs:
- build-binder-env
steps:
- uses: actions/checkout@v3
- name: Config
id: cfg
run: |
find notebooks/ -maxdepth 1 -name '*.py' -type f | sort -f -d
nb_dir="docs/notebooks"
nb_hash=$(python scripts/notebook_hash.py)
echo "Notebooks hash: ${nb_hash}"
echo "nb-dir=${nb_dir}" >> $GITHUB_OUTPUT
echo "nb-hash=${nb_hash}" >> $GITHUB_OUTPUT
echo "nb-archive=odc-stac-notebooks-${nb_hash}.tar.gz" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
id: nb_cache
with:
path: ${{ steps.cfg.outputs.nb-dir }}
key: docs-notebooks-${{ hashFiles('notebooks/*.py') }}
- name: Get Conda Environment from Cache
if: steps.nb_cache.outputs.cache-hit != 'true'
uses: actions/cache@v3
id: conda_cache
with:
path: /tmp/binder_env
key: ${{ runner.os }}-binder-env-${{ hashFiles('binder/environment.yml') }}
- name: Update PATH
if: steps.nb_cache.outputs.cache-hit != 'true'
shell: bash
run: |
echo "/tmp/binder_env/bin" >> $GITHUB_PATH
- name: Run Notebooks
if: steps.nb_cache.outputs.cache-hit != 'true'
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
mkdir -p $nb_dir
for src in $(find notebooks -type f -maxdepth 1 -name '*py'); do
dst="${nb_dir}/$(basename ${src%%.py}.ipynb)"
echo "$src -> $dst"
./binder/render-nb-pipe.sh <$src >$dst
done
ls -lh ${nb_dir}/
- name: Package Notebooks
run: |
nb_dir="${{ steps.cfg.outputs.nb-dir }}"
nb_hash="${{ steps.cfg.outputs.nb-hash }}"
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "DIR: ${nb_dir}"
echo "NB hash: $nb_hash"
echo "Archive: $nb_archive"
(cd $nb_dir && tar cvz .) > "${nb_archive}"
ls -lh "${nb_archive}"
tar tzf "${nb_archive}"
- name: Upload to S3
run: |
nb_archive="${{ steps.cfg.outputs.nb-archive }}"
echo "Using Keys: ...${AWS_ACCESS_KEY_ID:(-4)}/...${AWS_SECRET_ACCESS_KEY:(-4)}"
echo "Testing permissions"
aws s3 ls "${S3_DST}/" || true
aws s3 cp "${nb_archive}" "${S3_DST}/${nb_archive}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_DEFAULT_REGION: "ap-southeast-2"
AWS_REGION: "ap-southeast-2"
S3_DST: "s3://datacube-core-deployment/odc-stac/nb"
- name: Upload results (artifact)
uses: actions/upload-artifact@v3
with:
name: rendered-notebooks
path: docs/notebooks
if-no-files-found: error