Skip to content

Dataset Lifecycle - Backlog and Post-Compute #4771

Dataset Lifecycle - Backlog and Post-Compute

Dataset Lifecycle - Backlog and Post-Compute #4771

---
# lifecycle processing for all datasets currently being tracked - backlog and post-compute
name: Dataset Lifecycle - Backlog and Post-Compute
on:
# run whenever a pull request is labeled
# triggers when we add the `tracking` label
pull_request:
types: [labeled]
# run every 8 hours
schedule:
- cron: "0 0,8,16 * * *"
workflow_dispatch:
defaults:
run:
shell: bash -l {0}
jobs:
exec-lifecycle:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: nschloe/action-cached-lfs-checkout@v1
- name: ensure we only have one instance running
uses: softprops/turnstyle@master
env:
GITHUB_TOKEN: ${{ secrets.GH_DANGERBOT_TOKEN_LIMITED }}
with:
abort-after-seconds: 60
- name: Additional info about the build
shell: bash
run: |
uname -a
df -h
ulimit -a
- name: Install environment
uses: mamba-org/setup-micromamba@v1
with:
environment-file: devtools/conda-envs/backlog.yaml
create-args: >-
python=3.11
cache-environment: true
- name: Environment Information
run: |
conda info
conda list
- name: Run lifecycle processing script
env:
GH_TOKEN: ${{ secrets.QCA_DATASET_SUBMISSION_PAT }}
QCA_USER: ${{ secrets.QCA_USER }}
QCA_KEY: ${{ secrets.QCA_KEY }}
run: |
python ./management/lifecycle.py --states "Backlog" "Requires Scientific Review" "End of Life" "Archived/Complete"