Skip to content

Inference test

Inference test #16

name: Inference test
on:
workflow_dispatch:
jobs:
start-aws-runner:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
outputs:
mapping: ${{ steps.aws-start.outputs.mapping }}
instances: ${{ steps.aws-start.outputs.instances }}
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_ROLE }}
aws-region: us-east-1
- name: Create cloud runner
id: aws-start
uses: omsf-eco-infra/gha-runner@main
with:
provider: "aws"
action: "start"
aws_region_name: us-east-1
aws_image_id: ami-0f7c4a792e3fb63c8
aws_instance_type: g4dn.xlarge
aws_home_dir: /home/ubuntu
env:
GH_PAT: ${{ secrets.GH_PAT }}
inference-test:
runs-on: ${{ fromJSON(needs.start-aws-runner.outputs.instances) }}
defaults:
run:
shell: bash -leo pipefail {0}
needs:
- start-aws-runner
steps:
- name: Checkout scripts
uses: actions/checkout@v4
with:
sparse-checkout: |
inference-test
repository: omsf-eco-infra/ci-scripts
path: ci-scripts
- name: Print disk usage
run: "df -h"
- name: Print Docker details
run: "docker version || true"
- name: Check for nvidia-smi
run: "nvidia-smi || true"
- uses: mamba-org/setup-micromamba@main
with:
environment-file: ci-scripts/inference-test/environment.yml
- name: Test for GPU
id: gpu_test
run: python ci-scripts/inference-test/inference.py
stop-aws-runner:
runs-on: ubuntu-latest
needs:
- start-aws-runner
- inference-test
if: ${{ always() }}
permissions:
id-token: write
contents: read
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_ROLE }}
aws-region: us-east-1
- name: Stop instances
uses: omsf-eco-infra/gha-runner@main
with:
provider: "aws"
action: "stop"
aws_region_name: us-east-1
instance_mapping: ${{ needs.start-aws-runner.outputs.mapping }}
env:
GH_PAT: ${{ secrets.GH_PAT }}