Skip to content

Hf top 1000 model ci #4

Hf top 1000 model ci

Hf top 1000 model ci #4

# Copyright 2024 Advanced Micro Devices
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
name: HF top-1000 model Test Suite
on:
pull_request:
workflow_dispatch:
schedule:
# Runs at 2:30 AM PST
- cron: '30 9 * * *'
jobs:
e2eshark:
timeout-minutes: 600
name: "Models :: ${{ matrix.backend }} :: ${{ matrix.test-file }}"
runs-on: ${{ matrix.runs-on }}
strategy:
fail-fast: false
matrix:
include:
- name: mi300_gpu1_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-feature-extraction-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu2_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-fill-mask-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu3_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-image-classification-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu4_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-image-segmentation-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu5_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-multiple-choice-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu6_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-object-detection-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu7_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-question-answering-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu8_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-semantic-segmentation-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu9_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-text-classification-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
# - name: mi300_gpu10_test
# runs-on: nodai-amdgpu-mi300-x86-64
# backend: rocm
# device: hip
# target-chip: gfx942
# test-file: hf-text-generation-shard
# cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: mi300_gpu11_test
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
device: hip
target-chip: gfx942
test-file: hf-token-classification-shard
cache-dir: /data/e2eshark/shark-test-suite-models-cache
- name: cpu_shard1_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-feature-extraction-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard2_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-fill-mask-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard3_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-image-classification-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard4_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-image-segmentation-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard5_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-multiple-choice-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard6_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-object-detection-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard7_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-question-answering-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard8_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-semantic-segmentation-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard9_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-text-classification-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard10_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-text-generation-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
- name: cpu_shard11_test
runs-on: nodai-amdgpu-mi250-x86-64
backend: llvm-cpu
device: local-task
target-chip: x86_64-linux-gnu
test-file: hf-token-classification-shard
cache-dir: /groups/aig_sharks/test-suite-ci-cache
env:
E2E_VENV_DIR: ${{ github.workspace }}/test-suite_venv
EP_VENV_DIR: ${{ github.workspace }}/ep_venv
ALT_E2E_VENV_DIR: ${{ github.workspace }}/alt-test-suite_venv
HF_TOKEN: ${{ secrets.HF_TOKEN }}
AZ_PRIVATE_CONNECTION: ${{ secrets.ONNXPRIVATESTORAGE_AZ_PRIVATE_CONNECTION }}
CACHE_DIR: ${{ matrix.cache-dir }}
steps:
- name: Checkout Test Suite
uses: actions/checkout@v2
with:
repository: nod-ai/SHARK-TestSuite
path: test-suite
- name: "Setup alt e2eshark python venv"
run: |
rm -rf ${ALT_E2E_VENV_DIR}
python3.11 -m venv ${ALT_E2E_VENV_DIR}
source ${ALT_E2E_VENV_DIR}/bin/activate
pip install --upgrade pip
pip install -r ./alt_e2eshark/base_requirements.txt
pip install -r ./alt_e2eshark/hf_requirements.txt
pip install -r ./alt_e2eshark/iree_requirements.txt
pip install --no-deps -r ./alt_e2eshark/torch_mlir_requirements.txt
pip install --pre --upgrade iree-base-compiler iree-base-runtime -f https://iree.dev/pip-release-links.html
working-directory: ./test-suite
- name: Run HF top-1000 model
run: |
source ${ALT_E2E_VENV_DIR}/bin/activate
pip freeze
cd alt_e2eshark
free -mh
python3.11 ./run.py \
-r ./test-onnx \
--report \
--testsfile onnx_tests/models/external_lists/hf-model-shards/${{ matrix.test-file }}.txt \
-b ${{ matrix.backend }} \
-d ${{ matrix.device }} \
-c ${{ matrix.target-chip }} \
--report-file reports/${{ matrix.test-file }}.md \
--mode=cl-onnx-iree \
--cleanup=3 \
--get-metadata \
-v
python utils/find_duplicate_models.py -s -r ./test-onnx -o reports/duplicates.json
working-directory: ./test-suite
- uses: actions/upload-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_${{ matrix.test-file }}_onnx_md
path: ./test-suite/alt_e2eshark/reports/${{ matrix.test-file }}.md
- uses: actions/upload-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_${{ matrix.test-file }}_onnx_json
path: ./test-suite/alt_e2eshark/reports/${{ matrix.test-file }}.json
- uses: actions/upload-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_${{ matrix.test-file }}_duplicates_json
path: ./test-suite/alt_e2eshark/duplicates.json
push_artifacts:
needs: [e2eshark]
runs-on: ${{ matrix.runs-on }}
strategy:
fail-fast: false
max-parallel: 1
matrix:
include:
- name: merge_rocm_reports
runs-on: nodai-amdgpu-mi300-x86-64
backend: rocm
regression-blob: rocm
- name: merge_cpu_reports
runs-on: nodai-amdgpu-mi300-x86-64
backend: llvm-cpu
regression-blob: cpu
env:
AZ_PUBLIC_KEY: ${{ secrets.SHARKPUBLIC_AZ_PUBLIC_KEY }}
steps:
- name: Checkout Test Suite
uses: actions/checkout@v2
with:
repository: nod-ai/SHARK-TestSuite
path: test-suite
- name: Checkout repo
uses: actions/checkout@v2
with:
repository: nod-ai/e2eshark-reports
ref: main
token: ${{ secrets.E2ESHARK_GITHUB_TOKEN }}
path: e2eshark-reports
- name: "Setup alt test suite venv"
run: |
python3.11 -m venv report_venv_alt
source report_venv_alt/bin/activate
pip install --upgrade pip
pip install -r ./test-suite/alt_e2eshark/base_requirements.txt
pip install -r ./test-suite/alt_e2eshark/iree_requirements.txt
pip install --no-deps -r ./test-suite/alt_e2eshark/torch_mlir_requirements.txt
pip install --pre --upgrade iree-base-compiler iree-base-runtime -f https://iree.dev/pip-release-links.html
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_json
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_md
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_md
- uses: actions/download-artifact@master
with:
name: ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_json
path: ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_json
- name: Merge Reports
run: |
source report_venv_alt/bin/activate
python ./test-suite/alt_e2eshark/utils/merge_dicts.py \
--sources ./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_json/hf-feature-extraction-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_json/hf-fill-mask-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_json/hf-image-classification-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_json/hf-image-segmentation-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_json/hf-multiple-choice-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_json/hf-object-detection-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_json/hf-question-answering-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_json/hf-semantic-segmentation-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_json/hf-text-classification-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_json/hf-text-generation-shard.json \
./e2eshark-reports/ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_json/hf-token-classification-shard.json \
--output ./e2eshark-reports/hf_combined_reports.json \
--report --report-file ./e2eshark-reports/hf_combined_reports.md
- name: Push status artifacts
run: |
git config user.name "GitHub Actions Bot"
git config user.email "<>"
git pull
date=$(date '+%Y-%m-%d')
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-feature-extraction-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-fill-mask-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-image-classification-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-image-segmentation-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-multiple-choice-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-object-detection-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-question-answering-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-semantic-segmentation-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-text-classification-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-text-generation-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-token-classification-shard
mkdir -p ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf_combined-reports
cp ci_reports_${{ matrix.backend }}_hf-feature-extraction-shard_onnx_md/hf-feature-extraction-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-feature-extraction-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-fill-mask-shard_onnx_md/hf-fill-mask-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-fill-mask-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-image-classification-shard_onnx_md/hf-image-classification-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-image-classification-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-image-segmentation-shard_onnx_md/hf-image-segmentation-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-image-segmentation-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-multiple-choice-shard_onnx_md/hf-multiple-choice-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-multiple-choice-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-object-detection-shard_onnx_md/hf-object-detection-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-object-detection-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-question-answering-shard_onnx_md/hf-question-answering-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-question-answering-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-semantic-segmentation-shard_onnx_md/hf-semantic-segmentation-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-semantic-segmentation-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-text-classification-shard_onnx_md/hf-text-classification-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-text-classification-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-text-generation-shard_onnx_md/hf-text-generation-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-text-generation-shard/summary.md
cp ci_reports_${{ matrix.backend }}_hf-token-classification-shard_onnx_md/hf-token-classification-shard.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf-token-classification-shard/summary.md
cp hf_combined_reports.md ${date}/ci_reports_onnx/hf/${{ matrix.backend }}/hf_combined-reports/summary.md
git add $date
git commit -m "add CI status reports for e2eshark for ${{ matrix.backend }}"
git push origin main
working-directory: ./e2eshark-reports