Support for loading multiple images for phi3 vision #2707
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "Linux CPU x64 Build" | |
on: | |
workflow_dispatch: | |
push: | |
branches: | |
- main | |
- rel-* | |
pull_request: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
env: | |
ORT_NIGHTLY_REST_API: "https://feeds.dev.azure.com/aiinfra/PublicPackages/_apis/packaging/Feeds/ORT-Nightly/packages?packageNameQuery=Microsoft.ML.OnnxRuntime&api-version=6.0-preview.1" | |
ORT_PACKAGE_NAME: "Microsoft.ML.OnnxRuntime" | |
ORT_NIGHTLY_SOURCE: "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json" | |
DOTNET_INSTALL_DIR: "${{ github.workspace }}/dotnet" | |
jobs: | |
linux_cpu_x64: | |
runs-on: [ "self-hosted", "1ES.Pool=onnxruntime-genai-Ubuntu2204-AMD-CPU" ] | |
steps: | |
- name: Checkout OnnxRuntime GenAI repo | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Install jq | |
run: | | |
sudo apt-get install jq | |
- uses: actions/setup-dotnet@v4 | |
with: | |
dotnet-version: '8.0.x' | |
- name: Get the Latest OnnxRuntime Nightly Version | |
run: | | |
ORT_NIGHTLY_VERSION=$(curl -s "${{ env.ORT_NIGHTLY_REST_API }}" | jq -r '.value[0].versions[0].normalizedVersion') | |
echo "$ORT_NIGHTLY_VERSION" | |
echo "ORT_NIGHTLY_VERSION=$ORT_NIGHTLY_VERSION" >> $GITHUB_ENV | |
- name: Download OnnxRuntime Nightly | |
run: | | |
dotnet new console | |
dotnet add package ${{ env.ORT_PACKAGE_NAME }} --version ${{ env.ORT_NIGHTLY_VERSION }} --source ${{ env.ORT_NIGHTLY_SOURCE }} --package-directory . | |
dotnet build | |
continue-on-error: true | |
- name: list files | |
shell: bash | |
run: | | |
ls -l | |
ls -R ${{ env.ORT_PACKAGE_NAME }} | |
continue-on-error: true | |
# TODO: Find out why do we need to to have libonnxruntime.so.$ort_version | |
- name: Extract OnnxRuntime library and header files | |
run: | | |
set -e -x | |
mkdir -p ort/lib | |
mv microsoft.ml.onnxruntime/${{ env.ORT_NIGHTLY_VERSION }}/build/native/include ort/ | |
mv microsoft.ml.onnxruntime/${{ env.ORT_NIGHTLY_VERSION }}/runtimes/linux-x64/native/* ort/lib/ | |
cp ort/lib/libonnxruntime.so ort/lib/libonnxruntime.so.1 | |
- name: Build with CMake and GCC | |
run: | | |
set -e -x | |
rm -rf build | |
cmake --preset linux_gcc_cpu_release | |
cmake --build --preset linux_gcc_cpu_release | |
- name: Install the python wheel and test dependencies | |
run: | | |
python3 -m pip install -r test/python/requirements.txt --user | |
python3 -m pip install -r test/python/requirements-cpu.txt --user | |
python3 -m pip install build/cpu/wheel/onnxruntime_genai*.whl --user | |
- name: Get HuggingFace Token | |
run: | | |
az login --identity --username 63b63039-6328-442f-954b-5a64d124e5b4 | |
HF_TOKEN=$(az keyvault secret show --vault-name anubissvcsecret --name ANUBIS-HUGGINGFACE-TOKEN --query value) | |
echo "::add-mask::$HF_TOKEN" | |
echo "HF_TOKEN=$HF_TOKEN" >> $GITHUB_ENV | |
- name: Remove the ort lib and header files | |
run: | | |
rm -rf ort | |
# This will also download all the test models to the test/test_models directory | |
# These models are used by the python tests as well as C#, C++ and others. | |
- name: Run the python tests | |
run: | | |
python3 test/python/test_onnxruntime_genai.py --cwd test/python --test_models test/test_models | |
- name: Verify Build Artifacts | |
if: always() | |
continue-on-error: true | |
run: | | |
ls -l ${{ github.workspace }}/build/cpu | |
- name: Run tests | |
run: | | |
set -e -x | |
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$GITHUB_WORKSPACE/build/cpu/ | |
./build/cpu/test/unit_tests |