Skip to content

Commit

Permalink
Merge branch 'main' into penghuic/pytest_by_script
Browse files Browse the repository at this point in the history
  • Loading branch information
PenghuiCheng authored Dec 20, 2024
2 parents ebc83be + 9ed0a1a commit 05da31c
Show file tree
Hide file tree
Showing 22 changed files with 1,426 additions and 74 deletions.
4 changes: 2 additions & 2 deletions .github/actions/inductor-xpu-e2e-test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ runs:
shell: bash
run: |
source activate e2e_ci
source .github/scripts/env.sh
source .github/scripts/env.sh ${{ inputs.pytorch }}
if [[ ${{ inputs.suite }} == *"torchbench"* ]]; then
if [ "${{ inputs.pytorch }}" != "nightly_wheel" ]; then
cd ../ && rm -rf audio && git clone --single-branch -b main https://github.com/pytorch/audio.git
Expand Down Expand Up @@ -94,7 +94,7 @@ runs:
shell: bash
run: |
source activate e2e_ci
source .github/scripts/env.sh
source .github/scripts/env.sh ${{ inputs.pytorch }}
cp .github/scripts/inductor_xpu_test.sh ../pytorch
cd ../pytorch
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/apply_torch_pr.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def appyly_pr(pr_info, re_apply_msg):
pr_file = pr_info["diff_url"].split("/")[-1]
urllib.request.urlretrieve(pr_info["diff_url"], pr_file)
# apply diff
apply_cmd = "git apply --3way " + pr_file + " && rm -f " + pr_file
apply_cmd = "git apply --3way " + pr_file
apply_info = subprocess.Popen(apply_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
apply_message = apply_info.communicate()[0].decode("utf-8")
apply_status = apply_info.returncode
Expand Down
11 changes: 8 additions & 3 deletions .github/scripts/env.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
#!/bin/bash
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/umf/latest/env/vars.sh
source /opt/intel/oneapi/pti/latest/env/vars.sh

if [ "$1" != "nightly_wheel" ];then
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/umf/latest/env/vars.sh
source /opt/intel/oneapi/pti/latest/env/vars.sh
else
echo "Don't need to source DL-Essential for nightly wheel"
fi
80 changes: 80 additions & 0 deletions .github/scripts/inductor_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import pandas as pd
from scipy.stats import gmean
from styleframe import StyleFrame, Styler, utils
import numpy as np
from openpyxl import Workbook

parser = argparse.ArgumentParser(description="Generate report")
parser.add_argument('-s', '--suite', default=["huggingface"], nargs='*', type=str, help='model suite name')
Expand Down Expand Up @@ -665,6 +667,73 @@ def update_summary(excel, scenario, suite):
sf.set_row_height(j, 30)
sf.to_excel(sheet_name=suite + '_' + scenario + '_Summary', excel_writer=excel)

def summary_conclusion(scenario, excel):
excel.book.save(excel)
df = pd.read_excel(excel, sheet_name = None, header = None)
#df = pd.DataFrame(excel)
if scenario == 'performance':
sheet_names = list(df.keys())
sheet_names = [s for s in sheet_names if 'Summary' in s and 'performance' in s]
sheet_names.sort()
print(f"Merge excel as below:\n{sheet_names}")
print("\n")
features = [[]] * 21
for sheet_name in sheet_names:
df_sheet = df[sheet_name]
df_sheet = df_sheet.values
features = np.hstack((features, df_sheet))

if len(sheet_names) == 1:
print("sheet not merge")
elif len(sheet_names) == 2:
print("2 sheets merge")
if 'huggingface' in sheet_names[0]:
features[:, 4:5] = features[:, 14:15]
features[:, 6:7] = features[:, 16:17]
else:
features[:, 4:5] = features[:, 14:15]
else:
print("3 sheets merge")
features[:, 4:5] = features[:, 24:25]
features[:, 6:7] = features[:, 16:17]

df_concat = StyleFrame(pd.DataFrame(features).iloc[:,:10])
for i in range(10):
df_concat.set_column_width(i, 22)
for j in range(1, 23):
df_concat.set_row_height(j, 30)
df_concat.to_excel(sheet_name='Perf_Summary', excel_writer=excel, index=False)
else:
sheet_names = list(df.keys())
sheet_names = [s for s in sheet_names if 'Summary' in s and 'accuracy' in s]
sheet_names.sort()
print(f"Merge excel as below:\n{sheet_names}")
print("\n")
features = [[]] * 11
for sheet_name in sheet_names:
df_sheet = df[sheet_name]
df_sheet = df_sheet.values
features = np.hstack((features, df_sheet))
if len(sheet_names) == 1:
print("sheet not merge")
elif len(sheet_names) == 2:
print("2 sheets merge")
if 'huggingface' in sheet_names[0]:
features[:, 3:4] = features[:, 12:13]
features[:, 5:6] = features[:, 14:15]
else:
features[:, 3:4] = features[:, 12:13]
else:
print("3 sheets merge")
features[:, 3:4] = features[:, 21:22]
features[:, 5:6] = features[:, 14:15]

df_concat = StyleFrame(pd.DataFrame(features).iloc[:,:9])
for i in range(10):
df_concat.set_column_width(i, 22)
for j in range(1, 13):
df_concat.set_row_height(j, 30)
df_concat.to_excel(sheet_name='Acc_Summary', excel_writer=excel, index=False)

def generate_report(excel, scenario_list, precision_list, mode_list, suite_list):
for sc in scenario_list:
Expand Down Expand Up @@ -693,8 +762,19 @@ def excel_postprocess(file, scenario, precison, mode, suite):
wdt.merge_cells(start_row=1, end_row=1, start_column=13, end_column=16)
wb.save(file)

if len(scenario) == 2:
wb.move_sheet("Perf_Summary", -(len(wb.worksheets)-1))
wb.move_sheet("Acc_Summary", -(len(wb.worksheets)-1))
elif len(scenario) == 1 and sc == 'accuracy':
wb.move_sheet("Acc_Summary", -(len(wb.worksheets)-1))
else:
wb.move_sheet("Perf_Summary", -(len(wb.worksheets)-1))


if __name__ == '__main__':
excel = StyleFrame.ExcelWriter('inductor_log/Inductor_E2E_Test_Report.xlsx')
generate_report(excel, args.scenario, args.precision, args.mode, args.suite)
for sc in args.scenario:
summary_conclusion(sc, excel)
excel_postprocess(excel, args.scenario, args.precision, args.mode, args.suite)
excel.close()
7 changes: 7 additions & 0 deletions .github/scripts/spec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import torch

DEVICE_NAME = 'xpu'

MANUAL_SEED_FN = torch.xpu.manual_seed
EMPTY_CACHE_FN = torch.xpu.empty_cache
DEVICE_COUNT_FN = torch.xpu.device_count
135 changes: 135 additions & 0 deletions .github/workflows/_linux_transformers.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
name: Linux Transformers Test

on:
pull_request:
branches:
- main
paths:
- '.github/scripts/spec.py'
- '.github/workflows/_linux_transformers.yml'
workflow_dispatch:
inputs:
pytorch:
required: false
type: string
default: 'nightly'
description: Pytorch branch/commit
python:
required: false
type: string
default: '3.10'
description: Python version
runner:
required: true
type: string
default: 'linux.idc.xpu'
description: Runner label
driver:
required: false
type: string
default: 'lts'
description: Driver lts/rolling
nightly_whl:
required: false
type: string
default: ''
description: Pytorch nightly wheel version
transformers:
required: false
type: string
default: 'v4.47.0'
description: Transformers version

permissions: read-all

jobs:
Torch-XPU-Transformers-Tests:
runs-on: ${{ inputs.runner != '' && inputs.runner || 'linux.idc.xpu' }}
env:
NEOReadDebugKeys: ${{ inputs.driver == 'rolling' && '1' || '0' }}
DisableScratchPages: ${{ inputs.driver == 'rolling' && '1' || '0' }}
python: ${{ inputs.python != '' && inputs.python || '3.10' }}
pytorch: ${{ inputs.pytorch != '' && inputs.pytorch || 'nightly' }}
TRANSFORMERS_TEST_DEVICE_SPEC: 'spec.py'
steps:
- name: Checkout torch-xpu-ops
uses: actions/checkout@v4
with:
path: torch-xpu-ops
- name: Checkout Transformers
uses: actions/checkout@v4
with:
repository: huggingface/transformers
ref: ${{ inputs.transformers != '' && inputs.transformers || 'v4.47.0' }}
path: transformers
- name: Prepare OS environment
run: |
sudo apt-get update
sudo apt-get install -y \
espeak-ng \
git-lfs \
pkg-config \
libavcodec-dev \
libavdevice-dev \
libavfilter-dev \
libavformat-dev \
libavutil-dev \
libswresample-dev \
libswscale-dev
git lfs install
- name: Prepare Conda ENV
run: |
which conda && conda clean -ay
conda remove --all -y -n huggingface_transformers_test || rm -rf $(dirname ${CONDA_EXE})/../envs/huggingface_transformers_test
conda create -y -n huggingface_transformers_test python=${{ env.python }}
source activate huggingface_transformers_test
- name: Prepare Stock XPU Pytorch
run: |
pwd
source activate huggingface_transformers_test
if [ -z "${{ inputs.nightly_whl }}" ]; then
pip install torch torchvision torchaudio --pre --index-url https://download.pytorch.org/whl/nightly/xpu
else
pip install torch==$(echo ${{ inputs.nightly_whl }}) torchvision torchaudio --pre --index-url https://download.pytorch.org/whl/nightly/xpu
fi
- name: Prepare Transformers
run: |
pwd
source activate huggingface_transformers_test
cd transformers
pip install -e .
pip install -e ".[dev-torch,testing,video]"
rm -rf tests_log && mkdir -p tests_log
rm -rf reports
cp ${{ github.workspace }}/torch-xpu-ops/.github/scripts/spec.py ./
- name: Report installed versions
id: installed
run: |
source activate huggingface_transformers_test
echo "TORCH_BRANCH_ID=$(python -c 'import torch; print(torch.__version__)')" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
echo "TORCH_COMMIT_ID=$(python -c 'import torch; print(torch.version.git_version)')" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
echo "pip installed packages:"
pip list | tee ${{ github.workspace }}/transformers/tests_log/pip_list.txt
echo "GPU render nodes:"
cat /sys/class/drm/render*/device/device | tee ${{ github.workspace }}/transformers/tests_log/device_IDs.txt
- name: Sanitry check installed packages
run: |
source activate huggingface_transformers_test
# These checks are to exit earlier if for any reason Transformers
# reinstalled torch packages back to CUDA versions (not expected).
pip show torch | grep Version | grep xpu
pip show torchaudio | grep Version | grep xpu
pip show torchvision | grep Version | grep xpu
- name: Run XPU backbone
run: |
source activate huggingface_transformers_test
cd transformers
python3 -m pytest -rsf --make-reports=tests_benchmark -k backbone tests
- name: Upload Test log
if: ${{ ! cancelled() }}
uses: actions/upload-artifact@v4
with:
name: Torch-XPU-Transformers-Log-${{ github.event.pull_request.number || github.sha }}
path: |
${{ github.workspace }}/transformers/reports
${{ github.workspace }}/transformers/tests_log
Loading

0 comments on commit 05da31c

Please sign in to comment.