-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Sun, Xuehao <[email protected]>
- Loading branch information
Showing
17 changed files
with
740 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
trigger: none | ||
|
||
pr: | ||
autoCancel: true | ||
drafts: false | ||
branches: | ||
include: | ||
- main | ||
paths: | ||
include: | ||
- onnx_neural_compressor | ||
- setup.py | ||
- requirements.txt | ||
- .azure-pipelines/scripts/models | ||
- .azure-pipelines/model-test.yml | ||
- .azure-pipelines/template/model-template.yml | ||
exclude: | ||
- test | ||
|
||
variables: | ||
OUT_SCRIPT_PATH: $(Build.SourcesDirectory)/.azure-pipelines/scripts/models | ||
SCRIPT_PATH: /neural_compressor/.azure-pipelines/scripts | ||
|
||
parameters: | ||
- name: algorithms | ||
type: object | ||
default: | ||
- SQ | ||
- WOQ | ||
- name: models | ||
type: object | ||
default: | ||
- bert_base_MRPC | ||
- bert_base_MRPC_dynamic | ||
- resnet50-v1-12_qdq | ||
- resnet50-v1-12 | ||
|
||
stages: | ||
# - stage: ONNX_LLM_Models | ||
# displayName: Run ONNX LLM Model | ||
# pool: ICX-16C | ||
# dependsOn: [] | ||
# jobs: | ||
# - ${{ each algorithm in parameters.algorithms }}: | ||
# - job: | ||
# steps: | ||
# - template: template/model-template.yml | ||
# parameters: | ||
# modelName: "facebook/opt-125m" | ||
# algorithm: "${{ algorithm }}" | ||
# script_path: "run_onnxrt_llm_models_trigger.sh" | ||
|
||
- stage: ONNX_Models | ||
displayName: Run ONNX Model | ||
pool: MODEL_PERF_TEST | ||
dependsOn: [] | ||
jobs: | ||
- ${{ each model in parameters.models }}: | ||
- job: | ||
displayName: ${{ model }} | ||
steps: | ||
- template: template/model-template.yml | ||
parameters: | ||
modelName: "${{ model }}" | ||
algorithm: "Quantize" | ||
script_path: "run_onnxrt_models_trigger.sh" | ||
|
||
- stage: GenerateLogs | ||
displayName: Generate Report | ||
pool: | ||
vmImage: "ubuntu-latest" | ||
dependsOn: [ONNX_Models] | ||
jobs: | ||
- job: GenerateReport | ||
steps: | ||
- script: | | ||
echo ${BUILD_SOURCESDIRECTORY} | ||
rm -fr ${BUILD_SOURCESDIRECTORY} || sudo rm -fr ${BUILD_SOURCESDIRECTORY} || true | ||
echo y | docker system prune | ||
displayName: "Clean workspace" | ||
- checkout: self | ||
clean: true | ||
displayName: "Checkout out Repo" | ||
- task: DownloadPipelineArtifact@2 | ||
inputs: | ||
artifact: | ||
patterns: "**/result.json" | ||
path: $(OUT_SCRIPT_PATH) | ||
- task: UsePythonVersion@0 | ||
displayName: "Use Python 3.10" | ||
inputs: | ||
versionSpec: "3.10" | ||
- script: | | ||
cd ${OUT_SCRIPT_PATH} | ||
mkdir generated last_generated | ||
python -u summarize_results.py --logs_dir $(OUT_SCRIPT_PATH) --output_dir generated | ||
displayName: "Summarize all results" | ||
- task: DownloadPipelineArtifact@2 | ||
continueOnError: true | ||
inputs: | ||
source: "specific" | ||
artifact: "FinalReport" | ||
patterns: "**.json" | ||
path: $(OUT_SCRIPT_PATH)/last_generated | ||
project: $(System.TeamProject) | ||
pipeline: "onc model test" | ||
runVersion: "specific" | ||
runId: $(refer_buildId) | ||
displayName: "Download last logs" | ||
- script: | | ||
echo "------ Generating final report.html ------" | ||
cd ${OUT_SCRIPT_PATH} | ||
pip install jinja2 | ||
python generate_report.py --json_path generated/summary.json --last_json_path last_generated/summary.json | ||
displayName: "Generate report" | ||
- task: PublishPipelineArtifact@1 | ||
inputs: | ||
targetPath: $(OUT_SCRIPT_PATH)/generated | ||
artifact: FinalReport | ||
publishLocation: "pipeline" | ||
displayName: "Publish report" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,84 @@ | ||
import argparse | ||
import json | ||
import os | ||
import re | ||
|
||
parser = argparse.ArgumentParser() | ||
parser.add_argument("--model", required=True, type=str) | ||
parser.add_argument("--build_id", required=True, type=str) | ||
args = parser.parse_args() | ||
|
||
URL = ( | ||
"https://dev.azure.com/lpot-inc/onnx-neural-compressor/_build/results?buildId=" | ||
+ args.build_id | ||
+ "&view=artifacts&pathAsName=false&type=publishedArtifacts" | ||
) | ||
REFER_SUMMARY_PATH = "/neural-compressor/.azure-pipelines/scripts/models/summary.json" | ||
|
||
|
||
def str_to_float(value): | ||
try: | ||
return round(float(value), 4) | ||
except ValueError: | ||
return value | ||
|
||
|
||
def get_refer_data(): | ||
if not os.path.exists(REFER_SUMMARY_PATH): | ||
print(f"The file '{REFER_SUMMARY_PATH}' does not exist.") | ||
return {} | ||
|
||
with open(REFER_SUMMARY_PATH, "r") as file: | ||
refer = json.load(file) | ||
return refer | ||
|
||
|
||
def check_status(performance, accuracy): | ||
refer = get_refer_data() | ||
|
||
refer_accuracy = refer.get(args.model, {}).get("accuracy", {}).get("value", "N/A") | ||
refer_performance = refer.get(args.model, {}).get("performance", {}).get("value", "N/A") | ||
print(f"{accuracy=}\n{refer_accuracy=}\n{performance=}\n{refer_performance=}") | ||
|
||
assert accuracy != "N/A" and performance != "N/A" | ||
if refer_accuracy != "N/A": | ||
assert abs(accuracy - refer_accuracy) <= 0.001 | ||
if refer_performance != "N/A": | ||
assert (refer_performance - performance) / refer_performance <= 0.08 | ||
|
||
|
||
def main(): | ||
result_dict = { | ||
args.model: { | ||
"performance": {"value": "N/A", "log_path": URL}, | ||
"accuracy": {"value": "N/A", "log_path": URL}, | ||
} | ||
} | ||
|
||
pattern = { | ||
"performance": r"Throughput: ([\d.]+)", | ||
"accuracy": r"Accuracy: ([\d.]+)", | ||
} | ||
|
||
for mode, _ in result_dict[args.model].items(): | ||
log_file = f"/neural-compressor/.azure-pipelines/scripts/models/{args.model}/{mode}.log" | ||
if not os.path.exists(log_file): | ||
print(f"The file '{log_file}' does not exist.") | ||
continue | ||
|
||
with open(log_file, "r") as file: | ||
log_content = file.read() | ||
|
||
match = re.search(pattern[mode], log_content) | ||
|
||
if match: | ||
result_dict[args.model][mode]["value"] = str_to_float(match.group(1)) | ||
|
||
with open(f"/neural-compressor/.azure-pipelines/scripts/models/{args.model}/result.json", "w") as json_file: | ||
json.dump(result_dict, json_file, indent=4) | ||
|
||
check_status(result_dict[args.model]["performance"]["value"], result_dict[args.model]["accuracy"]["value"]) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
#!/bin/bash | ||
set -eo pipefail | ||
PATTERN='[-a-zA-Z0-9_]*=' | ||
|
||
for i in "$@"; do | ||
case $i in | ||
--model=*) | ||
model=${i//${PATTERN}/} | ||
;; | ||
*) | ||
echo "Parameter $i not recognized." | ||
exit 1 | ||
;; | ||
esac | ||
done | ||
|
||
CONFIG_PATH="/neural-compressor/examples/.config/model_params_onnxrt.json" | ||
model_src_dir=$(jq -r ".\"onnxrt\".\"$model\".\"model_src_dir\"" "$CONFIG_PATH") | ||
|
||
log_dir="/neural-compressor/.azure-pipelines/scripts/models" | ||
|
||
$BOLD_YELLOW && echo "======= creat log_dir =========" && $RESET | ||
if [ -d "${log_dir}/${model}" ]; then | ||
$BOLD_GREEN && echo "${log_dir}/${model} already exists, don't need to mkdir." && $RESET | ||
else | ||
$BOLD_GREEN && echo "no log dir ${log_dir}/${model}, create." && $RESET | ||
cd "${log_dir}" | ||
mkdir "${model}" | ||
fi | ||
|
||
$BOLD_YELLOW && echo "====== install ONC ======" && $RESET | ||
cd /neural-compressor | ||
source .azure-pipelines/scripts/change_color.sh | ||
/bin/bash .azure-pipelines/scripts/install_nc.sh | ||
|
||
$BOLD_YELLOW && echo "====== install requirements ======" && $RESET | ||
cd "/neural-compressor/examples/$model_src_dir" | ||
pip install -r requirements.txt | ||
pip list |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
import argparse | ||
import json | ||
import os | ||
|
||
from jinja2 import Environment, FileSystemLoader | ||
|
||
parser = argparse.ArgumentParser(allow_abbrev=False) | ||
parser.add_argument("--json_path", type=str, required=True) | ||
parser.add_argument("--last_json_path", type=str, required=True) | ||
args = parser.parse_args() | ||
|
||
|
||
def get_data(json_path): | ||
""" | ||
{ | ||
model: { | ||
"performance": {"value": "N/A"|number, "log_path": string}, | ||
"accuracy": {"value": "N/A"|number, "log_path": string}, | ||
} | ||
} | ||
""" | ||
if os.path.exists(json_path): | ||
with open(json_path, "r") as f: | ||
return json.load(f) | ||
else: | ||
return {} | ||
|
||
|
||
def get_ratio(cur, last): | ||
if cur == "N/A" or last == "N/A": | ||
ratio = "N/A" | ||
else: | ||
ratio = (float(cur) - float(last)) / float(last) * 100 | ||
ratio = round(float(ratio), 2) | ||
return ratio | ||
|
||
|
||
def get_accuracy_ratio(current_json, last_accuracy_dict): | ||
compare_result_dict = [] | ||
for model, item in current_json.items(): | ||
current_accuracy = item.get("accuracy", {}).get("value", "N/A") | ||
last_accuracy = last_accuracy_dict.get(model, {}).get("accuracy", {}).get("value", "N/A") | ||
accuracy_ratio = get_ratio(current_accuracy, last_accuracy) | ||
|
||
current_performance = item.get("performance", {}).get("value", "N/A") | ||
last_performance = last_accuracy_dict.get(model, {}).get("performance", {}).get("value", "N/A") | ||
performance_ratio = get_ratio(current_performance, last_performance) | ||
|
||
if accuracy_ratio == "N/A" or performance_ratio == "N/A": | ||
status = "FAILURE" | ||
elif accuracy_ratio != 0: | ||
status = "FAILURE" | ||
elif performance_ratio > 8 or performance_ratio < -8: | ||
status = "FAILURE" | ||
else: | ||
status = "SUCCESS" | ||
|
||
format_ratio = lambda x: f"{x}%" if x != "N/A" else x | ||
|
||
compare_result_dict.append( | ||
{ | ||
"model": model, | ||
"current_accuracy": current_accuracy, | ||
"last_accuracy": last_accuracy, | ||
"accuracy_ratio": format_ratio(accuracy_ratio), | ||
"current_performance": current_performance, | ||
"last_performance": last_performance, | ||
"performance_ratio": format_ratio(performance_ratio), | ||
"status": status, | ||
} | ||
) | ||
return compare_result_dict | ||
|
||
|
||
def generate(rendered_template): | ||
with open("generated/report.html", "w") as html_file: | ||
html_file.write(rendered_template) | ||
|
||
|
||
def main(): | ||
path = "{}/templates/".format(os.path.dirname(__file__)) | ||
BUILD_BUILDID = os.getenv("BUILD_BUILDID") | ||
|
||
loader = FileSystemLoader(path) | ||
env = Environment(loader=loader) | ||
template = env.get_template("model.jinja2") | ||
|
||
data = get_data(args.json_path) | ||
last_data = get_data(args.last_json_path) | ||
data = get_accuracy_ratio(data, last_data) | ||
info = { | ||
"url": f"https://dev.azure.com/lpot-inc/onnx-neural-compressor/_build/results?buildId={BUILD_BUILDID}", | ||
"branch": os.getenv("SYSTEM_PULLREQUEST_SOURCEBRANCH"), | ||
"commit": os.getenv("BUILD_SOURCEVERSION"), | ||
"build_number": BUILD_BUILDID, | ||
} | ||
|
||
rendered_template = template.render(data=data, info=info) | ||
generate(rendered_template) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.