Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix nightly build after #6538 #7118

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 4 additions & 7 deletions .ci/scripts/test_llama.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,6 @@ UPLOAD_DIR="${UPLOAD_DIR:-}"
# Default PT2E_QUANTIZE to empty string if not set
PT2E_QUANTIZE="${PT2E_QUANTIZE:-}"

# Default CMake Build Type to release mode
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}

if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
echo "Expecting atleast 4 positional arguments"
echo "Usage: [...]"
Expand Down Expand Up @@ -146,7 +143,7 @@ cmake_install_executorch_libraries() {
rm -rf cmake-out
retry cmake \
-DCMAKE_INSTALL_PREFIX=cmake-out \
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
-DCMAKE_BUILD_TYPE=Debug \
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
-DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
Expand All @@ -160,22 +157,22 @@ cmake_install_executorch_libraries() {
-DQNN_SDK_ROOT="$QNN_SDK_ROOT" \
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
-Bcmake-out .
cmake --build cmake-out -j9 --target install --config "$CMAKE_BUILD_TYPE"
cmake --build cmake-out -j9 --target install --config Debug
}

cmake_build_llama_runner() {
echo "Building llama runner"
dir="examples/models/llama"
retry cmake \
-DCMAKE_INSTALL_PREFIX=cmake-out \
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
-DCMAKE_BUILD_TYPE=Debug \
-DEXECUTORCH_BUILD_KERNELS_CUSTOM="$CUSTOM" \
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
-Bcmake-out/${dir} \
${dir}
cmake --build cmake-out/${dir} -j9 --config "$CMAKE_BUILD_TYPE"
cmake --build cmake-out/${dir} -j9 --config Debug

}

Expand Down
16 changes: 8 additions & 8 deletions .ci/scripts/test_llava.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
set -exu
# shellcheck source=/dev/null

BUILD_TYPE=${1:-Debug}
TARGET_OS=${2:-Native}
BUILD_DIR=${3:-cmake-out}
CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release}

echo "Building with CMAKE_BUILD_TYPE: $CMAKE_BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"
echo "Building with BUILD_TYPE: $BUILD_TYPE, TARGET_OS: $TARGET_OS, BUILD_DIR: $BUILD_DIR"

if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
PYTHON_EXECUTABLE=python3
Expand All @@ -32,7 +32,7 @@ if hash nproc &> /dev/null; then NPROC=$(nproc); fi

EXECUTORCH_COMMON_CMAKE_ARGS=" \
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DEXECUTORCH_ENABLE_LOGGING=ON \
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
Expand All @@ -49,7 +49,7 @@ cmake_install_executorch_libraries() {
${EXECUTORCH_COMMON_CMAKE_ARGS} \
-B${BUILD_DIR} .

cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
}

cmake_install_executorch_libraries_for_android() {
Expand All @@ -59,14 +59,14 @@ cmake_install_executorch_libraries_for_android() {
${EXECUTORCH_COMMON_CMAKE_ARGS} \
-B${BUILD_DIR} .

cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${CMAKE_BUILD_TYPE}
cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
}


LLAVA_COMMON_CMAKE_ARGS=" \
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
-DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
-DEXECUTORCH_BUILD_XNNPACK=ON"
Expand All @@ -81,7 +81,7 @@ cmake_build_llava_runner() {
-B${BUILD_DIR}/${dir} \
${dir}

cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
}


Expand All @@ -98,7 +98,7 @@ cmake_build_llava_runner_for_android() {
-B${BUILD_DIR}/${dir} \
${dir}

cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${CMAKE_BUILD_TYPE}
cmake --build ${BUILD_DIR}/${dir} -j${NPROC} --config ${BUILD_TYPE}
}

# only export the one without custom op for now since it's
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/trunk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ jobs:
# ${CONDA_RUN} python -m unittest examples.models.llava.test.test_llava

# # run e2e (export, tokenizer and runner)
# PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llava.sh
# PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_llava.sh Release

test-qnn-model:
name: test-qnn-model
Expand Down
2 changes: 1 addition & 1 deletion backends/xnnpack/third-party/XNNPACK
Submodule XNNPACK updated 2740 files
6 changes: 2 additions & 4 deletions backends/xnnpack/third-party/xnnpack.buck.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def define_xnnpack():
"XNNPACK/src/mutex.c",
"XNNPACK/src/normalization.c",
"XNNPACK/src/operator-utils.c",
"XNNPACK/src/reference/packing.cc",
"XNNPACK/src/packing.cc",
],
headers = get_xnnpack_headers(),
header_namespace = "",
Expand All @@ -67,7 +67,7 @@ def define_xnnpack():
# @lint-ignore BUCKLINT: native and fb_native are explicitly forbidden in fbcode.
native.cxx_library(
name = "subgraph",
srcs = SUBGRAPH_SRCS + ["XNNPACK/src/datatype.c"],
srcs = SUBGRAPH_SRCS,
compiler_flags = [
"-Wno-error=missing-braces", # required since the SGX toolchain does not have this by default
],
Expand Down Expand Up @@ -1076,8 +1076,6 @@ def define_xnnpack():
"XNNPACK/src/configs/hardware-config.c",
"XNNPACK/src/microparams-init.c",
"XNNPACK/src/microkernel-utils.c",
"XNNPACK/src/reference/binary-elementwise.cc",
"XNNPACK/src/reference/unary-elementwise.cc",
],
headers = get_xnnpack_headers(),
exported_headers = {
Expand Down
12 changes: 11 additions & 1 deletion backends/xnnpack/third-party/xnnpack_src_defs.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,24 @@ def prod_srcs_for_arch_wrapper(arch):
return define_xnnpack_build_src(prod_srcs)

def get_xnnpack_headers():
# XNNPACK Headers in the path containing xnnpack/ or configs/
# do not contain the src/ path. However headers not in xnnpack/ or
# configs/ are prepend with the src/ path. This function helps us
# to correctly parse all the header files to the correct name
src_headers = subdir_glob([
("XNNPACK/src", "**/*.h"),
])
fixed_headers = {}
for k, v in src_headers.items():
new_key = k
if not k.startswith("xnnpack") and not k.startswith("configs"):
new_key = "src/{}".format(k)
fixed_headers[new_key] = v
include_headers = subdir_glob([
("XNNPACK/include", "*.h"),
])

return src_headers | include_headers
return fixed_headers | include_headers

OPERATOR_SRCS = define_xnnpack_build_src(_OPERATOR_SRCS)
SUBGRAPH_SRCS = define_xnnpack_build_src(_SUBGRAPH_SRCS)
Expand Down
Loading