From c4f475e31deb9037153dd2d169cd2157b36e895f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 9 Oct 2024 01:54:22 -0700 Subject: [PATCH] Force xnnpack when CPU inference is enforced Also renames the flag to MEDIAPIPE_ to abide to new naming scheme. PiperOrigin-RevId: 683942820 --- mediapipe/calculators/tensor/BUILD | 6 +++++- mediapipe/calculators/tensor/inference_calculator.cc | 5 ++--- mediapipe/calculators/tensor/inference_calculator_cpu.cc | 2 +- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index 4fa4faf399..a6717c4319 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -430,7 +430,7 @@ cc_library_with_tflite( hdrs = ["inference_calculator.h"], local_defines = select({ ":force_cpu_inference": ["MEDIAPIPE_FORCE_CPU_INFERENCE=1"], - "//conditions:default": [], + "//conditions:default": ["MEDIAPIPE_FORCE_CPU_INFERENCE=0"], }), tflite_deps = [ ":inference_runner", @@ -782,6 +782,10 @@ cc_library( srcs = [ "inference_calculator_cpu.cc", ], + local_defines = select({ + ":force_cpu_inference": ["MEDIAPIPE_FORCE_CPU_INFERENCE=1"], + "//conditions:default": ["MEDIAPIPE_FORCE_CPU_INFERENCE=0"], + }), deps = [ ":inference_calculator_interface", ":inference_calculator_utils", diff --git a/mediapipe/calculators/tensor/inference_calculator.cc b/mediapipe/calculators/tensor/inference_calculator.cc index 0c4548b1af..bf60ea8e65 100644 --- a/mediapipe/calculators/tensor/inference_calculator.cc +++ b/mediapipe/calculators/tensor/inference_calculator.cc @@ -51,7 +51,7 @@ class InferenceCalculatorSelectorImpl subgraph_node); std::vector impls; -#if !defined(MEDIAPIPE_FORCE_CPU_INFERENCE) || !MEDIAPIPE_FORCE_CPU_INFERENCE +#if !MEDIAPIPE_FORCE_CPU_INFERENCE const bool should_use_gpu = !options.has_delegate() || // Use GPU delegate if not specified @@ -73,8 +73,7 @@ class InferenceCalculatorSelectorImpl impls.emplace_back("GlAdvanced"); } } -#endif // !defined(MEDIAPIPE_FORCE_CPU_INFERENCE) || - // !MEDIAPIPE_FORCE_CPU_INFERENCE +#endif // !MEDIAPIPE_FORCE_CPU_INFERENCE impls.emplace_back("Cpu"); impls.emplace_back("Xnnpack"); std::vector missing_impls; diff --git a/mediapipe/calculators/tensor/inference_calculator_cpu.cc b/mediapipe/calculators/tensor/inference_calculator_cpu.cc index 16ce4782f4..4c8ba21ad6 100644 --- a/mediapipe/calculators/tensor/inference_calculator_cpu.cc +++ b/mediapipe/calculators/tensor/inference_calculator_cpu.cc @@ -146,7 +146,7 @@ InferenceCalculatorCpuImpl::MaybeCreateDelegate(CalculatorContext* cc) { } #endif // MEDIAPIPE_ANDROID -#if defined(__EMSCRIPTEN__) +#if defined(__EMSCRIPTEN__) || MEDIAPIPE_FORCE_CPU_INFERENCE const bool use_xnnpack = true; #else const bool use_xnnpack = opts_has_delegate && opts_delegate.has_xnnpack();