Skip to content

Commit a83a9ed

Browse files
Remove miscellaneous nuphar configs (microsoft#13070)
Remove a handful of nuphar related configurations after deprecation. Co-authored-by: Randy Shuai <[email protected]>
1 parent 44c14e8 commit a83a9ed

File tree

23 files changed

+12
-175
lines changed

23 files changed

+12
-175
lines changed

.flake8

-2
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@ exclude =
1515
# ignore generated flatbuffers code
1616
./onnxruntime/core/flatbuffers/ort_flatbuffers_py,
1717
# TODO enable
18-
./onnxruntime/core/providers/nuphar,
19-
# TODO enable
2018
./onnxruntime/python/tools,
2119
# ignore test code for now
2220
./onnxruntime/test,

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
|System|CPU|GPU|EPs|
2323
|---|---|---|---|
2424
|Windows|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20CPU%20CI%20Pipeline?label=Windows+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=9)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20CI%20Pipeline?label=Windows+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=10)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Windows%20GPU%20TensorRT%20CI%20Pipeline?label=Windows+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=47)|
25-
|Linux|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20CI%20Pipeline?label=Linux+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=11)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20Minimal%20Build%20E2E%20CI%20Pipeline?label=Linux+CPU+Minimal+Build)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=64)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20x64%20NoContribops%20CI%20Pipeline?label=Linux+CPU+x64+No+Contrib+Ops)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=110)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/centos7_cpu?label=Linux+CentOS7)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=78)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-ci-pipeline?label=Linux+CPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=86)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20CI%20Pipeline?label=Linux+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=12)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20TensorRT%20CI%20Pipeline?label=Linux+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=45)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-distributed?label=Distributed+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=140)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-gpu-ci-pipeline?label=Linux+GPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=84)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20NUPHAR%20CI%20Pipeline?label=Linux+NUPHAR)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=110)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20OpenVINO%20CI%20Pipeline?label=Linux+OpenVINO)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=55)|
25+
|Linux|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20CI%20Pipeline?label=Linux+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=11)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20Minimal%20Build%20E2E%20CI%20Pipeline?label=Linux+CPU+Minimal+Build)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=64)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20CPU%20x64%20NoContribops%20CI%20Pipeline?label=Linux+CPU+x64+No+Contrib+Ops)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=110)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/centos7_cpu?label=Linux+CentOS7)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=78)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-ci-pipeline?label=Linux+CPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=86)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20CI%20Pipeline?label=Linux+GPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=12)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20GPU%20TensorRT%20CI%20Pipeline?label=Linux+GPU+TensorRT)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=45)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-distributed?label=Distributed+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=140)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/orttraining-linux-gpu-ci-pipeline?label=Linux+GPU+Training)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=84)|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Linux%20OpenVINO%20CI%20Pipeline?label=Linux+OpenVINO)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=55)|
2626
|Mac|[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/MacOS%20CI%20Pipeline?label=MacOS+CPU)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=13)<br>[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/MacOS%20NoContribops%20CI%20Pipeline?label=MacOS+NoContribops)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=65)|||
2727
|Android|||[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/Android%20CI%20Pipeline?label=Android)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=53)|
2828
|iOS|||[![Build Status](https://dev.azure.com/onnxruntime/onnxruntime/_apis/build/status/iOS%20CI%20Pipeline?label=iOS)](https://dev.azure.com/onnxruntime/onnxruntime/_build/latest?definitionId=134)|

cgmanifests/cgmanifest.json

-10
Original file line numberDiff line numberDiff line change
@@ -32,16 +32,6 @@
3232
}
3333
}
3434
},
35-
{
36-
"component": {
37-
"type": "git",
38-
"git": {
39-
"commitHash": "638d7d2407de27f98f542f61a37a33c90a2e75a9",
40-
"repositoryUrl": "https://github.com/microsoft/onnxruntime-tvm.git"
41-
},
42-
"comments": "needed for Nuphar EP"
43-
}
44-
},
4535
{
4636
"component": {
4737
"type": "git",

cmake/onnxruntime.cmake

-1
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,6 @@ set(onnxruntime_INTERNAL_LIBRARIES
179179
${PROVIDERS_DML}
180180
${PROVIDERS_NNAPI}
181181
${PROVIDERS_SNPE}
182-
${PROVIDERS_NUPHAR}
183182
${PROVIDERS_TVM}
184183
${PROVIDERS_RKNPU}
185184
${PROVIDERS_ROCM}

cmake/onnxruntime_csharp.cmake

-4
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,6 @@ if (onnxruntime_USE_NNAPI_BUILTIN)
3030
STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_NNAPI;")
3131
endif()
3232

33-
if (onnxruntime_USE_NUPHAR)
34-
STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_NUPHAR;")
35-
endif()
36-
3733
if (onnxruntime_USE_TVM)
3834
STRING(APPEND CSHARP_PREPROCESSOR_DEFINES "USE_TVM,")
3935
endif()

cmake/onnxruntime_python.cmake

-20
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,6 @@ endif()
199199
target_link_libraries(onnxruntime_pybind11_state PRIVATE
200200
onnxruntime_session
201201
${onnxruntime_libs}
202-
${PROVIDERS_NUPHAR}
203202
${PROVIDERS_TVM}
204203
${PROVIDERS_VITISAI}
205204
${PROVIDERS_NNAPI}
@@ -835,25 +834,6 @@ if (onnxruntime_USE_ROCM)
835834
)
836835
endif()
837836

838-
if (onnxruntime_USE_NUPHAR)
839-
add_custom_command(
840-
TARGET onnxruntime_pybind11_state POST_BUILD
841-
COMMAND ${CMAKE_COMMAND} -E copy
842-
$<TARGET_FILE:tvm>
843-
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/capi/
844-
)
845-
file(GLOB onnxruntime_python_nuphar_python_srcs CONFIGURE_DEPENDS
846-
"${ONNXRUNTIME_ROOT}/core/providers/nuphar/scripts/*"
847-
)
848-
add_custom_command(
849-
TARGET onnxruntime_pybind11_state POST_BUILD
850-
COMMAND ${CMAKE_COMMAND} -E make_directory $<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/nuphar
851-
COMMAND ${CMAKE_COMMAND} -E copy
852-
${onnxruntime_python_nuphar_python_srcs}
853-
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/nuphar/
854-
)
855-
endif()
856-
857837
if (onnxruntime_USE_TVM)
858838
file(GLOB onnxruntime_python_providers_tvm_srcs CONFIGURE_DEPENDS
859839
"${ONNXRUNTIME_ROOT}/python/providers/tvm/*.py"

cmake/onnxruntime_unittests.cmake

+6-22
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ set(TEST_INC_DIR ${ONNXRUNTIME_ROOT})
99
if (onnxruntime_ENABLE_TRAINING)
1010
list(APPEND TEST_INC_DIR ${ORTTRAINING_ROOT})
1111
endif()
12-
if (onnxruntime_USE_NUPHAR_TVM)
12+
if (onnxruntime_USE_TVM)
1313
list(APPEND TEST_INC_DIR ${TVM_INCLUDES})
1414
endif()
1515

@@ -364,7 +364,7 @@ if (onnxruntime_USE_RKNPU)
364364
list(APPEND onnxruntime_test_providers_src ${onnxruntime_test_providers_rknpu_src})
365365
endif()
366366

367-
if ((NOT onnxruntime_MINIMAL_BUILD AND NOT onnxruntime_USE_NUPHAR) OR onnxruntime_EXTENDED_MINIMAL_BUILD)
367+
if (NOT onnxruntime_MINIMAL_BUILD OR onnxruntime_EXTENDED_MINIMAL_BUILD)
368368
file(GLOB_RECURSE onnxruntime_test_providers_internal_testing_src CONFIGURE_DEPENDS
369369
"${TEST_SRC_DIR}/providers/internal_testing/*"
370370
)
@@ -483,17 +483,6 @@ if(onnxruntime_USE_COREML)
483483
endif()
484484
endif()
485485

486-
if(onnxruntime_USE_NUPHAR)
487-
# the test case under nuphar_tvm is only to verify some basic tvm show case, which is already out of date
488-
# it doesn't have relationship to nuphar directly. consider we have an official tvm execution provider now,
489-
# keep those test cases doesn't bring any value now.
490-
491-
list(APPEND onnxruntime_test_framework_src_patterns ${TEST_SRC_DIR}/framework/nuphar/*)
492-
list(APPEND onnxruntime_test_framework_libs onnxruntime_providers_nuphar)
493-
list(APPEND onnxruntime_test_providers_dependencies onnxruntime_providers_nuphar)
494-
list(APPEND onnxruntime_test_providers_libs onnxruntime_providers_nuphar)
495-
endif()
496-
497486
if(onnxruntime_USE_ACL)
498487
list(APPEND onnxruntime_test_providers_dependencies onnxruntime_providers_acl)
499488
endif()
@@ -511,7 +500,6 @@ set(ONNXRUNTIME_TEST_LIBS
511500
${ONNXRUNTIME_INTEROP_TEST_LIBS}
512501
${onnxruntime_libs}
513502
# CUDA, ROCM, TENSORRT, MIGRAPHX, DNNL, and OpenVINO are dynamically loaded at runtime
514-
${PROVIDERS_NUPHAR}
515503
${PROVIDERS_NNAPI}
516504
${PROVIDERS_SNPE}
517505
${PROVIDERS_RKNPU}
@@ -604,7 +592,7 @@ endif()
604592

605593

606594
if(WIN32)
607-
if (onnxruntime_USE_NUPHAR_TVM)
595+
if (onnxruntime_USE_TVM)
608596
list(APPEND disabled_warnings ${DISABLED_WARNINGS_FOR_TVM})
609597
endif()
610598
endif()
@@ -691,10 +679,6 @@ if (onnxruntime_ENABLE_TRAINING)
691679
endif()
692680
endif()
693681

694-
if (onnxruntime_USE_NUPHAR)
695-
list(APPEND all_tests ${onnxruntime_test_nuphar_src})
696-
endif()
697-
698682
if (onnxruntime_USE_TVM)
699683
list(APPEND all_tests ${onnxruntime_test_tvm_src})
700684
endif()
@@ -842,7 +826,7 @@ if (NOT onnxruntime_ENABLE_TRAINING_TORCH_INTEROP)
842826
)
843827
endif()
844828
if(WIN32)
845-
if (onnxruntime_USE_NUPHAR_TVM)
829+
if (onnxruntime_USE_TVM)
846830
add_custom_command(
847831
TARGET ${test_data_target} POST_BUILD
848832
COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:tvm> $<TARGET_FILE_DIR:${test_data_target}>
@@ -899,7 +883,7 @@ if (onnxruntime_ENABLE_TRAINING_TORCH_INTEROP)
899883
endif()
900884
set_target_properties(onnx_test_runner PROPERTIES FOLDER "ONNXRuntimeTest")
901885

902-
if (onnxruntime_USE_NUPHAR_TVM)
886+
if (onnxruntime_USE_TVM)
903887
if (WIN32)
904888
target_link_options(onnx_test_runner PRIVATE "/STACK:4000000")
905889
endif()
@@ -1102,7 +1086,7 @@ if (NOT onnxruntime_ENABLE_TRAINING_TORCH_INTEROP)
11021086
target_link_libraries(onnxruntime_perf_test PRIVATE onnxruntime_language_interop onnxruntime_pyop)
11031087
endif()
11041088

1105-
if (onnxruntime_USE_NUPHAR_TVM)
1089+
if (onnxruntime_USE_TVM)
11061090
if (WIN32)
11071091
target_link_options(onnxruntime_perf_test PRIVATE "/STACK:4000000")
11081092
endif()

csharp/src/Microsoft.ML.OnnxRuntime/NativeMethods.shared.cs

-3
Original file line numberDiff line numberDiff line change
@@ -887,9 +887,6 @@ IntPtr[] outputValues /* An array of output value pointers. Array must be alloca
887887
[DllImport(NativeLib.DllName, CharSet = CharSet.Ansi)]
888888
public static extern IntPtr /*(OrtStatus*)*/ OrtSessionOptionsAppendExecutionProvider_MIGraphX(IntPtr /*(OrtSessionOptions*)*/ options, int device_id);
889889

890-
[DllImport(NativeLib.DllName, CharSet = CharSet.Ansi)]
891-
public static extern IntPtr /*(OrtStatus*)*/ OrtSessionOptionsAppendExecutionProvider_Nuphar(IntPtr /*(OrtSessionOptions*) */ options, int allow_unaligned_buffers, IntPtr /*(char char*)*/ settings);
892-
893890
[DllImport(NativeLib.DllName, CharSet = CharSet.Ansi)]
894891
public static extern IntPtr /*(OrtStatus*)*/ OrtSessionOptionsAppendExecutionProvider_Tvm(IntPtr /*(OrtSessionOptions*) */ options, IntPtr /*(char char*)*/ settings);
895892
#endif

csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.shared.cs

-31
Original file line numberDiff line numberDiff line change
@@ -136,20 +136,6 @@ public static SessionOptions MakeSessionOptionWithTensorrtProvider(OrtTensorRTPr
136136
}
137137
}
138138

139-
/// <summary>
140-
/// A helper method to construct a SessionOptions object for Nuphar execution.
141-
/// Use only if you have the onnxruntime package specific to this Execution Provider.
142-
/// </summary>
143-
/// <param name="settings">settings string, comprises of comma separated key:value pairs. default is empty</param>
144-
/// <returns>A SessionsOptions() object configured for execution with Nuphar</returns>
145-
public static SessionOptions MakeSessionOptionWithNupharProvider(String settings = "")
146-
{
147-
SessionOptions options = new SessionOptions();
148-
options.AppendExecutionProvider_Nuphar(settings);
149-
150-
return options;
151-
}
152-
153139
/// <summary>
154140
/// A helper method to construct a SessionOptions object for TVM execution.
155141
/// Use only if you have the onnxruntime package specific to this Execution Provider.
@@ -352,23 +338,6 @@ public void AppendExecutionProvider_CoreML(CoreMLFlags coremlFlags = CoreMLFlags
352338
#endif
353339
}
354340

355-
/// <summary>
356-
/// Use only if you have the onnxruntime package specific to this Execution Provider.
357-
/// </summary>
358-
/// <param name="settings">string with Nuphar specific settings</param>
359-
public void AppendExecutionProvider_Nuphar(string settings = "")
360-
{
361-
#if __MOBILE__
362-
throw new NotSupportedException("The Nuphar Execution Provider is not supported in this build");
363-
#else
364-
var settingsPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(settings), GCHandleType.Pinned);
365-
using (var pinnedSettingsName = new PinnedGCHandle(settingsPinned))
366-
{
367-
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nuphar(handle, 1, pinnedSettingsName.Pointer));
368-
}
369-
#endif
370-
}
371-
372341
/// <summary>
373342
/// Use only if you have the onnxruntime package specific to this Execution Provider.
374343
/// </summary>

csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/InferenceTest.cs

-4
Original file line numberDiff line numberDiff line change
@@ -122,10 +122,6 @@ public void TestSessionOptions()
122122
opt.AppendExecutionProvider_Tvm("Vulkan -device=amd_apu");
123123
#endif
124124

125-
#if USE_NUPHAR
126-
opt.AppendExecutionProvider_Nuphar();
127-
#endif
128-
129125
#if USE_OPENVINO
130126
opt.AppendExecutionProvider_OpenVINO();
131127
#endif

dockerfiles/README.md

-22
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
- CUDA/cuDNN: [Dockerfile](Dockerfile.cuda), [Instructions](#cuda)
55
- MIGraphX: [Dockerfile](Dockerfile.migraphx), [Instructions](#migraphx)
66
- ROCm: [Dockerfile](Dockerfile.rocm), [Instructions](#rocm)
7-
- NUPHAR: [Dockerfile](Dockerfile.nuphar), [Instructions](#nuphar)
87
- OpenVINO: [Dockerfile](Dockerfile.openvino), [Instructions](#openvino)
98
- TensorRT: [Dockerfile](Dockerfile.tensorrt), [Instructions](#tensorrt)
109
- VitisAI: [Dockerfile](Dockerfile.vitisai)
@@ -279,27 +278,6 @@ Nothing else from ONNX Runtime source tree will be copied/installed to the image
279278

280279
Note: When running the container you built in Docker, please either use 'nvidia-docker' command instead of 'docker', or use Docker command-line options to make sure NVIDIA runtime will be used and appropiate files mounted from host. Otherwise, CUDA libraries won't be found. You can also [set NVIDIA runtime as default in Docker](https://github.com/dusty-nv/jetson-containers#docker-default-runtime).
281280

282-
## NUPHAR
283-
*Public Preview*
284-
285-
**Ubuntu 16.04, Python Bindings**
286-
287-
1. Update submodules
288-
```
289-
git submodule update --init
290-
```
291-
292-
2. Build the docker image from the Dockerfile in this repository.
293-
```
294-
docker build -t onnxruntime-nuphar -f Dockerfile.nuphar .
295-
```
296-
297-
3. Run the Docker image
298-
299-
```
300-
docker run -it onnxruntime-nuphar
301-
```
302-
303281
## MIGraphX
304282
**Ubuntu 18.04, rocm4.5, AMDMIGraphX v1.2**
305283

include/onnxruntime/core/framework/execution_provider.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ class IExecutionProvider {
239239
};
240240

241241
virtual FusionStyle GetFusionStyle() const {
242-
// All the ORT build in EP has migrate to FilteredGraphViewer style except Nuphar.
242+
// All the ORT build in EP has migrate to FilteredGraphViewer style.
243243
// For newer EPs, please avoid use Function style as it is deprecated.
244244
return FusionStyle::FilteredGraphViewer;
245245
}

include/onnxruntime/core/graph/constants.h

-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ constexpr const char* kCpuExecutionProvider = "CPUExecutionProvider";
2727
constexpr const char* kCudaExecutionProvider = "CUDAExecutionProvider";
2828
constexpr const char* kDnnlExecutionProvider = "DnnlExecutionProvider";
2929
constexpr const char* kOpenVINOExecutionProvider = "OpenVINOExecutionProvider";
30-
constexpr const char* kNupharExecutionProvider = "NupharExecutionProvider";
3130
constexpr const char* kVitisAIExecutionProvider = "VitisAIExecutionProvider";
3231
constexpr const char* kTensorrtExecutionProvider = "TensorrtExecutionProvider";
3332
constexpr const char* kNnapiExecutionProvider = "NnapiExecutionProvider";

include/onnxruntime/core/providers/nuphar/nuphar_provider_factory.h

-17
This file was deleted.

java/src/main/java/ai/onnxruntime/OrtSession.java

-12
Original file line numberDiff line numberDiff line change
@@ -911,18 +911,6 @@ public void addNnapi(EnumSet<NNAPIFlags> flags) throws OrtException {
911911
addNnapi(OnnxRuntime.ortApiHandle, nativeHandle, OrtFlags.aggregateToInt(flags));
912912
}
913913

914-
/**
915-
* Adds Nuphar as an execution backend.
916-
*
917-
* @param allowUnalignedBuffers Allow unaligned memory buffers.
918-
* @param settings See the documentation for valid settings strings.
919-
* @throws OrtException If there was an error in native code.
920-
*/
921-
public void addNuphar(boolean allowUnalignedBuffers, String settings) throws OrtException {
922-
checkClosed();
923-
addNuphar(OnnxRuntime.ortApiHandle, nativeHandle, allowUnalignedBuffers ? 1 : 0, settings);
924-
}
925-
926914
/**
927915
* Adds TVM as an execution backend.
928916
*

java/src/main/native/ai_onnxruntime_OrtSession_SessionOptions.c

-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
#include "onnxruntime/core/providers/cpu/cpu_provider_factory.h"
1919
#include "onnxruntime/core/providers/dnnl/dnnl_provider_factory.h"
2020
#include "onnxruntime/core/providers/nnapi/nnapi_provider_factory.h"
21-
#include "onnxruntime/core/providers/nuphar/nuphar_provider_factory.h"
2221
#include "onnxruntime/core/providers/tvm/tvm_provider_factory.h"
2322
#include "onnxruntime/core/providers/openvino/openvino_provider_factory.h"
2423
#include "onnxruntime/core/providers/tensorrt/tensorrt_provider_factory.h"

java/src/test/java/ai/onnxruntime/InferenceTest.java

-3
Original file line numberDiff line numberDiff line change
@@ -1525,9 +1525,6 @@ private static SqueezeNetTuple openSessionSqueezeNet(EnumSet<OrtProvider> provid
15251525
case XNNPACK:
15261526
options.addXnnpack(Collections.emptyMap());
15271527
break;
1528-
case NUPHAR:
1529-
options.addNuphar(true, "");
1530-
break;
15311528
case VITIS_AI:
15321529
case RK_NPU:
15331530
case MI_GRAPH_X:

onnxruntime/core/providers/provider_factory_creators.h

-4
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,6 @@
4646
#include "core/providers/nnapi/nnapi_provider_factory_creator.h"
4747
#endif
4848

49-
#if defined(USE_NUPHAR)
50-
#include "core/providers/nuphar/nuphar_provider_factory_creator.h"
51-
#endif
52-
5349
#if defined(USE_OPENVINO)
5450
#include "core/providers/openvino/openvino_provider_factory_creator.h"
5551
#endif

0 commit comments

Comments
 (0)