Commit cd4996a 1 parent 148ac65 commit cd4996a Copy full SHA for cd4996a
File tree 3 files changed +9
-8
lines changed
3 files changed +9
-8
lines changed Original file line number Diff line number Diff line change @@ -65,13 +65,14 @@ LABEL djl-serving-version=$djl_serving_version
65
65
FROM base AS cpu-full
66
66
67
67
ARG torch_version=2.5.1
68
+ ARG onnx_version=1.20.0
68
69
69
70
COPY scripts scripts/
70
71
RUN scripts/install_python.sh && \
71
72
scripts/install_djl_serving.sh $djl_version $djl_serving_version $torch_version && \
72
73
djl-serving -i ai.djl.pytorch:pytorch-native-cpu:$torch_version:linux-x86_64 && \
73
74
djl-serving -i ai.djl.onnxruntime:onnxruntime-engine:$djl_version && \
74
- djl-serving -i com.microsoft.onnxruntime:onnxruntime:1.19.0 && \
75
+ djl-serving -i com.microsoft.onnxruntime:onnxruntime:$onnx_version && \
75
76
scripts/patch_oss_dlc.sh python && \
76
77
echo "${djl_serving_version} cpufull" > /opt/djl/bin/telemetry && \
77
78
rm -rf /opt/djl/logs && \
Original file line number Diff line number Diff line change @@ -19,7 +19,7 @@ ARG djl_torch_version=2.5.1
19
19
ARG djl_onnx_version=1.20.0
20
20
21
21
# djl converter wheel for text-embedding use case
22
- ARG djl_converter_wheel="https://publish.djl.ai/djl_converter/djl_converter-0.31.0 -py3-none-any.whl"
22
+ ARG djl_converter_wheel="https://publish.djl.ai/djl_converter/djl_converter-${djl_version//-*/} -py3-none-any.whl"
23
23
24
24
EXPOSE 8080
25
25
@@ -34,7 +34,7 @@ ENV MODEL_LOADING_TIMEOUT=1200
34
34
ENV PREDICT_TIMEOUT=240
35
35
ENV DJL_CACHE_DIR=/tmp/.djl.ai
36
36
# set cudnn9 library path
37
- ENV LD_LIBRARY_PATH=/usr/local/lib/python${python_version}/dist-packages/nvidia/cudnn/lib/
37
+ ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH: /usr/local/lib/python${python_version}/dist-packages/nvidia/cudnn/lib/"
38
38
ENV PYTORCH_LIBRARY_PATH=/usr/local/lib/python${python_version}/dist-packages/torch/lib
39
39
ENV PYTORCH_PRECXX11=true
40
40
ENV PYTORCH_VERSION=${djl_torch_version}
Original file line number Diff line number Diff line change @@ -18,11 +18,11 @@ ARG djl_serving_version
18
18
ARG cuda_version=cu124
19
19
ARG torch_version=2.5.1
20
20
ARG torch_vision_version=0.20.1
21
- ARG onnx_version=1.19 .0
22
- ARG python_version=3.10
21
+ ARG onnx_version=1.20 .0
22
+ ARG python_version=3.11
23
23
ARG numpy_version=1.26.4
24
24
ARG pydantic_version=2.8.2
25
- ARG djl_converter_wheel="https://publish.djl.ai/djl_converter/djl_converter-0.31.0 -py3-none-any.whl"
25
+ ARG djl_converter_wheel="https://publish.djl.ai/djl_converter/djl_converter-${djl_version//-*/} -py3-none-any.whl"
26
26
27
27
RUN mkdir -p /opt/djl/conf && \
28
28
mkdir -p /opt/ml/model
@@ -36,8 +36,8 @@ ENV MODEL_SERVER_HOME=/opt/djl
36
36
ENV DJL_CACHE_DIR=/tmp/.djl.ai
37
37
ENV HF_HOME=/tmp/.cache/huggingface
38
38
# set cudnn9 library path
39
- ENV LD_LIBRARY_PATH=/usr/local/lib/python3.10 /dist-packages/nvidia/cudnn/lib/
40
- ENV PYTORCH_LIBRARY_PATH=/usr/local/lib/python3.10 /dist-packages/torch/lib
39
+ ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH: /usr/local/lib/python${python_version} /dist-packages/nvidia/cudnn/lib/"
40
+ ENV PYTORCH_LIBRARY_PATH=/usr/local/lib/python${python_version} /dist-packages/torch/lib
41
41
ENV PYTORCH_PRECXX11=true
42
42
ENV PYTORCH_VERSION=${torch_version}
43
43
ENV PYTORCH_FLAVOR=cu124-precxx11
You can’t perform that action at this time.
0 commit comments