Skip to content

Commit

Permalink
build dockerfile
Browse files Browse the repository at this point in the history
Signed-off-by: Anupam Kumar <[email protected]>
  • Loading branch information
kyteinsky committed Aug 2, 2024
1 parent 5271cfd commit 7eb25fe
Show file tree
Hide file tree
Showing 5 changed files with 125 additions and 28 deletions.
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "CTranslate2"]
path = CTranslate2
url = https://github.com/OpenNMT/CTranslate2
1 change: 1 addition & 0 deletions CTranslate2
Submodule CTranslate2 added at 39f48f
145 changes: 119 additions & 26 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,115 @@
FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04
FROM nvidia/cuda:12.2.2-cudnn8-devel-ubuntu22.04 as builder

ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
apt-get install -y --no-install-recommends \
python3-dev \
python3-pip \
wget \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

RUN apt-get update
RUN apt-get install -y software-properties-common
RUN add-apt-repository -y ppa:deadsnakes/ppa
RUN apt-get update
RUN apt-get install -y --no-install-recommends python3.11 python3.11-venv python3-pip vim git pciutils
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1
RUN apt-get -y clean
RUN rm -rf /var/lib/apt/lists/*
RUN python3 -m pip --no-cache-dir install cmake==3.22.*

ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute
ENV DEBIAN_FRONTEND=dialog
WORKDIR /root

# RUN pip install -c intel mkl-devel onednn-devel

ENV ONEAPI_VERSION=2023.0.0
RUN wget -q https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB && \
apt-key add *.PUB && \
rm *.PUB && \
echo "deb https://apt.repos.intel.com/oneapi all main" > /etc/apt/sources.list.d/oneAPI.list && \
apt-get update && \
apt-get install -y --no-install-recommends \
intel-oneapi-mkl-devel-$ONEAPI_VERSION \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

ENV ONEDNN_VERSION=3.1.1
RUN wget -q https://github.com/oneapi-src/oneDNN/archive/refs/tags/v${ONEDNN_VERSION}.tar.gz && \
tar xf *.tar.gz && \
rm *.tar.gz && \
cd oneDNN-* && \
cmake -DCMAKE_BUILD_TYPE=Release -DONEDNN_LIBRARY_TYPE=STATIC -DONEDNN_BUILD_EXAMPLES=OFF -DONEDNN_BUILD_TESTS=OFF -DONEDNN_ENABLE_WORKLOAD=INFERENCE -DONEDNN_ENABLE_PRIMITIVE="CONVOLUTION;REORDER" -DONEDNN_BUILD_GRAPH=OFF . && \
make -j$(nproc) install && \
cd .. && \
rm -r oneDNN-*

ENV OPENMPI_VERSION=4.1.6
RUN wget -q https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-${OPENMPI_VERSION}.tar.bz2 && \
tar xf *.tar.bz2 && \
rm *.tar.bz2 && \
cd openmpi-* && \
./configure && \
make -j$(nproc) install && \
cd .. && \
rm -r openmpi-*

ADD CTranslate2 CTranslate2
WORKDIR /root/CTranslate2

ARG CXX_FLAGS
ENV CXX_FLAGS=${CXX_FLAGS:-"-msse4.1"}
ARG CUDA_NVCC_FLAGS
ENV CUDA_NVCC_FLAGS=${CUDA_NVCC_FLAGS:-"-Xfatbin=-compress-all"}
ARG CUDA_ARCH_LIST
ENV CUDA_ARCH_LIST=${CUDA_ARCH_LIST:-"Common"}
ENV CTRANSLATE2_ROOT=/opt/ctranslate2
ENV LD_LIBRARY_PATH=/usr/local/lib/:${LD_LIBRARY_PATH}

RUN mkdir build_tmp && \
cd build_tmp && \
cmake -DCMAKE_INSTALL_PREFIX=${CTRANSLATE2_ROOT} \
-DWITH_CUDA=ON -DWITH_CUDNN=ON -DWITH_MKL=ON -DWITH_DNNL=ON -DOPENMP_RUNTIME=COMP \
-DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS="${CXX_FLAGS}" \
-DCUDA_NVCC_FLAGS="${CUDA_NVCC_FLAGS}" -DCUDA_ARCH_LIST="${CUDA_ARCH_LIST}" -DWITH_TENSOR_PARALLEL=ON .. && \
VERBOSE=1 make -j$(nproc) install

ENV LANG=en_US.UTF-8
COPY README.md .

RUN cd python && \
python3 -m pip --no-cache-dir install -r install_requirements.txt && \
python3 setup.py bdist_wheel --dist-dir $CTRANSLATE2_ROOT

FROM nvidia/cuda:12.2.2-base-ubuntu22.04

# We remove the cuda-compat package because it conflicts with the CUDA Enhanced Compatibility.
# See e.g. https://github.com/NVIDIA/nvidia-docker/issues/1515
RUN apt-get update && \
apt-get install -y --no-install-recommends \
libcublas-12-2 \
libcudnn8=8.9.7.29-1+cuda12.2 \
libnccl2=2.19.3-1+cuda12.2 \
libopenmpi3=4.1.2-2ubuntu1 \
openmpi-bin \
libgomp1 \
python3-pip \
&& \
apt-get purge -y cuda-compat-12-2 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

ENV CTRANSLATE2_ROOT=/opt/ctranslate2
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CTRANSLATE2_ROOT/lib

COPY --from=builder $CTRANSLATE2_ROOT $CTRANSLATE2_ROOT
RUN python3 -m pip --no-cache-dir install $CTRANSLATE2_ROOT/*.whl && \
rm $CTRANSLATE2_ROOT/*.whl


#ENV DEBIAN_FRONTEND noninteractive
#
#RUN apt-get update && \
# apt-get install -y software-properties-common && \
# add-apt-repository -y ppa:deadsnakes/ppa && \
# apt-get update && \
# apt-get install -y --no-install-recommends python3.11 python3.11-venv python3-pip vim git && \
# update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
# apt-get -y clean && \
# rm -rf /var/lib/apt/lists/*

# Set working directory
WORKDIR /app
Expand All @@ -22,25 +118,22 @@ WORKDIR /app
COPY requirements.txt .

# Install requirements
RUN python3 -m pip install --no-cache-dir --upgrade pip setuptools wheel
RUN python3 -m pip install --no-cache-dir https://github.com/abetlen/llama-cpp-python/releases/download/v0.2.82-cu121/llama_cpp_python-0.2.82-cp311-cp311-linux_x86_64.whl
RUN sed -i '/llama_cpp_python/d' requirements.txt
RUN sed -i '/ctranslate2/d' requirements.txt
RUN python3 -m pip install --no-cache-dir --no-deps -r requirements.txt


# CUDA 12.1 compat lib
ENV LD_LIBRARY_PATH=/usr/local/cuda/compat:$LD_LIBRARY_PATH
ENV LIBRARY_PATH=/usr/local/cuda/compat:$LIBRARY_PATH
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute
ENV DEBIAN_FRONTEND dialog

# Copy application files
ADD cs[s] /app/css
ADD im[g] /app/img
ADD j[s] /app/js
ADD cs[s] /app/css
ADD im[g] /app/img
ADD j[s] /app/js
ADD l10[n] /app/l10n
ADD li[b] /app/lib
ADD config.json /app/config.json
ADD li[b] /app/lib
ADD config.json /app/config.json
ADD languages.json /app/languages.json

WORKDIR /app/lib
ENTRYPOINT ["python3", "lib/main.py"]

LABEL org.opencontainers.image.source="https://github.com/nextcloud/translate2"
2 changes: 1 addition & 1 deletion config.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"__comment::loader": "CTranslate2 loader options, see https://opennmt.net/CTranslate2/python/ctranslate2.Translator.html#ctranslate2.Translator.__init__. Use 'model_path' key for local paths or 'model_name' key for models hosted on Hugging Face. Both can't be used at the same time.",
"__comment::inference": "CTranslate2 inference options, see the kwargs in https://opennmt.net/CTranslate2/python/ctranslate2.Translator.html#ctranslate2.Translator.translate_batch",
"__comment::changes_to_the_config": "the program needs to be restarted if you change this file since it is stored in memory on startup",
"log_level": 20,
"log_level": 10,
"tokenizer_file": "spiece.model",
"loader": {
"model_name": "Nextcloud-AI/madlad400-3b-mt-ct2-int8_float32",
Expand Down
2 changes: 1 addition & 1 deletion lib/Service.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def load_config(self, config: dict):
self.config = config_copy

def translate(self, to_language: str, text: str) -> str:
logger.debug(f"translating text to: {to_language}")
logger.debug(f"translating text to: {self.languages[to_language]}")

with translate_context(self.config) as (tokenizer, translator):
input_tokens = tokenizer.Encode(f"<2{to_language}> {clean_text(text)}", out_type=str)
Expand Down

0 comments on commit 7eb25fe

Please sign in to comment.