diff --git a/engines/pyfunc-ensembler-service/Dockerfile b/engines/pyfunc-ensembler-service/Dockerfile index d55f9990b..2a6b4d646 100644 --- a/engines/pyfunc-ensembler-service/Dockerfile +++ b/engines/pyfunc-ensembler-service/Dockerfile @@ -1,5 +1,7 @@ FROM condaforge/miniforge3:23.3.1-1 AS builder +RUN apt-get update && apt-get install unzip + ARG APP_NAME ARG CONDA_ENV_NAME ARG PYTHON_VERSION @@ -7,8 +9,11 @@ ARG PYTHON_VERSION ENV APP_NAME=$APP_NAME ENV CONDA_ENV_NAME=$CONDA_ENV_NAME +# Install gcloud SDK RUN wget -qO- https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-367.0.0-linux-x86_64.tar.gz | tar xzf - ENV PATH=$PATH:/google-cloud-sdk/bin +# Install aws CLI +RUN wget -q https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip && unzip awscli-exe-linux-x86_64.zip && ./aws/install COPY . . COPY ./temp-deps/sdk ./../../sdk diff --git a/engines/pyfunc-ensembler-service/app.Dockerfile b/engines/pyfunc-ensembler-service/app.Dockerfile index 5182175c4..39bec2a53 100644 --- a/engines/pyfunc-ensembler-service/app.Dockerfile +++ b/engines/pyfunc-ensembler-service/app.Dockerfile @@ -2,13 +2,34 @@ ARG BASE_IMAGE FROM ${BASE_IMAGE} as builder +ARG MLFLOW_ARTIFACT_STORAGE_TYPE + ARG MODEL_URL ARG FOLDER_NAME ARG GOOGLE_APPLICATION_CREDENTIALS -# Run docker build using the credentials if provided -RUN if [[-z "$GOOGLE_APPLICATION_CREDENTIALS"]]; then gcloud auth activate-service-account --key-file=${GOOGLE_APPLICATION_CREDENTIALS}; fi -RUN gsutil -m cp -r ${MODEL_URL} . +ARG AWS_ACCESS_KEY_ID +ARG AWS_SECRET_ACCESS_KEY +ARG AWS_DEFAULT_REGION +ARG AWS_ENDPOINT_URL + +RUN if [ "${MLFLOW_ARTIFACT_STORAGE_TYPE}" = "gcs" ]; then \ + if [ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then \ + gcloud auth activate-service-account --key-file=${GOOGLE_APPLICATION_CREDENTIALS}; \ + fi \ + elif [ "${MLFLOW_ARTIFACT_STORAGE_TYPE}" = "s3" ]; then \ + echo "S3 credentials used"; \ + else \ + echo "No credentials are used"; \ + fi + +RUN if [ "${MLFLOW_ARTIFACT_STORAGE_TYPE}" = "gcs" ]; then \ + gsutil -m cp -r ${MODEL_URL} .; \ + elif [ "${MLFLOW_ARTIFACT_STORAGE_TYPE}" = "s3" ]; then \ + aws s3 cp ${MODEL_URL} ensembler --recursive; \ + else \ + echo "No credentials are used"; \ + fi RUN /bin/bash -c "conda env update --name ${CONDA_ENV_NAME} --file ./${FOLDER_NAME}/conda.yaml"