Skip to content

Commit

Permalink
Add Autogluon v1.2 (#4456)
Browse files Browse the repository at this point in the history
* Add autogluon v1.2

* pip check

* fix scan and sanity

* fix test

* py scan

* add comments

* fix formatting

* revert toml

* remove packages

* address comments

* revert toml

* rebuild

* revert toml

---------

Co-authored-by: Ubuntu <[email protected]>
  • Loading branch information
prateekdesai04 and Ubuntu authored Dec 17, 2024
1 parent 87802ac commit 04b4f9c
Show file tree
Hide file tree
Showing 20 changed files with 1,823 additions and 12 deletions.
53 changes: 53 additions & 0 deletions autogluon/inference/buildspec-1-1-1.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
region: &REGION <set-$REGION-in-environment>
framework: &FRAMEWORK autogluon
version: &VERSION 1.1.1
short_version: &SHORT_VERSION 1.1
arch_type: x86

repository_info:
inference_repository: &INFERENCE_REPOSITORY
image_type: &INFERENCE_IMAGE_TYPE inference
root: !join [ *FRAMEWORK, "/", *INFERENCE_IMAGE_TYPE ]
repository_name: &REPOSITORY_NAME !join [pr, "-", *FRAMEWORK, "-", *INFERENCE_IMAGE_TYPE]
repository: &REPOSITORY !join [ *ACCOUNT_ID, .dkr.ecr., *REGION, .amazonaws.com/, *REPOSITORY_NAME ]

context:
inference_context: &INFERENCE_CONTEXT
torchserve-entrypoint:
source: ../build_artifacts/inference/torchserve-entrypoint.py
target: torchserve-entrypoint.py
config:
source: ../build_artifacts/inference/config.properties
target: config.properties
deep_learning_container:
source: ../../src/deep_learning_container.py
target: deep_learning_container.py

images:
BuildAutogluonCPUInferencePy3DockerImage:
<<: *INFERENCE_REPOSITORY
build: &AUTOGLUON_CPU_INFERENCE_PY3 false
image_size_baseline: 6399
device_type: &DEVICE_TYPE cpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
os_version: &OS_VERSION ubuntu20.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
<<: *INFERENCE_CONTEXT

BuildAutogluonGPUInferencePy3DockerImage:
<<: *INFERENCE_REPOSITORY
build: &AUTOGLUON_GPU_INFERENCE_PY3 false
image_size_baseline: 19456
device_type: &DEVICE_TYPE gpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
cuda_version: &CUDA_VERSION cu121
os_version: &OS_VERSION ubuntu20.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
<<: *INFERENCE_CONTEXT
10 changes: 5 additions & 5 deletions autogluon/inference/buildspec.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
region: &REGION <set-$REGION-in-environment>
framework: &FRAMEWORK autogluon
version: &VERSION 1.1.1
short_version: &SHORT_VERSION 1.1
version: &VERSION 1.2.0
short_version: &SHORT_VERSION 1.2
arch_type: x86

repository_info:
Expand Down Expand Up @@ -32,7 +32,7 @@ images:
device_type: &DEVICE_TYPE cpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
os_version: &OS_VERSION ubuntu20.04
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
Expand All @@ -45,8 +45,8 @@ images:
device_type: &DEVICE_TYPE gpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
cuda_version: &CUDA_VERSION cu121
os_version: &OS_VERSION ubuntu20.04
cuda_version: &CUDA_VERSION cu124
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
Expand Down
61 changes: 61 additions & 0 deletions autogluon/inference/docker/1.2/py3/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
ARG PYTHON_VERSION=3.11.9

FROM 763104351884.dkr.ecr.us-west-2.amazonaws.com/pytorch-inference:2.5.1-cpu-py311-ubuntu22.04-sagemaker

# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true

LABEL maintainer="Amazon AI"
LABEL dlc_major_version="1"

RUN apt-get update \
&& apt-get -y upgrade \
&& apt-get autoremove -y \
&& apt-get install tesseract-ocr -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

ARG AUTOGLUON_VERSION=1.2.0

# Upgrading pip and installing/updating Python dependencies
# Comments are added to explain the reason behind each update
RUN pip install --no-cache-dir -U --trusted-host pypi.org --trusted-host files.pythonhosted.org pip \
&& pip install --no-cache-dir -U wheel \
&& pip uninstall -y dataclasses \
&& pip install --no-cache-dir -U numpy numba \
# Install AutoGluon, ensuring no vulnerable dependencies are left behind
&& pip install --no-cache-dir -U autogluon==${AUTOGLUON_VERSION} \
# Capping setuptools to 69.5.1 to fix AutoMM tests
&& pip install --no-cache-dir setuptools==69.5.1 \
# Update urllib3 to fix vulnerability id 71608
&& pip install --no-cache-dir -U urllib3 \
# Cap pillow & ninja to fix sanity test
&& pip install --no-cache-dir "pillow<11.0.0" \
&& pip install --no-cache-dir "ninja<1.11.1.1"



# add TS entrypoint
COPY config.properties /home/model-server

COPY torchserve-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py

RUN HOME_DIR=/root \
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
&& unzip -o ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
&& chmod +x /usr/local/bin/testOSSCompliance \
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
&& rm -rf ${HOME_DIR}/oss_compliance*

RUN curl -o /licenses-autogluon.txt https://autogluon.s3.us-west-2.amazonaws.com/licenses/THIRD-PARTY-LICENSES.txt

EXPOSE 8080 8081
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]
Loading

0 comments on commit 04b4f9c

Please sign in to comment.