Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 53 additions & 0 deletions autogluon/inference/buildspec-1-4-0.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
region: &REGION <set-$REGION-in-environment>
framework: &FRAMEWORK autogluon
version: &VERSION 1.4.0
short_version: &SHORT_VERSION 1.4
arch_type: x86

repository_info:
inference_repository: &INFERENCE_REPOSITORY
image_type: &INFERENCE_IMAGE_TYPE inference
root: !join [ *FRAMEWORK, "/", *INFERENCE_IMAGE_TYPE ]
repository_name: &REPOSITORY_NAME !join [pr, "-", *FRAMEWORK, "-", *INFERENCE_IMAGE_TYPE]
repository: &REPOSITORY !join [ *ACCOUNT_ID, .dkr.ecr., *REGION, .amazonaws.com/, *REPOSITORY_NAME ]

context:
inference_context: &INFERENCE_CONTEXT
torchserve-entrypoint:
source: ../build_artifacts/inference/torchserve-entrypoint.py
target: torchserve-entrypoint.py
config:
source: ../build_artifacts/inference/config.properties
target: config.properties
deep_learning_container:
source: ../../src/deep_learning_container.py
target: deep_learning_container.py

images:
BuildAutogluonCPUInferencePy3DockerImage:
<<: *INFERENCE_REPOSITORY
build: &AUTOGLUON_CPU_INFERENCE_PY3 false
image_size_baseline: 6399
device_type: &DEVICE_TYPE cpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
<<: *INFERENCE_CONTEXT

BuildAutogluonGPUInferencePy3DockerImage:
<<: *INFERENCE_REPOSITORY
build: &AUTOGLUON_GPU_INFERENCE_PY3 false
image_size_baseline: 19456
device_type: &DEVICE_TYPE gpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
cuda_version: &CUDA_VERSION cu124
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
context:
<<: *INFERENCE_CONTEXT
8 changes: 4 additions & 4 deletions autogluon/inference/buildspec.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
region: &REGION <set-$REGION-in-environment>
framework: &FRAMEWORK autogluon
version: &VERSION 1.4.0
short_version: &SHORT_VERSION 1.4
version: &VERSION 1.5.0
short_version: &SHORT_VERSION 1.5
arch_type: x86

repository_info:
Expand Down Expand Up @@ -31,7 +31,7 @@ images:
image_size_baseline: 6399
device_type: &DEVICE_TYPE cpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
tag_python_version: &TAG_PYTHON_VERSION py312
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
Expand All @@ -44,7 +44,7 @@ images:
image_size_baseline: 19456
device_type: &DEVICE_TYPE gpu
python_version: &DOCKER_PYTHON_VERSION py3
tag_python_version: &TAG_PYTHON_VERSION py311
tag_python_version: &TAG_PYTHON_VERSION py312
cuda_version: &CUDA_VERSION cu124
os_version: &OS_VERSION ubuntu22.04
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
Expand Down
54 changes: 54 additions & 0 deletions autogluon/inference/docker/1.5/py3/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
ARG PYTHON_VERSION=3.12.12

FROM 763104351884.dkr.ecr.us-west-2.amazonaws.com/pytorch-inference:2.6.0-cpu-py312-ubuntu22.04-sagemaker

# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true

LABEL maintainer="Amazon AI"
LABEL dlc_major_version="1"

RUN apt-get update \
&& apt-get -y upgrade \
&& apt-get autoremove -y \
&& apt-get install tesseract-ocr -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

ARG AUTOGLUON_VERSION=1.5.0

# Upgrading pip and installing/updating Python dependencies
# Comments are added to explain the reason behind each update
RUN pip install --no-cache-dir -U --trusted-host pypi.org --trusted-host files.pythonhosted.org pip \
&& pip install --no-cache-dir -U wheel \
&& pip uninstall -y dataclasses \
# Install AutoGluon, ensuring no vulnerable dependencies are left behind
&& pip install --no-cache-dir -U autogluon==${AUTOGLUON_VERSION} \
&& pip install --no-cache-dir "ninja<1.11.1.1"



# add TS entrypoint
COPY config.properties /home/model-server

COPY torchserve-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py

RUN HOME_DIR=/root \
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
&& unzip -o ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
&& chmod +x /usr/local/bin/testOSSCompliance \
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
&& rm -rf ${HOME_DIR}/oss_compliance*

RUN curl -o /licenses-autogluon.txt https://autogluon.s3.us-west-2.amazonaws.com/licenses/THIRD-PARTY-LICENSES.txt

EXPOSE 8080 8081
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]
Loading