Skip to content

Commit

Permalink
update build process
Browse files Browse the repository at this point in the history
  • Loading branch information
robballantyne committed Apr 18, 2024
1 parent 8c13348 commit cd0fa3e
Show file tree
Hide file tree
Showing 12 changed files with 156 additions and 144 deletions.
127 changes: 63 additions & 64 deletions .github/workflows/docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,22 @@ name: Docker Build

on:
workflow_dispatch:
push:
branches: [ "main" ]
#push:
# branches: [ "main" ]

env:
UBUNTU_VERSION: 22.04
BUILDX_NO_DEFAULT_ATTESTATIONS: 1
LATEST_CUDA: "pytorch-2.2.1-py3.10-cuda-11.8.0-runtime-22.04"
LATEST_ROCM: "pytorch-2.2.1-py3.10-rocm-5.7-runtime-22.04"
LATEST_CPU: "pytorch-2.2.1-py3.10-cpu-22.04"


jobs:
cpu-base:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
base:
- "jupyter-pytorch"
python:
- "3.10"
pytorch:
- "2.2.1"
build:
#- {latest: "true", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2"}
- {latest: "false", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2"}
steps:
-
name: Free Space
Expand Down Expand Up @@ -59,22 +53,28 @@ jobs:
-
name: Set tags
run: |
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
ver_tag="pytorch-${{ matrix.pytorch }}-py${{ matrix.python }}-cpu-${{ env.UBUNTU_VERSION }}"
if [[ $ver_tag == ${{ env.LATEST_CPU }} ]]; then
TAGS="${img_path}:latest-cpu, ${img_path}:$ver_tag"
else
TAGS="${img_path}:$ver_tag"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
INVOKEAI_VERSION=${{ matrix.build.invokeai }}
[ -z "$INVOKEAI_VERSION" ] && { echo "Error: INVOKEAI_VERSION is empty. Exiting script." >&2; exit 1; }
echo "INVOKEAI_VERSION=${INVOKEAI_VERSION}" >> ${GITHUB_ENV}
base_tag="cpu-${{ env.UBUNTU_VERSION }}"
if [[ ${{ matrix.build.latest }} == "true" ]]; then
echo "Marking latest"
TAGS="${img_path}:${base_tag}, ${img_path}:latest-cpu, ${img_path}:latest-cpu-jupyter"
else
TAGS="${img_path}:${base_tag}-v${INVOKEAI_VERSION}"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
-
name: Build and push
uses: docker/build-push-action@v4
with:
context: build
build-args: |
IMAGE_BASE=ghcr.io/ai-dock/${{ matrix.base }}:${{ matrix.pytorch }}-py${{ matrix.python }}-cpu-${{ env.UBUNTU_VERSION }}
IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-cpu-${{ env.UBUNTU_VERSION }}
push: true
# Avoids unknown/unknown architecture and extra metadata
provenance: false
Expand All @@ -85,17 +85,9 @@ jobs:
strategy:
fail-fast: false
matrix:
base:
- "jupyter-pytorch"
python:
- "3.10"
pytorch:
- "2.2.1"
cuda:
- "11.8.0"
- "12.1.0"
level:
- "runtime"
build:
#- {latest: "true", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2", cuda: "11.8.0-runtime"}
- {latest: "false", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2", cuda: "12.1.1-runtime"}

steps:
-
Expand Down Expand Up @@ -132,23 +124,31 @@ jobs:
-
name: Set tags
run: |
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
ver_tag="pytorch-${{ matrix.pytorch }}-py${{ matrix.python }}-cuda-${{ matrix.cuda }}-${{ matrix.level }}-${{ env.UBUNTU_VERSION }}"
if [[ $ver_tag == ${{ env.LATEST_CUDA }} ]]; then
TAGS="${img_path}:latest, ${img_path}:latest-cuda, ${img_path}:$ver_tag"
else
TAGS="${img_path}:$ver_tag"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
INVOKEAI_VERSION=${{ matrix.build.invokeai }}
[ -z "$INVOKEAI_VERSION" ] && { echo "Error: INVOKEAI_VERSION is empty. Exiting script." >&2; exit 1; }
echo "INVOKEAI_VERSION=${INVOKEAI_VERSION}" >> ${GITHUB_ENV}
base_tag="cpu-${{ env.UBUNTU_VERSION }}"
if [[ ${{ matrix.build.latest }} == "true" ]]; then
echo "Marking latest"
TAGS="${img_path}:${base_tag}, ${img_path}:latest, ${img_path}:latest-cuda"
else
TAGS="${img_path}:${base_tag}-v${INVOKEAI_VERSION}"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
-
name: Build and push
uses: docker/build-push-action@v4
with:
context: build
build-args: |
IMAGE_BASE=ghcr.io/ai-dock/${{ matrix.base }}:${{ matrix.pytorch }}-py${{ matrix.python }}-cuda-${{ matrix.cuda }}-${{ matrix.level }}-${{ env.UBUNTU_VERSION }}
PYTORCH_VERSION=${{ matrix.pytorch }}
IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}
PYTHON_VERSION=${{ matrix.build.python }}
PYTORCH_VERSION=${{ matrix.build.pytorch }}
INVOKEAI_VERSION=${{ env.INVOKEAI_VERSION }}
push: true
provenance: false
tags: ${{ env.TAGS }}
Expand All @@ -158,16 +158,9 @@ jobs:
strategy:
fail-fast: false
matrix:
base:
- "jupyter-pytorch"
python:
- "3.10"
pytorch:
- "2.2.1"
rocm:
- "5.7"
level:
- "runtime"
build:
#- {latest: "true", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2", rocm: "5.7-runtime"}
- {latest: "false", invokeai: "4.1.0", python: "3.10", pytorch: "2.2.2", rocm: "5.7-runtime"}
steps:
-
name: Free Space
Expand Down Expand Up @@ -203,22 +196,28 @@ jobs:
-
name: Set tags
run: |
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
ver_tag="pytorch-${{ matrix.pytorch }}-py${{ matrix.python }}-rocm-${{ matrix.rocm }}-${{ matrix.level }}-${{ env.UBUNTU_VERSION }}"
if [[ $ver_tag == ${{ env.LATEST_ROCM }} ]]; then
TAGS="${img_path}:latest-rocm, ${img_path}:$ver_tag"
else
TAGS="${img_path}:$ver_tag"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
img_path="ghcr.io/${{ env.PACKAGE_NAME }}"
INVOKEAI_VERSION=${{ matrix.build.tag }}
[ -z "$INVOKEAI_VERSION" ] && { echo "Error: INVOKEAI_VERSION is empty. Exiting script." >&2; exit 1; }
echo "INVOKEAI_VERSION=${INVOKEAI_VERSION}" >> ${GITHUB_ENV}
base_tag="cpu-${{ env.UBUNTU_VERSION }}"
if [[ ${{ matrix.build.latest }} == "true" ]]; then
echo "Marking latest"
TAGS="${img_path}:${base_tag}, ${img_path}:latest-rocm"
else
TAGS="${img_path}:${base_tag}-v${INVOKEAI_VERSION}"
fi
echo "TAGS=${TAGS}" >> ${GITHUB_ENV}
-
name: Build and push
uses: docker/build-push-action@v4
with:
context: build
build-args: |
IMAGE_BASE=ghcr.io/ai-dock/${{ matrix.base }}:${{ matrix.pytorch }}-py${{ matrix.python }}-rocm-${{ matrix.rocm }}-${{ matrix.level }}-${{ env.UBUNTU_VERSION }}
IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }}
push: true
provenance: false
tags: ${{ env.TAGS }}
12 changes: 5 additions & 7 deletions build/COPY_ROOT/opt/ai-dock/bin/build/layer0/amd.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,14 @@

build_amd_main() {
build_amd_install_invokeai
build_common_run_tests
}

build_amd_install_invokeai() {
# Mamba export does not include pip packages.
# We need to get torch again - todo find a better way?
micromamba -n invokeai run pip install \
--no-cache-dir \
--index-url https://download.pytorch.org/whl/rocm${ROCM_VERSION} \
torch==${PYTORCH_VERSION} torchvision torchaudio
/opt/ai-dock/bin/update-invokeai.sh
micromamba run -n invokeai ${PIP_INSTALL} \
torch=="${PYTORCH_VERSION}" \
onnxruntime-gpu
build_common_install_invokeai
}

build_amd_main "$@"
74 changes: 48 additions & 26 deletions build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,37 +9,59 @@ build_common_main() {

build_common_create_env() {
apt-get update
$APT_INSTALL libgl1 libgoogle-perftools4 python3-opencv libopencv-dev
ln -sf $(ldconfig -p | grep -Po "libtcmalloc.so.\d" | head -n 1) \
$APT_INSTALL \
libgl1-mesa-glx \
libtcmalloc-minimal4

ln -sf $(ldconfig -p | grep -Po "libtcmalloc_minimal.so.\d" | head -n 1) \
/lib/x86_64-linux-gnu/libtcmalloc.so
# A new pytorch env costs ~ 300Mb
exported_env=/tmp/${MAMBA_DEFAULT_ENV}.yaml
micromamba env export -n ${MAMBA_DEFAULT_ENV} > "${exported_env}"
$MAMBA_CREATE -n invokeai --file "${exported_env}"

micromamba create -n invokeai
micromamba run -n invokeai mamba-skel

mkdir -p $INVOKEAI_ROOT

micromamba install -n invokeai -y \
python="${PYTHON_VERSION}" \
ipykernel \
ipywidgets \
nano
micromamba run -n invokeai install-pytorch -v "$PYTORCH_VERSION"
}

build_common_install_jupyter_kernels() {
if [[ $IMAGE_BASE =~ "jupyter-pytorch" ]]; then
$MAMBA_INSTALL -n invokeai \
ipykernel \
ipywidgets

kernel_path=/usr/local/share/jupyter/kernels

# Add the often-present "Python3 (ipykernel) as a comfyui alias"
rm -rf ${kernel_path}/python3
dir="${kernel_path}/python3"
file="${dir}/kernel.json"
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"Python3 (ipykernel)"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"invokeai"'/g' ${file}

dir="${kernel_path}/invokeai"
file="${dir}/kernel.json"
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"Invoke AI"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"invokeai"'/g' ${file}
micromamba install -n invokeai -y \
ipykernel \
ipywidgets

kernel_path=/usr/local/share/jupyter/kernels

# Add the often-present "Python3 (ipykernel) as an InvokeAI alias"
rm -rf ${kernel_path}/python3
dir="${kernel_path}/python3"
file="${dir}/kernel.json"
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"Python3 (ipykernel)"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"invokeai"'/g' ${file}

dir="${kernel_path}/invokeai"
file="${dir}/kernel.json"
cp -rf ${kernel_path}/../_template ${dir}
sed -i 's/DISPLAY_NAME/'"Invoke AI"'/g' ${file}
sed -i 's/PYTHON_MAMBA_NAME/'"invokeai"'/g' ${file}
}

build_common_install_invokeai() {
micromamba run -n invokeai ${PIP_INSTALL} --use-pep517 \
torch==${PYTORCH_VERSION} \
InvokeAI==${INVOKEAI_VERSION}
}

build_common_run_tests() {
installed_pytorch_version=$(micromamba run -n invokeai python -c "import torch; print(torch.__version__)")
if [[ "$installed_pytorch_version" != "$PYTORCH_VERSION"* ]]; then
echo "Expected PyTorch ${PYTORCH_VERSION} but found ${installed_pytorch_version}\n"
exit 1
fi
}

Expand Down
3 changes: 2 additions & 1 deletion build/COPY_ROOT/opt/ai-dock/bin/build/layer0/cpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@

build_cpu_main() {
build_cpu_install_invokeai
build_common_run_tests
}

build_cpu_install_invokeai() {
/opt/ai-dock/bin/update-invokeai.sh
build_common_install_invokeai
}

build_cpu_main "$@"
2 changes: 0 additions & 2 deletions build/COPY_ROOT/opt/ai-dock/bin/build/layer0/init.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,4 @@ else
exit 1
fi

$MAMBA_DEFAULT_RUN python /opt/ai-dock/tests/assert-torch-version.py

source /opt/ai-dock/bin/build/layer0/clean.sh
20 changes: 17 additions & 3 deletions build/COPY_ROOT/opt/ai-dock/bin/build/layer0/nvidia.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,30 @@

build_nvidia_main() {
build_nvidia_install_invokeai
build_common_run_tests
build_nvidia_run_tests
}

build_nvidia_install_invokeai() {
micromamba run -n invokeai ${PIP_INSTALL} \
torch=="${PYTORCH_VERSION}" \
nvidia-ml-py3
nvidia-ml-py3 \
onnxruntime-gpu

micromamba install -n invokeai -c xformers xformers
micromamba install -n invokeai -c xformers -y \
xformers \
pytorch=${PYTORCH_VERSION} \
pytorch-cuda="$(cut -d '.' -f 1,2 <<< "${CUDA_VERSION}")"

/opt/ai-dock/bin/update-invokeai.sh
build_common_install_invokeai
}

build_nvidia_run_tests() {
installed_pytorch_cuda_version=$(micromamba run -n invokeai python -c "import torch; print(torch.version.cuda)")
if [[ "$CUDA_VERSION" != "$installed_pytorch_cuda"* ]]; then
echo "Expected PyTorch CUDA ${CUDA_VERSION} but found ${installed_pytorch_cuda}\n"
exit 1
fi
}

build_nvidia_main "$@"
28 changes: 0 additions & 28 deletions build/COPY_ROOT/opt/ai-dock/bin/get-cfqt-invokeai.sh

This file was deleted.

Loading

0 comments on commit cd0fa3e

Please sign in to comment.