Skip to content

Commit

Permalink
fix: naming image and ollama-cpu (zylon-ai#2056)
Browse files Browse the repository at this point in the history
  • Loading branch information
jaluma authored Aug 12, 2024
1 parent 22904ca commit 89477ea
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ services:
# Private-GPT service for the Ollama CPU and GPU modes
# This service builds from an external Dockerfile and runs the Ollama mode.
private-gpt-ollama:
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.2}-ollama # x-release-please-version
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version
build:
context: .
dockerfile: Dockerfile.ollama
Expand All @@ -31,7 +31,7 @@ services:
# Private-GPT service for the local mode
# This service builds from a local Dockerfile and runs the application in local mode.
private-gpt-llamacpp-cpu:
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version
build:
context: .
dockerfile: Dockerfile.llamacpp-cpu
Expand All @@ -57,7 +57,6 @@ services:
ollama:
image: traefik:v2.10
ports:
- "11435:11434"
- "8081:8080"
command:
- "--providers.file.filename=/etc/router.yml"
Expand All @@ -84,7 +83,7 @@ services:
- ./models:/root/.ollama
profiles:
- ""
- ollama
- ollama-cpu

# Ollama service for the CUDA mode
ollama-cuda:
Expand Down

0 comments on commit 89477ea

Please sign in to comment.