forked from TransformerOptimus/SuperAGI
-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose-gpu.yml
114 lines (105 loc) · 2.35 KB
/
docker-compose-gpu.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
version: '3.8'
services:
backend:
volumes:
- "./:/app"
build:
context: .
dockerfile: Dockerfile-gpu
depends_on:
- super__redis
- super__postgres
networks:
- super_network
command: ["/app/wait-for-it.sh", "super__postgres:5432","-t","60","--","/app/entrypoint.sh"]
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
celery:
volumes:
- "./:/app"
- "${EXTERNAL_RESOURCE_DIR:-./workspace}:/app/ext"
build:
context: .
dockerfile: Dockerfile-gpu
depends_on:
- super__redis
- super__postgres
networks:
- super_network
command: ["/app/entrypoint_celery.sh"]
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
gui:
build:
context: ./gui
args:
NEXT_PUBLIC_API_BASE_URL: "/api"
networks:
- super_network
super__redis:
image: "redis/redis-stack-server:latest"
networks:
- super_network
volumes:
- redis_data:/data
super__postgres:
image: "docker.io/library/postgres:15"
environment:
- POSTGRES_USER=superagi
- POSTGRES_PASSWORD=password
- POSTGRES_DB=super_agi_main
volumes:
- superagi_postgres_data:/var/lib/postgresql/data/
networks:
- super_network
proxy:
image: nginx:stable-alpine
ports:
- "3000:80"
networks:
- super_network
depends_on:
- backend
- gui
volumes:
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf
ollama:
image: ollama/ollama
command: []
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
volumes:
- ollama:/root/.ollama
ports:
- "11434:11434"
networks:
- super_network
litellm:
image: ghcr.io/berriai/litellm:main-latest
command: ["--port", "8002", "--num_workers", "8", "--model", "ollama/nous-hermes2-mixtral", "--api_base", "http://ollama:11434" ]
depends_on:
- ollama
networks:
- super_network
networks:
super_network:
driver: bridge
volumes:
superagi_postgres_data:
redis_data:
ollama: