update compose
This commit is contained in:
parent
136fc43f23
commit
5b8ec8fc3d
|
@ -6,11 +6,9 @@ services:
|
||||||
ollama:
|
ollama:
|
||||||
image: ollama/ollama:rocm
|
image: ollama/ollama:rocm
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
entrypoint: /bootstrap.sh
|
ports:
|
||||||
command: mistral
|
- 11434:11434
|
||||||
network_mode: service:open-webui
|
|
||||||
environment:
|
|
||||||
OLLAMA_HOST: http://localhost:11434
|
|
||||||
# begin for AMD GPU support
|
# begin for AMD GPU support
|
||||||
devices:
|
devices:
|
||||||
- /dev/kfd
|
- /dev/kfd
|
||||||
|
@ -22,12 +20,13 @@ services:
|
||||||
- SYS_PTRACE
|
- SYS_PTRACE
|
||||||
security_opt:
|
security_opt:
|
||||||
- seccomp=unconfined
|
- seccomp=unconfined
|
||||||
environment:
|
# environment:
|
||||||
# https://github.com/ROCm/ROCm/issues/2625
|
# # https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846
|
||||||
GPU_MAX_HW_QUEUES: 1
|
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
|
||||||
# https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846
|
|
||||||
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
|
|
||||||
# end of section for AMD GPU support
|
# end of section for AMD GPU support
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
|
- ollama_data:/root/.ollama
|
||||||
- ./ollama:/root/.ollama
|
|
||||||
|
volumes:
|
||||||
|
ollama_data:
|
|
@ -18,13 +18,12 @@ services:
|
||||||
image: ghcr.io/open-webui/open-webui:main
|
image: ghcr.io/open-webui/open-webui:main
|
||||||
ports:
|
ports:
|
||||||
- 8080:8080
|
- 8080:8080
|
||||||
- 11434:11434
|
|
||||||
environment:
|
environment:
|
||||||
OLLAMA_BASE_URL: http://localhost:11434
|
OLLAMA_BASE_URL: http://ollama:11434
|
||||||
|
WEBUI_AUTH: "False"
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- host.docker.internal:host-gateway
|
- host.docker.internal:host-gateway
|
||||||
volumes:
|
volumes:
|
||||||
- ./litellm/config.yaml:/app/backend/data/litellm/config.yaml
|
|
||||||
- open-webui_data:/app/backend/data
|
- open-webui_data:/app/backend/data
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
|
|
@ -6,11 +6,10 @@ services:
|
||||||
ollama:
|
ollama:
|
||||||
image: ollama/ollama:latest
|
image: ollama/ollama:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
entrypoint: /bootstrap.sh
|
ports:
|
||||||
command: mistral
|
- 11434:11434
|
||||||
network_mode: service:open-webui
|
|
||||||
environment:
|
|
||||||
OLLAMA_HOST: http://localhost:11434
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
|
- ollama_data:/root/.ollama
|
||||||
- ./ollama:/root/.ollama
|
|
||||||
|
volumes:
|
||||||
|
ollama_data:
|
|
@ -6,11 +6,9 @@ services:
|
||||||
ollama:
|
ollama:
|
||||||
image: ollama/ollama:latest
|
image: ollama/ollama:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
entrypoint: /bootstrap.sh
|
ports:
|
||||||
command: mistral
|
- 11434:11434
|
||||||
network_mode: service:open-webui
|
|
||||||
environment:
|
|
||||||
OLLAMA_HOST: http://localhost:11434
|
|
||||||
# begin for NVIDIA GPU support
|
# begin for NVIDIA GPU support
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
|
@ -20,6 +18,9 @@ services:
|
||||||
count: 1
|
count: 1
|
||||||
capabilities: [gpu]
|
capabilities: [gpu]
|
||||||
# end of section for NVIDIA GPU support
|
# end of section for NVIDIA GPU support
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
|
- ollama_data:/root/.ollama
|
||||||
- ./ollama:/root/.ollama
|
|
||||||
|
volumes:
|
||||||
|
ollama_data:
|
|
@ -1,3 +0,0 @@
|
||||||
*
|
|
||||||
!.gitignore
|
|
||||||
!bootstrap.sh
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/bin/bash -x
|
|
||||||
|
|
||||||
ollama serve &
|
|
||||||
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
for model in ${@:-mistral}; do
|
|
||||||
ollama pull "$model"
|
|
||||||
done
|
|
||||||
|
|
||||||
wait
|
|
Loading…
Reference in New Issue