1
0
Fork 0
local-llm/docker-compose.amd.yml

33 lines
826 B
YAML

include:
- docker-compose.base.yml
services:
# Begin Ollama service
ollama:
image: ollama/ollama:rocm
restart: unless-stopped
entrypoint: /bootstrap.sh
command: mistral
network_mode: service:open-webui
environment:
OLLAMA_HOST: http://localhost:11434
# begin for AMD GPU support
devices:
- /dev/kfd
- /dev/dri
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp=unconfined
environment:
# https://github.com/ROCm/ROCm/issues/2625
GPU_MAX_HW_QUEUES: 1
# https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
# end of section for AMD GPU support
volumes:
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
- ./ollama:/root/.ollama