1
0
Fork 0
local-llm/docker-compose.amd.yml

31 lines
594 B
YAML

include:
- docker-compose.base.yml
services:
# Begin Ollama service
ollama:
image: ollama/ollama:rocm
ports:
- 11434:11434
# begin for AMD GPU support
devices:
- /dev/kfd
- /dev/dri
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp=unconfined
# environment:
# # https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
# end of section for AMD GPU support
volumes:
- ollama_data:/root/.ollama
volumes:
ollama_data: