1
0
Fork 0
local-llm/docker-compose.amd.yml

34 lines
787 B
YAML
Raw Normal View History

2024-02-08 03:40:43 +00:00
include:
- docker-compose.base.yml
services:
# Begin Ollama service
ollama:
2024-03-28 18:00:29 +00:00
image: ollama/ollama:rocm
2024-02-08 03:40:43 +00:00
restart: unless-stopped
entrypoint: /bootstrap.sh
command: mistral
env_file:
- .env
ports:
- 11434:11434
# begin for AMD GPU support
devices:
- /dev/kfd
- /dev/dri
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp=unconfined
2024-02-08 04:19:26 +00:00
environment:
# https://github.com/ROCm/ROCm/issues/2625
GPU_MAX_HW_QUEUES: 1
# https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
2024-02-08 03:40:43 +00:00
# end of section for AMD GPU support
volumes:
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
2024-02-08 04:19:26 +00:00
- ./ollama:/root/.ollama