1
0
Fork 0
local-llm/docker-compose.nvidia.yml

24 lines
558 B
YAML
Raw Normal View History

include:
- docker-compose.base.yml
services:
# Begin Ollama service
ollama:
2024-03-28 18:00:29 +00:00
image: ollama/ollama:latest
restart: unless-stopped
entrypoint: /bootstrap.sh
command: mistral
network_mode: service:open-webui
# begin for NVIDIA GPU support
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
# end of section for NVIDIA GPU support
volumes:
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
- ./ollama:/root/.ollama