1
0
Fork 0
local-llm/docker-compose.nvidia.yml

27 lines
578 B
YAML

include:
- docker-compose.base.yml
services:
# Begin Ollama service
ollama:
image: ollama/ollama:latest
restart: unless-stopped
entrypoint: /bootstrap.sh
command: mistral
env_file:
- .env
ports:
- 11434:11434
# begin for NVIDIA GPU support
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
# end of section for NVIDIA GPU support
volumes:
- ./ollama/bootstrap.sh:/bootstrap.sh:ro
- ./ollama:/root/.ollama