1
0
Fork 0
local-llm/docker-compose.base.yml

29 lines
580 B
YAML

services:
# Begin LiteLLM service
litellm:
image: ghcr.io/berriai/litellm:main-latest
command: --config /config.yaml
ports:
- 8000:8000
env_file:
- .env
environment:
- HOST=0.0.0.0
- PORT=8000
volumes:
- ./litellm/config.yaml:/config.yaml:ro
# Begin open-webui
open-webui:
image: ghcr.io/open-webui/open-webui:main
ports:
- 8080:8080
env_file:
- .env
environment:
OLLAMA_BASE_URL: http://ollama:11434
volumes:
- open-webui_data:/app/backend/data
volumes:
open-webui_data: