1
0
Fork 0
local-llm/docker-compose.base.yml

31 lines
724 B
YAML

services:
# Begin LiteLLM service
# litellm:
# image: ghcr.io/berriai/litellm:main-latest
# command: --config /config.yaml
# ports:
# - 8000:8000
# env_file:
# - .env
# environment:
# - HOST=0.0.0.0
# - PORT=8000
# volumes:
# - ./litellm/config.yaml:/config.yaml
# Begin open-webui
open-webui:
image: ghcr.io/open-webui/open-webui:main
ports:
- 8080:8080
- 11434:11434
environment:
OLLAMA_BASE_URL: http://localhost:11434
extra_hosts:
- host.docker.internal:host-gateway
volumes:
- ./litellm/config.yaml:/app/backend/data/litellm/config.yaml
- open-webui_data:/app/backend/data
volumes:
open-webui_data: