1
0
Fork 0

promote use of open-webui embedded litellm

This commit is contained in:
Massaki Archambault 2024-04-04 10:16:41 -04:00
parent 8753ea6b3a
commit 66888709a6
2 changed files with 16 additions and 18 deletions

View File

@ -1,17 +1,17 @@
services:
# Begin LiteLLM service
litellm:
image: ghcr.io/berriai/litellm:main-latest
command: --config /config.yaml
ports:
- 8000:8000
env_file:
- .env
environment:
- HOST=0.0.0.0
- PORT=8000
volumes:
- ./litellm/config.yaml:/config.yaml:ro
# litellm:
# image: ghcr.io/berriai/litellm:main-latest
# command: --config /config.yaml
# ports:
# - 8000:8000
# env_file:
# - .env
# environment:
# - HOST=0.0.0.0
# - PORT=8000
# volumes:
# - ./litellm/config.yaml:/config.yaml
# Begin open-webui
open-webui:
@ -22,7 +22,10 @@ services:
- .env
environment:
OLLAMA_BASE_URL: http://ollama:11434
extra_hosts:
- host.docker.internal:host-gateway
volumes:
- ./litellm/config.yaml:/app/backend/data/litellm/config.yaml
- open-webui_data:/app/backend/data
volumes:

View File

@ -1,6 +1 @@
model_list:
- model_name: mistral
litellm_params:
model: ollama/mistral
api_base: http://ollama:11434
# stream: True
model_list: {}