1
0
Fork 0

promote use of open-webui embedded litellm

This commit is contained in:
Massaki Archambault 2024-04-04 10:16:41 -04:00
parent 8753ea6b3a
commit 66888709a6
2 changed files with 16 additions and 18 deletions

View File

@ -1,17 +1,17 @@
services: services:
# Begin LiteLLM service # Begin LiteLLM service
litellm: # litellm:
image: ghcr.io/berriai/litellm:main-latest # image: ghcr.io/berriai/litellm:main-latest
command: --config /config.yaml # command: --config /config.yaml
ports: # ports:
- 8000:8000 # - 8000:8000
env_file: # env_file:
- .env # - .env
environment: # environment:
- HOST=0.0.0.0 # - HOST=0.0.0.0
- PORT=8000 # - PORT=8000
volumes: # volumes:
- ./litellm/config.yaml:/config.yaml:ro # - ./litellm/config.yaml:/config.yaml
# Begin open-webui # Begin open-webui
open-webui: open-webui:
@ -22,7 +22,10 @@ services:
- .env - .env
environment: environment:
OLLAMA_BASE_URL: http://ollama:11434 OLLAMA_BASE_URL: http://ollama:11434
extra_hosts:
- host.docker.internal:host-gateway
volumes: volumes:
- ./litellm/config.yaml:/app/backend/data/litellm/config.yaml
- open-webui_data:/app/backend/data - open-webui_data:/app/backend/data
volumes: volumes:

View File

@ -1,6 +1 @@
model_list: model_list: {}
- model_name: mistral
litellm_params:
model: ollama/mistral
api_base: http://ollama:11434
# stream: True