1
0
Fork 0
home-stack-kustomize/kustomize/env/prod/configurations/librechat/librechat.yaml

18 lines
517 B
YAML

version: 1.0.1
cache: true
endpoints:
custom:
- name: "LiteLLM"
iconURL: https://ollama.ai/public/icon.png
apiKey: "${LITELLM_MASTER_KEY}"
baseURL: "http://librechat-litellm.llm.svc:8000"
models:
default: ["openhermes", "solar"]
fetch: true
titleConvo: true
titleModel: "solar"
summarize: false
summaryModel: "solar"
forcePrompt: false
modelDisplayLabel: "LiteLLM"
dropParams: ["stop", "frequency_penalty", "presence_penalty"]