2024-02-06 00:14:44 +00:00
|
|
|
version: 1.0.1
|
|
|
|
cache: true
|
|
|
|
endpoints:
|
|
|
|
custom:
|
|
|
|
# Example using Mistral AI API
|
|
|
|
- name: "Mistral"
|
2024-02-06 02:03:35 +00:00
|
|
|
apiKey: "${LITELLM_MASTER_KEY}"
|
2024-02-06 00:14:44 +00:00
|
|
|
baseURL: "http://librechat-litellm.llm.svc:8000"
|
|
|
|
models:
|
2024-02-06 02:03:35 +00:00
|
|
|
default: ["mistral-7b", "mistral-openorca", "dolphin-mistral"]
|
2024-02-06 00:14:44 +00:00
|
|
|
titleConvo: true
|
|
|
|
titleModel: "mistral-7b"
|
|
|
|
summarize: true
|
|
|
|
summaryModel: "mistral-7b"
|
|
|
|
forcePrompt: false
|
|
|
|
modelDisplayLabel: "Mistral"
|
|
|
|
# addParams:
|
|
|
|
# safe_prompt: true
|
|
|
|
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
|
|
|
|
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
|