1
0
Fork 0
local-llm/librechat/librechat.yaml

21 lines
639 B
YAML
Raw Normal View History

2024-02-03 03:27:22 +00:00
version: 1.0.1
cache: true
endpoints:
custom:
2024-02-11 04:53:37 +00:00
- name: "Ollama"
iconURL: https://ollama.ai/public/icon.png
2024-02-03 03:27:22 +00:00
apiKey: "noUse"
baseURL: "http://litellm:8000"
models:
2024-02-22 02:45:43 +00:00
default: ["mistral"]
2024-02-11 04:53:37 +00:00
fetch: true
2024-02-03 03:27:22 +00:00
titleConvo: true
2024-02-22 02:45:43 +00:00
titleModel: "mistral"
2024-02-03 03:27:22 +00:00
summarize: true
2024-02-22 02:45:43 +00:00
summaryModel: "mistral"
2024-02-03 03:27:22 +00:00
forcePrompt: false
2024-02-11 04:53:37 +00:00
modelDisplayLabel: "Ollama"
2024-02-03 03:27:22 +00:00
# addParams:
# safe_prompt: true
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]