1
0
Fork 0

tweak librechat config

This commit is contained in:
Massaki Archambault 2024-02-17 02:01:24 -05:00
parent f07eab2efe
commit a44482b7cd
5 changed files with 48 additions and 21 deletions

View File

@ -48,6 +48,14 @@ spec:
value: mongodb://librechat-mongodb.$(NAMESPACE).svc:27017/LibreChat
- name: SEARCH
value: 'false' # TODO
- name: DEBUG_LOGGING
value: 'true'
# Models
- name: OPENAI_API_KEY
value: user_provided
# Secrets
- name: CREDS_KEY
valueFrom:
secretKeyRef:
@ -68,6 +76,8 @@ spec:
secretKeyRef:
name: server-tokens
key: jwt_refresh_secret
# Login configuration
- name: ALLOW_EMAIL_LOGIN
value: 'false'
- name: ALLOW_REGISTRATION

View File

@ -2,19 +2,17 @@ version: 1.0.1
cache: true
endpoints:
custom:
# Example using Mistral AI API
- name: "Mistral"
- name: "LiteLLM"
iconURL: https://ollama.ai/public/icon.png
apiKey: "${LITELLM_MASTER_KEY}"
baseURL: "http://librechat-litellm.llm.svc:8000"
models:
default: ["mistral-7b", "mistral-openorca", "dolphin-mistral"]
models:
default: ["openhermes", "solar"]
fetch: true
titleConvo: true
titleModel: "mistral-7b"
summarize: true
summaryModel: "mistral-7b"
titleModel: "solar"
summarize: false
summaryModel: "solar"
forcePrompt: false
modelDisplayLabel: "Mistral"
# addParams:
# safe_prompt: true
# NOTE: For Mistral, it is necessary to drop the following parameters or you will encounter a 422 Error:
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
modelDisplayLabel: "LiteLLM"
dropParams: ["stop", "frequency_penalty", "presence_penalty"]

View File

@ -1,19 +1,32 @@
model_list:
- model_name: mistral-7b
- model_name: mistral
litellm_params:
model: ollama/mistral
model: ollama/mistral:7b-instruct-v0.2-q5_K_M
api_base: http://192.168.30.20:11434
stream: True
- model_name: openhermes
litellm_params:
model: ollama/openhermes:7b-mistral-v2.5-q5_K_M
api_base: http://192.168.30.20:11434
# stream: True
- model_name: mistral-openorca
# - model_name: dolphin-mistral
# litellm_params:
# model: ollama/dolphin-mistral
# api_base: http://192.168.30.20:11434
# # stream: True
- model_name: solar
litellm_params:
model: ollama/mistral-openorca
model: ollama/solar:10.7b-instruct-v1-q5_K_M
api_base: http://192.168.30.20:11434
# stream: True
- model_name: dolphin-mistral
stream: True
- model_name: deepseek-coder-6.7b
litellm_params:
model: ollama/dolphin-mistral
model: ollama/deepseek-coder:6.7b-instruct-q8_0
api_base: http://192.168.30.20:11434
# stream: True
stream: True
litellm_settings:
drop_params: True
general_settings:
master_key: "os.environ/MASTER_KEY"

View File

@ -54,6 +54,12 @@ images:
newTag: 6.2.10
- name: bitnami/kubectl
newTag: "1.26"
- name: bitnami/kubectl
newTag: "1.26"
- name: ghcr.io/danny-avila/librechat
newTag: v0.6.10
- name: ghcr.io/berriai/litellm
newTag: main-v1.24.6
configMapGenerator:
- name: librechat-server-config

View File

@ -4,7 +4,7 @@
- op: add
path: /spec/rules/0/http/paths/-
value:
path: /backend
path: /api2
pathType: Prefix
backend:
service: