1
0
Fork 0

expose litellm and add api key authentication

This commit is contained in:
Massaki Archambault 2024-02-05 21:03:35 -05:00
parent 21563a4cc6
commit fa95b03900
10 changed files with 91 additions and 3 deletions

View File

@ -25,6 +25,7 @@ spec:
- name: litellm
image: ghcr.io/berriai/litellm:main-latest
args: ['--config', '/config/config.yml']
env: []
resources:
requests:
memory: 200Mi

View File

@ -4,10 +4,10 @@ endpoints:
custom:
# Example using Mistral AI API
- name: "Mistral"
apiKey: "noUse"
apiKey: "${LITELLM_MASTER_KEY}"
baseURL: "http://librechat-litellm.llm.svc:8000"
models:
default: ["mistral-7b", "mistral-openorca"]
default: ["mistral-7b", "mistral-openorca", "dolphin-mistral"]
titleConvo: true
titleModel: "mistral-7b"
summarize: true

View File

@ -9,3 +9,11 @@ model_list:
model: ollama/mistral-openorca
api_base: http://192.168.30.20:11434
# stream: True
- model_name: dolphin-mistral
litellm_params:
model: ollama/dolphin-mistral
api_base: http://192.168.30.20:11434
# stream: True
general_settings:
master_key: "os.environ/MASTER_KEY"

View File

@ -13,6 +13,8 @@ resources:
- ../../overlays/actual
- ../../overlays/llm
- probes/snmp-exporter.yaml
- resources/litellm-externalsecret.yaml
- resources/litellm-stripprefix.yaml
# resources:
# - probes/external-services-bobcat-miner.yaml
@ -103,6 +105,21 @@ patches:
kind: Deployment
name: deluge-server
path: patches/deluge-deployment-patch.yaml
- target:
version: v1
kind: Deployment
name: librechat-server
path: patches/librechat-deployment-patch.yaml
- target:
version: v1
kind: Ingress
name: librechat-server
path: patches/librechat-ingress-patch.yaml
- target:
version: v1
kind: Deployment
name: librechat-litellm
path: patches/litellm-deployment-patch.yaml
# - target:
# version: v1
# kind: Prometheus

View File

@ -0,0 +1,8 @@
- op: add
path: /spec/template/spec/containers/0/env/-
value:
name: LITELLM_MASTER_KEY
valueFrom:
secretKeyRef:
name: litellm-config
key: master_key

View File

@ -0,0 +1,13 @@
- op: add
path: /metadata/annotations/traefik.ingress.kubernetes.io~1router.middlewares
value: llm-litellm-stripprefix@kubernetescrd
- op: add
path: /spec/rules/0/http/paths/-
value:
path: /backend
pathType: Prefix
backend:
service:
name: librechat-litellm
port:
name: http

View File

@ -0,0 +1,8 @@
- op: add
path: /spec/template/spec/containers/0/env/-
value:
name: MASTER_KEY
valueFrom:
secretKeyRef:
name: litellm-config
key: master_key

View File

@ -0,0 +1,4 @@
- job_name: additional/bastion-haproxy
static_configs:
- targets: ["192.168.20.10:8080"]

View File

@ -0,0 +1,20 @@
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: litellm-config
namespace: llm
spec:
secretStoreRef:
name: aws-parameters-store
kind: ClusterSecretStore
target:
name: litellm-config
template:
metadata:
labels:
app.kubernetes.io/managed-by: external-secret
annotations: {}
data:
- secretKey: master_key
remoteRef:
key: /k3s/prod/llm/litellm/master_key

View File

@ -0,0 +1,9 @@
apiVersion: traefik.io/v1alpha1
kind: Middleware
metadata:
name: litellm-stripprefix
namespace: llm
spec:
stripPrefix:
prefixes:
- /backend