1
0
Fork 0

Compare commits

..

3 Commits

Author SHA1 Message Date
Massaki Archambault 5792a1d269 bump longhorn 2024-07-06 17:05:30 -04:00
Massaki Archambault 712002c6d2 add prune job to makefile 2024-07-06 17:05:18 -04:00
Massaki Archambault 598dd7da2b tweak llm 2024-07-06 17:05:05 -04:00
5 changed files with 24 additions and 5 deletions

View File

@ -6,6 +6,7 @@ KUBECTLFLAGS =
KUBECTLDIFFFLAGS =
KUBECTLDIFFPRINTER = highlight --out-format xterm256 --syntax diff
KUBECTLAPPLYFLAGS = --server-side --force-conflicts
KUBECTLAPPLYPRUNEFLAGS = --server-side --force-conflicts --prune
KUBECTLAPPLYPRINTER = sed -E -e 's/(.+configured)/\o033[32m\1\o033[0m/g' -e 's/(.+pruned)/\o033[31m\1\o033[0m/g'
SRC := $(shell find kustomize/ -type f)
@ -25,6 +26,10 @@ diff: $(PRODOUT)
apply: $(PRODOUT)
$(KUBECTL) $(KUBECTLFLAGS) apply $(KUBECTLAPPLYFLAGS) -l app.kubernetes.io/managed-by=kustomize -f $(PRODOUT) | $(KUBECTLAPPLYPRINTER)
.PHONY: apply-prune
apply-prune: $(PRODOUT)
$(KUBECTL) $(KUBECTLFLAGS) apply $(KUBECTLAPPLYPRUNEFLAGS) -l app.kubernetes.io/managed-by=kustomize -f $(PRODOUT) | $(KUBECTLAPPLYPRINTER)
$(PRODOUT): $(SRC)
mkdir -p $(OUT)
$(KUSTOMIZE) build $(KUSTOMIZEFLAGS) $(PRODSRC) >$(PRODOUT) || (rm $(PRODOUT); exit 1)

View File

@ -13,6 +13,16 @@ spec:
labels:
app.kubernetes.io/component: proxy
spec:
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
# Image only supports amd64
- key: kubernetes.io/arch
operator: In
values:
- amd64
containers:
- name: litellm
image: ghcr.io/berriai/litellm:main-latest

View File

@ -6,7 +6,7 @@ metadata:
spec:
repo: https://charts.longhorn.io
chart: longhorn
version: 1.5.5
version: 1.6.2
targetNamespace: longhorn-system
set:
backupTargetCredentialSecret: s3-backupstore-credentials

View File

@ -26,10 +26,10 @@ spec:
value: http://ollama-server.$(NAMESPACE).svc:11434
resources:
requests:
cpu: 500m
cpu: 250m
memory: 1Gi
limits:
cpu: 1000m
cpu: 500m
memory: 1Gi
ports:
- containerPort: 8080

View File

@ -1,9 +1,13 @@
model_list:
- model_name: llama3
- model_name: llama3-8b
litellm_params:
model: ollama_chat/llama3
api_base: http://ollama-server:11434
- model_name: gemma2
- model_name: gemma2-9b
litellm_params:
model: ollama_chat/gemma2
api_base: http://ollama-server:11434
- model_name: phi3-3b
litellm_params:
model: ollama_chat/phi3:mini
api_base: http://ollama-server:11434