1
0
Fork 0
local-llm/ollama/bootstrap.sh

16 lines
230 B
Bash
Raw Normal View History

2024-02-03 03:27:22 +00:00
#!/bin/bash -x
# Ollama has trouble handling HTTP_PROXY
# https://github.com/ollama/ollama/issues/2168
unset HTTP_PROXY
unset http_proxy
2024-02-03 03:27:22 +00:00
ollama serve &
sleep 1
for model in ${@:-mistral}; do
ollama pull "$model"
done
wait