1
0
Fork 0
local-llm/ollama/bootstrap.sh

16 lines
230 B
Bash
Executable File

#!/bin/bash -x
# Ollama has trouble handling HTTP_PROXY
# https://github.com/ollama/ollama/issues/2168
unset HTTP_PROXY
unset http_proxy
ollama serve &
sleep 1
for model in ${@:-mistral}; do
ollama pull "$model"
done
wait