1
0
Fork 0
local-llm/.env

5 lines
186 B
Bash

# If set, HTTP_PROXY messes with inter-container communication in the deployment.
# Ollama downloads the models via https anyway so it should be safe to unset it
HTTP_PROXY=
http_proxy=