diff --git a/.env b/.env index 2710a0f..276ae97 100644 --- a/.env +++ b/.env @@ -1,6 +1,8 @@ -# HF_TOKEN= -PYTORCH_HIP_ALLOC_CONF=garbage_collection_threshold:0.9,max_split_size_mb:512 +# https://github.com/ROCm/ROCm/issues/2625 +GPU_MAX_HW_QUEUES=1 +# https://github.com/ROCm/ROCm/issues/2788#issuecomment-1915765846 HSA_OVERRIDE_GFX_VERSION=11.0.0 # GFX1101/Navi32 (RX 7800XT) +# PYTORCH_HIP_ALLOC_CONF=garbage_collection_threshold:0.9,max_split_size_mb:512 #=============================================================# # LibreChat Configuration # diff --git a/docker-compose.yml b/docker-compose.yml index 957b241..5ba292f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,6 @@ version: "3" services: + # Begin Ollama service ollama: image: ollama/ollama:0.1.22-rocm restart: unless-stopped @@ -9,6 +10,7 @@ services: - .env ports: - 11434:11434 + # begin for AMD GPU support devices: - /dev/kfd - /dev/dri @@ -19,10 +21,12 @@ services: - SYS_PTRACE security_opt: - seccomp=unconfined + # end of section for AMD GPU support volumes: - ./ollama/bootstrap.sh:/bootstrap.sh:ro - ollama:/root/.ollama + # Begin LiteLLM service litellm: image: ghcr.io/berriai/litellm:main-latest command: --config /config.yaml @@ -31,6 +35,7 @@ services: volumes: - ./litellm/config.yaml:/config.yaml:ro + # Begin LibreChat librechat: image: ghcr.io/danny-avila/librechat ports: