From 0f90332e61bbc48b712bf35060f148328da463db Mon Sep 17 00:00:00 2001 From: Self Denial Date: Thu, 4 Apr 2024 20:08:14 -0600 Subject: [PATCH] Set cudnn LD_LIBRARY_PATH to fix whisper inference --- backend/start.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/start.sh b/backend/start.sh index 674a126cf..06adf1ff8 100755 --- a/backend/start.sh +++ b/backend/start.sh @@ -26,7 +26,7 @@ fi if [ "$USE_CUDA_DOCKER" = "true" ]; then echo "CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries." - export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cublas/lib" + export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib" fi WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'