diff --git a/.github/workflows/docker-build.yaml b/.github/workflows/docker-build.yaml index b5dd72192..86d27f4dc 100644 --- a/.github/workflows/docker-build.yaml +++ b/.github/workflows/docker-build.yaml @@ -84,6 +84,8 @@ jobs: outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max + build-args: | + BUILD_HASH=${{ github.sha }} - name: Export digest run: | @@ -170,7 +172,9 @@ jobs: outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max - build-args: USE_CUDA=true + build-args: | + BUILD_HASH=${{ github.sha }} + USE_CUDA=true - name: Export digest run: | @@ -257,7 +261,9 @@ jobs: outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }} cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max - build-args: USE_OLLAMA=true + build-args: | + BUILD_HASH=${{ github.sha }} + USE_OLLAMA=true - name: Export digest run: | diff --git a/Dockerfile b/Dockerfile index c2c42aa17..896c06960 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,12 +11,14 @@ ARG USE_CUDA_VER=cu121 # IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them. ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2 ARG USE_RERANKING_MODEL="" +ARG BUILD_HASH=dev-build # Override at your own risk - non-root configurations are untested ARG UID=0 ARG GID=0 ######## WebUI frontend ######## FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build +ARG BUILD_HASH WORKDIR /app @@ -24,6 +26,7 @@ COPY package.json package-lock.json ./ RUN npm ci COPY . . +ENV WEBUI_VERSION=${BUILD_HASH} RUN npm run build ######## WebUI backend ######## @@ -35,6 +38,7 @@ ARG USE_OLLAMA ARG USE_CUDA_VER ARG USE_EMBEDDING_MODEL ARG USE_RERANKING_MODEL +ARG BUILD_HASH ARG UID ARG GID @@ -155,4 +159,6 @@ HEALTHCHECK CMD curl --silent --fail http://localhost:8080/health | jq -e '.stat USER $UID:$GID +ENV WEBUI_VERSION=${BUILD_HASH} + CMD [ "bash", "start.sh"] diff --git a/backend/main.py b/backend/main.py index df79a3106..d244e18df 100644 --- a/backend/main.py +++ b/backend/main.py @@ -61,6 +61,7 @@ from config import ( WEBHOOK_URL, ENABLE_ADMIN_EXPORT, AppConfig, + WEBUI_VERSION, ) from constants import ERROR_MESSAGES @@ -90,7 +91,8 @@ print( |_| -v{VERSION} - building the best open-source AI user interface. +v{VERSION} - building the best open-source AI user interface. +{f"Commit: {WEBUI_VERSION}" if WEBUI_VERSION != "v1.0.0-alpha.100" else ""} https://github.com/open-webui/open-webui """ ) diff --git a/hatch_build.py b/hatch_build.py index 2fa9e4805..8728dc5c0 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -1,4 +1,5 @@ # noqa: INP001 +import os import shutil import subprocess from sys import stderr @@ -18,4 +19,5 @@ class CustomBuildHook(BuildHookInterface): stderr.write("### npm install\n") subprocess.run([npm, "install"], check=True) # noqa: S603 stderr.write("\n### npm run build\n") + os.environ["WEBUI_VERSION"] = version subprocess.run([npm, "run", "build"], check=True) # noqa: S603 diff --git a/src/lib/components/chat/Settings/About.svelte b/src/lib/components/chat/Settings/About.svelte index dad1f0ae6..51ad94a3c 100644 --- a/src/lib/components/chat/Settings/About.svelte +++ b/src/lib/components/chat/Settings/About.svelte @@ -1,7 +1,7 @@