mirror of https://github.com/vllm-project/vllm
[Misc] Upgrade to `torch==2.3.0` (#4454)
This commit is contained in:
parent
f4f921b7f1
commit
d627a3d837
|
@ -49,7 +49,7 @@ jobs:
|
|||
matrix:
|
||||
os: ['ubuntu-20.04']
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11']
|
||||
pytorch-version: ['2.2.1'] # Must be the most recent version that meets requirements-cuda.txt.
|
||||
pytorch-version: ['2.3.0'] # Must be the most recent version that meets requirements-cuda.txt.
|
||||
cuda-version: ['11.8', '12.1']
|
||||
|
||||
steps:
|
||||
|
|
|
@ -31,7 +31,7 @@ set(HIP_SUPPORTED_ARCHS "gfx906;gfx908;gfx90a;gfx940;gfx941;gfx942;gfx1030;gfx11
|
|||
# requirements.txt files and should be kept consistent. The ROCm torch
|
||||
# versions are derived from Dockerfile.rocm
|
||||
#
|
||||
set(TORCH_SUPPORTED_VERSION_CUDA "2.2.1")
|
||||
set(TORCH_SUPPORTED_VERSION_CUDA "2.3.0")
|
||||
set(TORCH_SUPPORTED_VERSION_ROCM_5X "2.0.1")
|
||||
set(TORCH_SUPPORTED_VERSION_ROCM_6X "2.1.1")
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ FROM dev as flash-attn-builder
|
|||
ARG max_jobs=2
|
||||
ENV MAX_JOBS=${max_jobs}
|
||||
# flash attention version
|
||||
ARG flash_attn_version=v2.5.6
|
||||
ARG flash_attn_version=v2.5.8
|
||||
ENV FLASH_ATTN_VERSION=${flash_attn_version}
|
||||
|
||||
WORKDIR /usr/src/flash-attention-v2
|
||||
|
|
|
@ -5,7 +5,7 @@ requires = [
|
|||
"ninja",
|
||||
"packaging",
|
||||
"setuptools >= 49.4.0",
|
||||
"torch == 2.2.1",
|
||||
"torch == 2.3.0",
|
||||
"wheel",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
|
|
@ -3,5 +3,5 @@ cmake>=3.21
|
|||
ninja
|
||||
packaging
|
||||
setuptools>=49.4.0
|
||||
torch==2.2.1
|
||||
torch==2.3.0
|
||||
wheel
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
-r requirements-common.txt
|
||||
|
||||
# Dependencies for x86_64 CPUs
|
||||
torch == 2.2.1+cpu
|
||||
torch == 2.3.0+cpu
|
||||
triton >= 2.2.0 # FIXME(woosuk): This is a hack to avoid import error.
|
|
@ -5,5 +5,5 @@
|
|||
ray >= 2.9
|
||||
nvidia-ml-py # for pynvml package
|
||||
vllm-nccl-cu12>=2.18,<2.19 # for downloading nccl library
|
||||
torch == 2.2.1
|
||||
xformers == 0.0.25 # Requires PyTorch 2.2.1
|
||||
torch == 2.3.0
|
||||
xformers == 0.0.26.post1 # Requires PyTorch 2.3.0
|
||||
|
|
Loading…
Reference in New Issue