feat: change minimal supported CUDA version to 11.7 (#1206)

This commit is contained in:
juejuezi 2024-09-06 01:34:35 +08:00 committed by GitHub
parent 3cea2fb6ee
commit e371bea04f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 3 deletions

View File

@ -98,7 +98,7 @@ MAX_JOBS=4 pip install flash-attn --no-build-isolation
### NVIDIA CUDA Support
**Requirements:**
- CUDA 11.6 and above.
- CUDA 11.7 and above.
We recommend the
[Pytorch](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/pytorch)

View File

@ -156,9 +156,9 @@ if not SKIP_CUDA_BUILD and not IS_ROCM:
cc_flag = []
if CUDA_HOME is not None:
_, bare_metal_version = get_cuda_bare_metal_version(CUDA_HOME)
if bare_metal_version < Version("11.6"):
if bare_metal_version < Version("11.7"):
raise RuntimeError(
"FlashAttention is only supported on CUDA 11.6 and above. "
"FlashAttention is only supported on CUDA 11.7 and above. "
"Note: make sure nvcc has a supported version by running nvcc -V."
)
# cc_flag.append("-gencode")