From 68f178aa4b093837d0a59688dfc8521a187745fe Mon Sep 17 00:00:00 2001 From: Tri Dao Date: Fri, 22 Dec 2023 10:10:02 -0800 Subject: [PATCH] [CI] Don't compile for python 3.7 pytorch 2.2 --- .github/workflows/publish.yml | 2 +- flash_attn/__init__.py | 2 +- training/Dockerfile | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 21279f7..e1f0d22 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -60,7 +60,7 @@ jobs: python-version: '3.7' - torch-version: '2.1.1' python-version: '3.7' - - torch-version: '2.2.0.dev20231127' + - torch-version: '2.2.0.dev20231106' python-version: '3.7' # Pytorch <= 2.0 only supports CUDA <= 11.8 - torch-version: '1.12.1' diff --git a/flash_attn/__init__.py b/flash_attn/__init__.py index d6e37f8..5d40e99 100644 --- a/flash_attn/__init__.py +++ b/flash_attn/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2.4.0" +__version__ = "2.4.0.post1" from flash_attn.flash_attn_interface import ( flash_attn_func, diff --git a/training/Dockerfile b/training/Dockerfile index a890a57..a96fc0e 100644 --- a/training/Dockerfile +++ b/training/Dockerfile @@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0 # Install FlashAttention -RUN pip install flash-attn==2.4.0 +RUN pip install flash-attn==2.4.0.post1 # Install CUDA extensions for fused dense, layer norm RUN git clone https://github.com/HazyResearch/flash-attention \ - && cd flash-attention && git checkout v2.4.0 \ + && cd flash-attention && git checkout v2.4.0.post1 \ && cd csrc/layer_norm && pip install . && cd ../../ \ && cd csrc/fused_dense_lib && pip install . && cd ../../ \ && cd .. && rm -rf flash-attention