Selaa lähdekoodia

Update Cutlass to v3.2.0

Tri Dao 1 vuosi sitten
vanhempi
commit
757058d4d3
3 muutettua tiedostoa jossa 4 lisäystä ja 4 poistoa
  1. 1 1
      csrc/cutlass
  2. 1 1
      flash_attn/__init__.py
  3. 2 2
      training/Dockerfile

+ 1 - 1
csrc/cutlass

@@ -1 +1 @@
-Subproject commit 6f47420213f757831fae65c686aa471749fa8d60
+Subproject commit 3a8f57a3c89cfff7aa686e95f13d9ad850f61898

+ 1 - 1
flash_attn/__init__.py

@@ -1,4 +1,4 @@
-__version__ = "2.1.0"
+__version__ = "2.1.1"
 
 from flash_attn.flash_attn_interface import (
     flash_attn_func,

+ 2 - 2
training/Dockerfile

@@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr
 RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0
 
 # Install FlashAttention
-RUN pip install flash-attn==2.0.9
+RUN pip install flash-attn==2.1.1
 
 # Install CUDA extensions for cross-entropy, fused dense, layer norm
 RUN git clone https://github.com/HazyResearch/flash-attention \
-    && cd flash-attention && git checkout v2.1.0 \
+    && cd flash-attention && git checkout v2.1.1 \
     && cd csrc/fused_softmax && pip install . && cd ../../ \
     && cd csrc/rotary && pip install . && cd ../../ \
     && cd csrc/xentropy && pip install . && cd ../../ \