upgrade flash attention to 2.32.2

This commit is contained in:
mulin.lyh
2023-10-16 16:13:28 +08:00
parent e75f5b4bc4
commit 087cb4e463

View File

@@ -1,6 +1,4 @@
git clone -b v1.0.8 https://github.com/Dao-AILab/flash-attention && \
cd flash-attention && pip install . && \
pip install csrc/layer_norm && \
pip install csrc/rotary && \
git clone -b v2.3.2 https://github.com/Dao-AILab/flash-attention && \
cd flash-attention && python setup.py install && \
cd .. && \
rm -rf flash-attention