From 087cb4e463dc5dc022954796df484f3bfe65c9ba Mon Sep 17 00:00:00 2001 From: "mulin.lyh" Date: Mon, 16 Oct 2023 16:13:28 +0800 Subject: [PATCH] upgrade flash attention to 2.32.2 --- docker/scripts/install_flash_attension.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docker/scripts/install_flash_attension.sh b/docker/scripts/install_flash_attension.sh index 6a3301c2..f37e567d 100644 --- a/docker/scripts/install_flash_attension.sh +++ b/docker/scripts/install_flash_attension.sh @@ -1,6 +1,4 @@ - git clone -b v1.0.8 https://github.com/Dao-AILab/flash-attention && \ - cd flash-attention && pip install . && \ - pip install csrc/layer_norm && \ - pip install csrc/rotary && \ + git clone -b v2.3.2 https://github.com/Dao-AILab/flash-attention && \ + cd flash-attention && python setup.py install && \ cd .. && \ rm -rf flash-attention