git clone -b v2.3.2 https://github.com/Dao-AILab/flash-attention && \
cd flash-attention && python setup.py install && \
cd .. && \
rm -rf flash-attention