Skip to content

Commit 54ac1b2

Browse files
author
yefubao
committed
add flash_attn_version
1 parent 6091a19 commit 54ac1b2

File tree

1 file changed

+4
-3
lines changed

1 file changed

+4
-3
lines changed

install/install-requirements.sh

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,9 +87,10 @@ cu=$(nvcc --version | grep "Cuda compilation tools" | awk '{print $5}' | cut -d
8787
torch=$(pip show torch | grep Version | awk '{print $2}' | cut -d '+' -f 1 | cut -d '.' -f 1,2)
8888
cp=$(python3 --version | awk '{print $2}' | awk -F. '{print $1$2}')
8989
cxx=$(g++ --version | grep 'g++' | awk '{print $3}' | cut -d '.' -f 1)
90-
wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
91-
pip install --no-cache-dir flash_attn-2.8.0.post2+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
92-
rm flash_attn-2.8.0.post2+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
90+
flash_attn_version="2.8.0.post2"
91+
wget https://github.com/Dao-AILab/flash-attention/releases/download/v${flash_attn_version}/flash_attn-${flash_attn_version}+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
92+
pip install --no-cache-dir flash_attn-${flash_attn_version}+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
93+
rm flash_attn-${flash_attn_version}+cu${cu}torch${torch}cxx${cxx}abiFALSE-cp${cp}-cp${cp}-linux_x86_64.whl
9394

9495
# From Megatron-LM log
9596
pip install "git+https://github.com/Dao-AILab/flash-attention.git@v2.7.2#egg=flashattn-hopper&subdirectory=hopper"

0 commit comments

Comments
 (0)