File tree Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Expand file tree Collapse file tree 1 file changed +4
-3
lines changed Original file line number Diff line number Diff line change @@ -87,9 +87,10 @@ cu=$(nvcc --version | grep "Cuda compilation tools" | awk '{print $5}' | cut -d
87
87
torch=$( pip show torch | grep Version | awk ' {print $2}' | cut -d ' +' -f 1 | cut -d ' .' -f 1,2)
88
88
cp=$( python3 --version | awk ' {print $2}' | awk -F. ' {print $1$2}' )
89
89
cxx=$( g++ --version | grep ' g++' | awk ' {print $3}' | cut -d ' .' -f 1)
90
- wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.8.0.post2/flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
91
- pip install --no-cache-dir flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
92
- rm flash_attn-2.8.0.post2+cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
90
+ flash_attn_version=" 2.8.0.post2"
91
+ wget https://github.com/Dao-AILab/flash-attention/releases/download/v${flash_attn_version} /flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
92
+ pip install --no-cache-dir flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
93
+ rm flash_attn-${flash_attn_version} +cu${cu} torch${torch} cxx${cxx} abiFALSE-cp${cp} -cp${cp} -linux_x86_64.whl
93
94
94
95
# From Megatron-LM log
95
96
pip install " git+https://github.com/Dao-AILab/flash-attention.git@v2.7.2#egg=flashattn-hopper&subdirectory=hopper"
You can’t perform that action at this time.
0 commit comments