4
4
workflow_dispatch :
5
5
inputs :
6
6
version :
7
- description : ' Version tag of llama-cpp-python to build: v0.1.83 '
8
- default : ' v0.1.83 '
7
+ description : ' Version tag of llama-cpp-python to build: v0.2.2 '
8
+ default : ' v0.2.2 '
9
9
required : true
10
10
type : string
11
11
workflow_call :
12
12
inputs :
13
13
version :
14
- description : ' Version tag of llama-cpp-python to build: v0.1.83 '
15
- default : ' v0.1.83 '
14
+ description : ' Version tag of llama-cpp-python to build: v0.2.2 '
15
+ default : ' v0.2.2 '
16
16
required : true
17
17
type : string
18
18
19
19
permissions :
20
20
contents : write
21
21
22
22
jobs :
23
- build_lib_lin :
24
- name : Build ROCm Lib Linux
25
- runs-on : ubuntu-20.04
23
+ build_wheels :
24
+ name : Build ${{ matrix.os }} ROCm ${{ matrix.rocm }} Wheel ${{ matrix.pyver }}
25
+ runs-on : ${{ matrix.os }}
26
26
strategy :
27
27
matrix :
28
- rocm : ['5.4.2','5.5','5.6.1']
28
+ os : [ubuntu-20.04, windows-latest]
29
+ pyver : ["3.8", "3.9", "3.10", "3.11"]
30
+ rocm : ['5.4.2','5.5','5.5.1','5.6.1']
31
+ exclude :
32
+ - os : windows-latest
33
+ rocm : ' 5.4.2'
34
+ - os : windows-latest
35
+ rocm : ' 5.5'
36
+ - os : windows-latest
37
+ rocm : ' 5.6.1'
38
+ - os : ubuntu-20.04
39
+ rocm : ' 5.5.1'
29
40
defaults :
30
41
run :
31
42
shell : pwsh
32
43
env :
44
+ PCKGVER : ${{ inputs.version }}
33
45
ROCM_VERSION : ${{ matrix.rocm }}
34
46
35
47
steps :
36
- - uses : actions/checkout@v4
37
- with :
38
- repository : ' abetlen/llama-cpp-python'
39
- ref : ${{ inputs.version }}
40
- submodules : ' recursive'
41
-
42
48
- name : Free Disk Space
49
+ if : runner.os == 'Linux'
43
50
uses : jlumbroso/free-disk-space@v1.2.0
44
51
with :
45
52
tool-cache : false
@@ -49,132 +56,31 @@ jobs:
49
56
large-packages : false
50
57
swap-storage : false
51
58
52
- - name : Install ROCm SDK
59
+ - uses : actions/checkout@v4
60
+ with :
61
+ repository : ' abetlen/llama-cpp-python'
62
+ ref : ${{ inputs.version }}
63
+ submodules : ' recursive'
64
+
65
+ - name : Install Linux ROCm SDK
66
+ if : runner.os == 'Linux'
67
+ shell : bash
53
68
run : |
54
69
[ ! -d /etc/apt/keyrings ] && sudo mkdir --parents --mode=0755 /etc/apt/keyrings
55
70
wget https://repo.radeon.com/rocm/rocm.gpg.key -O - | gpg --dearmor | sudo tee /etc/apt/keyrings/rocm.gpg > /dev/null
56
71
echo "deb [arch=amd64 signed-by=/etc/apt/keyrings/rocm.gpg] https://repo.radeon.com/rocm/apt/$ROCM_VERSION focal main" | sudo tee --append /etc/apt/sources.list.d/rocm.list
57
72
echo -e 'Package: *\nPin: release o=repo.radeon.com\nPin-Priority: 600' | sudo tee /etc/apt/preferences.d/rocm-pin-600
58
73
sudo apt update
59
74
sudo apt install rocm-dev rocblas-dev hipblas-dev -y
60
- echo "/opt/rocm/bin" >> $GITHUB_PATH
61
- echo "ROCM_PATH=/opt/rocm" >> $GITHUB_ENV
62
- echo "HIP_PATH=/opt/rocm" >> $env:GITHUB_ENV
63
- shell : bash
64
-
65
- - uses : actions/setup-python@v4
66
- with :
67
- python-version : " 3.10"
68
-
69
- - name : Install Dependencies
70
- run : |
71
- python -m pip install cmake ninja
72
-
73
- - name : Build Lib
74
- run : |
75
- $env:CC = '/opt/rocm/llvm/bin/clang'
76
- $env:CXX = '/opt/rocm/llvm/bin/clang++'
77
- $env:CFLAGS = '-fPIC'
78
- $env:CXXFLAGS = '-fPIC'
79
- $env:CMAKE_PREFIX_PATH = '/opt/rocm'
80
- $env:VERBOSE = '1'
81
- mkdir 'build'
82
- Set-Location './vendor/llama.cpp'
83
- $gputargets = 'gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102'
84
- if ([version]$env:ROCM_VERSION -lt [version]'5.5') {$gputargets = 'gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx1010;gfx1012;gfx1030'}
85
- cmake -B build -DLLAMA_HIPBLAS=ON -DBUILD_SHARED_LIBS=ON "-DGPU_TARGETS=$gputargets"
86
- cmake --build build --config Release --target llama
87
- $llamalib = (dir './build' -file -recurse).where({$_.name -eq 'libllama.so'})[0].fullname
88
- Copy-Item $llamalib '../../build'
89
-
90
- - uses : actions/upload-artifact@v3
91
- with :
92
- name : ${{ format('{0}-rocm-lib-{1}-{2}', runner.os, inputs.version, matrix.rocm) }}
93
- path : ./build/libllama.so
94
-
95
- build_lib_win :
96
- name : Build ROCm Lib Windows
97
- runs-on : windows-latest
98
- defaults :
99
- run :
100
- shell : pwsh
101
-
102
- steps :
103
- - uses : actions/checkout@v4
104
- with :
105
- repository : ' abetlen/llama-cpp-python'
106
- ref : ${{ inputs.version }}
107
- submodules : ' recursive'
108
75
109
- - name : Install ROCm SDK
76
+ - name : Install Windows ROCm SDK
77
+ if : runner.os == 'Windows'
110
78
run : |
111
79
curl -LO https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-23.Q3-Win10-Win11-For-HIP.exe
112
80
Start-Process 'AMD-Software-PRO-Edition-23.Q3-Win10-Win11-For-HIP.exe' -ArgumentList '-install' -NoNewWindow -Wait
113
81
echo "C:\Program Files\AMD\ROCm\5.5\bin" >> $env:GITHUB_PATH
114
82
echo 'ROCM_PATH=C:\Program Files\AMD\ROCm\5.5' >> $env:GITHUB_ENV
115
83
echo 'HIP_PATH=C:\Program Files\AMD\ROCm\5.5' >> $env:GITHUB_ENV
116
- echo "ROCM_VERSION=5.5.1" >> $env:GITHUB_ENV
117
-
118
- - uses : actions/setup-python@v4
119
- with :
120
- python-version : " 3.10"
121
-
122
- - name : Install Dependencies
123
- run : |
124
- python -m pip install cmake ninja
125
-
126
- - name : Build Lib
127
- run : |
128
- $env:CC = 'C:\Program Files\AMD\ROCm\5.5\bin\clang.exe'
129
- $env:CXX = 'C:\Program Files\AMD\ROCm\5.5\bin\clang++.exe'
130
- $env:CMAKE_PREFIX_PATH = 'C:\Program Files\AMD\ROCm\5.5'
131
- $env:VERBOSE = '1'
132
- mkdir 'build'
133
- Set-Location '.\vendor\llama.cpp'
134
- cmake -B build -G "Ninja" -DLLAMA_HIPBLAS=ON -DBUILD_SHARED_LIBS=ON '-DGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102'
135
- cmake --build build --config Release --target llama
136
- Copy-Item '.\build\bin\llama.dll' '..\..\build'
137
-
138
- - uses : actions/upload-artifact@v3
139
- with :
140
- name : ${{ format('{0}-rocm-lib-{1}-{2}', runner.os, inputs.version, '5.5.1') }}
141
- path : ./build/llama.dll
142
-
143
- build_wheels :
144
- name : Build ${{ matrix.os }} ROCm ${{ matrix.rocm }} Wheel ${{ matrix.pyver }}
145
- runs-on : ${{ matrix.os }}
146
- needs : ['build_lib_lin','build_lib_win']
147
- strategy :
148
- matrix :
149
- os : [ubuntu-20.04, windows-latest]
150
- pyver : ["3.8", "3.9", "3.10", "3.11"]
151
- rocm : ['5.4.2','5.5','5.5.1','5.6.1']
152
- exclude :
153
- - os : windows-latest
154
- rocm : ' 5.4.2'
155
- - os : windows-latest
156
- rocm : ' 5.5'
157
- - os : windows-latest
158
- rocm : ' 5.6.1'
159
- - os : ubuntu-20.04
160
- rocm : ' 5.5.1'
161
- defaults :
162
- run :
163
- shell : pwsh
164
- env :
165
- PCKGVER : ${{ inputs.version }}
166
- ROCM_VERSION : ${{ matrix.rocm }}
167
-
168
- steps :
169
- - uses : actions/checkout@v4
170
- with :
171
- repository : ' abetlen/llama-cpp-python'
172
- ref : ${{ inputs.version }}
173
-
174
- - uses : actions/download-artifact@v3
175
- with :
176
- name : ${{ format('{0}-rocm-lib-{1}-{2}', runner.os, inputs.version, matrix.rocm) }}
177
- path : ./llama_cpp
178
84
179
85
- uses : actions/setup-python@v4
180
86
with :
@@ -186,12 +92,38 @@ jobs:
186
92
187
93
- name : Build Wheel
188
94
run : |
95
+ if ($IsLinux) {
96
+ $env:CC = '/opt/rocm/llvm/bin/clang'
97
+ $env:CXX = '/opt/rocm/llvm/bin/clang++'
98
+ $env:CFLAGS = '-fPIC'
99
+ $env:CXXFLAGS = '-fPIC'
100
+ $env:CMAKE_PREFIX_PATH = '/opt/rocm'
101
+ $env:GITHUB_PATH = "/opt/rocm/bin:$env:GITHUB_PATH"
102
+ $env:ROCM_PATH = "/opt/rocm"
103
+ $env:HIP_PATH = "/opt/rocm"
104
+ } else {
105
+ $env:CC = 'C:\Program Files\AMD\ROCm\5.5\bin\clang.exe'
106
+ $env:CXX = 'C:\Program Files\AMD\ROCm\5.5\bin\clang++.exe'
107
+ $env:CMAKE_PREFIX_PATH = 'C:\Program Files\AMD\ROCm\5.5'
108
+ }
109
+ $env:VERBOSE = '1'
189
110
$packageVersion = [version]$env:PCKGVER.TrimStart('v')
190
- $setup = Get-Content 'setup.py' -raw
191
- $llamalib = if ($IsLinux) {'libllama.so'} else {'llama.dll'}
192
- $newsetup = $setup.Replace('package_data={"llama_cpp": ["py.typed"]},',"package_data={'llama_cpp': ['py.typed', '$llamalib']},")
193
- New-Item 'setup.py' -itemType File -value $newsetup -force
194
- python setup.py --skip-cmake bdist_wheel egg_info "--tag-build=+rocm$env:ROCM_VERSION"
111
+ $gputargets = 'gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102'
112
+ if ([version]$env:ROCM_VERSION -lt [version]'5.5') {$gputargets = 'gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx1010;gfx1012;gfx1030'}
113
+ $env:CMAKE_ARGS = "-GNinja -DLLAMA_HIPBLAS=ON -DGPU_TARGETS=$gputargets"
114
+ $buildtag = "+rocm$env:ROCM_VERSION"
115
+ if ($packageVersion -lt [version]'0.2.0') {
116
+ python -m build --wheel -C--build-option=egg_info "-C--build-option=--tag-build=$buildtag"
117
+ } else {
118
+ $initpath = Join-Path '.' 'llama_cpp' '__init__.py' -resolve
119
+ $initcontent = Get-Content $initpath -raw
120
+ $regexstr = '(?s)(?<=__version__ \= ")\d+(?:\.\d+)*(?=")'
121
+ $regexmatch = [Regex]::Matches($initcontent,$regexstr)
122
+ if (!($regexmatch[0].Success)) {throw '__init__.py parsing failed'}
123
+ $newinit = $regexmatch[0].Result(('$`' + '$&' + $buildtag + '$'''))
124
+ New-Item $initpath -itemType File -value $newinit -force
125
+ python -m build --wheel
126
+ }
195
127
196
128
- name : Upload files to a GitHub release
197
129
id : upload-release
0 commit comments