Skip to content

Commit c180f07

Browse files
committed
Add 0.2.14 builds to package index
1 parent b6b39fe commit c180f07

File tree

86 files changed

+650
-28
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

86 files changed

+650
-28
lines changed

.github/workflows/build-wheels-cpu.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
8-
default: 'v0.2.9'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111
config:
@@ -21,8 +21,8 @@ on:
2121
workflow_call:
2222
inputs:
2323
version:
24-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
25-
default: 'v0.2.9'
24+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
25+
default: 'v0.2.14'
2626
required: true
2727
type: string
2828
config:

.github/workflows/build-wheels-macos.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
8-
default: 'v0.2.9'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111
config:
@@ -21,8 +21,8 @@ on:
2121
workflow_call:
2222
inputs:
2323
version:
24-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
25-
default: 'v0.2.9'
24+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
25+
default: 'v0.2.14'
2626
required: true
2727
type: string
2828
config:

.github/workflows/build-wheels-oobabooga.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
8-
default: 'v0.2.9'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111
config:
@@ -21,8 +21,8 @@ on:
2121
workflow_call:
2222
inputs:
2323
version:
24-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
25-
default: 'v0.2.9'
24+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
25+
default: 'v0.2.14'
2626
required: true
2727
type: string
2828
config:

.github/workflows/build-wheels-prioritized-release.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.11'
8-
default: 'v0.2.11'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111

@@ -18,15 +18,15 @@ jobs:
1818
uses: ./.github/workflows/build-wheels-oobabooga.yml
1919
with:
2020
version: ${{ inputs.version }}
21-
config: 'pyver:3.10;cuda:11.7.1'
21+
config: 'pyver:3.10;cuda:12.1.1'
2222

2323
build_wheels_main_prio:
2424
name: Main Prioritized
2525
uses: ./.github/workflows/build-wheels.yml
2626
with:
2727
version: ${{ inputs.version }}
2828
cpu: '0'
29-
config: 'pyver:3.10;cuda:11.7.1'
29+
config: 'pyver:3.10;cuda:12.1.1'
3030

3131
build_wheels_cpu_prio:
3232
name: CPU-only Prioritized
@@ -57,15 +57,15 @@ jobs:
5757
with:
5858
version: ${{ inputs.version }}
5959
cpu: '0'
60-
exclude: 'pyver:3.10,cuda:11.7.1'
60+
exclude: 'pyver:3.10,cuda:12.1.1'
6161

6262
build_textgen_wheels:
6363
name: Textgen Wheels
6464
needs: build_wheels_main
6565
uses: ./.github/workflows/build-wheels-oobabooga.yml
6666
with:
6767
version: ${{ inputs.version }}
68-
exclude: 'pyver:3.10,cuda:11.7.1'
68+
exclude: 'pyver:3.10,cuda:12.1.1'
6969

7070
build_wheels_cpu:
7171
name: CPU-only Wheels

.github/workflows/build-wheels-rocm-full.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.11'
8-
default: 'v0.2.11'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111
config:
@@ -21,8 +21,8 @@ on:
2121
workflow_call:
2222
inputs:
2323
version:
24-
description: 'Version tag of llama-cpp-python to build: v0.2.11'
25-
default: 'v0.2.11'
24+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
25+
default: 'v0.2.14'
2626
required: true
2727
type: string
2828
config:

.github/workflows/build-wheels.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ on:
44
workflow_dispatch:
55
inputs:
66
version:
7-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
8-
default: 'v0.2.9'
7+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
8+
default: 'v0.2.14'
99
required: true
1010
type: string
1111
cpu:
@@ -26,8 +26,8 @@ on:
2626
workflow_call:
2727
inputs:
2828
version:
29-
description: 'Version tag of llama-cpp-python to build: v0.2.9'
30-
default: 'v0.2.9'
29+
description: 'Version tag of llama-cpp-python to build: v0.2.14'
30+
default: 'v0.2.14'
3131
required: true
3232
type: string
3333
cpu:

generate-html.ps1

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ Set-Location $PSScriptRoot
33
$destinationDir = if (Test-Path $(Join-Path $(Resolve-Path '.') 'index')) {Join-Path '.' 'index' -resolve} else {(New-Item 'index' -ItemType 'Directory').fullname}
44
$avxVersions = "AVX","AVX2","AVX512","basic"
55
$cudaVersions = "11.6","11.7","11.8","12.0","12.1","12.2","rocm5.4.2","rocm5.5","rocm5.5.1","rocm5.6.1","cpu"
6-
$packageVersions = (@(62)+66..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11).foreach({"$_".Insert(0,'0.2.')})
6+
$packageVersions = (@(62)+66..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+@(14)).foreach({"$_".Insert(0,'0.2.')})
77
$pythonVersions = "3.7","3.8","3.9","3.10","3.11"
88
$supportedSystems = 'linux_x86_64','win_amd64','macosx_11_0_x86_64','macosx_12_0_x86_64','macosx_13_0_x86_64','macosx_14_0_x86_64','macosx_11_0_arm64','macosx_12_0_arm64','macosx_13_0_arm64','macosx_14_0_arm64'
99
$wheelSource = 'https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download'

generate-textgen-html.ps1

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ $destinationDir = if (Test-Path $(Join-Path $(Resolve-Path '.') 'index')) {Join-
44
$destinationDir = if (Test-Path $(Join-Path $destinationDir 'textgen')) {Join-Path $destinationDir 'textgen'} else {(New-Item $(Join-Path $destinationDir 'textgen') -ItemType 'Directory').fullname}
55
$avxVersions = "AVX","AVX2","basic"
66
$cudaVersions = "11.7","11.8","12.0","12.1","12.2","rocm5.4.2","rocm5.5","rocm5.5.1","rocm5.6.1"
7-
$packageVersions = (73..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11).foreach({"$_".Insert(0,'0.2.')})
7+
$packageVersions = (73..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+@(14)).foreach({"$_".Insert(0,'0.2.')})
88
$pythonVersions = "3.8","3.9","3.10","3.11"
99
$supportedSystems = 'linux_x86_64','win_amd64'
1010
$wheelSource = 'https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download'

index/AVX/cpu/llama-cpp-python/index.html

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -273,5 +273,14 @@
273273
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.11+cpuavx-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.11+cpuavx-cp310-cp310-win_amd64.whl</a><br/>
274274
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.11+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.11+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
275275
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.11+cpuavx-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.11+cpuavx-cp311-cp311-win_amd64.whl</a><br/>
276+
277+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cpuavx-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
278+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.14+cpuavx-cp38-cp38-win_amd64.whl</a><br/>
279+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cpuavx-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
280+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.14+cpuavx-cp39-cp39-win_amd64.whl</a><br/>
281+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
282+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.14+cpuavx-cp310-cp310-win_amd64.whl</a><br/>
283+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
284+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.14+cpuavx-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.14+cpuavx-cp311-cp311-win_amd64.whl</a><br/>
276285
</body>
277286
</html>

index/AVX/cu116/llama-cpp-python/index.html

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -328,5 +328,14 @@
328328
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.11+cu116-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.11+cu116-cp310-cp310-win_amd64.whl</a><br/>
329329
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.11+cu116-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.11+cu116-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
330330
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.11+cu116-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.11+cu116-cp311-cp311-win_amd64.whl</a><br/>
331+
332+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cu116-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
333+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.14+cu116-cp38-cp38-win_amd64.whl</a><br/>
334+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cu116-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
335+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.14+cu116-cp39-cp39-win_amd64.whl</a><br/>
336+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cu116-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
337+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.14+cu116-cp310-cp310-win_amd64.whl</a><br/>
338+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.14+cu116-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
339+
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.14+cu116-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.14+cu116-cp311-cp311-win_amd64.whl</a><br/>
331340
</body>
332341
</html>

0 commit comments

Comments
 (0)