1
1
[package ]
2
2
name = " llama-cpp-2"
3
3
description = " llama.cpp bindings for Rust"
4
- version = " 0.1.61 "
4
+ version = " 0.1.62 "
5
5
edition = " 2021"
6
6
license = " MIT OR Apache-2.0"
7
7
repository = " https://github.com/utilityai/llama-cpp-rs"
@@ -10,7 +10,7 @@ repository = "https://github.com/utilityai/llama-cpp-rs"
10
10
11
11
[dependencies ]
12
12
enumflags2 = " 0.7.10"
13
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " }
13
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " }
14
14
thiserror = { workspace = true }
15
15
tracing = { workspace = true }
16
16
@@ -23,36 +23,36 @@ native = ["llama-cpp-sys-2/native"]
23
23
sampler = []
24
24
25
25
[target .'cfg(target_feature = "avx")' .dependencies ]
26
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
26
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
27
27
" avx" ,
28
28
] }
29
29
[target .'cfg(target_feature = "avx2")' .dependencies ]
30
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
30
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
31
31
" avx2" ,
32
32
] }
33
33
[target .'cfg(target_feature = "avx512f")' .dependencies ]
34
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
34
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
35
35
" avx512" ,
36
36
] }
37
37
[target .'cfg(target_feature = "avx512vbmi")' .dependencies ]
38
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
38
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
39
39
" avx512_vmbi" ,
40
40
] }
41
41
[target .'cfg(target_feature = "avx512vnni")' .dependencies ]
42
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
42
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
43
43
" avx512_vnni" ,
44
44
] }
45
45
[target .'cfg(target_feature = "f16c")' .dependencies ]
46
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
46
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
47
47
" f16c" ,
48
48
] }
49
49
[target .'cfg(target_feature = "fma")' .dependencies ]
50
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
50
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
51
51
" fma" ,
52
52
] }
53
53
54
54
[target .'cfg(all(target_os = "macos", any(target_arch = "aarch64", target_arch = "arm64")))' .dependencies ]
55
- llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.61 " , features = [
55
+ llama-cpp-sys-2 = { path = " ../llama-cpp-sys-2" , version = " 0.1.62 " , features = [
56
56
" metal" ,
57
57
] }
58
58
0 commit comments