Skip to content

Commit da0d159

Browse files
committed
1.7b and 4b configs
1 parent 507dbc0 commit da0d159

File tree

2 files changed

+32
-0
lines changed

2 files changed

+32
-0
lines changed
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"dim": 2048,
3+
"ffn_dim_multiplier": 1,
4+
"hidden_dim": 6144,
5+
"n_heads": 16,
6+
"head_dim": 128,
7+
"n_kv_heads": 8,
8+
"n_layers": 28,
9+
"norm_eps": 1e-06,
10+
"rope_theta": 1000000.0,
11+
"use_scaled_rope": false,
12+
"vocab_size": 151936,
13+
"use_hf_rope": true,
14+
"attention_qkv_bias": false,
15+
"use_qk_norm": true
16+
}

examples/models/qwen3/4b_config.json

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"dim": 2560,
3+
"ffn_dim_multiplier": 1,
4+
"hidden_dim": 9728,
5+
"n_heads": 32,
6+
"head_dim": 128,
7+
"n_kv_heads": 8,
8+
"n_layers": 36,
9+
"norm_eps": 1e-06,
10+
"rope_theta": 1000000.0,
11+
"use_scaled_rope": false,
12+
"vocab_size": 151936,
13+
"use_hf_rope": true,
14+
"attention_qkv_bias": false,
15+
"use_qk_norm": true
16+
}

0 commit comments

Comments
 (0)