Skip to content

Commit 41f5d54

Browse files
committed
feat: Add c++ side constants for attention layer indices hparam
Branch: GraniteFour
1 parent e8d9499 commit 41f5d54

File tree

2 files changed

+2
-0
lines changed

2 files changed

+2
-0
lines changed

src/llama-arch.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,7 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
147147
{ LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
148148
{ LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
149149
{ LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
150+
{ LLM_KV_ATTENTION_LAYER_INDICES, "%s.attention.layer_indices" },
150151

151152
{ LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
152153
{ LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },

src/llama-arch.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,7 @@ enum llm_kv {
151151
LLM_KV_ATTENTION_SCALE,
152152
LLM_KV_ATTENTION_KEY_LENGTH_MLA,
153153
LLM_KV_ATTENTION_VALUE_LENGTH_MLA,
154+
LLM_KV_ATTENTION_LAYER_INDICES,
154155

155156
LLM_KV_ROPE_DIMENSION_COUNT,
156157
LLM_KV_ROPE_DIMENSION_SECTIONS,

0 commit comments

Comments
 (0)