Skip to content

Commit 35c0233

Browse files
committed
refactor: recurrent_layer() -> is_recurrent()
Branch: HybridRecurrentCache Signed-off-by: Gabe Goodhart <ghart@us.ibm.com>
1 parent d0565e8 commit 35c0233

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

src/llama-hparams.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ uint32_t llama_hparams::n_embd_s() const {
8686
return ssm_d_state * ssm_d_inner;
8787
}
8888

89-
bool llama_hparams::recurrent_layer(uint32_t il) const {
89+
bool llama_hparams::is_recurrent(uint32_t il) const {
9090
return recurrent_layer_arr[il];
9191
}
9292

src/llama-hparams.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ struct llama_hparams {
190190
uint32_t n_embd_s() const;
191191

192192
// whether or not the given layer is recurrent (for hybrid models)
193-
bool recurrent_layer(uint32_t il) const;
193+
bool is_recurrent(uint32_t il) const;
194194

195195
bool is_swa(uint32_t il) const;
196196
};

src/llama-memory-hybrid.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ llama_memory_hybrid::llama_memory_hybrid(
3232
mem_attn(new llama_kv_cache_unified(
3333
model,
3434
filter_attn == nullptr ?
35-
[&](int32_t il) { return !model.hparams.recurrent_layer(il); }
35+
[&](int32_t il) { return !model.hparams.is_recurrent(il); }
3636
: filter_attn,
3737
type_k,
3838
type_v,
@@ -47,7 +47,7 @@ llama_memory_hybrid::llama_memory_hybrid(
4747
mem_recr(new llama_memory_recurrent(
4848
model,
4949
filter_recr == nullptr ?
50-
[&](int32_t il) { return model.hparams.recurrent_layer(il); }
50+
[&](int32_t il) { return model.hparams.is_recurrent(il); }
5151
: filter_recr,
5252
type_r,
5353
type_s,

0 commit comments

Comments
 (0)