Skip to content

Commit db5ff0c

Browse files
committed
jamba : remove redundant nullptr initializations
1 parent b0b280e commit db5ff0c

File tree

1 file changed

+0
-27
lines changed

1 file changed

+0
-27
lines changed

src/llama-model.cpp

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -3305,32 +3305,13 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
33053305

33063306
// out_proj
33073307
layer.ssm_out = create_tensor(tn(LLM_TENSOR_SSM_OUT, "weight", i), {d_inner, n_embd}, 0);
3308-
3309-
layer.wq = nullptr;
3310-
layer.wk = nullptr;
3311-
layer.wv = nullptr;
3312-
layer.wo = nullptr;
3313-
33143308
} else {
33153309
// Attention layers
33163310

33173311
layer.wq = create_tensor(tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, 0);
33183312
layer.wk = create_tensor(tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, 0);
33193313
layer.wv = create_tensor(tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, 0);
33203314
layer.wo = create_tensor(tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, 0);
3321-
3322-
layer.ssm_in = nullptr;
3323-
layer.ssm_conv1d = nullptr;
3324-
layer.ssm_conv1d_b = nullptr;
3325-
layer.ssm_x = nullptr;
3326-
layer.ssm_dt_norm = nullptr;
3327-
layer.ssm_dt = nullptr;
3328-
layer.ssm_dt_b = nullptr;
3329-
layer.ssm_b_norm = nullptr;
3330-
layer.ssm_c_norm = nullptr;
3331-
layer.ssm_a = nullptr;
3332-
layer.ssm_d = nullptr;
3333-
layer.ssm_out = nullptr;
33343315
}
33353316

33363317
layer.ffn_norm = create_tensor(tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, 0);
@@ -3342,19 +3323,11 @@ bool llama_model::load_tensors(llama_model_loader & ml) {
33423323
layer.ffn_gate_exps = create_tensor(tn(LLM_TENSOR_FFN_GATE_EXPS, "weight", i), {n_embd, n_ff, n_expert}, 0);
33433324
layer.ffn_down_exps = create_tensor(tn(LLM_TENSOR_FFN_DOWN_EXPS, "weight", i), {n_ff, n_embd, n_expert}, 0);
33443325
layer.ffn_up_exps = create_tensor(tn(LLM_TENSOR_FFN_UP_EXPS, "weight", i), {n_embd, n_ff, n_expert}, 0);
3345-
3346-
layer.ffn_gate = nullptr;
3347-
layer.ffn_down = nullptr;
3348-
layer.ffn_up = nullptr;
33493326
} else {
33503327
// FFN (no MoE)
33513328
layer.ffn_gate = create_tensor(tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, 0);
33523329
layer.ffn_down = create_tensor(tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, 0);
33533330
layer.ffn_up = create_tensor(tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, 0);
3354-
3355-
layer.ffn_gate_exps = nullptr;
3356-
layer.ffn_down_exps = nullptr;
3357-
layer.ffn_up_exps = nullptr;
33583331
}
33593332
}
33603333
} break;

0 commit comments

Comments
 (0)