Skip to content

Commit 0777cd3

Browse files
committed
rm redundant settings
1 parent f8facb3 commit 0777cd3

File tree

1 file changed

+0
-4
lines changed

1 file changed

+0
-4
lines changed

src/llama-model.cpp

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -824,12 +824,8 @@ void llama_model::load_hparams(llama_model_loader & ml) {
824824
} break;
825825
case LLM_ARCH_QWEN3:
826826
{
827-
// default for embeddings, will be overwritten if model is rerank
828-
hparams.pooling_type = LLAMA_POOLING_TYPE_LAST;
829-
830827
ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps);
831828
ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type);
832-
ml.get_arr_n(LLM_KV_CLASSIFIER_OUTPUT_LABELS, hparams.n_cls_out, false);
833829

834830
switch (hparams.n_layer) {
835831
case 28: type = hparams.n_embd == 1024 ? LLM_TYPE_0_6B : LLM_TYPE_1_7B; break;

0 commit comments

Comments
 (0)