From 88dafb65d1b599917a39e5c2ba7448fc27b58d6d Mon Sep 17 00:00:00 2001 From: sstamenk Date: Thu, 1 May 2025 11:17:05 +0200 Subject: [PATCH] Rmap fp8 kv-scale name for Deepseek --- vllm/model_executor/models/deepseek.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/vllm/model_executor/models/deepseek.py b/vllm/model_executor/models/deepseek.py index c6421143dd68..bcde432280a8 100644 --- a/vllm/model_executor/models/deepseek.py +++ b/vllm/model_executor/models/deepseek.py @@ -45,7 +45,8 @@ from vllm.model_executor.layers.rotary_embedding import get_rope from vllm.model_executor.layers.vocab_parallel_embedding import ( ParallelLMHead, VocabParallelEmbedding) -from vllm.model_executor.model_loader.weight_utils import default_weight_loader +from vllm.model_executor.model_loader.weight_utils import ( + default_weight_loader, maybe_remap_kv_scale_name) from vllm.model_executor.sampling_metadata import SamplingMetadata from vllm.sequence import IntermediateTensors @@ -414,6 +415,11 @@ def load_weights(self, weights: Iterable[Tuple[str, continue if is_pp_missing_parameter(name, self): continue + if name.endswith("scale"): + # Remapping the name of FP8 kv-scale. + name = maybe_remap_kv_scale_name(name, params_dict) + if name is None: + continue param = params_dict[name] weight_loader = param.weight_loader weight_loader(param, loaded_weight, shard_id) @@ -428,6 +434,10 @@ def load_weights(self, weights: Iterable[Tuple[str, continue if is_pp_missing_parameter(name, self): continue + # Remapping the name of FP8 kv-scale. + name = maybe_remap_kv_scale_name(name, params_dict) + if name is None: + continue param = params_dict[name] weight_loader = getattr(param, "weight_loader", default_weight_loader)