Skip to content

Commit 1cdea6b

Browse files
committed
update torch config
Signed-off-by: David9857 <985700846@qq.com>
1 parent ad36420 commit 1cdea6b

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

vllm_ascend/ascend_config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@ def __init__(self, torchair_graph_config):
5353
"graph_batch_sizes", [])
5454
self.graph_batch_sizes_init = torchair_graph_config.get(
5555
"graph_batch_sizes_init", False)
56+
self.enable_multistream_shared_expert = torchair_graph_config.get(
57+
"enable_multistream_shared_expert", False)
5658

5759
if not isinstance(self.graph_batch_sizes, list):
5860
raise TypeError("graph_batch_sizes must be list[int]")

vllm_ascend/models/deepseek_v2.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -278,10 +278,10 @@ def forward(
278278
top_k=CustomDeepseekV2MoE.top_k,
279279
enable_force_load_balance=enable_force_load_balance,
280280
**kwargs)
281-
281+
282282
if multistream:
283283
hidden_states, shared_output = hidden_states
284-
284+
285285
hidden_states = hidden_states * self.routed_scaling_factor
286286

287287
if self.tp_size > 1:

0 commit comments

Comments
 (0)