Skip to content

Commit 19f97f7

Browse files
committed
[bugfix] fix graph_batch_sizes padding bug
Signed-off-by: zzzzwwjj <1183291235@qq.com>
1 parent 3ea2410 commit 19f97f7

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

vllm_ascend/worker/model_runner_v1.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2101,7 +2101,7 @@ def check_torchair_graph_batch_sizes(self):
21012101
if self.parallel_config.enable_expert_parallel:
21022102
new_graph_batch_sizes = []
21032103
for graph_batch_size in self.torchair_graph_batch_sizes:
2104-
cur_graph_batch_size = graph_batch_size + tp_size - graph_batch_size % tp_size
2104+
cur_graph_batch_size = math.ceil(graph_batch_size / tp_size) * tp_size
21052105
if cur_graph_batch_size not in new_graph_batch_sizes:
21062106
new_graph_batch_sizes.append(cur_graph_batch_size)
21072107
self.torchair_graph_batch_sizes = new_graph_batch_sizes

0 commit comments

Comments
 (0)