From fd18ae649453fa4c31b58b04ba75d3fd9ed0b3d4 Mon Sep 17 00:00:00 2001 From: Mengqing Cao Date: Thu, 27 Feb 2025 14:21:08 +0800 Subject: [PATCH] [MOE] fix #176 (#179) Fix #176 We need to set `topk_group` and `num_expert_group` to `0` if they are `None` Signed-off-by: MengqingCao --- vllm_ascend/ops/fused_moe.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vllm_ascend/ops/fused_moe.py b/vllm_ascend/ops/fused_moe.py index db03509..c2d4146 100644 --- a/vllm_ascend/ops/fused_moe.py +++ b/vllm_ascend/ops/fused_moe.py @@ -48,6 +48,8 @@ def group_topk(hidden_states: torch.Tensor, original_scores = scores scores = scores + e_score_correction_bias.unsqueeze(0) + topk_group = 0 if topk_group is None else topk_group + num_expert_group = 0 if num_expert_group is None else num_expert_group torch_npu.npu_group_topk(input=scores, out=scores, group_num=num_expert_group,