Fix #176
We need to set `topk_group` and `num_expert_group` to `0` if they are
`None`

Signed-off-by: MengqingCao <cmq0113@163.com>
This commit is contained in:
Mengqing Cao
2025-02-27 14:21:08 +08:00
committed by GitHub
parent ee43179767
commit fd18ae6494

View File

@@ -48,6 +48,8 @@ def group_topk(hidden_states: torch.Tensor,
original_scores = scores
scores = scores + e_score_correction_bias.unsqueeze(0)
topk_group = 0 if topk_group is None else topk_group
num_expert_group = 0 if num_expert_group is None else num_expert_group
torch_npu.npu_group_topk(input=scores,
out=scores,
group_num=num_expert_group,