[Pangu][MoE] Remove PanguProMoEV1 related code (#5088)
### What this PR does / why we need it?
PanguProMoEV1 is no longer supported in vllm-ascend, remove related
code.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
e2e & ut
- vLLM version: v0.12.0
- vLLM main:
ad32e3e19c
Signed-off-by: weichen <calvin_zhu0210@outlook.com>
This commit is contained in:
@@ -240,7 +240,6 @@ def select_moe_comm_method(num_tokens: int,
|
||||
quant_type = getattr(
|
||||
vllm_config.model_config.hf_config, 'moe_quantize',
|
||||
getattr(vllm_config.model_config.hf_config, 'quantize', None))
|
||||
model_type = vllm_config.model_config.hf_config.model_type
|
||||
|
||||
if not vllm_config.parallel_config.enable_expert_parallel:
|
||||
moe_comm_type = MoECommType.ALLGATHER
|
||||
@@ -267,7 +266,4 @@ def select_moe_comm_method(num_tokens: int,
|
||||
if fused_all2all_enable else MoECommType.ALLTOALL)
|
||||
else:
|
||||
raise ValueError(f"Unsupported soc_version: {soc_version}")
|
||||
# PanguProMoE only supports allgather
|
||||
if model_type == "PanguProMoE":
|
||||
moe_comm_type = MoECommType.ALLGATHER
|
||||
return moe_comm_type
|
||||
|
||||
Reference in New Issue
Block a user