[deepep] fix: shared experts are not initialized when shared experts fusion is enabled (#5072)

This commit is contained in:
Cheng Wan
2025-04-04 18:08:30 -04:00
committed by GitHub
parent 924ca7c92c
commit 4c54f44202

View File

@@ -183,7 +183,7 @@ class ServerArgs:
enable_flashmla: bool = False
flashinfer_mla_disable_ragged: bool = False
warmups: Optional[str] = None
n_share_experts_fusion: Optional[int] = None
n_share_experts_fusion: int = 0
disable_shared_experts_fusion: bool = False
# Debug tensor dumps
@@ -1110,7 +1110,7 @@ class ServerArgs:
parser.add_argument(
"--n-share-experts-fusion",
type=int,
default=None,
default=0,
help="The number of shared_experts need to be replica to fuse with normal experts in deepseek v3/r1 "
"we use tp_size by default.",
)