support Llama4 with non uniformed intermediate size across layers for… (#10047)

This commit is contained in:
gongwei-130
2025-09-05 17:28:15 -07:00
committed by GitHub
parent 273b28344b
commit ab62b135c1
7 changed files with 123 additions and 13 deletions

View File

@@ -136,6 +136,7 @@ suites = {
"per-commit-8-gpu": [
# Disabled because it hangs on the CI.
# TestFile("ep/test_moe_ep.py", 181),
TestFile("lora/test_lora_llama4.py", 600),
TestFile("test_disaggregation.py", 499),
TestFile("test_disaggregation_different_tp.py", 155),
TestFile("test_full_deepseek_v3.py", 333),