support Llama4 with non uniformed intermediate size across layers for… (#10047)
This commit is contained in:
@@ -136,6 +136,7 @@ suites = {
|
||||
"per-commit-8-gpu": [
|
||||
# Disabled because it hangs on the CI.
|
||||
# TestFile("ep/test_moe_ep.py", 181),
|
||||
TestFile("lora/test_lora_llama4.py", 600),
|
||||
TestFile("test_disaggregation.py", 499),
|
||||
TestFile("test_disaggregation_different_tp.py", 155),
|
||||
TestFile("test_full_deepseek_v3.py", 333),
|
||||
|
||||
Reference in New Issue
Block a user