Remove fused_moe_grok (#2223)
This commit is contained in:
2
3rdparty/amd/tuning/benchmark_moe_rocm.py
vendored
2
3rdparty/amd/tuning/benchmark_moe_rocm.py
vendored
@@ -10,7 +10,7 @@ import triton.language as tl
|
||||
from tqdm import tqdm
|
||||
from transformers import AutoConfig
|
||||
|
||||
from sglang.srt.layers.fused_moe_grok.fused_moe import fused_moe, get_config_file_name
|
||||
from sglang.srt.layers.fused_moe_triton.fused_moe import fused_moe, get_config_file_name
|
||||
|
||||
padding_size = 128 if bool(int(os.getenv("MOE_PADDING", "0"))) else 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user