[CI]Add EPLB CI. (#3568)
### What this PR does / why we need it? 1.Add eplb ci to check the change of eplb feature. 2.Add param checking of eplb params. ### Does this PR introduce _any_ user-facing change? ### How was this patch tested? Qwen in A3. - vLLM version: v0.11.0rc3 - vLLM main: https://github.com/vllm-project/vllm/commit/v0.11.0 --------- Signed-off-by: offline0806 <3337230449@qq.com> Co-authored-by: offline0806 <3337230449@qq.com>
This commit is contained in:
@@ -15,7 +15,9 @@
|
||||
# This file is a part of the vllm-ascend project.
|
||||
#
|
||||
# Todo: Once https://github.com/vllm-project/vllm/issues/22246 is merged in vllm. Remove eplb utils.
|
||||
import os.path
|
||||
import random
|
||||
import sys
|
||||
|
||||
import torch
|
||||
from vllm.logger import logger
|
||||
@@ -117,3 +119,72 @@ def determine_default_log2phy_map(global_expert_num, world_size, rank_id,
|
||||
log2phy_map_all = generate_log2phy_map(expert_map_all)
|
||||
|
||||
return log2phy_map_all[rank_id]
|
||||
|
||||
|
||||
class EPLBParamUtils:
|
||||
|
||||
@staticmethod
|
||||
def check_iterations(iterations):
|
||||
if not isinstance(iterations, int):
|
||||
raise TypeError(f"The {iterations} is not int.")
|
||||
if iterations <= 0:
|
||||
raise ValueError(
|
||||
f"The {iterations} can not less than or equal to 0.")
|
||||
if iterations > sys.maxsize:
|
||||
raise ValueError(
|
||||
f"The {iterations} can not large than {sys.maxsize}")
|
||||
|
||||
@staticmethod
|
||||
def check_dynamic_eplb(dynamic_eplb):
|
||||
if dynamic_eplb is None:
|
||||
return
|
||||
if not isinstance(dynamic_eplb, bool):
|
||||
raise TypeError("The dynamic_eplb is not bool.")
|
||||
if dynamic_eplb and os.getenv("DYNAMIC_EPLB", "false") != "true":
|
||||
raise ValueError(
|
||||
'Can not enable dynamic_eplb when not export DYNAMIC_EPLB="true".'
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def check_expert_map_path(expert_map):
|
||||
if expert_map is None:
|
||||
return
|
||||
if not isinstance(expert_map, str):
|
||||
raise TypeError("The expert_map is not str.")
|
||||
if not expert_map.strip():
|
||||
raise ValueError("The expert_map is not empty.")
|
||||
_, ext = os.path.splitext(expert_map)
|
||||
if ext.lower() != ".json":
|
||||
raise TypeError("The expert_map is not json.")
|
||||
if not os.path.exists(expert_map):
|
||||
raise ValueError("The expert_map is not exist.")
|
||||
try:
|
||||
with open(expert_map, "w", encoding='utf-8') as f:
|
||||
f.read()
|
||||
except Exception as e:
|
||||
raise IOError(
|
||||
f"Fail read expert info from {expert_map}, please check the reading permission of {expert_map} : {e}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def check_expert_map_record_path(expert_map_record_path):
|
||||
if expert_map_record_path is None:
|
||||
return
|
||||
if not isinstance(expert_map_record_path, str):
|
||||
raise TypeError("The expert_map_record_path is not str.")
|
||||
if not expert_map_record_path.strip():
|
||||
raise ValueError("The expert_map_record_path is empty.")
|
||||
_, ext = os.path.splitext(expert_map_record_path)
|
||||
if ext.lower() != ".json":
|
||||
raise TypeError("The expert_map_record_path is not json.")
|
||||
if os.getenv("EXPERT_MAP_RECORD", "false") != "true":
|
||||
raise ValueError(
|
||||
'Can not enable expert_map_record_path when not export EXPERT_MAP_RECORD="true".'
|
||||
)
|
||||
try:
|
||||
with open(expert_map_record_path, "w", encoding='utf-8') as f:
|
||||
f.write("")
|
||||
except Exception as e:
|
||||
raise IOError(
|
||||
f"Fail write expert info to {expert_map_record_path}, please check the writing permission of {expert_map_record_path} : {e}"
|
||||
)
|
||||
|
||||
@@ -21,6 +21,7 @@ import torch.distributed as dist
|
||||
import vllm.envs as envs
|
||||
from vllm.logger import logger
|
||||
|
||||
from vllm_ascend.eplb.core.eplb_utils import EPLBParamUtils
|
||||
from vllm_ascend.eplb.core.eplb_worker import EplbProcess
|
||||
|
||||
|
||||
@@ -44,6 +45,7 @@ class EplbUpdator:
|
||||
self.num_expert_load_gather = 10
|
||||
self.periodic_load_gather = True
|
||||
self.num_iterations_eplb_update: torch.int64 = self.ascend_config.num_iterations_eplb_update
|
||||
EPLBParamUtils.check_iterations(self.num_iterations_eplb_update)
|
||||
self.expert_map_path = expert_map_path
|
||||
self.expert_map_record_path = self.ascend_config.expert_map_record_path
|
||||
|
||||
@@ -64,6 +66,7 @@ class EplbUpdator:
|
||||
self.cur_iterations: torch.int64 = 0
|
||||
|
||||
self.num_wait_worker_iterations: torch.int64 = self.ascend_config.num_wait_worker_iterations
|
||||
EPLBParamUtils.check_iterations(self.num_wait_worker_iterations)
|
||||
|
||||
self.process = process
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ def model_register(model, model_config):
|
||||
if config.model_type == "qwen3_moe":
|
||||
model.num_moe_layers = config.num_hidden_layers
|
||||
elif config.model_type == "deepseek_v2" or config.model_type == "deepseek_v3":
|
||||
num_dense_layers = config.first_k_dense_replace
|
||||
model.num_moe_layers = config.num_hidden_layers - num_dense_layers
|
||||
model.num_dense_layers = config.first_k_dense_replace
|
||||
model.num_moe_layers = config.num_hidden_layers - model.num_dense_layers
|
||||
else:
|
||||
raise NotImplementedError("EPLB is not supported.")
|
||||
|
||||
Reference in New Issue
Block a user