Remove vllm dependency in model config (#2809)

This commit is contained in:
Yunmeng
2025-01-09 17:51:56 +08:00
committed by GitHub
parent b5fb4ef58a
commit 656aed58c6
6 changed files with 370 additions and 14 deletions

View File

@@ -30,20 +30,15 @@ from transformers import (
)
from transformers.models.auto.modeling_auto import MODEL_FOR_CAUSAL_LM_MAPPING_NAMES
try:
from vllm.transformers_utils.configs import ChatGLMConfig, DbrxConfig
from sglang.srt.configs import ChatGLMConfig, DbrxConfig, ExaoneConfig, Qwen2VLConfig
from sglang.srt.configs import ExaoneConfig, Qwen2VLConfig
_CONFIG_REGISTRY: Dict[str, Type[PretrainedConfig]] = {
ChatGLMConfig.model_type: ChatGLMConfig,
DbrxConfig.model_type: DbrxConfig,
ExaoneConfig.model_type: ExaoneConfig,
Qwen2VLConfig.model_type: Qwen2VLConfig,
}
_CONFIG_REGISTRY: Dict[str, Type[PretrainedConfig]] = {
ChatGLMConfig.model_type: ChatGLMConfig,
DbrxConfig.model_type: DbrxConfig,
ExaoneConfig.model_type: ExaoneConfig,
Qwen2VLConfig.model_type: Qwen2VLConfig,
}
except ImportError:
# We want this file to run without vllm dependency
_CONFIG_REGISTRY: Dict[str, Type[PretrainedConfig]] = {}
for name, cls in _CONFIG_REGISTRY.items():
with contextlib.suppress(ValueError):