add deepseekv3 and llama4
This commit is contained in:
@@ -13,7 +13,7 @@ from transformers import GenerationConfig, PretrainedConfig
|
||||
from transformers.models.auto.image_processing_auto import (
|
||||
get_image_processor_config)
|
||||
from transformers.models.auto.modeling_auto import (
|
||||
MODEL_FOR_CAUSAL_LM_MAPPING_NAMES)
|
||||
MODEL_FOR_CAUSAL_LM_MAPPING_NAMES, MODEL_MAPPING_NAMES)
|
||||
from transformers.utils import CONFIG_NAME as HF_CONFIG_NAME
|
||||
|
||||
from vllm.envs import VLLM_USE_MODELSCOPE
|
||||
@@ -229,13 +229,16 @@ def get_config(
|
||||
model_type = MODEL_FOR_CAUSAL_LM_MAPPING_NAMES[config.model_type]
|
||||
config.update({"architectures": [model_type]})
|
||||
|
||||
# Some composite config classes (e.g. Llama4Config) may not preserve
|
||||
# the 'architectures' field from config.json. Restore it from the
|
||||
# raw config_dict if needed.
|
||||
if config_format == ConfigFormat.HF:
|
||||
raw_archs = config_dict.get("architectures")
|
||||
if raw_archs and not getattr(config, "architectures", None):
|
||||
config.architectures = raw_archs
|
||||
# Architecture mapping for models without explicit architectures field
|
||||
if not getattr(config, "architectures", None):
|
||||
if config.model_type not in MODEL_MAPPING_NAMES:
|
||||
logger.warning(
|
||||
"Model config does not have a top-level 'architectures' "
|
||||
"field: expecting `hf_overrides={'architectures': "
|
||||
"['...']}` to be passed in engine args.")
|
||||
else:
|
||||
model_type = MODEL_MAPPING_NAMES[config.model_type]
|
||||
config.update({"architectures": [model_type]})
|
||||
|
||||
patch_rope_scaling(config)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user