forked from EngineX-Cambricon/enginex-mlu370-vllm
fix: handle missing tie_word_embeddings attr in MPTConfig
Use getattr with default True for MPTConfig.tie_word_embeddings, as some MPT model configs lack this attribute. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -272,7 +272,7 @@ class MPTForCausalLM(nn.Module, SupportsPP):
|
||||
config = vllm_config.model_config.hf_config
|
||||
quant_config = vllm_config.quant_config
|
||||
self.config = config
|
||||
assert config.tie_word_embeddings
|
||||
assert getattr(config, "tie_word_embeddings", True)
|
||||
self.quant_config = quant_config
|
||||
|
||||
self.transformer = MPTModel(vllm_config=vllm_config,
|
||||
|
||||
Reference in New Issue
Block a user