diff --git a/vllm-v0.6.2/vllm/model_executor/models/llama.py b/vllm-v0.6.2/vllm/model_executor/models/llama.py index e53631e..27a9301 100644 --- a/vllm-v0.6.2/vllm/model_executor/models/llama.py +++ b/vllm-v0.6.2/vllm/model_executor/models/llama.py @@ -404,6 +404,12 @@ class LlamaModel(nn.Module): if is_pp_missing_parameter(name, self): continue + if name not in params_dict: + logger.warning( + "Skipping weight %s not present in the model", + name) + continue + param = params_dict[name] weight_loader = getattr(param, "weight_loader", default_weight_loader)