Bug: Fix weight loader error when LM head weights are tied (#3766)

This commit is contained in:
fzyzcjy
2025-02-22 09:53:12 +08:00
committed by GitHub
parent 14d90617b0
commit a3339d8cac
8 changed files with 16 additions and 0 deletions

View File

@@ -458,6 +458,8 @@ class LlamaForCausalLM(nn.Module):
continue
if name.startswith("model.vision_tower") and name not in params_dict:
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
# Handle FP8 kv-scale remapping
if "scale" in name:
name = maybe_remap_kv_scale_name(name, params_dict)

View File

@@ -339,6 +339,8 @@ class MiniCPMForCausalLM(nn.Module):
# Models trained using ColossalAI may include these tensors in
# the checkpoint. Skip them.
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
for param_name, weight_name, shard_id in stacked_params_mapping:
if weight_name not in name:

View File

@@ -603,6 +603,8 @@ class MiniCPM3ForCausalLM(nn.Module):
# Models trained using ColossalAI may include these tensors in
# the checkpoint. Skip them.
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
for param_name, weight_name, shard_id in stacked_params_mapping:
if weight_name not in name:

View File

@@ -325,6 +325,8 @@ class OlmoForCausalLM(nn.Module):
# Models trained using ColossalAI may include these tensors in
# the checkpoint. Skip them.
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
for param_name, weight_name, shard_id in stacked_params_mapping:
if weight_name not in name:
continue

View File

@@ -433,6 +433,8 @@ class Phi3SmallForCausalLM(nn.Module):
continue
if name.endswith(".bias") and name not in params_dict:
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
param = params_dict[name]
weight_loader = getattr(param, "weight_loader", default_weight_loader)

View File

@@ -377,6 +377,8 @@ class Qwen2ForCausalLM(nn.Module):
# Models trained using ColossalAI may include these tensors in
# the checkpoint. Skip them.
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
if name.startswith("model.vision_tower") and name not in params_dict:
continue

View File

@@ -586,6 +586,8 @@ class Qwen2VLForConditionalGeneration(nn.Module):
for name, loaded_weight in weights:
if "rotary_emb.inv_freq" in name:
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
for param_name, weight_name, shard_id in stacked_params_mapping:
if weight_name not in name:

View File

@@ -486,6 +486,8 @@ class TorchNativeLlamaForCausalLM(nn.Module):
continue
if name.startswith("model.vision_tower") and name not in params_dict:
continue
if self.config.tie_word_embeddings and "lm_head.weight" in name:
continue
for param_name, weight_name, shard_id in stacked_params_mapping:
if weight_name not in name: