Add qwen2 tie word embedding (#630)
This commit is contained in:
@@ -313,6 +313,8 @@ class Qwen2ForCausalLM(nn.Module):
|
||||
param = params_dict[name]
|
||||
weight_loader = getattr(param, "weight_loader", default_weight_loader)
|
||||
weight_loader(param, loaded_weight)
|
||||
if self.config.tie_word_embeddings and name=="model.embed_tokens.weight":
|
||||
weight_loader(params_dict["lm_head.weight"], loaded_weight)
|
||||
|
||||
|
||||
EntryClass = Qwen2ForCausalLM
|
||||
|
||||
Reference in New Issue
Block a user