[feat] add small vocab table for eagle's draft model[1]. (#3822)

Co-authored-by: Achazwl <323163497@qq.com>
Co-authored-by: Chayenne <zhaochen20@outlook.com>
This commit is contained in:
Zhousx
2025-03-03 10:58:45 +08:00
committed by GitHub
parent b7e274f2d9
commit 7fbab730bd
5 changed files with 129 additions and 8 deletions

View File

@@ -117,9 +117,14 @@ class LlamaForCausalLMEagle(LlamaForCausalLM):
if self.config.tie_word_embeddings:
self.lm_head = self.model.embed_tokens
else:
self.lm_head = ParallelLMHead(
config.vocab_size, config.hidden_size, quant_config=quant_config
)
if hasattr(config, "hot_vocab_size"):
self.lm_head = ParallelLMHead(
config.hot_vocab_size, config.hidden_size, quant_config=quant_config
)
else:
self.lm_head = ParallelLMHead(
config.vocab_size, config.hidden_size, quant_config=quant_config
)
self.logits_processor = LogitsProcessor(config)
def load_weights(self, weights: Iterable[Tuple[str, torch.Tensor]]):