Upgrade to vllm 0.17.0 corex v4.1 overlay
This commit is contained in:
@@ -27,6 +27,7 @@ import torch
|
||||
import torch.nn as nn
|
||||
|
||||
from vllm.config import VllmConfig
|
||||
from vllm.distributed import get_pp_group
|
||||
from vllm.model_executor.layers.logits_processor import LogitsProcessor
|
||||
from vllm.model_executor.models.llama import (
|
||||
LlamaDecoderLayer,
|
||||
@@ -69,7 +70,7 @@ class TeleFLMForCausalLM(LlamaForCausalLM):
|
||||
super().__init__(vllm_config=vllm_config, prefix=prefix)
|
||||
# mup
|
||||
self.use_mup = self.config.use_mup
|
||||
if self.use_mup:
|
||||
if self.use_mup and get_pp_group().is_last_rank:
|
||||
self.mup_scale_factor = self.config.mup_scale_factor
|
||||
self.output_mult = self.config.output_mult / self.mup_scale_factor
|
||||
logit_scale = self.output_mult
|
||||
|
||||
Reference in New Issue
Block a user