feat: remove vllm get_rope (#2964)

This commit is contained in:
Yineng Zhang
2025-01-18 19:38:01 +08:00
committed by GitHub
parent 6f98c586bd
commit 2add697d7a
30 changed files with 1028 additions and 219 deletions

View File

@@ -47,7 +47,6 @@ import torch
from torch import nn
from torch.nn.parameter import Parameter
from transformers import LlamaConfig
from vllm.model_executor.layers.rotary_embedding import get_rope
from sglang.srt.distributed import (
get_tensor_model_parallel_rank,
@@ -58,6 +57,7 @@ from sglang.srt.layers.layernorm import RMSNorm
from sglang.srt.layers.logits_processor import LogitsProcessor, LogitsProcessorOutput
from sglang.srt.layers.quantization.base_config import QuantizationConfig
from sglang.srt.layers.radix_attention import RadixAttention
from sglang.srt.layers.rotary_embedding import get_rope
from sglang.srt.layers.vocab_parallel_embedding import (
ParallelLMHead,
VocabParallelEmbedding,