[Refact.]: Refactor some leftover implementations of 300I DUO in the main branch. (#6425)
### What this PR does / why we need it?
- Replace the RoPE operator implementation.
- Refactor some leftover implementations of 300I DUO in the main branch.
### Does this PR introduce _any_ user-facing change?
NA
### How was this patch tested?
- vLLM version: v0.14.1
- vLLM main:
dc917cceb8
---------
Signed-off-by: Tflowers-0129 <2906339855@qq.com>
This commit is contained in:
@@ -18,13 +18,13 @@
|
||||
import torch
|
||||
import torch.nn.functional as F
|
||||
|
||||
from vllm_ascend.ops.activation import AscendSiluAndMul as _Base
|
||||
from vllm_ascend.ops.activation import AscendSiluAndMul
|
||||
|
||||
|
||||
class AscendSiluAndMul310(_Base):
|
||||
class AscendSiluAndMul310(AscendSiluAndMul):
|
||||
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
||||
torch.ops.vllm.maybe_prefetch_mlp_down_proj(x)
|
||||
h = x.shape[-1] // 2
|
||||
out = (F.silu(x[..., :h].to(torch.float32)) * x[..., h:].to(torch.float32)).to(torch.float16)
|
||||
out = F.silu(x[..., :h]) * x[..., h:]
|
||||
torch.ops.vllm.maybe_wait_prefetch_done(out)
|
||||
return out
|
||||
|
||||
Reference in New Issue
Block a user