[CPU] Add gelu_and_mul kernel in sgl-kernel and add ut (#9300)

This commit is contained in:
blzheng
2025-09-09 14:23:13 +08:00
committed by GitHub
parent 718f25ae6e
commit d1d4074c4e
5 changed files with 100 additions and 3 deletions

View File

@@ -110,6 +110,14 @@ class GeluAndMul(CustomOp):
d = x.shape[-1] // 2
return F.gelu(x[..., :d], approximate=self.approximate) * x[..., d:]
def forward_cpu(self, x: torch.Tensor) -> torch.Tensor:
if _is_cpu_amx_available and self.approximate == "tanh":
return torch.ops.sgl_kernel.gelu_tanh_and_mul_cpu(x)
elif _is_cpu_amx_available and self.approximate == "none":
return torch.ops.sgl_kernel.gelu_and_mul_cpu(x)
else:
return self.forward_native(x)
def forward_cuda(self, x: torch.Tensor) -> torch.Tensor:
return self._forward_impl(x)