[OPS] add bmm_transpose ops (#3990)

### What this PR does / why we need it?
Add a new fusion ops to custom_op, which can cobime the torch.bmm() and
transpsose to achieve better peformance. This ops is used in mla_v1 to
replace the bmm and transpose

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?


- vLLM version: v0.11.2

---------

Signed-off-by: hust17yixuan <303660421@qq.com>
This commit is contained in:
Wang Yixuan
2025-12-01 09:09:51 +08:00
committed by GitHub
parent bc67696a02
commit c68ddc11ce
15 changed files with 1737 additions and 14 deletions

View File

@@ -19,7 +19,7 @@ locale = "en"
extend-ignore-identifiers-re = [".*Unc.*", ".*_thw",
".*UE8M0.*", ".*[UE4M3|ue4m3].*", ".*eles.*", ".*fo.*", ".*ba.*",
".*ot.*", ".*[Tt]h[rR].*"]
extend-ignore-words-re = ["CANN", "cann"]
extend-ignore-words-re = ["CANN", "cann","ND"]
extend-ignore-re = []
[default.extend-identifiers]