### What this PR does / why we need it?
**Scope of Changes**:
| File Path |
| :--- |
| `vllm_ascend/attention/attention_mask.py` |
| `vllm_ascend/attention/attention_v1.py` |
| `vllm_ascend/attention/context_parallel/attention_cp.py` |
| `vllm_ascend/attention/context_parallel/common_cp.py` |
| `vllm_ascend/attention/context_parallel/mla_cp.py` |
| `vllm_ascend/attention/utils.py` |
| `vllm_ascend/batch_invariant.py` |
| `vllm_ascend/device/device_op.py` |
| `vllm_ascend/device_allocator/camem.py` |
| `vllm_ascend/envs.py` |
- vLLM version: v0.13.0
- vLLM main:
2c24bc6996
---------
Signed-off-by: MrZ20 <2609716663@qq.com>
This commit is contained in:
@@ -49,11 +49,9 @@ line-length = 120
|
||||
# Folder to be modified
|
||||
exclude = [
|
||||
"tests/**",
|
||||
"vllm_ascend/_cann_ops_custom",
|
||||
"vllm_ascend/attention",
|
||||
"vllm_ascend/attention/mla_v1.py",
|
||||
"vllm_ascend/attention/sfa_v1.py",
|
||||
"vllm_ascend/core",
|
||||
"vllm_ascend/device",
|
||||
"vllm_ascend/device_allocator",
|
||||
"vllm_ascend/distributed",
|
||||
"vllm_ascend/eplb",
|
||||
"vllm_ascend/kv_offload",
|
||||
@@ -66,8 +64,6 @@ exclude = [
|
||||
"vllm_ascend/spec_decode",
|
||||
"vllm_ascend/worker",
|
||||
"vllm_ascend/xlite",
|
||||
"vllm_ascend/envs.py",
|
||||
"vllm_ascend/batch_invariant.py",
|
||||
]
|
||||
|
||||
[tool.ruff.lint]
|
||||
|
||||
Reference in New Issue
Block a user