[Lint]Style: Convert root, benchmarks, tools and docs to ruff format (#5843)

### What this PR does / why we need it?
Description
This PR fixes linting issues in the root directory, benchmarks/, tools/
and docs/ to align with the project's Ruff configuration.

This is part of a gradual effort to enable full linting coverage across
the repository. The corresponding paths have been removed from the
exclude list in pyproject.toml.

### Does this PR introduce _any_ user-facing change?

### How was this patch tested?

- vLLM version: v0.13.0
- vLLM main:
2f4e6548ef

---------

Signed-off-by: root <root@LAPTOP-VQKDDVMG.localdomain>
Co-authored-by: root <root@LAPTOP-VQKDDVMG.localdomain>
This commit is contained in:
SILONG ZENG
2026-01-13 15:29:34 +08:00
committed by GitHub
parent 4b679984de
commit 523e83016b
14 changed files with 425 additions and 531 deletions

109
setup.py
View File

@@ -23,7 +23,6 @@ import os
import subprocess
import sys
from sysconfig import get_paths
from typing import Dict, List
from setuptools import Command, Extension, find_packages, setup
from setuptools.command.build_ext import build_ext
@@ -45,15 +44,13 @@ ROOT_DIR = os.path.dirname(__file__)
logger = logging.getLogger(__name__)
def check_or_set_default_env(cmake_args,
env_name,
env_variable,
default_path=""):
def check_or_set_default_env(cmake_args, env_name, env_variable, default_path=""):
if env_variable is None:
logging.warning(
f"No {env_name} found in your environment, pleause try to set {env_name} "
"if you customize the installation path of this library, otherwise default "
"path will be adapted during build this project")
"path will be adapted during build this project"
)
logging.warning(f"Set default {env_name}: {default_path}")
env_variable = default_path
else:
@@ -65,25 +62,27 @@ def check_or_set_default_env(cmake_args,
return cmake_args
def get_value_from_lines(lines: List[str], key: str) -> str:
def get_value_from_lines(lines: list[str], key: str) -> str:
for line in lines:
line = ' '.join(line.split())
line = " ".join(line.split())
if key in line:
return line.split(':')[-1].strip()
return line.split(":")[-1].strip()
return ""
def get_chip_type() -> str:
try:
npu_info_lines = subprocess.check_output(
['npu-smi', 'info', '-l']).decode().strip().split('\n')
npu_id = int(get_value_from_lines(npu_info_lines, 'NPU ID'))
chip_info_lines = subprocess.check_output(
['npu-smi', 'info', '-t', 'board', '-i',
str(npu_id), '-c', '0']).decode().strip().split('\n')
chip_name = get_value_from_lines(chip_info_lines, 'Chip Name')
chip_type = get_value_from_lines(chip_info_lines, 'Chip Type')
npu_name = get_value_from_lines(chip_info_lines, 'NPU Name')
npu_info_lines = subprocess.check_output(["npu-smi", "info", "-l"]).decode().strip().split("\n")
npu_id = int(get_value_from_lines(npu_info_lines, "NPU ID"))
chip_info_lines = (
subprocess.check_output(["npu-smi", "info", "-t", "board", "-i", str(npu_id), "-c", "0"])
.decode()
.strip()
.split("\n")
)
chip_name = get_value_from_lines(chip_info_lines, "Chip Name")
chip_type = get_value_from_lines(chip_info_lines, "Chip Type")
npu_name = get_value_from_lines(chip_info_lines, "NPU Name")
if "310" in chip_name:
# 310P case
@@ -97,12 +96,10 @@ def get_chip_type() -> str:
else:
# A3 case
assert npu_name
return (chip_name + '_' + npu_name).lower()
return (chip_name + "_" + npu_name).lower()
else:
# TODO(zzzzwwjj): Currently, A5's chip name has not determined yet.
raise ValueError(
f"Unable to recognize chip name: {chip_name}, please manually set env SOC_VERSION"
)
raise ValueError(f"Unable to recognize chip name: {chip_name}, please manually set env SOC_VERSION")
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Get chip info failed: {e}")
except FileNotFoundError:
@@ -112,8 +109,7 @@ def get_chip_type() -> str:
return ""
envs = load_module_from_path("envs",
os.path.join(ROOT_DIR, "vllm_ascend", "envs.py"))
envs = load_module_from_path("envs", os.path.join(ROOT_DIR, "vllm_ascend", "envs.py"))
soc_version = get_chip_type()
@@ -126,10 +122,8 @@ if not envs.SOC_VERSION:
)
envs.SOC_VERSION = soc_version
else:
if soc_version and envs.SOC_VERSION != soc_version:
logging.warning(
f"env SOC_VERSION: {envs.SOC_VERSION} is not equal to soc_version from npu-smi: {soc_version}"
)
if soc_version and soc_version != envs.SOC_VERSION:
logging.warning(f"env SOC_VERSION: {envs.SOC_VERSION} is not equal to soc_version from npu-smi: {soc_version}")
def gen_build_info():
@@ -167,30 +161,24 @@ def gen_build_info():
package_dir = os.path.join(ROOT_DIR, "vllm_ascend", "_build_info.py")
with open(package_dir, "w+") as f:
f.write('# Auto-generated file\n')
f.write("# Auto-generated file\n")
f.write(f"__device_type__ = '{device_type}'\n")
logging.info(f"Generated _build_info.py with SOC version: {soc_version}")
class CMakeExtension(Extension):
def __init__(self,
name: str,
cmake_lists_dir: str = ".",
**kwargs) -> None:
def __init__(self, name: str, cmake_lists_dir: str = ".", **kwargs) -> None:
super().__init__(name, sources=[], py_limited_api=False, **kwargs)
self.cmake_lists_dir = os.path.abspath(cmake_lists_dir)
class custom_develop(develop):
def run(self):
gen_build_info()
super().run()
class custom_build_info(build_py):
def run(self):
gen_build_info()
super().run()
@@ -209,8 +197,7 @@ class build_and_install_aclnn(Command):
def run(self):
try:
print("Running bash build_aclnn.sh ...")
subprocess.check_call(
["bash", "csrc/build_aclnn.sh", ROOT_DIR, envs.SOC_VERSION])
subprocess.check_call(["bash", "csrc/build_aclnn.sh", ROOT_DIR, envs.SOC_VERSION])
print("buid_aclnn.sh executed successfully!")
except subprocess.CalledProcessError as e:
print(f"Error running build_aclnn.sh: {e}")
@@ -219,7 +206,7 @@ class build_and_install_aclnn(Command):
class cmake_build_ext(build_ext):
# A dict of extension directories that have been configured.
did_config: Dict[str, bool] = {}
did_config: dict[str, bool] = {}
#
# Determine number of compilation jobs
@@ -254,9 +241,9 @@ class cmake_build_ext(build_ext):
# Default use release mode to compile the csrc code
# Turbo now support compiled with Release, Debug and RelWithDebugInfo
if envs.CMAKE_BUILD_TYPE is None or envs.CMAKE_BUILD_TYPE not in [
"Debug",
"Release",
"RelWithDebugInfo",
"Debug",
"Release",
"RelWithDebugInfo",
]:
envs.CMAKE_BUILD_TYPE = "Release"
cmake_args += [f"-DCMAKE_BUILD_TYPE={envs.CMAKE_BUILD_TYPE}"]
@@ -278,20 +265,18 @@ class cmake_build_ext(build_ext):
)
# find PYTHON_EXECUTABLE
check_or_set_default_env(cmake_args, "PYTHON_EXECUTABLE",
sys.executable)
check_or_set_default_env(cmake_args, "PYTHON_EXECUTABLE", sys.executable)
# find PYTHON_INCLUDE_PATH
check_or_set_default_env(cmake_args, "PYTHON_INCLUDE_PATH",
get_paths()["include"])
check_or_set_default_env(cmake_args, "PYTHON_INCLUDE_PATH", get_paths()["include"])
# ccache and ninja can not be applied at ascendc kernels now
try:
# if pybind11 is installed via pip
pybind11_cmake_path = (subprocess.check_output(
[python_executable, "-m", "pybind11",
"--cmakedir"]).decode().strip())
pybind11_cmake_path = (
subprocess.check_output([python_executable, "-m", "pybind11", "--cmakedir"]).decode().strip()
)
except subprocess.CalledProcessError as e:
# else specify pybind11 path installed from source code on CI container
raise RuntimeError(f"CMake configuration failed: {e}")
@@ -309,8 +294,7 @@ class cmake_build_ext(build_ext):
"910c": "ascend910_9392",
"310p": "ascend310p1",
}
CANN_SOC_VERSION = soc_version_map.get(envs.SOC_VERSION,
envs.SOC_VERSION)
CANN_SOC_VERSION = soc_version_map.get(envs.SOC_VERSION, envs.SOC_VERSION)
cmake_args += [f"-DSOC_VERSION={CANN_SOC_VERSION}"]
# Override the base directory for FetchContent downloads to $ROOT/.deps
@@ -323,8 +307,7 @@ class cmake_build_ext(build_ext):
torch_npu_command = "python3 -m pip show torch-npu | grep '^Location:' | awk '{print $2}'"
try:
torch_npu_path = subprocess.check_output(
torch_npu_command, shell=True).decode().strip()
torch_npu_path = subprocess.check_output(torch_npu_command, shell=True).decode().strip()
torch_npu_path += "/torch_npu"
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Retrieve torch version version failed: {e}")
@@ -399,22 +382,21 @@ class cmake_build_ext(build_ext):
# copy back to build folder for editable build
if isinstance(self.distribution.get_command_obj("develop"), develop):
import shutil
for root, _, files in os.walk(self.build_temp):
for file in files:
if file.endswith(".so"):
src_path = os.path.join(root, file)
dst_path = os.path.join(self.build_lib, "vllm_ascend",
file)
dst_path = os.path.join(self.build_lib, "vllm_ascend", file)
shutil.copy(src_path, dst_path)
print(f"Copy: {src_path} -> {dst_path}")
# copy back _cann_ops_custom directory
src_cann_ops_custom = os.path.join(ROOT_DIR, "vllm_ascend",
"_cann_ops_custom")
dst_cann_ops_custom = os.path.join(self.build_lib, "vllm_ascend",
"_cann_ops_custom")
src_cann_ops_custom = os.path.join(ROOT_DIR, "vllm_ascend", "_cann_ops_custom")
dst_cann_ops_custom = os.path.join(self.build_lib, "vllm_ascend", "_cann_ops_custom")
if os.path.exists(src_cann_ops_custom):
import shutil
if os.path.exists(dst_cann_ops_custom):
shutil.rmtree(dst_cann_ops_custom)
shutil.copytree(src_cann_ops_custom, dst_cann_ops_custom)
@@ -428,7 +410,6 @@ class cmake_build_ext(build_ext):
class custom_install(install):
def run(self):
self.run_command("build_ext")
install.run(self)
@@ -459,10 +440,10 @@ def read_readme() -> str:
return ""
def get_requirements() -> List[str]:
def get_requirements() -> list[str]:
"""Get Python package dependencies from requirements.txt."""
def _read_requirements(filename: str) -> List[str]:
def _read_requirements(filename: str) -> list[str]:
with open(get_path(filename)) as f:
requirements = f.read().strip().split("\n")
resolved_requirements = []
@@ -487,7 +468,7 @@ cmdclass = {
"build_py": custom_build_info,
"build_aclnn": build_and_install_aclnn,
"build_ext": cmake_build_ext,
"install": custom_install
"install": custom_install,
}
setup(
@@ -526,7 +507,7 @@ setup(
"vllm.general_plugins": [
"ascend_kv_connector = vllm_ascend:register_connector",
"ascend_model_loader = vllm_ascend:register_model_loader",
"ascend_service_profiling = vllm_ascend:register_service_profiling"
"ascend_service_profiling = vllm_ascend:register_service_profiling",
],
},
)