Sync from v0.13
This commit is contained in:
243
docs/mkdocs/hooks/generate_argparse.py
Normal file
243
docs/mkdocs/hooks/generate_argparse.py
Normal file
@@ -0,0 +1,243 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
import importlib.metadata
|
||||
import importlib.util
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from argparse import SUPPRESS, Action, HelpFormatter
|
||||
from collections.abc import Iterable
|
||||
from importlib.machinery import ModuleSpec
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from pydantic_core import core_schema
|
||||
|
||||
logger = logging.getLogger("mkdocs")
|
||||
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.parent
|
||||
ARGPARSE_DOC_DIR = ROOT_DIR / "docs/argparse"
|
||||
|
||||
sys.path.insert(0, str(ROOT_DIR))
|
||||
|
||||
|
||||
def mock_if_no_torch(mock_module: str, mock: MagicMock):
|
||||
if not importlib.util.find_spec("torch"):
|
||||
sys.modules[mock_module] = mock
|
||||
|
||||
|
||||
# Mock custom op code
|
||||
class MockCustomOp:
|
||||
@staticmethod
|
||||
def register(name):
|
||||
def decorator(cls):
|
||||
return cls
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
mock_if_no_torch("vllm._C", MagicMock())
|
||||
mock_if_no_torch("vllm.model_executor.custom_op", MagicMock(CustomOp=MockCustomOp))
|
||||
mock_if_no_torch(
|
||||
"vllm.utils.torch_utils", MagicMock(direct_register_custom_op=lambda *a, **k: None)
|
||||
)
|
||||
|
||||
|
||||
# Mock any version checks by reading from compiled CI requirements
|
||||
with open(ROOT_DIR / "requirements/test.txt") as f:
|
||||
VERSIONS = dict(line.strip().split("==") for line in f if "==" in line)
|
||||
importlib.metadata.version = lambda name: VERSIONS.get(name) or "0.0.0"
|
||||
|
||||
|
||||
# Make torch.nn.Parameter safe to inherit from
|
||||
mock_if_no_torch("torch.nn", MagicMock(Parameter=object))
|
||||
|
||||
|
||||
class PydanticMagicMock(MagicMock):
|
||||
"""`MagicMock` that's able to generate pydantic-core schemas."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
name = kwargs.pop("name", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.__spec__ = ModuleSpec(name, None)
|
||||
|
||||
def __get_pydantic_core_schema__(self, source_type, handler):
|
||||
return core_schema.any_schema()
|
||||
|
||||
|
||||
def auto_mock(module_name: str, attr: str, max_mocks: int = 100):
|
||||
"""Function that automatically mocks missing modules during imports."""
|
||||
logger.info("Importing %s from %s", attr, module_name)
|
||||
|
||||
for _ in range(max_mocks):
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
|
||||
# First treat attr as an attr, then as a submodule
|
||||
if hasattr(module, attr):
|
||||
return getattr(module, attr)
|
||||
|
||||
return importlib.import_module(f"{module_name}.{attr}")
|
||||
except ModuleNotFoundError as e:
|
||||
assert e.name is not None
|
||||
logger.info("Mocking %s for argparse doc generation", e.name)
|
||||
sys.modules[e.name] = PydanticMagicMock(name=e.name)
|
||||
except Exception:
|
||||
logger.exception("Failed to import %s.%s: %s", module_name, attr)
|
||||
|
||||
raise ImportError(
|
||||
f"Failed to import {module_name}.{attr} after mocking {max_mocks} imports"
|
||||
)
|
||||
|
||||
|
||||
bench_latency = auto_mock("vllm.benchmarks", "latency")
|
||||
bench_serve = auto_mock("vllm.benchmarks", "serve")
|
||||
bench_sweep_plot = auto_mock("vllm.benchmarks.sweep.plot", "SweepPlotArgs")
|
||||
bench_sweep_plot_pareto = auto_mock(
|
||||
"vllm.benchmarks.sweep.plot_pareto", "SweepPlotParetoArgs"
|
||||
)
|
||||
bench_sweep_serve = auto_mock("vllm.benchmarks.sweep.serve", "SweepServeArgs")
|
||||
bench_sweep_serve_sla = auto_mock(
|
||||
"vllm.benchmarks.sweep.serve_sla", "SweepServeSLAArgs"
|
||||
)
|
||||
bench_throughput = auto_mock("vllm.benchmarks", "throughput")
|
||||
AsyncEngineArgs = auto_mock("vllm.engine.arg_utils", "AsyncEngineArgs")
|
||||
EngineArgs = auto_mock("vllm.engine.arg_utils", "EngineArgs")
|
||||
ChatCommand = auto_mock("vllm.entrypoints.cli.openai", "ChatCommand")
|
||||
CompleteCommand = auto_mock("vllm.entrypoints.cli.openai", "CompleteCommand")
|
||||
openai_cli_args = auto_mock("vllm.entrypoints.openai", "cli_args")
|
||||
openai_run_batch = auto_mock("vllm.entrypoints.openai", "run_batch")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from vllm.utils.argparse_utils import FlexibleArgumentParser
|
||||
else:
|
||||
FlexibleArgumentParser = auto_mock(
|
||||
"vllm.utils.argparse_utils", "FlexibleArgumentParser"
|
||||
)
|
||||
|
||||
|
||||
class MarkdownFormatter(HelpFormatter):
|
||||
"""Custom formatter that generates markdown for argument groups."""
|
||||
|
||||
def __init__(self, prog: str, starting_heading_level: int = 3):
|
||||
super().__init__(prog, max_help_position=sys.maxsize, width=sys.maxsize)
|
||||
|
||||
self._section_heading_prefix = "#" * starting_heading_level
|
||||
self._argument_heading_prefix = "#" * (starting_heading_level + 1)
|
||||
self._markdown_output = []
|
||||
|
||||
def start_section(self, heading: str):
|
||||
if heading not in {"positional arguments", "options"}:
|
||||
heading_md = f"\n{self._section_heading_prefix} {heading}\n\n"
|
||||
self._markdown_output.append(heading_md)
|
||||
|
||||
def end_section(self):
|
||||
pass
|
||||
|
||||
def add_text(self, text: str):
|
||||
if text:
|
||||
self._markdown_output.append(f"{text.strip()}\n\n")
|
||||
|
||||
def add_usage(self, usage, actions, groups, prefix=None):
|
||||
pass
|
||||
|
||||
def add_arguments(self, actions: Iterable[Action]):
|
||||
for action in actions:
|
||||
if len(action.option_strings) == 0 or "--help" in action.option_strings:
|
||||
continue
|
||||
|
||||
option_strings = f"`{'`, `'.join(action.option_strings)}`"
|
||||
heading_md = f"{self._argument_heading_prefix} {option_strings}\n\n"
|
||||
self._markdown_output.append(heading_md)
|
||||
|
||||
if choices := action.choices:
|
||||
choices = f"`{'`, `'.join(str(c) for c in choices)}`"
|
||||
self._markdown_output.append(f"Possible choices: {choices}\n\n")
|
||||
elif (metavar := action.metavar) and isinstance(metavar, (list, tuple)):
|
||||
metavar = f"`{'`, `'.join(str(m) for m in metavar)}`"
|
||||
self._markdown_output.append(f"Possible choices: {metavar}\n\n")
|
||||
|
||||
if action.help:
|
||||
self._markdown_output.append(f"{action.help}\n\n")
|
||||
|
||||
if (default := action.default) != SUPPRESS:
|
||||
# Make empty string defaults visible
|
||||
if default == "":
|
||||
default = '""'
|
||||
self._markdown_output.append(f"Default: `{default}`\n\n")
|
||||
|
||||
def format_help(self):
|
||||
"""Return the formatted help as markdown."""
|
||||
return "".join(self._markdown_output)
|
||||
|
||||
|
||||
def create_parser(add_cli_args, **kwargs) -> FlexibleArgumentParser:
|
||||
"""Create a parser for the given class with markdown formatting.
|
||||
|
||||
Args:
|
||||
cls: The class to create a parser for
|
||||
**kwargs: Additional keyword arguments to pass to `cls.add_cli_args`.
|
||||
|
||||
Returns:
|
||||
FlexibleArgumentParser: A parser with markdown formatting for the class.
|
||||
"""
|
||||
try:
|
||||
parser = FlexibleArgumentParser(add_json_tip=False)
|
||||
parser.formatter_class = MarkdownFormatter
|
||||
with patch("vllm.config.DeviceConfig.__post_init__"):
|
||||
_parser = add_cli_args(parser, **kwargs)
|
||||
except ModuleNotFoundError as e:
|
||||
# Auto-mock runtime imports
|
||||
if tb_list := traceback.extract_tb(e.__traceback__):
|
||||
path = Path(tb_list[-1].filename).relative_to(ROOT_DIR)
|
||||
auto_mock(module_name=".".join(path.parent.parts), attr=path.stem)
|
||||
return create_parser(add_cli_args, **kwargs)
|
||||
else:
|
||||
raise e
|
||||
# add_cli_args might be in-place so return parser if _parser is None
|
||||
return _parser or parser
|
||||
|
||||
|
||||
def on_startup(command: Literal["build", "gh-deploy", "serve"], dirty: bool):
|
||||
logger.info("Generating argparse documentation")
|
||||
logger.debug("Root directory: %s", ROOT_DIR.resolve())
|
||||
logger.debug("Output directory: %s", ARGPARSE_DOC_DIR.resolve())
|
||||
|
||||
# Create the ARGPARSE_DOC_DIR if it doesn't exist
|
||||
if not ARGPARSE_DOC_DIR.exists():
|
||||
ARGPARSE_DOC_DIR.mkdir(parents=True)
|
||||
|
||||
# Create parsers to document
|
||||
parsers = {
|
||||
# Engine args
|
||||
"engine_args": create_parser(EngineArgs.add_cli_args),
|
||||
"async_engine_args": create_parser(
|
||||
AsyncEngineArgs.add_cli_args, async_args_only=True
|
||||
),
|
||||
# CLI
|
||||
"serve": create_parser(openai_cli_args.make_arg_parser),
|
||||
"chat": create_parser(ChatCommand.add_cli_args),
|
||||
"complete": create_parser(CompleteCommand.add_cli_args),
|
||||
"run-batch": create_parser(openai_run_batch.make_arg_parser),
|
||||
# Benchmark CLI
|
||||
"bench_latency": create_parser(bench_latency.add_cli_args),
|
||||
"bench_serve": create_parser(bench_serve.add_cli_args),
|
||||
"bench_sweep_plot": create_parser(bench_sweep_plot.add_cli_args),
|
||||
"bench_sweep_plot_pareto": create_parser(bench_sweep_plot_pareto.add_cli_args),
|
||||
"bench_sweep_serve": create_parser(bench_sweep_serve.add_cli_args),
|
||||
"bench_sweep_serve_sla": create_parser(bench_sweep_serve_sla.add_cli_args),
|
||||
"bench_throughput": create_parser(bench_throughput.add_cli_args),
|
||||
}
|
||||
|
||||
# Generate documentation for each parser
|
||||
for stem, parser in parsers.items():
|
||||
doc_path = ARGPARSE_DOC_DIR / f"{stem}.inc.md"
|
||||
# Specify encoding for building on Windows
|
||||
with open(doc_path, "w", encoding="utf-8") as f:
|
||||
f.write(super(type(parser), parser).format_help())
|
||||
logger.info("Argparse generated: %s", doc_path.relative_to(ROOT_DIR))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
on_startup("build", False)
|
||||
233
docs/mkdocs/hooks/generate_examples.py
Normal file
233
docs/mkdocs/hooks/generate_examples.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
import itertools
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
import regex as re
|
||||
|
||||
logger = logging.getLogger("mkdocs")
|
||||
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.parent
|
||||
ROOT_DIR_RELATIVE = "../../../../.."
|
||||
EXAMPLE_DIR = ROOT_DIR / "examples"
|
||||
EXAMPLE_DOC_DIR = ROOT_DIR / "docs/examples"
|
||||
|
||||
|
||||
def title(text: str) -> str:
|
||||
# Default title case
|
||||
text = text.replace("_", " ").replace("/", " - ").title()
|
||||
# Custom substitutions
|
||||
subs = {
|
||||
"io": "IO",
|
||||
"api": "API",
|
||||
"cli": "CLI",
|
||||
"cpu": "CPU",
|
||||
"llm": "LLM",
|
||||
"mae": "MAE",
|
||||
"ner": "NER",
|
||||
"tpu": "TPU",
|
||||
"gguf": "GGUF",
|
||||
"lora": "LoRA",
|
||||
"rlhf": "RLHF",
|
||||
"vllm": "vLLM",
|
||||
"openai": "OpenAI",
|
||||
"lmcache": "LMCache",
|
||||
"multilora": "MultiLoRA",
|
||||
"mlpspeculator": "MLPSpeculator",
|
||||
r"fp\d+": lambda x: x.group(0).upper(), # e.g. fp16, fp32
|
||||
r"int\d+": lambda x: x.group(0).upper(), # e.g. int8, int16
|
||||
}
|
||||
for pattern, repl in subs.items():
|
||||
text = re.sub(rf"\b{pattern}\b", repl, text, flags=re.IGNORECASE)
|
||||
return text
|
||||
|
||||
|
||||
@dataclass
|
||||
class Example:
|
||||
"""
|
||||
Example class for generating documentation content from a given path.
|
||||
|
||||
Attributes:
|
||||
path (Path): The path to the main directory or file.
|
||||
category (str): The category of the document.
|
||||
|
||||
Properties::
|
||||
main_file() -> Path | None: Determines the main file in the given path.
|
||||
other_files() -> list[Path]: Determines other files in the directory excluding
|
||||
the main file.
|
||||
title() -> str: Determines the title of the document.
|
||||
|
||||
Methods:
|
||||
generate() -> str: Generates the documentation content.
|
||||
"""
|
||||
|
||||
path: Path
|
||||
category: str
|
||||
|
||||
@cached_property
|
||||
def main_file(self) -> Path | None:
|
||||
"""Determines the main file in the given path.
|
||||
|
||||
If path is a file, it returns the path itself. If path is a directory, it
|
||||
searches for Markdown files (*.md) in the directory and returns the first one
|
||||
found. If no Markdown files are found, it returns None."""
|
||||
# Single file example
|
||||
if self.path.is_file():
|
||||
return self.path
|
||||
# Multi file example with a README
|
||||
if md_paths := list(self.path.glob("*.md")):
|
||||
return md_paths[0]
|
||||
# Multi file example without a README
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def other_files(self) -> list[Path]:
|
||||
"""Determine other files in the directory excluding the main file.
|
||||
|
||||
If path is a file, it returns an empty list. Otherwise, it returns every file
|
||||
in the directory except the main file in a list."""
|
||||
# Single file example
|
||||
if self.path.is_file():
|
||||
return []
|
||||
# Multi file example
|
||||
is_other_file = lambda file: file.is_file() and file != self.main_file
|
||||
return sorted(file for file in self.path.rglob("*") if is_other_file(file))
|
||||
|
||||
@cached_property
|
||||
def is_code(self) -> bool:
|
||||
return self.main_file is not None and self.main_file.suffix != ".md"
|
||||
|
||||
@cached_property
|
||||
def title(self) -> str:
|
||||
# Generate title from filename if no main md file found
|
||||
if self.main_file is None or self.is_code:
|
||||
return title(self.path.stem)
|
||||
# Specify encoding for building on Windows
|
||||
with open(self.main_file, encoding="utf-8") as f:
|
||||
first_line = f.readline().strip()
|
||||
match = re.match(r"^#\s+(?P<title>.+)$", first_line)
|
||||
if match:
|
||||
return match.group("title")
|
||||
raise ValueError(f"Title not found in {self.main_file}")
|
||||
|
||||
def fix_relative_links(self, content: str) -> str:
|
||||
"""
|
||||
Fix relative links in markdown content by converting them to gh-file
|
||||
format.
|
||||
|
||||
Args:
|
||||
content (str): The markdown content to process
|
||||
|
||||
Returns:
|
||||
str: Content with relative links converted to gh-file format
|
||||
"""
|
||||
# Regex to match markdown links [text](relative_path)
|
||||
# This matches links that don't start with http, https, ftp, or #
|
||||
link_pattern = r"\[([^\]]*)\]\((?!(?:https?|ftp)://|#)([^)]+)\)"
|
||||
|
||||
def replace_link(match):
|
||||
link_text = match.group(1)
|
||||
relative_path = match.group(2)
|
||||
|
||||
# Make relative to repo root
|
||||
gh_file = (self.main_file.parent / relative_path).resolve()
|
||||
gh_file = gh_file.relative_to(ROOT_DIR)
|
||||
|
||||
# Make GitHub URL
|
||||
url = "https://github.com/vllm-project/vllm/"
|
||||
url += "tree/main" if self.path.is_dir() else "blob/main"
|
||||
gh_url = f"{url}/{gh_file}"
|
||||
|
||||
return f"[{link_text}]({gh_url})"
|
||||
|
||||
return re.sub(link_pattern, replace_link, content)
|
||||
|
||||
def generate(self) -> str:
|
||||
content = f"# {self.title}\n\n"
|
||||
url = "https://github.com/vllm-project/vllm/"
|
||||
url += "tree/main" if self.path.is_dir() else "blob/main"
|
||||
content += f"Source <{url}/{self.path.relative_to(ROOT_DIR)}>.\n\n"
|
||||
|
||||
# Use long code fence to avoid issues with
|
||||
# included files containing code fences too
|
||||
code_fence = "``````"
|
||||
|
||||
if self.main_file is not None:
|
||||
# Single file example or multi file example with a README
|
||||
if self.is_code:
|
||||
content += (
|
||||
f"{code_fence}{self.main_file.suffix[1:]}\n"
|
||||
f'--8<-- "{self.main_file}"\n'
|
||||
f"{code_fence}\n"
|
||||
)
|
||||
else:
|
||||
with open(self.main_file, encoding="utf-8") as f:
|
||||
# Skip the title from md snippets as it's been included above
|
||||
main_content = f.readlines()[1:]
|
||||
content += self.fix_relative_links("".join(main_content))
|
||||
content += "\n"
|
||||
else:
|
||||
# Multi file example without a README
|
||||
for file in self.other_files:
|
||||
file_title = title(str(file.relative_to(self.path).with_suffix("")))
|
||||
content += f"## {file_title}\n\n"
|
||||
content += (
|
||||
f'{code_fence}{file.suffix[1:]}\n--8<-- "{file}"\n{code_fence}\n\n'
|
||||
)
|
||||
return content
|
||||
|
||||
if not self.other_files:
|
||||
return content
|
||||
|
||||
content += "## Example materials\n\n"
|
||||
for file in self.other_files:
|
||||
content += f'??? abstract "{file.relative_to(self.path)}"\n'
|
||||
if file.suffix != ".md":
|
||||
content += f" {code_fence}{file.suffix[1:]}\n"
|
||||
content += f' --8<-- "{file}"\n'
|
||||
if file.suffix != ".md":
|
||||
content += f" {code_fence}\n"
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def on_startup(command: Literal["build", "gh-deploy", "serve"], dirty: bool):
|
||||
logger.info("Generating example documentation")
|
||||
logger.debug("Root directory: %s", ROOT_DIR.resolve())
|
||||
logger.debug("Example directory: %s", EXAMPLE_DIR.resolve())
|
||||
logger.debug("Example document directory: %s", EXAMPLE_DOC_DIR.resolve())
|
||||
|
||||
# Create the EXAMPLE_DOC_DIR if it doesn't exist
|
||||
if not EXAMPLE_DOC_DIR.exists():
|
||||
EXAMPLE_DOC_DIR.mkdir(parents=True)
|
||||
|
||||
categories = sorted(p for p in EXAMPLE_DIR.iterdir() if p.is_dir())
|
||||
|
||||
examples = []
|
||||
glob_patterns = ["*.py", "*.md", "*.sh"]
|
||||
# Find categorised examples
|
||||
for category in categories:
|
||||
logger.info("Processing category: %s", category.stem)
|
||||
globs = [category.glob(pattern) for pattern in glob_patterns]
|
||||
for path in itertools.chain(*globs):
|
||||
examples.append(Example(path, category.stem))
|
||||
# Find examples in subdirectories
|
||||
globs = [category.glob(f"*/{pattern}") for pattern in glob_patterns]
|
||||
for path in itertools.chain(*globs):
|
||||
examples.append(Example(path.parent, category.stem))
|
||||
|
||||
# Generate the example documentation
|
||||
for example in sorted(examples, key=lambda e: e.path.stem):
|
||||
example_name = f"{example.path.stem}.md"
|
||||
doc_path = EXAMPLE_DOC_DIR / example.category / example_name
|
||||
if not doc_path.parent.exists():
|
||||
doc_path.parent.mkdir(parents=True)
|
||||
# Specify encoding for building on Windows
|
||||
with open(doc_path, "w+", encoding="utf-8") as f:
|
||||
f.write(example.generate())
|
||||
logger.debug("Example generated: %s", doc_path.relative_to(ROOT_DIR))
|
||||
logger.info("Total examples generated: %d", len(examples))
|
||||
149
docs/mkdocs/hooks/generate_metrics.py
Normal file
149
docs/mkdocs/hooks/generate_metrics.py
Normal file
@@ -0,0 +1,149 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
import ast
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
logger = logging.getLogger("mkdocs")
|
||||
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.parent
|
||||
DOCS_DIR = ROOT_DIR / "docs"
|
||||
GENERATED_METRICS_DIR = DOCS_DIR / "generated" / "metrics"
|
||||
|
||||
# Files to scan for metric definitions - each will generate a separate table
|
||||
METRIC_SOURCE_FILES = [
|
||||
{"path": "vllm/v1/metrics/loggers.py", "output": "general.md"},
|
||||
{
|
||||
"path": "vllm/v1/spec_decode/metrics.py",
|
||||
"output": "spec_decode.md",
|
||||
},
|
||||
{
|
||||
"path": "vllm/distributed/kv_transfer/kv_connector/v1/nixl_connector.py",
|
||||
"output": "nixl_connector.md",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class MetricExtractor(ast.NodeVisitor):
|
||||
"""AST visitor to extract metric definitions."""
|
||||
|
||||
def __init__(self):
|
||||
self.metrics: list[dict[str, str]] = []
|
||||
|
||||
def visit_Call(self, node: ast.Call) -> None:
|
||||
"""Visit function calls to find metric class instantiations."""
|
||||
metric_type = self._get_metric_type(node)
|
||||
if metric_type:
|
||||
name = self._extract_kwarg(node, "name")
|
||||
documentation = self._extract_kwarg(node, "documentation")
|
||||
|
||||
if name:
|
||||
self.metrics.append(
|
||||
{
|
||||
"name": name,
|
||||
"type": metric_type,
|
||||
"documentation": documentation or "",
|
||||
}
|
||||
)
|
||||
|
||||
self.generic_visit(node)
|
||||
|
||||
def _get_metric_type(self, node: ast.Call) -> str | None:
|
||||
"""Determine if this call creates a metric and return its type."""
|
||||
metric_type_map = {
|
||||
"_gauge_cls": "gauge",
|
||||
"_counter_cls": "counter",
|
||||
"_histogram_cls": "histogram",
|
||||
}
|
||||
if isinstance(node.func, ast.Attribute):
|
||||
return metric_type_map.get(node.func.attr)
|
||||
return None
|
||||
|
||||
def _extract_kwarg(self, node: ast.Call, key: str) -> str | None:
|
||||
"""Extract a keyword argument value from a function call."""
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == key:
|
||||
return self._get_string_value(keyword.value)
|
||||
return None
|
||||
|
||||
def _get_string_value(self, node: ast.AST) -> str | None:
|
||||
"""Extract string value from an AST node."""
|
||||
if isinstance(node, ast.Constant):
|
||||
return str(node.value) if node.value is not None else None
|
||||
return None
|
||||
|
||||
|
||||
def extract_metrics_from_file(filepath: Path) -> list[dict[str, str]]:
|
||||
"""Parse a Python file and extract all metric definitions."""
|
||||
try:
|
||||
with open(filepath, encoding="utf-8") as f:
|
||||
source = f.read()
|
||||
|
||||
tree = ast.parse(source, filename=str(filepath))
|
||||
extractor = MetricExtractor()
|
||||
extractor.visit(tree)
|
||||
return extractor.metrics
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to parse {filepath}: {e}") from e
|
||||
|
||||
|
||||
def generate_markdown_table(metrics: list[dict[str, str]]) -> str:
|
||||
"""Generate a markdown table from extracted metrics."""
|
||||
if not metrics:
|
||||
return "No metrics found.\n"
|
||||
|
||||
# Sort by type, then by name
|
||||
metrics_sorted = sorted(metrics, key=lambda m: (m["type"], m["name"]))
|
||||
|
||||
lines = []
|
||||
lines.append("| Metric Name | Type | Description |")
|
||||
lines.append("|-------------|------|-------------|")
|
||||
|
||||
for metric in metrics_sorted:
|
||||
name = metric["name"]
|
||||
metric_type = metric["type"].capitalize()
|
||||
doc = metric["documentation"].replace("\n", " ").strip()
|
||||
lines.append(f"| `{name}` | {metric_type} | {doc} |")
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def on_startup(command: Literal["build", "gh-deploy", "serve"], dirty: bool):
|
||||
"""Generate metrics documentation tables from source files."""
|
||||
logger.info("Generating metrics documentation")
|
||||
|
||||
# Create generated directory if it doesn't exist
|
||||
GENERATED_METRICS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
total_metrics = 0
|
||||
for source_config in METRIC_SOURCE_FILES:
|
||||
source_path = source_config["path"]
|
||||
output_file = source_config["output"]
|
||||
|
||||
filepath = ROOT_DIR / source_path
|
||||
if not filepath.exists():
|
||||
raise FileNotFoundError(f"Metrics source file not found: {filepath}")
|
||||
|
||||
logger.debug("Extracting metrics from: %s", source_path)
|
||||
metrics = extract_metrics_from_file(filepath)
|
||||
logger.debug("Found %d metrics in %s", len(metrics), source_path)
|
||||
|
||||
# Generate and write the markdown table for this source
|
||||
table_content = generate_markdown_table(metrics)
|
||||
output_path = GENERATED_METRICS_DIR / output_file
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
f.write(table_content)
|
||||
|
||||
total_metrics += len(metrics)
|
||||
logger.info(
|
||||
"Generated metrics table: %s (%d metrics)",
|
||||
output_path.relative_to(ROOT_DIR),
|
||||
len(metrics),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Total metrics generated: %d across %d files",
|
||||
total_metrics,
|
||||
len(METRIC_SOURCE_FILES),
|
||||
)
|
||||
17
docs/mkdocs/hooks/remove_announcement.py
Normal file
17
docs/mkdocs/hooks/remove_announcement.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
|
||||
def on_startup(command: Literal["build", "gh-deploy", "serve"], dirty: bool):
|
||||
# see https://docs.readthedocs.io/en/stable/reference/environment-variables.html # noqa
|
||||
if os.getenv("READTHEDOCS_VERSION_TYPE") == "tag":
|
||||
# remove the warning banner if the version is a tagged release
|
||||
mkdocs_dir = Path(__file__).parent.parent
|
||||
announcement_path = mkdocs_dir / "overrides/main.html"
|
||||
# The file might be removed already if the build is triggered multiple
|
||||
# times (readthedocs build both HTML and PDF versions separately)
|
||||
if announcement_path.exists():
|
||||
os.remove(announcement_path)
|
||||
95
docs/mkdocs/hooks/url_schemes.py
Normal file
95
docs/mkdocs/hooks/url_schemes.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
|
||||
"""
|
||||
MkDocs hook to enable the following links to render correctly:
|
||||
|
||||
- Relative file links outside of the `docs/` directory, e.g.:
|
||||
- [Text](../some_file.py)
|
||||
- [Directory](../../some_directory/)
|
||||
- GitHub URLs for issues, pull requests, and projects, e.g.:
|
||||
- Adds GitHub icon before links
|
||||
- Replaces raw links with descriptive text,
|
||||
e.g. <...pull/123> -> [Pull Request #123](.../pull/123)
|
||||
- Works for external repos too by including the `owner/repo` in the link title
|
||||
|
||||
The goal is to simplify cross-referencing common GitHub resources
|
||||
in project docs.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import regex as re
|
||||
from mkdocs.config.defaults import MkDocsConfig
|
||||
from mkdocs.structure.files import Files
|
||||
from mkdocs.structure.pages import Page
|
||||
|
||||
ROOT_DIR = Path(__file__).parent.parent.parent.parent.resolve()
|
||||
DOC_DIR = ROOT_DIR / "docs"
|
||||
|
||||
|
||||
gh_icon = ":octicons-mark-github-16:"
|
||||
|
||||
# Regex pieces
|
||||
TITLE = r"(?P<title>[^\[\]<>]+?)"
|
||||
REPO = r"(?P<repo>.+?/.+?)"
|
||||
TYPE = r"(?P<type>issues|pull|projects)"
|
||||
NUMBER = r"(?P<number>\d+)"
|
||||
FRAGMENT = r"(?P<fragment>#[^\s]+)?"
|
||||
URL = f"https://github.com/{REPO}/{TYPE}/{NUMBER}{FRAGMENT}"
|
||||
RELATIVE = r"(?!(https?|ftp)://|#)(?P<path>[^\s]+?)"
|
||||
|
||||
# Common titles to use for GitHub links when none is provided in the link.
|
||||
TITLES = {"issues": "Issue ", "pull": "Pull Request ", "projects": "Project "}
|
||||
|
||||
# Regex to match GitHub issue, PR, and project links with optional titles.
|
||||
github_link = re.compile(rf"(\[{TITLE}\]\(|<){URL}(\)|>)")
|
||||
# Regex to match relative file links with optional titles.
|
||||
relative_link = re.compile(rf"\[{TITLE}\]\({RELATIVE}\)")
|
||||
|
||||
|
||||
def on_page_markdown(
|
||||
markdown: str, *, page: Page, config: MkDocsConfig, files: Files
|
||||
) -> str:
|
||||
def replace_relative_link(match: re.Match) -> str:
|
||||
"""Replace relative file links with URLs if they point outside the docs dir."""
|
||||
title = match.group("title")
|
||||
path = match.group("path")
|
||||
path = (Path(page.file.abs_src_path).parent / path).resolve()
|
||||
|
||||
# Check if the path exists and is outside the docs dir
|
||||
if not path.exists() or path.is_relative_to(DOC_DIR):
|
||||
return match.group(0)
|
||||
|
||||
# Files and directories have different URL schemes on GitHub
|
||||
slug = "tree/main" if path.is_dir() else "blob/main"
|
||||
|
||||
path = path.relative_to(ROOT_DIR)
|
||||
url = f"https://github.com/vllm-project/vllm/{slug}/{path}"
|
||||
return f"[{gh_icon} {title}]({url})"
|
||||
|
||||
def replace_github_link(match: re.Match) -> str:
|
||||
"""Replace GitHub issue, PR, and project links with enhanced Markdown links."""
|
||||
repo = match.group("repo")
|
||||
type = match.group("type")
|
||||
number = match.group("number")
|
||||
# Title and fragment could be None
|
||||
title = match.group("title") or ""
|
||||
fragment = match.group("fragment") or ""
|
||||
|
||||
# Use default titles for raw links
|
||||
if not title:
|
||||
title = TITLES[type]
|
||||
if "vllm-project" not in repo:
|
||||
title += repo
|
||||
title += f"#{number}"
|
||||
|
||||
url = f"https://github.com/{repo}/{type}/{number}{fragment}"
|
||||
return f"[{gh_icon} {title}]({url})"
|
||||
|
||||
markdown = relative_link.sub(replace_relative_link, markdown)
|
||||
markdown = github_link.sub(replace_github_link, markdown)
|
||||
|
||||
if "interface" in str(page.file.abs_src_path):
|
||||
print(markdown)
|
||||
|
||||
return markdown
|
||||
Reference in New Issue
Block a user