[CI] Remove unused imports with Ruff to pre-commit config, only to benchmarks/docs/examples folder (#3969)

This commit is contained in:
Brayden Zhong
2025-03-27 22:45:02 -04:00
committed by GitHub
parent 31dfff7da7
commit b149b39353
21 changed files with 13 additions and 48 deletions

View File

@@ -22,6 +22,13 @@ repos:
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.2
hooks:
- id: ruff
args: [--select=F401, --fixable=F401]
files: ^(benchmark/|docs/|examples/)
exclude: \.ipynb$
- repo: https://github.com/psf/black
rev: 24.10.0
hooks:

View File

@@ -23,7 +23,7 @@ import warnings
from argparse import ArgumentParser
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple, Union
from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple
import aiohttp
import numpy as np

View File

@@ -4,8 +4,6 @@ import math
import cudnn
import torch
import torch.utils.benchmark as benchmark
import triton
import triton.language as tl
from flashinfer import BatchDecodeWithPagedKVCacheWrapper
from sglang.srt.layers.attention.triton_ops.decode_attention import decode_attention_fwd

View File

@@ -1,6 +1,5 @@
import itertools
import math
import os
from typing import Optional, Tuple
import torch

View File

@@ -3,7 +3,6 @@ from typing import Optional, Tuple, Union
import torch
import triton
import triton.language as tl
from flashinfer.norm import fused_add_rmsnorm, rmsnorm
from torch import nn
from vllm import _custom_ops as vllm_ops

View File

@@ -1,9 +1,6 @@
import itertools
import os
from typing import List
import numpy as np
import pytest
import torch
import triton
import triton.language as tl

View File

@@ -15,42 +15,28 @@
import argparse
import asyncio
import json
import os
import random
import resource
import sys
import time
import traceback
import warnings
from argparse import ArgumentParser
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple, Union
from typing import Any, Dict, List, Optional, Tuple
import aiohttp
import numpy as np
import requests
from launch_server import LORA_PATH, NUM_LORAS
from tqdm.asyncio import tqdm
from transformers import (
AutoTokenizer,
PreTrainedTokenizer,
PreTrainedTokenizerBase,
PreTrainedTokenizerFast,
)
from transformers import PreTrainedTokenizerBase
from sglang.bench_serving import (
AIOHTTP_TIMEOUT,
SHAREGPT_URL,
BenchmarkMetrics,
RequestFuncInput,
RequestFuncOutput,
calculate_metrics,
check_chat_template,
get_model,
get_request,
get_tokenizer,
parse_request_rate_range,
remove_prefix,
sample_random_requests,
)

View File

@@ -6,7 +6,6 @@ import time
import numpy as np
import pandas as pd
import tiktoken
from tqdm import tqdm
from sglang.test.test_utils import (
add_common_sglang_args_and_parse,

View File

@@ -1,6 +1,5 @@
import argparse
import PIL.Image
import torch
from data_utils import save_json
from eval_utils import (

View File

@@ -5,7 +5,6 @@ import os
import re
import yaml
from datasets import concatenate_datasets, load_dataset
DOMAIN_CAT2SUB_CAT = {
"Art and Design": ["Art", "Art_Theory", "Design", "Music"],

View File

@@ -1,13 +1,8 @@
import itertools
import json
import os
import random
import string
import threading
import time
from argparse import ArgumentParser
from pathlib import Path
from typing import Union
from tqdm import tqdm

View File

@@ -1,4 +1,3 @@
import os
import weakref
import nest_asyncio

View File

@@ -1,4 +1,3 @@
import os
import weakref
from sglang.utils import execute_shell_command, reserve_port

View File

@@ -4,8 +4,6 @@ export OPENAI_API_KEY=sk-******
python3 openai_example_chat.py
"""
import json
import sglang as sgl

View File

@@ -1,5 +1,4 @@
# NOTE: Currently this can only be run through HTTP requests.
import json
from concurrent.futures import ThreadPoolExecutor
from json_decode import character_regex

View File

@@ -5,11 +5,6 @@ python offline_batch_inference_vlm.py --model-path Qwen/Qwen2-VL-7B-Instruct --c
import argparse
import dataclasses
import io
import os
import requests
from PIL import Image
import sglang as sgl
from sglang.srt.conversation import chat_templates

View File

@@ -12,7 +12,7 @@ import requests
import torch
from sglang.test.test_utils import is_in_ci
from sglang.utils import print_highlight, terminate_process, wait_for_server
from sglang.utils import terminate_process, wait_for_server
if is_in_ci():
from docs.backend.patch import launch_server_cmd

View File

@@ -11,7 +11,6 @@ you should create the input.jsonl file with the following content:
"""
import json
import os
import time
import openai

View File

@@ -5,7 +5,6 @@ python openai_chat.py
"""
import openai
from openai import OpenAI
client = openai.Client(base_url="http://127.0.0.1:30000/v1", api_key="EMPTY")

View File

@@ -9,7 +9,7 @@ import requests
from sglang.srt.hf_transformers_utils import get_tokenizer
from sglang.test.test_utils import is_in_ci
from sglang.utils import print_highlight, terminate_process, wait_for_server
from sglang.utils import terminate_process, wait_for_server
if is_in_ci():
from docs.backend.patch import launch_server_cmd

View File

@@ -13,9 +13,8 @@ from PIL import Image
from transformers import AutoProcessor
from sglang.lang.chat_template import get_chat_template_by_model_path
from sglang.srt.hf_transformers_utils import get_tokenizer
from sglang.test.test_utils import DEFAULT_IMAGE_URL, is_in_ci
from sglang.utils import print_highlight, terminate_process, wait_for_server
from sglang.utils import terminate_process, wait_for_server
if is_in_ci():
from docs.backend.patch import launch_server_cmd