[CI] Fix lint CI (#5880)
Quick fix for lint CI
- vLLM version: v0.13.0
- vLLM main:
bde38c11df
Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
@@ -1,14 +1,11 @@
|
||||
import torch
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
|
||||
from llmcompressor import oneshot
|
||||
from llmcompressor.modifiers.quantization import QuantizationModifier
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
|
||||
MODEL_ID = "Qwen/Qwen3-30B-A3B-Instruct-2507"
|
||||
|
||||
model = AutoModelForCausalLM.from_pretrained(
|
||||
MODEL_ID, dtype=torch.bfloat16, trust_remote_code=True
|
||||
)
|
||||
model = AutoModelForCausalLM.from_pretrained(MODEL_ID, dtype=torch.bfloat16, trust_remote_code=True)
|
||||
tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
|
||||
|
||||
recipe = QuantizationModifier(
|
||||
|
||||
Reference in New Issue
Block a user