[CI] Upgrade trasnformers version (#6307)

Upgrade transformers to >=4.56.4

- vLLM version: v0.14.1
- vLLM main:
dc917cceb8

Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
wangxiyuan
2026-01-28 14:06:39 +08:00
committed by GitHub
parent c498cea22d
commit f8e76a49fa
14 changed files with 30 additions and 42 deletions

View File

@@ -21,7 +21,6 @@ from unittest.mock import patch
import pytest
import torch
from modelscope import snapshot_download # type: ignore
from vllm import SamplingParams
from tests.e2e.conftest import VllmRunner
@@ -66,11 +65,11 @@ def test_aclgraph_mem_use(model: str, max_tokens: int) -> None:
sampling_params = SamplingParams(max_tokens=max_tokens,
temperature=0.0)
if model == "vllm-ascend/DeepSeek-V2-Lite-W8A8":
vllm_model = VllmRunner(snapshot_download(model),
vllm_model = VllmRunner(model,
max_model_len=1024,
quantization="ascend")
else:
vllm_model = VllmRunner(snapshot_download(model))
vllm_model = VllmRunner(model)
_ = vllm_model.generate(prompts, sampling_params)
assert capture_called.value == 1, "capture_model was not called during test"