[CI] Upgrade trasnformers version (#6307)
Upgrade transformers to >=4.56.4
- vLLM version: v0.14.1
- vLLM main:
dc917cceb8
Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
@@ -21,7 +21,6 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
from modelscope import snapshot_download # type: ignore
|
||||
from vllm import SamplingParams
|
||||
|
||||
from tests.e2e.conftest import VllmRunner
|
||||
@@ -66,11 +65,11 @@ def test_aclgraph_mem_use(model: str, max_tokens: int) -> None:
|
||||
sampling_params = SamplingParams(max_tokens=max_tokens,
|
||||
temperature=0.0)
|
||||
if model == "vllm-ascend/DeepSeek-V2-Lite-W8A8":
|
||||
vllm_model = VllmRunner(snapshot_download(model),
|
||||
vllm_model = VllmRunner(model,
|
||||
max_model_len=1024,
|
||||
quantization="ascend")
|
||||
else:
|
||||
vllm_model = VllmRunner(snapshot_download(model))
|
||||
vllm_model = VllmRunner(model)
|
||||
_ = vllm_model.generate(prompts, sampling_params)
|
||||
|
||||
assert capture_called.value == 1, "capture_model was not called during test"
|
||||
|
||||
Reference in New Issue
Block a user