[CI] Add qwen2.5-vl test (#643)
### What this PR does / why we need it? Part of #499 Add qwen2.5-vl test on single npu, v1 engine is excluded because qwen2.5-vl has some problems with v1 now, at the same time, this test can also make #639 more credible Signed-off-by: wangli <wangli858794774@gmail.com>
This commit is contained in:
@@ -18,7 +18,7 @@
|
||||
#
|
||||
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import torch
|
||||
from vllm.config import ModelConfig, TaskOption
|
||||
@@ -301,3 +301,16 @@ def build_model_context(model_name: str,
|
||||
limit_mm_per_prompt=limit_mm_per_prompt,
|
||||
)
|
||||
return InputContext(model_config)
|
||||
|
||||
|
||||
def qwen_prompt(questions: List[str]) -> List[str]:
|
||||
placeholder = "<|image_pad|>"
|
||||
return [("<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n"
|
||||
f"<|im_start|>user\n<|vision_start|>{placeholder}<|vision_end|>"
|
||||
f"{q}<|im_end|>\n<|im_start|>assistant\n") for q in questions]
|
||||
|
||||
|
||||
# Map of prompt templates for different models.
|
||||
PROMPT_TEMPLATES: dict[str, Callable] = {
|
||||
"qwen2.5vl": qwen_prompt,
|
||||
}
|
||||
Reference in New Issue
Block a user