[CI] fix vllm test (#365)

fix vllm test

Signed-off-by: MengqingCao <cmq0113@163.com>
This commit is contained in:
Mengqing Cao
2025-03-24 16:09:06 +08:00
committed by GitHub
parent 89ca63a2c2
commit 8996733307
2 changed files with 11 additions and 5 deletions

View File

@@ -130,11 +130,12 @@ jobs:
run: | run: |
VLLM_USE_V1=0 pytest -sv tests VLLM_USE_V1=0 pytest -sv tests
# FIXME: make vllm test pass - name: Run vllm-project/vllm test
#- name: Checkout vllm-project/vllm repo env:
#- name: Run vllm-project/vllm test VLLM_USE_V1: 0
# run: | PYTORCH_NPU_ALLOC_CONF: max_split_size_mb:256
# VLLM_USE_V1=0 pytest -sv run: |
pytest -sv
post_cleanup: post_cleanup:
name: vLLM Ascend test (post-cleanup) name: vLLM Ascend test (post-cleanup)

View File

@@ -53,6 +53,11 @@ addopts = --ignore=vllm-empty/tests/test_utils.py
--ignore=vllm-empty/tests/multimodal/test_processing.py --ignore=vllm-empty/tests/multimodal/test_processing.py
--ignore=vllm-empty/tests/multimodal/test_processor_kwargs.py --ignore=vllm-empty/tests/multimodal/test_processor_kwargs.py
--ignore=vllm-empty/tests/multimodal/test_utils.py --ignore=vllm-empty/tests/multimodal/test_utils.py
; Both V1 and V0 engine will be run in detokenizer/test_stop_reason
; VLLM_USE_V1=1 is not supported with device type=npu.
--ignore=vllm-empty/tests/detokenizer/test_stop_reason.py
; oom on llama-2-7b-hf
--ignore=vllm-empty/tests/detokenizer/test_stop_strings.py
testpaths = testpaths =
vllm-empty/tests vllm-empty/tests