[CI] update vLLM to 0.14.1 (#6222)

Upgrade vLLM to 0.14.1
- vLLM version: v0.14.0
- vLLM main:
d68209402d

Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
wangxiyuan
2026-01-25 17:52:16 +08:00
committed by GitHub
parent 384d84c7ef
commit 99bdd7363c
14 changed files with 17 additions and 17 deletions

View File

@@ -32,7 +32,7 @@ on:
description: how many pods will be pulled up via lws.yaml, indicates number of nodes we need
vllm_version:
required: false
default: "v0.14.0"
default: "v0.14.1"
type: string
description: vllm version to use
vllm_ascend_remote_url:

View File

@@ -66,7 +66,7 @@ jobs:
tests: tests/e2e/nightly/single_node/ops/multicard_ops_a2/
uses: ./.github/workflows/_e2e_nightly_single_node.yaml
with:
vllm: v0.14.0
vllm: v0.14.1
runner: ${{ matrix.test_config.os }}
tests: ${{ matrix.test_config.tests }}
name: ${{ matrix.test_config.name }}
@@ -137,7 +137,7 @@ jobs:
- Qwen3-Omni-30B-A3B-Instruct
uses: ./.github/workflows/_e2e_nightly_single_node_models.yaml
with:
vllm: v0.14.0
vllm: v0.14.1
runner: ${{ matrix.test_config.os }}
model_list: ${{ toJson(matrix.test_config.model_list) }}
image: 'swr.cn-southwest-2.myhuaweicloud.com/base_image/ascend-ci/cann:8.5.0-910b-ubuntu22.04-py3.11'

View File

@@ -161,7 +161,7 @@ jobs:
tests: tests/e2e/nightly/single_node/models/test_deepseek_v3_2_w8a8.py
uses: ./.github/workflows/_e2e_nightly_single_node.yaml
with:
vllm: v0.14.0
vllm: v0.14.1
runner: ${{ matrix.test_config.os }}
image: 'swr.cn-southwest-2.myhuaweicloud.com/base_image/ascend-ci/vllm-ascend:nightly-a3'
tests: ${{ matrix.test_config.tests }}
@@ -181,7 +181,7 @@ jobs:
uses: ./.github/workflows/_e2e_nightly_single_node.yaml
with:
runner: ${{ matrix.test_config.os }}
vllm: v0.14.0
vllm: v0.14.1
image: 'swr.cn-southwest-2.myhuaweicloud.com/base_image/ascend-ci/vllm-ascend:nightly-a3'
tests: ${{ matrix.test_config.tests }}
name: ${{ matrix.test_config.name }}

View File

@@ -75,7 +75,7 @@ jobs:
name: e2e-full
strategy:
matrix:
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.0]
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.1]
needs: [changes]
if: ${{ needs.changes.outputs.e2e_tracker == 'true' }}
uses: ./.github/workflows/_e2e_test.yaml

View File

@@ -85,7 +85,7 @@ jobs:
if: ${{ needs.lint.result == 'success' && (needs.changes.outputs.e2e_tracker == 'true' || needs.changes.outputs.ut_tracker == 'true') }}
strategy:
matrix:
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.0]
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.1]
uses: ./.github/workflows/_unit_test.yaml
with:
vllm: ${{ matrix.vllm_version }}
@@ -97,7 +97,7 @@ jobs:
name: e2e-light
strategy:
matrix:
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.0]
vllm_version: [d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.1]
# Note (yikun): If CI resource are limited we can split job into two chain jobs
needs: [lint, changes]
# only trigger e2e test after lint passed and the change is e2e related with pull request.

View File

@@ -51,7 +51,7 @@ jobs:
strategy:
matrix:
include:
- vllm_branch: v0.14.0
- vllm_branch: v0.14.1
vllm_ascend_branch: main
max-parallel: 1
container:

View File

@@ -48,7 +48,7 @@ RUN pip config set global.index-url ${PIP_INDEX_URL}
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -v -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -40,7 +40,7 @@ RUN pip config set global.index-url ${PIP_INDEX_URL}
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -v -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -36,7 +36,7 @@ COPY . /vllm-workspace/vllm-ascend/
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -47,7 +47,7 @@ RUN apt-get update -y && \
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -v -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -50,7 +50,7 @@ RUN yum update -y && \
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -50,7 +50,7 @@ RUN yum update -y && \
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.14.0
ARG VLLM_TAG=v0.14.1
RUN git clone --depth 1 $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -e /vllm-workspace/vllm/[audio] --extra-index https://download.pytorch.org/whl/cpu/ && \

View File

@@ -54,7 +54,7 @@ For main branch of vLLM Ascend, we usually make it compatible with the latest vL
| vLLM Ascend | vLLM | Python | Stable CANN | PyTorch/torch_npu |
|-------------|--------------|------------------|-------------|--------------------|
| main | d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.0 tag | >= 3.10, < 3.12 | 8.5.0 | 2.9.0 / 2.9.0 |
| main | d68209402ddab3f54a09bc1f4de9a9495a283b60, v0.14.1 tag | >= 3.10, < 3.12 | 8.5.0 | 2.9.0 / 2.9.0 |
## Release cadence

View File

@@ -77,7 +77,7 @@ myst_substitutions = {
# CANN image tag
"cann_image_tag": "8.5.0-910b-ubuntu22.04-py3.11",
# vllm version in ci
"ci_vllm_version": "v0.14.0",
"ci_vllm_version": "v0.14.1",
}
# For cross-file header anchors