[v0.18.0][CI] Fix releases/v0.18.0 ci test only support vllm v0.18.0 (#7686)

### What this PR does / why we need it?
Fix releases/v0.18.0 ci test only support vllm v0.18.0 

Signed-off-by: hfadzxy <starmoon_zhang@163.com>
This commit is contained in:
zhangxinyuehfad
2026-03-26 18:36:04 +08:00
committed by GitHub
parent 124bb00158
commit d781902ce9
9 changed files with 11 additions and 11 deletions

View File

@@ -37,7 +37,7 @@ jobs:
steps:
- name: Get vLLM version
run: |
VLLM_COMMIT=ed359c497a728f08b5b41456c07a688ccd510fbc
VLLM_COMMIT=v0.18.0
echo "VLLM_COMMIT=https://github.com/vllm-project/vllm/commit/$VLLM_COMMIT" >> "$GITHUB_ENV"
- name: Checkout repository

View File

@@ -27,7 +27,7 @@ RUN apt-get update -y && \
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
# For lint purpose, actually we need make a main2main matching.
ARG VLLM_COMMIT=ed359c497a728f08b5b41456c07a688ccd510fbc
ARG VLLM_COMMIT=v0.18.0
RUN git clone $VLLM_REPO /vllm-workspace/vllm && \
cd /vllm-workspace/vllm && \
git checkout $VLLM_COMMIT

View File

@@ -15,7 +15,7 @@
# This file is a part of the vllm-ascend project.
#
FROM quay.io/ascend/vllm-ascend:main
FROM quay.io/ascend/vllm-ascend:release-0.18.0
ARG PIP_INDEX_URL="https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
ARG AIS_BENCH_TAG="v3.0-20250930-master"

View File

@@ -15,7 +15,7 @@
# This file is a part of the vllm-ascend project.
#
FROM quay.io/ascend/vllm-ascend:main-a3
FROM quay.io/ascend/vllm-ascend:releases-v0.18.0-a3
ARG PIP_INDEX_URL="https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
ARG AIS_BENCH_TAG="v3.0-20250930-master"

View File

@@ -75,7 +75,7 @@ jobs:
name: e2e-full
strategy:
matrix:
vllm_version: [ed359c497a728f08b5b41456c07a688ccd510fbc, v0.18.0]
vllm_version: [v0.18.0]
needs: [changes]
if: ${{ needs.changes.outputs.e2e_tracker == 'true' || needs.changes.outputs.e2e_tracker == true }}
uses: ./.github/workflows/_e2e_test.yaml

View File

@@ -41,7 +41,7 @@ jobs:
lint:
uses: ./.github/workflows/_pre_commit.yml
with:
vllm: ed359c497a728f08b5b41456c07a688ccd510fbc
vllm: v0.18.0
changes:
runs-on: linux-aarch64-a2b3-0
outputs:
@@ -90,7 +90,7 @@ jobs:
if: ${{ needs.lint.result == 'success' && (needs.changes.outputs.e2e_tracker == 'true' || needs.changes.outputs.ut_tracker == 'true') }}
strategy:
matrix:
vllm_version: [ed359c497a728f08b5b41456c07a688ccd510fbc, v0.18.0]
vllm_version: [v0.18.0]
uses: ./.github/workflows/_unit_test.yaml
with:
vllm: ${{ matrix.vllm_version }}
@@ -102,7 +102,7 @@ jobs:
name: e2e-light
strategy:
matrix:
vllm_version: [ed359c497a728f08b5b41456c07a688ccd510fbc, v0.18.0]
vllm_version: [v0.18.0]
# Note (yikun): If CI resource are limited we can split job into two chain jobs
needs: [lint, changes]
# only trigger e2e test after lint passed and the change is e2e related with pull request.

View File

@@ -33,7 +33,7 @@ jobs:
name: refresh codecov
strategy:
matrix:
vllm_version: [ed359c497a728f08b5b41456c07a688ccd510fbc]
vllm_version: [v0.18.0]
uses: ./.github/workflows/_unit_test.yaml
with:
vllm: ${{ matrix.vllm_version }}

View File

@@ -59,7 +59,7 @@ For main branch of vLLM Ascend, we usually make it compatible with the latest vL
| vLLM Ascend | vLLM | Python | Stable CANN | PyTorch/torch_npu |
|-------------|--------------|------------------|-------------|--------------------|
| main | ed359c497a728f08b5b41456c07a688ccd510fbc, v0.18.0 tag | >= 3.10, < 3.12 | 8.5.0 | 2.9.0 / 2.9.0 |
| main | v0.18.0 tag | >= 3.10, < 3.12 | 8.5.0 | 2.9.0 / 2.9.0 |
## Release cadence

View File

@@ -5,5 +5,5 @@ This directory contains the new model runner which is under active development.
please see [Model Runner V2](https://github.com/vllm-project/vllm-ascend/issues/5208)
to get specific plans.
supported vllm version: main@ed359c497a728f08b5b41456c07a688ccd510fbc
supported vllm version: v0.18.0
related PR: <https://github.com/vllm-project/vllm-ascend/pull/7598>