From 839e03cbc9b3fbf6e448d3ffa1b24edf724c689e Mon Sep 17 00:00:00 2001 From: Li Wang Date: Wed, 21 Jan 2026 10:39:12 +0800 Subject: [PATCH] [Nightly] Use Qwen repo for qwen3-next (#6064) ### What this PR does / why we need it? Use Qwen repo for qwen3-next to make nightly test happy. see https://github.com/vllm-project/vllm-ascend/actions/runs/21179025996/job/60915871441 ### Does this PR introduce _any_ user-facing change? ### How was this patch tested? - vLLM version: v0.13.0 - vLLM main: https://github.com/vllm-project/vllm/commit/d68209402ddab3f54a09bc1f4de9a9495a283b60 Signed-off-by: wangli --- tests/e2e/nightly/single_node/models/test_qwen3_next.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/nightly/single_node/models/test_qwen3_next.py b/tests/e2e/nightly/single_node/models/test_qwen3_next.py index 22e869d0..5fd9d183 100644 --- a/tests/e2e/nightly/single_node/models/test_qwen3_next.py +++ b/tests/e2e/nightly/single_node/models/test_qwen3_next.py @@ -10,7 +10,7 @@ from tests.e2e.conftest import RemoteOpenAIServer from tools.aisbench import run_aisbench_cases MODELS = [ - "vllm-ascend/Qwen3-Next-80B-A3B-Instruct", + "Qwen/Qwen3-Next-80B-A3B-Instruct", ] MODES = ["aclgraph"]