From f0caeeadcb37261beebd4a6e32934fa9f460db98 Mon Sep 17 00:00:00 2001 From: Nengjun Ma Date: Sat, 14 Feb 2026 18:54:04 +0800 Subject: [PATCH] [CI] unlock when load model (#6771) ### What this PR does / why we need it? ### Does this PR introduce _any_ user-facing change? ### How was this patch tested? - vLLM version: v0.15.0 - vLLM main: https://github.com/vllm-project/vllm/commit/9562912cead1f11e8540fb91306c5cbda66f0007 Signed-off-by: leo-pony --- .github/workflows/_e2e_test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/_e2e_test.yaml b/.github/workflows/_e2e_test.yaml index 75849a56..da0849b5 100644 --- a/.github/workflows/_e2e_test.yaml +++ b/.github/workflows/_e2e_test.yaml @@ -96,6 +96,7 @@ jobs: VLLM_LOGGING_LEVEL: ERROR VLLM_USE_MODELSCOPE: True HF_HUB_OFFLINE: 1 + MODELSCOPE_HUB_FILE_LOCK: False steps: - name: Checkout vllm-project/vllm-ascend repo uses: actions/checkout@v6