From 84d7f5a10d4eb3f683ecf62be239e611c5ee0522 Mon Sep 17 00:00:00 2001 From: zhangxinyuehfad <59153331+zhangxinyuehfad@users.noreply.github.com> Date: Wed, 26 Nov 2025 21:37:47 +0800 Subject: [PATCH] [UT] Fix ut test (#4472) ### What this PR does / why we need it? ### Does this PR introduce _any_ user-facing change? ### How was this patch tested? - vLLM version: v0.11.2 - vLLM main: https://github.com/vllm-project/vllm/commit/v0.11.2 Signed-off-by: hfadzxy --- tests/ut/attention/test_mla_v1.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/ut/attention/test_mla_v1.py b/tests/ut/attention/test_mla_v1.py index 5c58cf7b..57ac54c1 100644 --- a/tests/ut/attention/test_mla_v1.py +++ b/tests/ut/attention/test_mla_v1.py @@ -456,6 +456,8 @@ class TestAscendMLAMetadataBuilderBuild(TestBase): @patch("vllm_ascend.attention.mla_v1.get_ascend_config") def test_build_prefix_no_cache_metadata(self, mock_get_ascend_config, mock_dcp_world_size): + if not torch.npu.is_available(): + self.skipTest("NPU not available, skipping NPU-dependent tests") mock_dcp_world_size.return_value = 1 common_attn_metadata = AscendCommonAttentionMetadata( @@ -506,6 +508,8 @@ class TestAscendMLAMetadataBuilderBuild(TestBase): @patch("vllm_ascend.attention.mla_v1.get_ascend_config") def test_build_chunked_prefix_metadata(self, mock_get_ascend_config, mock_dcp_world_size): + if not torch.npu.is_available(): + self.skipTest("NPU not available, skipping NPU-dependent tests") mock_dcp_world_size.return_value = 1 common_attn_metadata = AscendCommonAttentionMetadata(