[CI] Drop ascend scheduler from test (#4613)

Drop ascend scheduler from test

- vLLM version: v0.11.2

Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
This commit is contained in:
wangxiyuan
2025-12-02 13:18:17 +08:00
committed by GitHub
parent 6360eb1dea
commit 400af665e6
7 changed files with 1 additions and 165 deletions

View File

@@ -56,9 +56,6 @@ class TestAscendConfig(TestBase):
self.assertTrue(torchair_graph_config.enable_frozen_parameter)
self.assertFalse(torchair_graph_config.enable_kv_nz)
ascend_scheduler_config = ascend_config.ascend_scheduler_config
self.assertFalse(ascend_scheduler_config.enabled)
@_clean_up_ascend_config
def test_init_ascend_config_with_additional_config(self):
test_vllm_config = VllmConfig()
@@ -74,9 +71,6 @@ class TestAscendConfig(TestBase):
"enable_kv_nz": True
},
"multistream_overlap_shared_expert": True,
"ascend_scheduler_config": {
"enabled": True
},
"expert_map_path": "test_expert_map_path",
"refresh": True,
}
@@ -94,9 +88,6 @@ class TestAscendConfig(TestBase):
self.assertTrue(torchair_graph_config.enable_frozen_parameter)
self.assertTrue(torchair_graph_config.enable_kv_nz)
ascend_scheduler_config = ascend_config.ascend_scheduler_config
self.assertTrue(ascend_scheduler_config.enabled)
@_clean_up_ascend_config
def test_init_ascend_config_with_refresh(self):
test_vllm_config = VllmConfig()