Fix VocabParallelEmbedding UT (#2722)

### What this PR does / why we need it?
Fix VocabParallelEmbedding UT

### How was this patch tested?
CI passed with new added/existing test.

- vLLM version: main
- vLLM main:
f592b3174b

---------

Signed-off-by: Icey <1790571317@qq.com>
This commit is contained in:
Icey
2025-09-18 19:54:01 +08:00
committed by GitHub
parent 01592515b8
commit acb46f303f
2 changed files with 5 additions and 1 deletions

View File

@@ -18,6 +18,7 @@ from unittest.mock import MagicMock, patch
import torch
from vllm_ascend.ascend_config import init_ascend_config
from vllm_ascend.ops.vocab_parallel_embedding import (
AscendLogitsProcessor, AscendParallelLMHead, AscendVocabParallelEmbedding)
@@ -31,6 +32,9 @@ class TestCustomVocabParallelEmbedding(unittest.TestCase):
self.embedding_dim = 10
self.org_num_embeddings = 40
self.padding_size = 8
mock_vllm_config = MagicMock()
mock_vllm_config.additional_config = {}
init_ascend_config(mock_vllm_config)
def _create_layer(self):
# Patch methods and dependencies for VocabParallelEmbedding