2025-12-18 15:51:54 +08:00
|
|
|
from contextlib import contextmanager
|
|
|
|
|
|
|
|
|
|
import torch
|
2026-03-13 09:11:46 +08:00
|
|
|
from vllm.logger import logger
|
|
|
|
|
|
2025-12-18 15:51:54 +08:00
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
|
def torch_cuda_wrapper():
|
|
|
|
|
try:
|
|
|
|
|
torch.cuda.Event = torch.npu.Event
|
|
|
|
|
torch.cuda.Stream = torch.npu.Stream
|
2025-12-29 15:28:34 +08:00
|
|
|
torch.cuda.stream = torch.npu.stream
|
2025-12-18 15:51:54 +08:00
|
|
|
torch.cuda.default_stream = torch.npu.default_stream
|
|
|
|
|
torch.cuda.current_stream = torch.npu.current_stream
|
|
|
|
|
torch.cuda.graph_pool_handle = torch.npu.graph_pool_handle
|
2025-12-29 15:28:34 +08:00
|
|
|
torch.cuda.CUDAGraph = torch.npu.NPUGraph
|
2025-12-18 15:51:54 +08:00
|
|
|
torch.cuda.graph = torch.npu.graph
|
2025-12-29 15:28:34 +08:00
|
|
|
torch.cuda.synchronize = torch.npu.synchronize
|
2026-03-13 09:11:46 +08:00
|
|
|
torch.cuda.set_stream = torch.npu.set_stream
|
|
|
|
|
torch.cuda.current_device = torch.npu.current_device
|
|
|
|
|
torch.cuda.mem_get_info = torch.npu.mem_get_info
|
|
|
|
|
logger.info_once("Wrapping torch.cuda with torch.npu.")
|
|
|
|
|
yield
|
|
|
|
|
finally:
|
|
|
|
|
pass
|