[dist] fix communicator patch (#58)

### What this PR does / why we need it?
fix communicator patch so parallel could work.
see #52

Signed-off-by: MengqingCao <cmq0113@163.com>
This commit is contained in:
Mengqing Cao
2025-02-14 10:45:49 +08:00
committed by GitHub
parent e264987af2
commit b88443b6c6
4 changed files with 7 additions and 6 deletions

View File

@@ -19,11 +19,11 @@
# https://github.com/vllm-project/vllm/pull/11324.
import torch
from vllm.distributed.parallel_state import GroupCoordinator
import vllm
from vllm.utils import resolve_obj_by_qualname
class GroupCoordinatorPatch(GroupCoordinator):
class GroupCoordinatorPatch(vllm.distributed.parallel_state.GroupCoordinator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -66,4 +66,4 @@ class GroupCoordinatorPatch(GroupCoordinator):
return self.communicator.all_gather(input_, dim)
GroupCoordinator = GroupCoordinatorPatch
vllm.distributed.parallel_state.GroupCoordinator = GroupCoordinatorPatch