[Bug fixed] fixed the crash when enable the dp-attention on the single card (#3958)

This commit is contained in:
DavidChan
2025-03-10 15:50:34 +08:00
committed by GitHub
parent c553e1604c
commit 4455b26e76

View File

@@ -848,12 +848,12 @@ class DeepseekV2AttentionMLA(nn.Module):
def all_gather(
input_tensor: torch.Tensor, forward_batch: ForwardBatch, rank, world_size, group
):
if world_size == 1:
return input_tensor
all_lens = forward_batch.global_num_tokens_cpu
max_len = max(forward_batch.global_num_tokens_cpu)
if world_size == 1:
return input_tensor, 0, all_lens[0]
padded_tensor = torch.nn.functional.pad(
input_tensor, (0, 0, 0, max_len - input_tensor.shape[0])
)