[Bug] Fixed the issue where an error occurred when the request included a seed.

This commit is contained in:
chenyili0619
2025-12-18 13:03:34 +08:00
committed by GitHub
parent 6b5740ad0a
commit 2e2933d217

View File

@@ -42,7 +42,7 @@ class TopKTopPSampler(nn.Module):
"""
logits = apply_top_k_top_p(logits, k, p)
probs = logits.softmax(dim=-1, dtype=torch.float32)
return random_sample(probs, generators)
return random_sample(probs, generators), None
def forward_kunlun(
self,
@@ -199,4 +199,4 @@ def flashinfer_sample(
next_token_ids = xtorch_ops.top_k_top_p_sampling_from_probs(
probs, top_k=k, top_p=p, deterministic=True)
return next_token_ids.view(-1)
return next_token_ids.view(-1)