[Fix] use torch.cat instead of torch.concat to prevent entering the Autograd backends. (#4466)
This commit is contained in:
@@ -306,7 +306,7 @@ class SamplingBatchInfo:
|
||||
]:
|
||||
self_val = getattr(self, item, None)
|
||||
other_val = getattr(other, item, None)
|
||||
setattr(self, item, torch.concat([self_val, other_val]))
|
||||
setattr(self, item, torch.cat([self_val, other_val]))
|
||||
|
||||
self.is_all_greedy |= other.is_all_greedy
|
||||
self.need_min_p_sampling |= other.need_min_p_sampling
|
||||
|
||||
Reference in New Issue
Block a user