Fix MTP error when enabling two-batch overlap (#7569)

This commit is contained in:
fzyzcjy
2025-06-27 06:40:54 +08:00
committed by GitHub
parent 604efe07e1
commit 3d7cdb2ebd

View File

@@ -168,7 +168,7 @@ def get_batch_sizes_to_capture(model_runner: ModelRunner):
capture_bs += [model_runner.req_to_token_pool.size]
if server_args.enable_two_batch_overlap:
capture_bs = [bs for bs in capture_bs if bs >= 2]
capture_bs = [bs for bs in capture_bs if bs % 2 == 0]
if server_args.cuda_graph_max_bs:
capture_bs = [bs for bs in capture_bs if bs <= server_args.cuda_graph_max_bs]