Fix out of memory message. (#1771)

This commit is contained in:
Liangsheng Yin
2024-10-23 15:20:39 -07:00
committed by GitHub
parent 0d800090b4
commit 3f5ac88d02
2 changed files with 6 additions and 2 deletions

View File

@@ -514,7 +514,12 @@ class ScheduleBatch:
out_cache_loc = self.token_to_kv_pool.alloc(num_tokens)
if out_cache_loc is None:
logger.error("Prefill out of memory. Try to lower your batch size.")
phase_str = "Prefill" if self.forward_mode.is_extend() else "Decode"
logger.error(
f"{phase_str} out of memory. Try to lower your batch size.\n"
f"Try to allocate {num_tokens} tokens.\n"
f"Avaliable tokens: {self.token_to_kv_pool.available_size() + self.tree_cache.evictable_size()}\n"
)
if self.tree_cache is not None:
self.tree_cache.pretty_print()
exit(1)

View File

@@ -71,7 +71,6 @@ from sglang.srt.utils import (
is_generation_model,
is_multimodal_model,
kill_parent_process,
pytorch_profile,
set_random_seed,
suppress_other_loggers,
)