Add skip_tokenizer_init args. (#959)

Co-authored-by: lzhang <zhanglei@modelbest.cn>
This commit is contained in:
gryffindor-rr
2024-08-10 03:14:13 +08:00
committed by GitHub
parent b16e856f11
commit 9cf0a5bada
10 changed files with 218 additions and 71 deletions

View File

@@ -195,6 +195,8 @@ class Req:
return all_ids[self.surr_offset :], self.read_offset - self.surr_offset
def get_next_inc_detokenization(self):
if self.tokenizer is None:
return False, ""
read_ids, read_offset = self.init_incremental_detokenize()
surr_ids = read_ids[:read_offset]
@@ -225,16 +227,11 @@ class Req:
return
last_token_id = self.output_ids[-1]
if (
last_token_id == self.tokenizer.eos_token_id
and not self.sampling_params.ignore_eos
):
self.finished_reason = FINISH_MATCHED_TOKEN(
matched=self.tokenizer.eos_token_id
)
return
if last_token_id in self.sampling_params.stop_token_ids:
if self.tokenizer is None:
matched_eos = last_token_id in self.sampling_params.stop_token_ids
else:
matched_eos = last_token_id == self.tokenizer.eos_token_id
if matched_eos and not self.sampling_params.ignore_eos:
self.finished_reason = FINISH_MATCHED_TOKEN(matched=last_token_id)
return