Make stop reason a dict instead of str (#1407)

This commit is contained in:
Lianmin Zheng
2024-09-12 20:47:31 -07:00
committed by GitHub
parent eb02c1618a
commit b912de11b0
3 changed files with 60 additions and 44 deletions

View File

@@ -95,19 +95,6 @@ file_id_storage: Dict[str, str] = {}
storage_dir = None
def format_finish_reason(finish_reason) -> Optional[str]:
if finish_reason.startswith("None"):
return None
elif finish_reason.startswith("FINISH_MATCHED"):
return "stop"
elif finish_reason.startswith("FINISH_LENGTH"):
return "length"
elif finish_reason.startswith("FINISH_ABORT"):
return "abort"
else:
return "unknown"
def create_error_response(
message: str,
err_type: str = "BadRequestError",
@@ -618,8 +605,10 @@ def v1_generate_response(request, ret, tokenizer_manager, to_file=False):
"index": 0,
"text": text,
"logprobs": logprobs,
"finish_reason": format_finish_reason(
ret_item["meta_info"]["finish_reason"]
"finish_reason": (
ret_item["meta_info"]["finish_reason"]["type"]
if ret_item["meta_info"]["finish_reason"]
else ""
),
}
else:
@@ -627,8 +616,10 @@ def v1_generate_response(request, ret, tokenizer_manager, to_file=False):
index=idx,
text=text,
logprobs=logprobs,
finish_reason=format_finish_reason(
ret_item["meta_info"]["finish_reason"]
finish_reason=(
ret_item["meta_info"]["finish_reason"]["type"]
if ret_item["meta_info"]["finish_reason"]
else ""
),
)
@@ -762,8 +753,10 @@ async def v1_completions(tokenizer_manager, raw_request: Request):
index=index,
text=delta,
logprobs=logprobs,
finish_reason=format_finish_reason(
content["meta_info"]["finish_reason"]
finish_reason=(
content["meta_info"]["finish_reason"]["type"]
if content["meta_info"]["finish_reason"]
else ""
),
)
chunk = CompletionStreamResponse(
@@ -999,8 +992,10 @@ def v1_chat_generate_response(request, ret, to_file=False):
"index": 0,
"message": {"role": "assistant", "content": ret_item["text"]},
"logprobs": choice_logprobs,
"finish_reason": format_finish_reason(
ret_item["meta_info"]["finish_reason"]
"finish_reason": (
ret_item["meta_info"]["finish_reason"]["type"]
if ret_item["meta_info"]["finish_reason"]
else ""
),
}
else:
@@ -1008,8 +1003,10 @@ def v1_chat_generate_response(request, ret, to_file=False):
index=idx,
message=ChatMessage(role="assistant", content=ret_item["text"]),
logprobs=choice_logprobs,
finish_reason=format_finish_reason(
ret_item["meta_info"]["finish_reason"]
finish_reason=(
ret_item["meta_info"]["finish_reason"]["type"]
if ret_item["meta_info"]["finish_reason"]
else ""
),
)
@@ -1134,8 +1131,10 @@ async def v1_chat_completions(tokenizer_manager, raw_request: Request):
choice_data = ChatCompletionResponseStreamChoice(
index=index,
delta=DeltaMessage(role="assistant"),
finish_reason=format_finish_reason(
content["meta_info"]["finish_reason"]
finish_reason=(
content["meta_info"]["finish_reason"]["type"]
if content["meta_info"]["finish_reason"]
else ""
),
logprobs=choice_logprobs,
)
@@ -1152,8 +1151,10 @@ async def v1_chat_completions(tokenizer_manager, raw_request: Request):
choice_data = ChatCompletionResponseStreamChoice(
index=index,
delta=DeltaMessage(content=delta),
finish_reason=format_finish_reason(
content["meta_info"]["finish_reason"]
finish_reason=(
content["meta_info"]["finish_reason"]["type"]
if content["meta_info"]["finish_reason"]
else ""
),
logprobs=choice_logprobs,
)