support llava video (#426)
This commit is contained in:
@@ -259,6 +259,8 @@ def match_vicuna(model_path: str):
|
||||
return get_chat_template("vicuna_v1.1")
|
||||
if "llava-v1.5" in model_path.lower():
|
||||
return get_chat_template("vicuna_v1.1")
|
||||
if "llava-next-video-7b" in model_path.lower():
|
||||
return get_chat_template("vicuna_v1.1")
|
||||
|
||||
|
||||
@register_chat_template_matching_function
|
||||
@@ -283,19 +285,24 @@ def match_llama3_instruct(model_path: str):
|
||||
|
||||
@register_chat_template_matching_function
|
||||
def match_chat_ml(model_path: str):
|
||||
# import pdb;pdb.set_trace()
|
||||
model_path = model_path.lower()
|
||||
if "tinyllama" in model_path:
|
||||
return get_chat_template("chatml")
|
||||
if "qwen" in model_path and "chat" in model_path:
|
||||
return get_chat_template("chatml")
|
||||
if "llava-v1.6-34b" in model_path:
|
||||
if (
|
||||
"llava-v1.6-34b" in model_path
|
||||
or "llava-v1.6-yi-34b" in model_path
|
||||
or "llava-next-video-34b" in model_path
|
||||
):
|
||||
return get_chat_template("chatml-llava")
|
||||
|
||||
|
||||
@register_chat_template_matching_function
|
||||
def match_chat_yi(model_path: str):
|
||||
model_path = model_path.lower()
|
||||
if "yi" in model_path:
|
||||
if "yi" in model_path and "llava" not in model_path:
|
||||
return get_chat_template("yi")
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user