Files
lfm2.5-me-merged/tokenizer_config.json

11 lines
921 B
JSON
Raw Normal View History

{
"backend": "tokenizers",
"bos_token": "<bos>",
"eos_token": "<end_of_turn>",
"mask_token": "<mask>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<pad>",
"tokenizer_class": "GemmaTokenizer",
"unk_token": "<unk>",
"chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{'<start_of_turn>user\n' + messages[0]['content'] | trim + ' ' + messages[1]['content'] | trim + '<end_of_turn>\n'}}{% set messages = messages[2:] %}{% endif %}{% for message in messages %}{% if message['role'] == 'user' %}{{'<start_of_turn>user\n' + message['content'] | trim + '<end_of_turn>\n'}}{% elif message['role'] == 'assistant' %}{{'<start_of_turn>model\n' + message['content'] | trim + '<end_of_turn>\n' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<start_of_turn>model\n' }}{% endif %}"
}