64 lines
2.6 KiB
JSON
64 lines
2.6 KiB
JSON
{
|
|
"add_bos_token": true,
|
|
"add_eos_token": false,
|
|
"add_prefix_space": true,
|
|
"added_tokens_decoder": {
|
|
"0": {
|
|
"content": "<unk>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
},
|
|
"1": {
|
|
"content": "<s>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
},
|
|
"2": {
|
|
"content": "</s>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
},
|
|
"4": {
|
|
"content": "<|im_start|>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
},
|
|
"5": {
|
|
"content": "<|im_end|>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
}
|
|
},
|
|
"additional_special_tokens": [
|
|
"<|im_start|>",
|
|
"<|im_end|>"
|
|
],
|
|
"bos_token": "<s>",
|
|
"chat_template": "{%- if not date_string is defined %}{%- set date_string = \"2025-11-13\" %}{%- endif %}{%- set base_system_message = \"Soy CecilIA, un modelo de lenguaje experimental desarrollado en colaboración entre la Facultad de Matemática y Computación (MATCOM) de la Universidad de La Habana y el Grupo de Procesamiento del Lenguaje y Sistemas de Información (GPLSI) de la Universidad de Alicante, entrenado para entender y procesar el español hablado en Cuba. Estás diseñado para responder a preguntas de cultura, historia, geografía, y conversar de forma general sobre Cuba. Responde siempre de forma amigable y coloquial.\" -%}{%- if messages and messages[0].role == \"system\" -%}{%- set task_system = messages[0].content -%}{%- set messages = messages[1:] -%}{%- else -%}{%- set task_system = \"\" -%}{%- endif -%}{%- if task_system -%}{%- set system_message = base_system_message + \"\n\" + task_system -%}{%- else -%}{%- set system_message = base_system_message -%}{%- endif -%}{{ \"<|im_start|>system\n\" + system_message + \"<|im_end|>\n\" }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
|
"clean_up_tokenization_spaces": false,
|
|
"eos_token": "</s>",
|
|
"legacy": true,
|
|
"model_max_length": 8192,
|
|
"pad_token": "<unk>",
|
|
"padding_side": "right",
|
|
"sp_model_kwargs": {},
|
|
"spaces_between_special_tokens": false,
|
|
"tokenizer_class": "LlamaTokenizer",
|
|
"unk_token": "<unk>",
|
|
"use_default_system_prompt": false
|
|
} |