Files
deepseek-math-7B-lean-prove…/tokenizer_config.json

37 lines
946 B
JSON
Raw Permalink Normal View History

{
"add_bos_token": true,
"add_eos_token": false,
"add_prefix_space": null,
"added_tokens_decoder": {
"100000": {
"content": "<begin▁of▁sentence>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"100001": {
"content": "<end▁of▁sentence>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<begin▁of▁sentence>",
"clean_up_tokenization_spaces": false,
"eos_token": "<end▁of▁sentence>",
"extra_special_tokens": {},
"legacy": true,
"model_max_length": 16383,
"pad_token": "<end▁of▁sentence>",
"padding_side": "left",
"sp_model_kwargs": {},
"split_special_tokens": false,
"tokenizer_class": "LlamaTokenizerFast",
"unk_token": null,
"use_default_system_prompt": false
}