Model: priyamsahoo/llemma-7b-pretrained-sft-repair-round-2-dpo-v2 Source: Original Platform
35 lines
630 B
JSON
35 lines
630 B
JSON
{
|
|
"additional_special_tokens": [
|
|
"<|eot_id|>",
|
|
"<|eom_id|>"
|
|
],
|
|
"bos_token": {
|
|
"content": "<s>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
},
|
|
"eos_token": {
|
|
"content": "<|eot_id|>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
},
|
|
"pad_token": {
|
|
"content": "</s>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
},
|
|
"unk_token": {
|
|
"content": "<unk>",
|
|
"lstrip": false,
|
|
"normalized": false,
|
|
"rstrip": false,
|
|
"single_word": false
|
|
}
|
|
}
|