初始化项目,由ModelHub XC社区提供模型

Model: tifa-benchmark/llama2_tifa_question_generation
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-05-07 02:00:50 +08:00
commit 111c48a5eb
10 changed files with 93982 additions and 0 deletions

33
tokenizer_config.json Normal file
View File

@@ -0,0 +1,33 @@
{
"bos_token": {
"__type": "AddedToken",
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"clean_up_tokenization_spaces": false,
"eos_token": {
"__type": "AddedToken",
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"legacy": false,
"model_max_length": 1000000000000000019884624838656,
"pad_token": null,
"padding_side": "right",
"sp_model_kwargs": {},
"tokenizer_class": "LlamaTokenizer",
"unk_token": {
"__type": "AddedToken",
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}