初始化项目,由ModelHub XC社区提供模型

Model: dizza01/qwen2.5-7b-finetunerag-merged
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-05-06 22:59:16 +08:00
commit 77136e7d0f
10 changed files with 472 additions and 0 deletions

36
tokenizer_config.json Normal file
View File

@@ -0,0 +1,36 @@
{
"add_prefix_space": false,
"backend": "tokenizers",
"bos_token": null,
"clean_up_tokenization_spaces": false,
"eos_token": "<|im_end|>",
"errors": "replace",
"extra_special_tokens": {
"<|im_start|>": "<|im_start|>",
"<|im_end|>": "<|im_end|>",
"<|object_ref_start|>": "<|object_ref_start|>",
"<|object_ref_end|>": "<|object_ref_end|>",
"<|box_start|>": "<|box_start|>",
"<|box_end|>": "<|box_end|>",
"<|quad_start|>": "<|quad_start|>",
"<|quad_end|>": "<|quad_end|>",
"<|vision_start|>": "<|vision_start|>",
"<|vision_end|>": "<|vision_end|>",
"<|vision_pad|>": "<|vision_pad|>",
"<|image_pad|>": "<|image_pad|>",
"<|video_pad|>": "<|video_pad|>"
},
"is_local": false,
"max_length": 2048,
"model_max_length": 131072,
"pad_to_multiple_of": null,
"pad_token": "<|endoftext|>",
"pad_token_type_id": 0,
"padding_side": "right",
"split_special_tokens": false,
"stride": 0,
"tokenizer_class": "Qwen2Tokenizer",
"truncation_side": "right",
"truncation_strategy": "longest_first",
"unk_token": null
}