初始化项目,由ModelHub XC社区提供模型

Model: ibm-research/merlinite-7b
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-05-03 17:22:37 +08:00
commit 0be1de1a72
21 changed files with 634 additions and 0 deletions

88
tokenizer_config.json Normal file
View File

@@ -0,0 +1,88 @@
{
"add_bos_token": false,
"add_eos_token": false,
"added_tokens_decoder": {
"0": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"1": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"2": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32000": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32001": {
"content": "<|pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32002": {
"content": "<|user|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32003": {
"content": "<|assistant|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32004": {
"content": "<|system|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"additional_special_tokens": [
"<|pad|>",
"<|user|>",
"<|assistant|>",
"<|system|>"
],
"bos_token": "<s>",
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"fast_tokenizer": true,
"legacy": true,
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|pad|>",
"sp_model_kwargs": {},
"spaces_between_special_tokens": false,
"tokenizer_class": "LlamaTokenizer",
"unk_token": "<unk>",
"use_default_system_prompt": false
}