(Trained with Unsloth)
This commit is contained in:
13
added_tokens.json
Normal file
13
added_tokens.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"<|assistant|>": 32001,
|
||||
"<|endoftext|>": 32000,
|
||||
"<|end|>": 32007,
|
||||
"<|placeholder1|>": 32002,
|
||||
"<|placeholder2|>": 32003,
|
||||
"<|placeholder3|>": 32004,
|
||||
"<|placeholder4|>": 32005,
|
||||
"<|placeholder5|>": 32008,
|
||||
"<|placeholder6|>": 32009,
|
||||
"<|system|>": 32006,
|
||||
"<|user|>": 32010
|
||||
}
|
||||
8
chat_template.jinja
Normal file
8
chat_template.jinja
Normal file
@@ -0,0 +1,8 @@
|
||||
{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>
|
||||
' + message['content'] + '<|end|>
|
||||
'}}{% elif message['role'] == 'user' %}{{'<|user|>
|
||||
' + message['content'] + '<|end|>
|
||||
'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>
|
||||
' + message['content'] + '<|end|>
|
||||
'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>
|
||||
' }}{% else %}{{ eos_token }}{% endif %}
|
||||
136
config.json
Normal file
136
config.json
Normal file
@@ -0,0 +1,136 @@
|
||||
{
|
||||
"architectures": [
|
||||
"LlamaForCausalLM"
|
||||
],
|
||||
"attention_bias": false,
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 1,
|
||||
"torch_dtype": "bfloat16",
|
||||
"eos_token_id": 32000,
|
||||
"head_dim": 96,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 3072,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 8192,
|
||||
"max_position_embeddings": 131072,
|
||||
"mlp_bias": false,
|
||||
"model_type": "llama",
|
||||
"num_attention_heads": 32,
|
||||
"num_hidden_layers": 32,
|
||||
"num_key_value_heads": 32,
|
||||
"original_max_position_embeddings": 4096,
|
||||
"pad_token_id": 32009,
|
||||
"pretraining_tp": 1,
|
||||
"rms_norm_eps": 1e-05,
|
||||
"rope_scaling": {
|
||||
"attention_factor": 32.0,
|
||||
"factor": 32.0,
|
||||
"long_factor": [
|
||||
1.0800000429153442,
|
||||
1.1100000143051147,
|
||||
1.1399999856948853,
|
||||
1.340000033378601,
|
||||
1.5899999141693115,
|
||||
1.600000023841858,
|
||||
1.6200000047683716,
|
||||
2.620000123977661,
|
||||
3.2300000190734863,
|
||||
3.2300000190734863,
|
||||
4.789999961853027,
|
||||
7.400000095367432,
|
||||
7.700000286102295,
|
||||
9.09000015258789,
|
||||
12.199999809265137,
|
||||
17.670000076293945,
|
||||
24.46000099182129,
|
||||
28.57000160217285,
|
||||
30.420001983642578,
|
||||
30.840002059936523,
|
||||
32.590003967285156,
|
||||
32.93000411987305,
|
||||
42.320003509521484,
|
||||
44.96000289916992,
|
||||
50.340003967285156,
|
||||
50.45000457763672,
|
||||
57.55000305175781,
|
||||
57.93000411987305,
|
||||
58.21000289916992,
|
||||
60.1400032043457,
|
||||
62.61000442504883,
|
||||
62.62000274658203,
|
||||
62.71000289916992,
|
||||
63.1400032043457,
|
||||
63.1400032043457,
|
||||
63.77000427246094,
|
||||
63.93000411987305,
|
||||
63.96000289916992,
|
||||
63.970001220703125,
|
||||
64.02999877929688,
|
||||
64.06999969482422,
|
||||
64.08000183105469,
|
||||
64.12000274658203,
|
||||
64.41000366210938,
|
||||
64.4800033569336,
|
||||
64.51000213623047,
|
||||
64.52999877929688,
|
||||
64.83999633789062
|
||||
],
|
||||
"rope_type": "longrope",
|
||||
"short_factor": [
|
||||
1.0,
|
||||
1.0199999809265137,
|
||||
1.0299999713897705,
|
||||
1.0299999713897705,
|
||||
1.0499999523162842,
|
||||
1.0499999523162842,
|
||||
1.0499999523162842,
|
||||
1.0499999523162842,
|
||||
1.0499999523162842,
|
||||
1.0699999332427979,
|
||||
1.0999999046325684,
|
||||
1.1099998950958252,
|
||||
1.1599998474121094,
|
||||
1.1599998474121094,
|
||||
1.1699998378753662,
|
||||
1.2899998426437378,
|
||||
1.339999794960022,
|
||||
1.679999828338623,
|
||||
1.7899998426437378,
|
||||
1.8199998140335083,
|
||||
1.8499997854232788,
|
||||
1.8799997568130493,
|
||||
1.9099997282028198,
|
||||
1.9399996995925903,
|
||||
1.9899996519088745,
|
||||
2.0199997425079346,
|
||||
2.0199997425079346,
|
||||
2.0199997425079346,
|
||||
2.0199997425079346,
|
||||
2.0199997425079346,
|
||||
2.0199997425079346,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0299997329711914,
|
||||
2.0799996852874756,
|
||||
2.0899996757507324,
|
||||
2.189999580383301,
|
||||
2.2199995517730713,
|
||||
2.5899994373321533,
|
||||
2.729999542236328,
|
||||
2.749999523162842,
|
||||
2.8399994373321533
|
||||
]
|
||||
},
|
||||
"rope_theta": 10000.0,
|
||||
"tie_word_embeddings": false,
|
||||
"transformers_version": "4.57.3",
|
||||
"unsloth_version": "2025.8.10",
|
||||
"use_cache": true,
|
||||
"vocab_size": 32064
|
||||
}
|
||||
30
special_tokens_map.json
Normal file
30
special_tokens_map.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"bos_token": {
|
||||
"content": "<s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"eos_token": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": {
|
||||
"content": "<|placeholder6|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false
|
||||
},
|
||||
"unk_token": {
|
||||
"content": "<unk>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
277210
tokenizer.json
Normal file
277210
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
3
tokenizer.model
Normal file
3
tokenizer.model
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
||||
size 499723
|
||||
132
tokenizer_config.json
Normal file
132
tokenizer_config.json
Normal file
@@ -0,0 +1,132 @@
|
||||
{
|
||||
"add_bos_token": false,
|
||||
"add_eos_token": false,
|
||||
"add_prefix_space": null,
|
||||
"added_tokens_decoder": {
|
||||
"0": {
|
||||
"content": "<unk>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"1": {
|
||||
"content": "<s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"2": {
|
||||
"content": "</s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"32000": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32001": {
|
||||
"content": "<|assistant|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32002": {
|
||||
"content": "<|placeholder1|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32003": {
|
||||
"content": "<|placeholder2|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32004": {
|
||||
"content": "<|placeholder3|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32005": {
|
||||
"content": "<|placeholder4|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32006": {
|
||||
"content": "<|system|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32007": {
|
||||
"content": "<|end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32008": {
|
||||
"content": "<|placeholder5|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32009": {
|
||||
"content": "<|placeholder6|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"32010": {
|
||||
"content": "<|user|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
}
|
||||
},
|
||||
"bos_token": "<s>",
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "<|endoftext|>",
|
||||
"extra_special_tokens": {},
|
||||
"legacy": false,
|
||||
"model_max_length": 131072,
|
||||
"pad_token": "<|placeholder6|>",
|
||||
"padding_side": "right",
|
||||
"sp_model_kwargs": {},
|
||||
"tokenizer_class": "LlamaTokenizer",
|
||||
"unk_token": "<unk>",
|
||||
"use_default_system_prompt": false,
|
||||
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}"
|
||||
}
|
||||
Reference in New Issue
Block a user