初始化项目,由ModelHub XC社区提供模型
Model: FinaPolat/Mistral-Nemo-Instruct-2407_openED Source: Original Platform
This commit is contained in:
36
.gitattributes
vendored
Normal file
36
.gitattributes
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||
*.model filter=lfs diff=lfs merge=lfs -text
|
||||
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
||||
21
README.md
Normal file
21
README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
base_model: unsloth/mistral-nemo-instruct-2407-bnb-4bit
|
||||
tags:
|
||||
- text-generation-inference
|
||||
- transformers
|
||||
- unsloth
|
||||
- mistral
|
||||
license: apache-2.0
|
||||
language:
|
||||
- en
|
||||
---
|
||||
|
||||
# Uploaded finetuned model
|
||||
|
||||
- **Developed by:** FinaPolat
|
||||
- **License:** apache-2.0
|
||||
- **Finetuned from model :** unsloth/mistral-nemo-instruct-2407-bnb-4bit
|
||||
|
||||
This mistral model was trained 2x faster with [Unsloth](https://github.com/unslothai/unsloth) and Huggingface's TRL library.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/unsloth%20made%20with%20love.png" width="200"/>](https://github.com/unslothai/unsloth)
|
||||
87
chat_template.jinja
Normal file
87
chat_template.jinja
Normal file
@@ -0,0 +1,87 @@
|
||||
{%- if messages[0]["role"] == "system" %}
|
||||
{%- set system_message = messages[0]["content"] %}
|
||||
{%- set loop_messages = messages[1:] %}
|
||||
{%- else %}
|
||||
{%- set loop_messages = messages %}
|
||||
{%- endif %}
|
||||
{%- if not tools is defined %}
|
||||
{%- set tools = none %}
|
||||
{%- endif %}
|
||||
{%- set user_messages = loop_messages | selectattr("role", "equalto", "user") | list %}
|
||||
|
||||
{#- This block checks for alternating user/assistant messages, skipping tool calling messages #}
|
||||
{%- set ns = namespace() %}
|
||||
{%- set ns.index = 0 %}
|
||||
{%- for message in loop_messages %}
|
||||
{%- if not (message.role == "tool" or message.role == "tool_results" or (message.tool_calls is defined and message.tool_calls is not none)) %}
|
||||
{%- if (message["role"] == "user") != (ns.index % 2 == 0) %}
|
||||
{{- raise_exception("After the optional system message, conversation roles must alternate user/assistant/user/assistant/...") }}
|
||||
{%- endif %}
|
||||
{%- set ns.index = ns.index + 1 %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
{{- bos_token }}
|
||||
{%- for message in loop_messages %}
|
||||
{%- if message["role"] == "user" %}
|
||||
{%- if tools is not none and (message == user_messages[-1]) %}
|
||||
{{- "[AVAILABLE_TOOLS][" }}
|
||||
{%- for tool in tools %}
|
||||
{%- set tool = tool.function %}
|
||||
{{- '{"type": "function", "function": {' }}
|
||||
{%- for key, val in tool.items() if key != "return" %}
|
||||
{%- if val is string %}
|
||||
{{- '"' + key + '": "' + val + '"' }}
|
||||
{%- else %}
|
||||
{{- '"' + key + '": ' + val|tojson }}
|
||||
{%- endif %}
|
||||
{%- if not loop.last %}
|
||||
{{- ", " }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{{- "}}" }}
|
||||
{%- if not loop.last %}
|
||||
{{- ", " }}
|
||||
{%- else %}
|
||||
{{- "]" }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{{- "[/AVAILABLE_TOOLS]" }}
|
||||
{%- endif %}
|
||||
{%- if loop.last and system_message is defined %}
|
||||
{{- "[INST]" + system_message + "\n\n" + message["content"] + "[/INST]" }}
|
||||
{%- else %}
|
||||
{{- "[INST]" + message["content"] + "[/INST]" }}
|
||||
{%- endif %}
|
||||
{%- elif (message.tool_calls is defined and message.tool_calls is not none) %}
|
||||
{{- "[TOOL_CALLS][" }}
|
||||
{%- for tool_call in message.tool_calls %}
|
||||
{%- set out = tool_call.function|tojson %}
|
||||
{{- out[:-1] }}
|
||||
{%- if not tool_call.id is defined or tool_call.id|length != 9 %}
|
||||
{{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }}
|
||||
{%- endif %}
|
||||
{{- ', "id": "' + tool_call.id + '"}' }}
|
||||
{%- if not loop.last %}
|
||||
{{- ", " }}
|
||||
{%- else %}
|
||||
{{- "]" + eos_token }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- elif message["role"] == "assistant" %}
|
||||
{{- message["content"] + eos_token}}
|
||||
{%- elif message["role"] == "tool_results" or message["role"] == "tool" %}
|
||||
{%- if message.content is defined and message.content.content is defined %}
|
||||
{%- set content = message.content.content %}
|
||||
{%- else %}
|
||||
{%- set content = message.content %}
|
||||
{%- endif %}
|
||||
{{- '[TOOL_RESULTS]{"content": ' + content|string + ", " }}
|
||||
{%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %}
|
||||
{{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }}
|
||||
{%- endif %}
|
||||
{{- '"call_id": "' + message.tool_call_id + '"}[/TOOL_RESULTS]' }}
|
||||
{%- else %}
|
||||
{{- raise_exception("Only user and assistant roles are supported, with the exception of an initial optional system message!") }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
29
config.json
Normal file
29
config.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"architectures": [
|
||||
"MistralForCausalLM"
|
||||
],
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 1,
|
||||
"torch_dtype": "bfloat16",
|
||||
"eos_token_id": 2,
|
||||
"head_dim": 128,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 5120,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 14336,
|
||||
"max_position_embeddings": 131072,
|
||||
"model_type": "mistral",
|
||||
"num_attention_heads": 32,
|
||||
"num_hidden_layers": 40,
|
||||
"num_key_value_heads": 8,
|
||||
"pad_token_id": 10,
|
||||
"rms_norm_eps": 1e-05,
|
||||
"rope_scaling": null,
|
||||
"rope_theta": 1000000.0,
|
||||
"sliding_window": null,
|
||||
"tie_word_embeddings": false,
|
||||
"unsloth_fixed": true,
|
||||
"unsloth_version": "2026.4.4",
|
||||
"use_cache": true,
|
||||
"vocab_size": 131072
|
||||
}
|
||||
3
model-00001-of-00005.safetensors
Normal file
3
model-00001-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a757f8f31a6276cc8dd9f09fdcdbe162f3c9dd8951361692b8607ea6996fe644
|
||||
size 4865522496
|
||||
3
model-00002-of-00005.safetensors
Normal file
3
model-00002-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:55912f12011ef4027a0fd7d1ceefeeb53dc907a37b9e7e432c96bdb964ef606f
|
||||
size 4907529424
|
||||
3
model-00003-of-00005.safetensors
Normal file
3
model-00003-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2d6e1b171a1f13a2ebfa809659e4a758851ca000fc36c74bab84f671feb0fbf5
|
||||
size 4907529456
|
||||
3
model-00004-of-00005.safetensors
Normal file
3
model-00004-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0039cc817974588d5f04fed13f029211f62b522e6d5cdb60a4b132a706482c6f
|
||||
size 4907529456
|
||||
3
model-00005-of-00005.safetensors
Normal file
3
model-00005-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c275e6e3cac13fcebed6a722d2fa44217569a42acfe9477be7c3ce4aa3b25716
|
||||
size 4907496272
|
||||
370
model.safetensors.index.json
Normal file
370
model.safetensors.index.json
Normal file
@@ -0,0 +1,370 @@
|
||||
{
|
||||
"metadata": {
|
||||
"total_size": 24495564800
|
||||
},
|
||||
"weight_map": {
|
||||
"lm_head.weight": "model-00005-of-00005.safetensors",
|
||||
"model.embed_tokens.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||
"model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||
"model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||
"model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||
"model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||
"model.norm.weight": "model-00005-of-00005.safetensors"
|
||||
}
|
||||
}
|
||||
30
special_tokens_map.json
Normal file
30
special_tokens_map.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"bos_token": {
|
||||
"content": "<s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"eos_token": {
|
||||
"content": "</s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": {
|
||||
"content": "<pad>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"unk_token": {
|
||||
"content": "<unk>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
3
tokenizer.json
Normal file
3
tokenizer.json
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b0240ce510f08e6c2041724e9043e33be9d251d1e4a4d94eb68cd47b954b61d2
|
||||
size 17078292
|
||||
8021
tokenizer_config.json
Normal file
8021
tokenizer_config.json
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user