初始化项目,由ModelHub XC社区提供模型

Model: shanearora/Flex-reddit-2x7B-1T
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-04-25 16:02:03 +08:00
commit 03ca567b46
21 changed files with 301342 additions and 0 deletions

35
.gitattributes vendored Normal file
View File

@@ -0,0 +1,35 @@
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text

BIN
FlexOlmo_Logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

6
README.md Normal file
View File

@@ -0,0 +1,6 @@
---
library_name: transformers
tags: []
---
Test repo

32
config.json Normal file
View File

@@ -0,0 +1,32 @@
{
"architectures": [
"FlexOlmoForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"clip_qkv": null,
"eos_token_id": 100257,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 11008,
"max_position_embeddings": 4096,
"model_type": "flex_olmo",
"norm_topk_prob": false,
"num_attention_heads": 32,
"num_experts": 2,
"num_experts_per_tok": 2,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"output_router_logits": false,
"pad_token_id": 100277,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 500000,
"router_aux_loss_coef": 0.01,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.50.0.dev0",
"use_cache": true,
"vocab_size": 100352
}

4
generation_config.json Normal file
View File

@@ -0,0 +1,4 @@
{
"_from_model_config": true,
"transformers_version": "4.50.0.dev0"
}

100001
merges.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7cd9ff2e78ea0791c807b280cb3729eb8fcf7b47216f8a745da34f45fd3e72cb
size 4974711408

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:dc8411373d2305988fad6ba556477354503df5a16d7db284ea989ef0b9cb4fbd
size 4974778640

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:7ad96a9b1850c2e26c0db6e433f018a3c2055f5b7a2a61413249a6334b1c1b31
size 4840593536

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:83ddfd565fa64859c20572f264b4105b6748842a9710911e4a123491a3040e97
size 4861598344

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1997b28011840c0153f1ad5ef8d3c3448290a13c653435a302e95472f829af1a
size 4953806944

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3e57f842036cd31094cc6e80e4c3ea7aa501265a4c34965f14fae59bf67ccc69
size 4861565336

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:51f0f98bc42a9d022b45dd0af8b675b10bd477a9f6d66d9c61ff4b94bac2af35
size 4861598344

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:01c2ada616e898d39c2494178d4e1dbd52a4b64ffe0b2fbeae62c25435e1ca75
size 4953773936

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3c808603de38189b3e9e039ed00d69a1453eef14aa08f9710594c86246cc32fb
size 4861598344

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2c5a8f308ebfc5091793525e60636f890428949874b12e3251b0cbacae4a6c19
size 2365637568

View File

@@ -0,0 +1,490 @@
{
"metadata": {
"total_size": 46509604864
},
"weight_map": {
"lm_head.weight": "model-00010-of-00010.safetensors",
"model.embed_tokens.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.0.down_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.0.gate_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.0.up_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.1.down_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.1.gate_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.experts.1.up_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.mlp.gate.weight": "model-00001-of-00010.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
"model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.k_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.q_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.0.down_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.0.gate_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.0.up_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.1.down_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.1.gate_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.experts.1.up_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.mlp.gate.weight": "model-00001-of-00010.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
"model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.k_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.q_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.10.mlp.experts.0.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.experts.0.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.experts.0.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.experts.1.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.experts.1.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.experts.1.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.mlp.gate.weight": "model-00004-of-00010.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.10.post_feedforward_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.k_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.q_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.0.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.0.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.0.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.1.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.1.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.experts.1.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.mlp.gate.weight": "model-00004-of-00010.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.11.post_feedforward_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.k_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.q_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.0.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.0.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.0.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.1.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.1.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.experts.1.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.mlp.gate.weight": "model-00004-of-00010.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.12.post_feedforward_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.k_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.q_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.13.mlp.experts.0.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.13.mlp.experts.0.gate_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.13.mlp.experts.0.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.13.mlp.experts.1.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.13.mlp.experts.1.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.13.mlp.experts.1.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.13.mlp.gate.weight": "model-00004-of-00010.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.13.post_feedforward_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.13.self_attn.k_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.13.self_attn.q_norm.weight": "model-00004-of-00010.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.14.mlp.experts.0.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.experts.0.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.experts.0.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.experts.1.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.experts.1.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.experts.1.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.mlp.gate.weight": "model-00005-of-00010.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.14.post_feedforward_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.k_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.q_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.0.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.0.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.0.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.1.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.1.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.experts.1.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.mlp.gate.weight": "model-00005-of-00010.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.15.post_feedforward_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.k_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.q_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.0.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.0.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.0.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.1.down_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.1.gate_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.experts.1.up_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.mlp.gate.weight": "model-00005-of-00010.safetensors",
"model.layers.16.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.16.post_feedforward_layernorm.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.k_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.q_norm.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.16.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
"model.layers.17.mlp.experts.0.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.experts.0.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.experts.0.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.experts.1.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.experts.1.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.experts.1.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.mlp.gate.weight": "model-00006-of-00010.safetensors",
"model.layers.17.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.17.post_feedforward_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.k_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.q_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.17.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.0.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.0.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.0.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.1.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.1.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.experts.1.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.mlp.gate.weight": "model-00006-of-00010.safetensors",
"model.layers.18.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.18.post_feedforward_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.k_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.q_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.18.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.0.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.0.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.0.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.1.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.1.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.experts.1.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.mlp.gate.weight": "model-00006-of-00010.safetensors",
"model.layers.19.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.19.post_feedforward_layernorm.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.k_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.q_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.19.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.2.mlp.experts.0.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.2.mlp.experts.0.gate_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.2.mlp.experts.0.up_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.2.mlp.experts.1.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.2.mlp.experts.1.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.2.mlp.experts.1.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.2.mlp.gate.weight": "model-00001-of-00010.safetensors",
"model.layers.2.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.2.post_feedforward_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.2.self_attn.k_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.2.self_attn.q_norm.weight": "model-00001-of-00010.safetensors",
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
"model.layers.20.mlp.experts.0.down_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.mlp.experts.0.gate_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.mlp.experts.0.up_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.mlp.experts.1.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.20.mlp.experts.1.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.20.mlp.experts.1.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.20.mlp.gate.weight": "model-00006-of-00010.safetensors",
"model.layers.20.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.20.post_feedforward_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.20.self_attn.k_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.20.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.self_attn.q_norm.weight": "model-00006-of-00010.safetensors",
"model.layers.20.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.20.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
"model.layers.21.mlp.experts.0.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.experts.0.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.experts.0.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.experts.1.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.experts.1.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.experts.1.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.mlp.gate.weight": "model-00007-of-00010.safetensors",
"model.layers.21.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.21.post_feedforward_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.k_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.q_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.21.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.0.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.0.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.0.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.1.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.1.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.experts.1.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.mlp.gate.weight": "model-00007-of-00010.safetensors",
"model.layers.22.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.22.post_feedforward_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.k_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.q_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.22.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.0.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.0.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.0.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.1.down_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.1.gate_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.experts.1.up_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.mlp.gate.weight": "model-00007-of-00010.safetensors",
"model.layers.23.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.23.post_feedforward_layernorm.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.k_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.q_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.23.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.24.mlp.experts.0.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.experts.0.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.experts.0.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.experts.1.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.experts.1.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.experts.1.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.24.mlp.gate.weight": "model-00007-of-00010.safetensors",
"model.layers.24.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.24.post_feedforward_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.24.self_attn.k_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.24.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.24.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.24.self_attn.q_norm.weight": "model-00007-of-00010.safetensors",
"model.layers.24.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.24.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
"model.layers.25.mlp.experts.0.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.experts.0.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.experts.0.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.experts.1.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.experts.1.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.experts.1.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.mlp.gate.weight": "model-00008-of-00010.safetensors",
"model.layers.25.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.25.post_feedforward_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.k_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.q_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.25.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.0.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.0.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.0.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.1.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.1.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.experts.1.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.mlp.gate.weight": "model-00008-of-00010.safetensors",
"model.layers.26.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.26.post_feedforward_layernorm.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.k_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.q_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.26.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.experts.0.down_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.experts.0.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.experts.0.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.experts.1.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.27.mlp.experts.1.gate_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.experts.1.up_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.mlp.gate.weight": "model-00008-of-00010.safetensors",
"model.layers.27.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.27.post_feedforward_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.27.self_attn.k_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.27.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.self_attn.q_norm.weight": "model-00008-of-00010.safetensors",
"model.layers.27.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.27.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
"model.layers.28.mlp.experts.0.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.experts.0.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.experts.0.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.experts.1.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.experts.1.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.experts.1.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.mlp.gate.weight": "model-00009-of-00010.safetensors",
"model.layers.28.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.28.post_feedforward_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.k_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.q_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.28.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.0.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.0.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.0.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.1.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.1.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.experts.1.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.mlp.gate.weight": "model-00009-of-00010.safetensors",
"model.layers.29.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.29.post_feedforward_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.k_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.q_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.29.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.3.mlp.experts.0.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.experts.0.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.experts.0.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.experts.1.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.experts.1.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.experts.1.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.mlp.gate.weight": "model-00002-of-00010.safetensors",
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.3.post_feedforward_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.k_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.q_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.30.mlp.experts.0.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.experts.0.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.experts.0.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.experts.1.down_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.experts.1.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.experts.1.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.mlp.gate.weight": "model-00009-of-00010.safetensors",
"model.layers.30.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.30.post_feedforward_layernorm.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.k_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.q_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.30.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.mlp.experts.0.down_proj.weight": "model-00010-of-00010.safetensors",
"model.layers.31.mlp.experts.0.gate_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.mlp.experts.0.up_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.mlp.experts.1.down_proj.weight": "model-00010-of-00010.safetensors",
"model.layers.31.mlp.experts.1.gate_proj.weight": "model-00010-of-00010.safetensors",
"model.layers.31.mlp.experts.1.up_proj.weight": "model-00010-of-00010.safetensors",
"model.layers.31.mlp.gate.weight": "model-00009-of-00010.safetensors",
"model.layers.31.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
"model.layers.31.post_feedforward_layernorm.weight": "model-00010-of-00010.safetensors",
"model.layers.31.self_attn.k_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.31.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.self_attn.q_norm.weight": "model-00009-of-00010.safetensors",
"model.layers.31.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.31.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
"model.layers.4.mlp.experts.0.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.experts.0.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.experts.0.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.experts.1.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.experts.1.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.experts.1.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.mlp.gate.weight": "model-00002-of-00010.safetensors",
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.4.post_feedforward_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.k_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.q_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.0.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.0.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.0.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.1.down_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.1.gate_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.experts.1.up_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.mlp.gate.weight": "model-00002-of-00010.safetensors",
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.5.post_feedforward_layernorm.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.k_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.q_norm.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.6.mlp.experts.0.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.experts.0.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.experts.0.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.experts.1.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.experts.1.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.experts.1.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.mlp.gate.weight": "model-00003-of-00010.safetensors",
"model.layers.6.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.6.post_feedforward_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.6.self_attn.k_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.6.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.6.self_attn.q_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
"model.layers.7.mlp.experts.0.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.experts.0.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.experts.0.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.experts.1.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.experts.1.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.experts.1.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.mlp.gate.weight": "model-00003-of-00010.safetensors",
"model.layers.7.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.7.post_feedforward_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.k_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.q_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.7.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.0.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.0.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.0.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.1.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.1.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.experts.1.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.mlp.gate.weight": "model-00003-of-00010.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.8.post_feedforward_layernorm.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.k_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.q_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.mlp.experts.0.down_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.mlp.experts.0.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.mlp.experts.0.up_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.mlp.experts.1.down_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.9.mlp.experts.1.gate_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.mlp.experts.1.up_proj.weight": "model-00004-of-00010.safetensors",
"model.layers.9.mlp.gate.weight": "model-00003-of-00010.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.9.post_feedforward_layernorm.weight": "model-00004-of-00010.safetensors",
"model.layers.9.self_attn.k_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.self_attn.q_norm.weight": "model-00003-of-00010.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
"model.norm.weight": "model-00010-of-00010.safetensors"
}
}

30
special_tokens_map.json Normal file
View File

@@ -0,0 +1,30 @@
{
"bos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}

200524
tokenizer.json Normal file

File diff suppressed because it is too large Load Diff

189
tokenizer_config.json Normal file
View File

@@ -0,0 +1,189 @@
{
"add_prefix_space": false,
"added_tokens_decoder": {
"100256": {
"content": "<|extra_id_0|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100257": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100258": {
"content": "<|fim_prefix|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100259": {
"content": "<|fim_middle|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100260": {
"content": "<|fim_suffix|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100261": {
"content": "|||PHONE_NUMBER|||",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100262": {
"content": "|||EMAIL_ADDRESS|||",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100263": {
"content": "|||IP_ADDRESS|||",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100264": {
"content": "<|im_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100265": {
"content": "<|im_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100266": {
"content": "<|extra_id_1|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100267": {
"content": "<|extra_id_2|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100268": {
"content": "<|extra_id_3|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100269": {
"content": "<|extra_id_4|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100270": {
"content": "<|extra_id_5|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100271": {
"content": "<|extra_id_6|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100272": {
"content": "<|extra_id_7|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100273": {
"content": "<|extra_id_8|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100274": {
"content": "<|extra_id_9|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100275": {
"content": "<|extra_id_10|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"100276": {
"content": "<|endofprompt|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"100277": {
"content": "<|pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<|endoftext|>",
"chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"model_max_length": 8192,
"pad_token": "<|pad|>",
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "<|endoftext|>"
}

1
vocab.json Normal file

File diff suppressed because one or more lines are too long