Delete model-00006-of-00006.safetensors

This commit is contained in:
ai-modelscope
2025-04-24 09:15:51 +08:00
parent e7c78f0844
commit cac6fa7d02
16 changed files with 383 additions and 359 deletions

10
.gitattributes vendored
View File

@@ -45,5 +45,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
tokenizer.json filter=lfs diff=lfs merge=lfs -text
tokenizer.json filter=lfs diff=lfs merge=lfs -text
model-00001-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00002-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00003-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00004-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00005-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00006-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text
model-00007-of-00007.safetensors filter=lfs diff=lfs merge=lfs -text

14
generation_config.json Normal file
View File

@@ -0,0 +1,14 @@
{
"bos_token_id": 151643,
"do_sample": true,
"eos_token_id": [
151645,
151643
],
"pad_token_id": 151643,
"repetition_penalty": 1.05,
"temperature": 0.7,
"top_k": 20,
"top_p": 0.8,
"transformers_version": "4.48.3"
}

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5c41870d3070f5172926cecbd386ed42ebe60d573b92eaf9d291bb125b2b5653
size 4976686992

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3da3999265e556c6a17262c581cae5fd464c0662df761a6ce748f98d297db2cd
size 4976687216

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:55b37f472e62bebab20192719e5c6f3902f7688934cd6ae8bf88c07d71e6ba69
size 4778621952

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8b24f88c03736c71ff99f774d7ce7ce60f63f878602519d3d9f6d4d578ec401b
size 4778622352

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:39d5faaa4335054abdf79c8e38a8763cff487e83c612183bac175bde1a296a3a
size 4932743600

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9ca39c4e462ce3107dbeb0cfa352f85ad82b8be63b0d8a9cfec9db1cf299b399
size 4932743960

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:64052ef732913620f8339b733fa715c36b72400f94ef061143df7cd27f557c07
size 4932743624

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:436e19556290a63ce945cd9791ec84ca74ca51e2a14e440cfc976065f9110145
size 4932743992

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f3a17880a1af3081ed7524385680cf43ceae52171df61f59025a5de5004eed6b
size 4998851880

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ae1f536219974c5425bad02de9fba2a50e00bacf7436320a6314f8e219fc7039
size 4998852296

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:64e0097c87f2e83e43b370722d54922ace3fc709b6d8989ce4a016e6c2a99afd
size 3662864928

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a5a2610528e42e31068d608a0969aaa4b06fb39c9731c9473aa4c65887e6df11
size 3662865184

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e491ea771cda28076d0f4421e16f605135c29a7821d078894b43342f3be64bdd
size 2179989632

View File

@@ -1,345 +1,346 @@
{
"metadata": {
"total_size": 28282476544
"total_size": 30462466048
},
"weight_map": {
"embed_tokens.weight": "model-00001-of-00006.safetensors",
"layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.0.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
"layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
"layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
"layers.0.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
"layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.1.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
"layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
"layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
"layers.1.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
"layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
"layers.10.input_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.10.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.10.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
"layers.10.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
"layers.10.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
"layers.10.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
"layers.10.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.input_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.11.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.11.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
"layers.11.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
"layers.11.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
"layers.11.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
"layers.11.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.input_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.12.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.12.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
"layers.12.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
"layers.12.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
"layers.12.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
"layers.12.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
"layers.13.input_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.13.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
"layers.13.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.13.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.13.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.13.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
"layers.13.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
"layers.13.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
"layers.13.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
"layers.13.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
"layers.13.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
"layers.13.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
"layers.14.input_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.14.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.14.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
"layers.14.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
"layers.14.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
"layers.14.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
"layers.14.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.input_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.15.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.15.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
"layers.15.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
"layers.15.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
"layers.15.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
"layers.15.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.input_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.16.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.16.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
"layers.16.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
"layers.16.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
"layers.16.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
"layers.16.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.input_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.17.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
"layers.17.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
"layers.17.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
"layers.17.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
"layers.17.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
"layers.17.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.18.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.18.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.18.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
"layers.18.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
"layers.18.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
"layers.18.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
"layers.18.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
"layers.19.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.19.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.19.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.19.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.19.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.19.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.19.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
"layers.2.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
"layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
"layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
"layers.2.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
"layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
"layers.20.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.20.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.20.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.20.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.20.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.20.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.20.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.21.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.21.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.21.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.21.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.21.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.21.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.22.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.22.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.22.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.22.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.22.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.22.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.input_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.23.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
"layers.23.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.23.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.23.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.23.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.23.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.24.input_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.24.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
"layers.24.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
"layers.24.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
"layers.24.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.24.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
"layers.24.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
"layers.24.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
"layers.24.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
"layers.24.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
"layers.24.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
"layers.24.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
"layers.25.input_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.25.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.25.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
"layers.25.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
"layers.25.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
"layers.25.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
"layers.25.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.input_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.26.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.26.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
"layers.26.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
"layers.26.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
"layers.26.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
"layers.26.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.input_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.27.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
"layers.27.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
"layers.27.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
"layers.27.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
"layers.27.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
"layers.27.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
"layers.3.input_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.3.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.3.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.3.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.3.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.3.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.3.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.input_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.4.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.4.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.4.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.4.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.4.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.4.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.input_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.5.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.5.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.5.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.5.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.5.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.5.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.6.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.6.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.6.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.6.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.6.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
"layers.7.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.7.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.8.input_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.8.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
"layers.8.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.8.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
"layers.8.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.8.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
"layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
"layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
"layers.8.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
"layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
"layers.8.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
"layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
"layers.9.input_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.9.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
"layers.9.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
"layers.9.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
"layers.9.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
"layers.9.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
"layers.9.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
"norm.weight": "model-00006-of-00006.safetensors"
"lm_head.weight": "model-00007-of-00007.safetensors",
"model.embed_tokens.weight": "model-00001-of-00007.safetensors",
"model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.13.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.18.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.24.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.norm.weight": "model-00006-of-00007.safetensors"
}
}