初始化项目,由ModelHub XC社区提供模型

Model: PJMixers-Archive/LLaMa-1-MedicWizard-7B
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-04-11 02:11:57 +08:00
commit 21d854decd
24 changed files with 823 additions and 0 deletions

34
.gitattributes vendored Normal file
View File

@@ -0,0 +1,34 @@
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text

12
README.md Normal file
View File

@@ -0,0 +1,12 @@
---
tags:
- llama
- alpaca
---
# MedicWizard-7B Recipe
WizardLM-Uncensored-7B + MedAlpaca-7B (50%/50%)
## Original Models:
WizardLM-Uncensored-7B: https://huggingface.co/ehartford/WizardLM-7B-Uncensored
MedAlpaca-7B: https://huggingface.co/medalpaca/medalpaca-7b

3
added_tokens.json Normal file
View File

@@ -0,0 +1,3 @@
{
"[PAD]": 32000
}

22
config.json Normal file
View File

@@ -0,0 +1,22 @@
{
"architectures": [
"LlamaForCausalLM"
],
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 11008,
"max_position_embeddings": 2048,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"pad_token_id": 0,
"rms_norm_eps": 1e-06,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.28.1",
"use_cache": true,
"vocab_size": 32001
}

7
generation_config.json Normal file
View File

@@ -0,0 +1,7 @@
{
"_from_model_config": true,
"bos_token_id": 1,
"eos_token_id": 2,
"pad_token_id": 0,
"transformers_version": "4.28.1"
}

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5f8e796829ce190b8d82f7f9a435601094f48f2540a55a0b8a8af6ce7181a9d4
size 2015442712

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:929e54e54bfb05825dd23d06aea75b1f5f8870bc57c34b5b763ec8499b72f272
size 2023839960

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6eeb45f69daec2861f8e7c68d71583a8ae51e3b35a1b691e373d1cf2c8d881fd
size 2023840000

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2c62f5ae1cf990457305502863c850961c9c37c28ec30c7c900f54c82831c513
size 2023840008

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:cbfd7b9356bde8334f0faa8179699b8bbe5e782c2d88d5b1f98e0cfa7b6e3254
size 2023840008

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c428326a7ae33d0f92efac679eef40d38ba531861141b54711a7dc44ef6e0fab
size 2023840008

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6e6d7dcc7b9633695c03d149d0316b2ff620f05004d93f382867862868fac53d
size 1342246144

View File

@@ -0,0 +1,330 @@
{
"metadata": {
"total_size": 13476851712
},
"weight_map": {
"lm_head.weight": "model-00007-of-00007.safetensors",
"model.embed_tokens.weight": "model-00001-of-00007.safetensors",
"model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.rotary_emb.inv_freq": "model-00001-of-00007.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.rotary_emb.inv_freq": "model-00001-of-00007.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.rotary_emb.inv_freq": "model-00003-of-00007.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.rotary_emb.inv_freq": "model-00003-of-00007.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.rotary_emb.inv_freq": "model-00003-of-00007.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.13.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.rotary_emb.inv_freq": "model-00003-of-00007.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.14.self_attn.rotary_emb.inv_freq": "model-00003-of-00007.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.rotary_emb.inv_freq": "model-00004-of-00007.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.rotary_emb.inv_freq": "model-00004-of-00007.safetensors",
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.rotary_emb.inv_freq": "model-00004-of-00007.safetensors",
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.input_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.18.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.rotary_emb.inv_freq": "model-00004-of-00007.safetensors",
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.19.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.19.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.19.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.19.self_attn.rotary_emb.inv_freq": "model-00004-of-00007.safetensors",
"model.layers.19.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
"model.layers.2.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.rotary_emb.inv_freq": "model-00001-of-00007.safetensors",
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.rotary_emb.inv_freq": "model-00005-of-00007.safetensors",
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.rotary_emb.inv_freq": "model-00005-of-00007.safetensors",
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.rotary_emb.inv_freq": "model-00005-of-00007.safetensors",
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.input_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.rotary_emb.inv_freq": "model-00005-of-00007.safetensors",
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.24.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.rotary_emb.inv_freq": "model-00005-of-00007.safetensors",
"model.layers.24.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
"model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.rotary_emb.inv_freq": "model-00006-of-00007.safetensors",
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.rotary_emb.inv_freq": "model-00006-of-00007.safetensors",
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.rotary_emb.inv_freq": "model-00006-of-00007.safetensors",
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.input_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.28.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
"model.layers.28.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.28.self_attn.rotary_emb.inv_freq": "model-00006-of-00007.safetensors",
"model.layers.28.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.29.input_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.29.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.29.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.29.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.29.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.29.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.29.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.29.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.29.self_attn.rotary_emb.inv_freq": "model-00006-of-00007.safetensors",
"model.layers.29.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
"model.layers.3.input_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.3.self_attn.rotary_emb.inv_freq": "model-00001-of-00007.safetensors",
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.30.input_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.30.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.30.self_attn.k_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.self_attn.o_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.self_attn.q_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.30.self_attn.rotary_emb.inv_freq": "model-00007-of-00007.safetensors",
"model.layers.30.self_attn.v_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.input_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.31.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.mlp.gate_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.mlp.up_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
"model.layers.31.self_attn.k_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.self_attn.o_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.self_attn.q_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.31.self_attn.rotary_emb.inv_freq": "model-00007-of-00007.safetensors",
"model.layers.31.self_attn.v_proj.weight": "model-00007-of-00007.safetensors",
"model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.4.self_attn.rotary_emb.inv_freq": "model-00001-of-00007.safetensors",
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
"model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.rotary_emb.inv_freq": "model-00002-of-00007.safetensors",
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.rotary_emb.inv_freq": "model-00002-of-00007.safetensors",
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.rotary_emb.inv_freq": "model-00002-of-00007.safetensors",
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.input_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.rotary_emb.inv_freq": "model-00002-of-00007.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
"model.layers.9.self_attn.rotary_emb.inv_freq": "model-00002-of-00007.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
"model.norm.weight": "model-00007-of-00007.safetensors"
}
}

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:439432ab5f0dc2c917fc3e71d6e19b082b6079590f3d095fdf995b3a3ab09331
size 2015452811

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5bc2963f0b6bef87fbe3b35434bf2c9bc276f0ebada5ece80d93e415f6998afa
size 2023850899

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:424159f15afe682d02110c3cfa9e68ecec96bab9ce60b472b09846b83365dc1c
size 2023850963

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:df08f8756622a1d9e7b12eecf2e47180a3fd42b68c4fd0522c0e18afee43e6a4
size 2023850963

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:183ef1df57a40bdbdad9365d1e4eadcb545beb5895ecf7c815caaaf08f032faa
size 2023850963

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4d6f6ab8b08546de141b3c7c306ab9d03a4ca4356258cc86816b32d0c6e8669b
size 2023850963

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:73ed9a2275c2ae3e665aa59b0974ffb21c774647586299473d92f8fa3663e8b2
size 1342252085

View File

@@ -0,0 +1,330 @@
{
"metadata": {
"total_size": 13476851712
},
"weight_map": {
"lm_head.weight": "pytorch_model-00007-of-00007.bin",
"model.embed_tokens.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
"model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
"model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.10.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
"model.layers.10.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
"model.layers.11.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
"model.layers.12.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
"model.layers.13.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.14.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.14.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.14.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.14.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.14.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.14.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.14.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.14.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
"model.layers.14.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.15.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
"model.layers.15.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
"model.layers.16.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
"model.layers.17.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
"model.layers.18.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.19.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.19.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.19.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.19.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.19.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.19.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.19.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.19.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
"model.layers.19.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
"model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
"model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.20.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
"model.layers.20.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
"model.layers.21.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
"model.layers.22.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
"model.layers.23.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.24.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.24.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.24.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.24.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.24.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.24.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.24.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.24.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
"model.layers.24.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
"model.layers.25.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
"model.layers.25.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
"model.layers.26.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
"model.layers.27.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
"model.layers.28.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.29.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.29.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.29.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.29.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.29.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.29.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.29.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.29.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
"model.layers.29.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
"model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
"model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.30.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
"model.layers.30.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
"model.layers.31.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
"model.layers.4.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.4.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.4.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.4.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.4.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
"model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
"model.layers.5.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
"model.layers.5.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
"model.layers.6.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
"model.layers.7.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
"model.layers.8.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.9.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.9.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.9.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.9.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.9.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
"model.layers.9.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.9.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
"model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
"model.norm.weight": "pytorch_model-00007-of-00007.bin"
}
}

6
special_tokens_map.json Normal file
View File

@@ -0,0 +1,6 @@
{
"bos_token": "</s>",
"eos_token": "</s>",
"pad_token": "[PAD]",
"unk_token": "</s>"
}

3
tokenizer.model Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
size 499723

34
tokenizer_config.json Normal file
View File

@@ -0,0 +1,34 @@
{
"add_bos_token": true,
"add_eos_token": false,
"bos_token": {
"__type": "AddedToken",
"content": "<s>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"clean_up_tokenization_spaces": false,
"eos_token": {
"__type": "AddedToken",
"content": "</s>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"model_max_length": 2048,
"pad_token": null,
"padding_side": "right",
"sp_model_kwargs": {},
"tokenizer_class": "LlamaTokenizer",
"unk_token": {
"__type": "AddedToken",
"content": "<unk>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}