初始化项目,由ModelHub XC社区提供模型
Model: TinyPixel/testmodel2 Source: Original Platform
This commit is contained in:
35
.gitattributes
vendored
Normal file
35
.gitattributes
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.model filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||||
|
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||||
27
config.json
Normal file
27
config.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"_name_or_path": "TinyPixel/Llama-2-7B-bf16-sharded",
|
||||||
|
"architectures": [
|
||||||
|
"LlamaForCausalLM"
|
||||||
|
],
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"hidden_act": "silu",
|
||||||
|
"hidden_size": 4096,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 11008,
|
||||||
|
"max_position_embeddings": 2048,
|
||||||
|
"model_type": "llama",
|
||||||
|
"num_attention_heads": 32,
|
||||||
|
"num_hidden_layers": 32,
|
||||||
|
"num_key_value_heads": 32,
|
||||||
|
"pad_token_id": 0,
|
||||||
|
"pretraining_tp": 1,
|
||||||
|
"rms_norm_eps": 1e-05,
|
||||||
|
"rope_scaling": null,
|
||||||
|
"rope_theta": 10000.0,
|
||||||
|
"tie_word_embeddings": false,
|
||||||
|
"torch_dtype": "bfloat16",
|
||||||
|
"transformers_version": "4.33.2",
|
||||||
|
"use_cache": true,
|
||||||
|
"vocab_size": 32000
|
||||||
|
}
|
||||||
7
generation_config.json
Normal file
7
generation_config.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"_from_model_config": true,
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"pad_token_id": 0,
|
||||||
|
"transformers_version": "4.33.2"
|
||||||
|
}
|
||||||
3
model-00001-of-00014.safetensors
Normal file
3
model-00001-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:9f4bd81605f6ac30b81d6ae3e1cc87a8ab0d5cd75a6bbd29f0bd4d01a49289be
|
||||||
|
size 981485368
|
||||||
3
model-00002-of-00014.safetensors
Normal file
3
model-00002-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:05f8f8f7c91bc54b7f016de19ac67d58336325c69c8dc488369ab2e5625492ed
|
||||||
|
size 966838864
|
||||||
3
model-00003-of-00014.safetensors
Normal file
3
model-00003-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:859574c1d74e7bbe8f04b30e658099060418cab264a0971fe16b9e73aac58d44
|
||||||
|
size 966822256
|
||||||
3
model-00004-of-00014.safetensors
Normal file
3
model-00004-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:f347fb2d583e132524ae25108f691191983a95765b8b144353dbc6464b9ae43a
|
||||||
|
size 989907416
|
||||||
3
model-00005-of-00014.safetensors
Normal file
3
model-00005-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:ae58aa68cf602349d7d557163f9765a444c4f6a331e260f7855c93593b9f7fbc
|
||||||
|
size 943753712
|
||||||
3
model-00006-of-00014.safetensors
Normal file
3
model-00006-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:8765ce0c8daf69bc7be34d391502a2ad850e9fb4cbd2c5634da005ba95d77421
|
||||||
|
size 989890824
|
||||||
3
model-00007-of-00014.safetensors
Normal file
3
model-00007-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:827064f16341b9022fa46379c0199d84ae26538e8b9bb940133b81b25b94aff5
|
||||||
|
size 966838888
|
||||||
3
model-00008-of-00014.safetensors
Normal file
3
model-00008-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:7003755fe6186f0585ffd908065155a1b3c87ded31d91d2512108f39be321c8a
|
||||||
|
size 966822272
|
||||||
3
model-00009-of-00014.safetensors
Normal file
3
model-00009-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:8b1c9583625ebe837be7e20eafc75ad75729139960e692bf2762ccf3895a6060
|
||||||
|
size 989907440
|
||||||
3
model-00010-of-00014.safetensors
Normal file
3
model-00010-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:e510dcb6de1b2bae842bebd0e4e8e20b5dbf9a090385c9ede48919184b8e405c
|
||||||
|
size 943753720
|
||||||
3
model-00011-of-00014.safetensors
Normal file
3
model-00011-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:99e89dd1e2ada2ee2a93d760f833d4b7d44b9feb2ea39dad57eba3793dabaa30
|
||||||
|
size 989890824
|
||||||
3
model-00012-of-00014.safetensors
Normal file
3
model-00012-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:fcdb830e91ce5a84fb0e047b85f3624daa6b68bb4f6ebb35238b92fc08a60634
|
||||||
|
size 966838888
|
||||||
3
model-00013-of-00014.safetensors
Normal file
3
model-00013-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:696741a10a743963e0d9a4d5f9db306cf04b40af5b32d2bcb576bd3cafc1d392
|
||||||
|
size 966822272
|
||||||
3
model-00014-of-00014.safetensors
Normal file
3
model-00014-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:c9432f0cc1b4d4b6e042d5b257af516174f78ee11c4f004f9cfa35950f04cd93
|
||||||
|
size 847292072
|
||||||
298
model.safetensors.index.json
Normal file
298
model.safetensors.index.json
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
{
|
||||||
|
"metadata": {
|
||||||
|
"total_size": 13476831232
|
||||||
|
},
|
||||||
|
"weight_map": {
|
||||||
|
"lm_head.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.embed_tokens.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.1.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||||
|
"model.layers.10.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.10.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.11.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.11.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.11.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.11.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.11.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.11.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.11.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.11.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.11.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.12.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.12.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.13.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.13.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.13.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.13.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||||
|
"model.layers.14.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.14.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.15.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.16.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.16.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.16.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||||
|
"model.layers.16.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.17.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.18.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.18.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.18.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.18.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.18.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.18.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.18.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.18.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.18.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||||
|
"model.layers.19.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.19.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.2.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.2.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.20.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.20.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||||
|
"model.layers.21.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.21.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.22.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.23.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.23.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.23.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.23.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.23.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.23.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.23.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.23.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.23.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||||
|
"model.layers.24.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.24.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.25.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.25.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.25.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.25.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||||
|
"model.layers.26.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.26.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.27.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.28.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.28.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.28.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||||
|
"model.layers.28.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.29.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.30.input_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.30.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.30.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.30.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.30.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.30.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.30.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.30.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.30.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||||
|
"model.layers.31.input_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.31.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
|
||||||
|
"model.layers.4.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.4.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||||
|
"model.layers.4.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.5.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.6.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.6.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.6.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.6.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.6.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.6.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.6.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.6.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.6.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||||
|
"model.layers.7.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.7.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.8.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||||
|
"model.layers.9.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.layers.9.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||||
|
"model.norm.weight": "model-00014-of-00014.safetensors"
|
||||||
|
}
|
||||||
|
}
|
||||||
3
pytorch_model-00001-of-00014.bin
Normal file
3
pytorch_model-00001-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:4093c2cee185cda3ea87720f716c5ebdb413aee08fd97d1d173a1e6e214743bc
|
||||||
|
size 981489327
|
||||||
3
pytorch_model-00002-of-00014.bin
Normal file
3
pytorch_model-00002-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:a7ee8804dbcc3d5b5b1b6e1c7fe28e6b0ad3b9e89f43b17175885d6588dda5c0
|
||||||
|
size 966844269
|
||||||
3
pytorch_model-00003-of-00014.bin
Normal file
3
pytorch_model-00003-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:a1f82ddc3e66cfe6d6d3d8fe9198b2810e26c9513b29486c779f992f5cfef4b2
|
||||||
|
size 966827273
|
||||||
3
pytorch_model-00004-of-00014.bin
Normal file
3
pytorch_model-00004-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:a695ec5ecb22acb024897cab6c6b59f917eaf20d1ccedaed2b600c24365e3b5a
|
||||||
|
size 989912603
|
||||||
3
pytorch_model-00005-of-00014.bin
Normal file
3
pytorch_model-00005-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:52f93ecd30e8a28f86307e81238931abda5e11472104f6041512fe43db7a194a
|
||||||
|
size 943758939
|
||||||
3
pytorch_model-00006-of-00014.bin
Normal file
3
pytorch_model-00006-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:7a2d5df99ebd17bb3b14d157f116670d53b0995bc130eef84a4a9c31e75e02d8
|
||||||
|
size 989895607
|
||||||
3
pytorch_model-00007-of-00014.bin
Normal file
3
pytorch_model-00007-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:79228fb1138cf98f8e2b5826c1d3b19319d4c9c42d9b3218d7d90d096b49b706
|
||||||
|
size 966844269
|
||||||
3
pytorch_model-00008-of-00014.bin
Normal file
3
pytorch_model-00008-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:b8a951737a87da7af4d0ced6aa9bd83e9df7284d961b2bd15c8d9582de0d216d
|
||||||
|
size 966827273
|
||||||
3
pytorch_model-00009-of-00014.bin
Normal file
3
pytorch_model-00009-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:75beca737b83f208b5ce0ff83ac2519efae738e0e0fa38883aa0144c9f45503c
|
||||||
|
size 989912667
|
||||||
3
pytorch_model-00010-of-00014.bin
Normal file
3
pytorch_model-00010-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:319e8343c754520fdd7403a74c3a6d99ee5623c4c51717fb7b1b469e69872f6e
|
||||||
|
size 943758939
|
||||||
3
pytorch_model-00011-of-00014.bin
Normal file
3
pytorch_model-00011-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:f5b4d79f9e593245c7b50ebcb11e17a54e1d5cb1c55a37edff0785f394064b1a
|
||||||
|
size 989895607
|
||||||
3
pytorch_model-00012-of-00014.bin
Normal file
3
pytorch_model-00012-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:3bcbd041e057d93861b380f5660f4285354c929630f096a631ea266cf80b7acc
|
||||||
|
size 966844269
|
||||||
3
pytorch_model-00013-of-00014.bin
Normal file
3
pytorch_model-00013-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:9a4d0bc49e3a04b15c1114acb29ad09e93912c768a6e981c0f6a050c8b14fc92
|
||||||
|
size 966827273
|
||||||
3
pytorch_model-00014-of-00014.bin
Normal file
3
pytorch_model-00014-of-00014.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:7ffb5923521c250d58a63c965158c15224c663d9fe31b66fa7cb50127aba911b
|
||||||
|
size 847295773
|
||||||
298
pytorch_model.bin.index.json
Normal file
298
pytorch_model.bin.index.json
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
{
|
||||||
|
"metadata": {
|
||||||
|
"total_size": 13476831232
|
||||||
|
},
|
||||||
|
"weight_map": {
|
||||||
|
"lm_head.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.embed_tokens.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.input_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.1.mlp.down_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.post_attention_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00014.bin",
|
||||||
|
"model.layers.10.input_layernorm.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.mlp.down_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.mlp.gate_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.mlp.up_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.post_attention_layernorm.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.self_attn.k_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.self_attn.o_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.self_attn.q_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.10.self_attn.v_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.11.input_layernorm.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.11.mlp.down_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.11.mlp.gate_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.11.mlp.up_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.11.post_attention_layernorm.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.11.self_attn.k_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.11.self_attn.o_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.11.self_attn.q_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.11.self_attn.v_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.12.input_layernorm.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.mlp.down_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.mlp.gate_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.mlp.up_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.post_attention_layernorm.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.self_attn.k_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.self_attn.o_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.self_attn.q_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.12.self_attn.v_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.input_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.13.mlp.down_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.13.mlp.gate_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.mlp.up_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.post_attention_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.13.self_attn.k_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.self_attn.o_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.self_attn.q_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.13.self_attn.v_proj.weight": "pytorch_model-00006-of-00014.bin",
|
||||||
|
"model.layers.14.input_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.mlp.down_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.mlp.gate_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.mlp.up_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.post_attention_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.self_attn.k_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.self_attn.o_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.self_attn.q_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.14.self_attn.v_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.input_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.mlp.down_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.mlp.gate_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.mlp.up_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.post_attention_layernorm.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.self_attn.k_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.self_attn.o_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.self_attn.q_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.15.self_attn.v_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.16.input_layernorm.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.mlp.down_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.mlp.gate_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.mlp.up_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.post_attention_layernorm.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.self_attn.k_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.16.self_attn.o_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.16.self_attn.q_proj.weight": "pytorch_model-00007-of-00014.bin",
|
||||||
|
"model.layers.16.self_attn.v_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.input_layernorm.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.mlp.down_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.mlp.gate_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.mlp.up_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.post_attention_layernorm.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.self_attn.k_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.self_attn.o_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.self_attn.q_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.17.self_attn.v_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.18.input_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.18.mlp.down_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.18.mlp.gate_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.18.mlp.up_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.18.post_attention_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.18.self_attn.k_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.18.self_attn.o_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.18.self_attn.q_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.18.self_attn.v_proj.weight": "pytorch_model-00008-of-00014.bin",
|
||||||
|
"model.layers.19.input_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.mlp.down_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.mlp.gate_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.mlp.up_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.post_attention_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.self_attn.k_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.self_attn.o_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.self_attn.q_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.19.self_attn.v_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.2.input_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.mlp.down_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.mlp.gate_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.mlp.up_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.post_attention_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.self_attn.k_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.self_attn.o_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.self_attn.q_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.2.self_attn.v_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.20.input_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.mlp.down_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.mlp.gate_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.mlp.up_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.post_attention_layernorm.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.self_attn.k_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.self_attn.o_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.self_attn.q_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.20.self_attn.v_proj.weight": "pytorch_model-00009-of-00014.bin",
|
||||||
|
"model.layers.21.input_layernorm.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.mlp.down_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.mlp.gate_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.mlp.up_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.post_attention_layernorm.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.self_attn.k_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.self_attn.o_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.self_attn.q_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.21.self_attn.v_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.input_layernorm.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.mlp.down_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.mlp.gate_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.mlp.up_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.post_attention_layernorm.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.self_attn.k_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.self_attn.o_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.self_attn.q_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.22.self_attn.v_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.23.input_layernorm.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.23.mlp.down_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.23.mlp.gate_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.23.mlp.up_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.23.post_attention_layernorm.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.23.self_attn.k_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.23.self_attn.o_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.23.self_attn.q_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.23.self_attn.v_proj.weight": "pytorch_model-00010-of-00014.bin",
|
||||||
|
"model.layers.24.input_layernorm.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.mlp.down_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.mlp.gate_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.mlp.up_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.post_attention_layernorm.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.self_attn.k_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.self_attn.o_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.self_attn.q_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.24.self_attn.v_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.input_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.25.mlp.down_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.25.mlp.gate_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.mlp.up_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.post_attention_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.25.self_attn.k_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.self_attn.o_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.self_attn.q_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.25.self_attn.v_proj.weight": "pytorch_model-00011-of-00014.bin",
|
||||||
|
"model.layers.26.input_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.mlp.down_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.mlp.gate_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.mlp.up_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.post_attention_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.self_attn.k_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.self_attn.o_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.self_attn.q_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.26.self_attn.v_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.input_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.mlp.down_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.mlp.gate_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.mlp.up_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.post_attention_layernorm.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.self_attn.k_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.self_attn.o_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.self_attn.q_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.27.self_attn.v_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.28.input_layernorm.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.mlp.down_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.mlp.gate_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.mlp.up_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.post_attention_layernorm.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.self_attn.k_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.28.self_attn.o_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.28.self_attn.q_proj.weight": "pytorch_model-00012-of-00014.bin",
|
||||||
|
"model.layers.28.self_attn.v_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.input_layernorm.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.mlp.down_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.mlp.gate_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.mlp.up_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.post_attention_layernorm.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.self_attn.k_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.self_attn.o_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.self_attn.q_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.29.self_attn.v_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.3.input_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.mlp.down_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.mlp.gate_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.mlp.up_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.post_attention_layernorm.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.self_attn.k_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.self_attn.o_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.self_attn.q_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.3.self_attn.v_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.30.input_layernorm.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.30.mlp.down_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.30.mlp.gate_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.30.mlp.up_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.30.post_attention_layernorm.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.30.self_attn.k_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.30.self_attn.o_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.30.self_attn.q_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.30.self_attn.v_proj.weight": "pytorch_model-00013-of-00014.bin",
|
||||||
|
"model.layers.31.input_layernorm.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.mlp.down_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.mlp.gate_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.mlp.up_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.post_attention_layernorm.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.self_attn.k_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.self_attn.o_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.self_attn.q_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.31.self_attn.v_proj.weight": "pytorch_model-00014-of-00014.bin",
|
||||||
|
"model.layers.4.input_layernorm.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.mlp.down_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.mlp.gate_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.mlp.up_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.post_attention_layernorm.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.self_attn.k_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.4.self_attn.o_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.4.self_attn.q_proj.weight": "pytorch_model-00002-of-00014.bin",
|
||||||
|
"model.layers.4.self_attn.v_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.input_layernorm.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.mlp.down_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.mlp.gate_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.mlp.up_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.post_attention_layernorm.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.self_attn.k_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.self_attn.o_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.self_attn.q_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.5.self_attn.v_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.6.input_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.6.mlp.down_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.6.mlp.gate_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.6.mlp.up_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.6.post_attention_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.6.self_attn.k_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.6.self_attn.o_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.6.self_attn.q_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.6.self_attn.v_proj.weight": "pytorch_model-00003-of-00014.bin",
|
||||||
|
"model.layers.7.input_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.mlp.down_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.mlp.gate_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.mlp.up_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.post_attention_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.self_attn.k_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.self_attn.o_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.self_attn.q_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.7.self_attn.v_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.input_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.mlp.down_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.mlp.gate_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.mlp.up_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.post_attention_layernorm.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.self_attn.k_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.self_attn.o_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.self_attn.q_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.8.self_attn.v_proj.weight": "pytorch_model-00004-of-00014.bin",
|
||||||
|
"model.layers.9.input_layernorm.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.mlp.down_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.mlp.gate_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.mlp.up_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.post_attention_layernorm.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.self_attn.k_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.self_attn.o_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.self_attn.q_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.layers.9.self_attn.v_proj.weight": "pytorch_model-00005-of-00014.bin",
|
||||||
|
"model.norm.weight": "pytorch_model-00014-of-00014.bin"
|
||||||
|
}
|
||||||
|
}
|
||||||
24
special_tokens_map.json
Normal file
24
special_tokens_map.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"bos_token": {
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"eos_token": {
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"pad_token": "</s>",
|
||||||
|
"unk_token": {
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
}
|
||||||
|
}
|
||||||
93405
tokenizer.json
Normal file
93405
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
36
tokenizer_config.json
Normal file
36
tokenizer_config.json
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"bos_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"clean_up_tokenization_spaces": false,
|
||||||
|
"eos_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"max_length": 1024,
|
||||||
|
"model_max_length": 1000000000000000019884624838656,
|
||||||
|
"pad_token": null,
|
||||||
|
"sp_model_kwargs": {},
|
||||||
|
"stride": 0,
|
||||||
|
"tokenizer_class": "LlamaTokenizer",
|
||||||
|
"truncation_side": "right",
|
||||||
|
"truncation_strategy": "longest_first",
|
||||||
|
"unk_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"use_default_system_prompt": true
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user