初始化项目,由ModelHub XC社区提供模型
Model: bofenghuang/vigogne-33b-instruct Source: Original Platform
This commit is contained in:
34
.gitattributes
vendored
Normal file
34
.gitattributes
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.model filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||||
|
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||||
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
checkpoint-*/
|
||||||
|
|
||||||
|
tmp*
|
||||||
69
README.md
Normal file
69
README.md
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
---
|
||||||
|
license: openrail
|
||||||
|
language:
|
||||||
|
- fr
|
||||||
|
pipeline_tag: text-generation
|
||||||
|
library_name: transformers
|
||||||
|
tags:
|
||||||
|
- llama
|
||||||
|
- LLM
|
||||||
|
inference: false
|
||||||
|
---
|
||||||
|
|
||||||
|
<p align="center" width="100%">
|
||||||
|
<img src="https://huggingface.co/bofenghuang/vigogne-33b-instruct/resolve/main/vigogne_logo.png" alt="Vigogne" style="width: 40%; min-width: 300px; display: block; margin: auto;">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Vigogne-33B-Instruct: A French Instruction-following LLaMA Model
|
||||||
|
|
||||||
|
Vigogne-33B-Instruct is a LLaMA-33B model fine-tuned to follow the French instructions.
|
||||||
|
|
||||||
|
For more information, please visit the Github repo: https://github.com/bofenghuang/vigogne
|
||||||
|
|
||||||
|
**Usage and License Notices**: Same as [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca), Vigogne is intended and licensed for research use only. The dataset is CC BY NC 4.0 (allowing only non-commercial use) and models trained using the dataset should not be used outside of research purposes.
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
All versions are available in branches.
|
||||||
|
|
||||||
|
- **V1.0**: Initial release, trained on the translated Stanford Alpaca dataset.
|
||||||
|
- **V2.0**: Expanded training dataset to 262k for better performance.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```python
|
||||||
|
import torch
|
||||||
|
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
||||||
|
from vigogne.preprocess import generate_instruct_prompt
|
||||||
|
|
||||||
|
model_name_or_path = "bofenghuang/vigogne-33b-instruct"
|
||||||
|
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, padding_side="right", use_fast=False)
|
||||||
|
model = AutoModelForCausalLM.from_pretrained(model_name_or_path, torch_dtype=torch.float16, device_map="auto")
|
||||||
|
|
||||||
|
user_query = "Expliquez la différence entre DoS et phishing."
|
||||||
|
prompt = generate_instruct_prompt(user_query)
|
||||||
|
input_ids = tokenizer(prompt, return_tensors="pt")["input_ids"].to(model.device)
|
||||||
|
input_length = input_ids.shape[1]
|
||||||
|
|
||||||
|
generated_outputs = model.generate(
|
||||||
|
input_ids=input_ids,
|
||||||
|
generation_config=GenerationConfig(
|
||||||
|
temperature=0.1,
|
||||||
|
do_sample=True,
|
||||||
|
repetition_penalty=1.0,
|
||||||
|
max_new_tokens=512,
|
||||||
|
),
|
||||||
|
return_dict_in_generate=True,
|
||||||
|
)
|
||||||
|
generated_tokens = generated_outputs.sequences[0, input_length:]
|
||||||
|
generated_text = tokenizer.decode(generated_tokens, skip_special_tokens=True)
|
||||||
|
print(generated_text)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also infer this model by using the following Google Colab Notebook.
|
||||||
|
|
||||||
|
<a href="https://colab.research.google.com/github/bofenghuang/vigogne/blob/main/notebooks/infer_instruct.ipynb" target="_blank"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
Vigogne is still under development, and there are many limitations that have to be addressed. Please note that it is possible that the model generates harmful or biased content, incorrect information or generally unhelpful answers.
|
||||||
24
config.json
Normal file
24
config.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"_name_or_path": "huggyllama/llama-30b",
|
||||||
|
"architectures": [
|
||||||
|
"LlamaForCausalLM"
|
||||||
|
],
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"hidden_act": "silu",
|
||||||
|
"hidden_size": 6656,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 17920,
|
||||||
|
"max_position_embeddings": 2048,
|
||||||
|
"max_sequence_length": 2048,
|
||||||
|
"model_type": "llama",
|
||||||
|
"num_attention_heads": 52,
|
||||||
|
"num_hidden_layers": 60,
|
||||||
|
"pad_token_id": 0,
|
||||||
|
"rms_norm_eps": 1e-06,
|
||||||
|
"tie_word_embeddings": false,
|
||||||
|
"torch_dtype": "float16",
|
||||||
|
"transformers_version": "4.28.1",
|
||||||
|
"use_cache": true,
|
||||||
|
"vocab_size": 32000
|
||||||
|
}
|
||||||
7
generation_config.json
Normal file
7
generation_config.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"_from_model_config": true,
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"pad_token_id": 0,
|
||||||
|
"transformers_version": "4.28.1"
|
||||||
|
}
|
||||||
3
pytorch_model-00001-of-00007.bin
Normal file
3
pytorch_model-00001-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:08fb4e90da9636aa6b556dbb7f0ad4d3e7b9add7832b42830b537cf59e5d7689
|
||||||
|
size 9818324627
|
||||||
3
pytorch_model-00002-of-00007.bin
Normal file
3
pytorch_model-00002-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:49ce19c68e05a4ddc428ebc70370b897c22a92cddd64b67f5b135794b9e8717d
|
||||||
|
size 9958102743
|
||||||
3
pytorch_model-00003-of-00007.bin
Normal file
3
pytorch_model-00003-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:586f9336186345e8d37798962efdafd754eb677711098c9099764bf39ef73c8f
|
||||||
|
size 9896734715
|
||||||
3
pytorch_model-00004-of-00007.bin
Normal file
3
pytorch_model-00004-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:e2c36788bef75ab2f30e08af012b0cf74b4c901ac461cc4fac6d923477245ee3
|
||||||
|
size 9869470481
|
||||||
3
pytorch_model-00005-of-00007.bin
Normal file
3
pytorch_model-00005-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:29648b4f39ce7a608de93cef66f968a80a2112d376fdcb51de70f114dfda6b6b
|
||||||
|
size 9869470445
|
||||||
3
pytorch_model-00006-of-00007.bin
Normal file
3
pytorch_model-00006-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:4882ac13813f921b58cd59d46c73f058657592f4a608e7f0d18fdcd890ed152e
|
||||||
|
size 9958102743
|
||||||
3
pytorch_model-00007-of-00007.bin
Normal file
3
pytorch_model-00007-of-00007.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:c98d404ceb00ce9aa9c4fd1ced82cd0af769c778a7e9a4bc6e14c2b0327ba469
|
||||||
|
size 5687903281
|
||||||
610
pytorch_model.bin.index.json
Normal file
610
pytorch_model.bin.index.json
Normal file
@@ -0,0 +1,610 @@
|
|||||||
|
{
|
||||||
|
"metadata": {
|
||||||
|
"total_size": 65057902592
|
||||||
|
},
|
||||||
|
"weight_map": {
|
||||||
|
"lm_head.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.embed_tokens.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.10.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.10.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.11.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.18.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.18.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.19.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.20.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.20.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.21.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.22.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.23.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.24.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.25.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.mlp.gate_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.mlp.up_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.27.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.27.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.27.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.27.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.27.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.27.self_attn.k_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.27.self_attn.o_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.27.self_attn.q_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.27.self_attn.v_proj.weight": "pytorch_model-00003-of-00007.bin",
|
||||||
|
"model.layers.28.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.28.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.29.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.30.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.30.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.31.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.32.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.33.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.34.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.input_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.mlp.down_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.mlp.up_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.post_attention_layernorm.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.35.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.36.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.36.mlp.gate_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.36.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.36.self_attn.k_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.self_attn.o_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.self_attn.q_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.36.self_attn.v_proj.weight": "pytorch_model-00004-of-00007.bin",
|
||||||
|
"model.layers.37.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.37.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.38.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.39.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.40.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.40.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.41.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.42.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.43.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.input_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.mlp.up_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.post_attention_layernorm.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.44.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.45.mlp.down_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.mlp.gate_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.45.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.45.self_attn.k_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.self_attn.o_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.self_attn.q_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.45.self_attn.v_proj.weight": "pytorch_model-00005-of-00007.bin",
|
||||||
|
"model.layers.46.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.46.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.47.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.48.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.49.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.50.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.50.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.51.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.52.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.53.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.input_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.mlp.down_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.mlp.gate_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.mlp.up_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.post_attention_layernorm.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.self_attn.k_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.self_attn.o_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.54.self_attn.v_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.55.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.self_attn.q_proj.weight": "pytorch_model-00006-of-00007.bin",
|
||||||
|
"model.layers.55.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.55.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.56.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.57.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.58.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.input_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.mlp.down_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.mlp.gate_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.mlp.up_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.post_attention_layernorm.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.self_attn.k_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.self_attn.o_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.self_attn.q_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.self_attn.rotary_emb.inv_freq": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.59.self_attn.v_proj.weight": "pytorch_model-00007-of-00007.bin",
|
||||||
|
"model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.8.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00007.bin",
|
||||||
|
"model.layers.9.input_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.mlp.down_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.mlp.gate_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.mlp.up_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.post_attention_layernorm.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.self_attn.k_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.self_attn.o_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00007.bin",
|
||||||
|
"model.norm.weight": "pytorch_model-00007-of-00007.bin"
|
||||||
|
}
|
||||||
|
}
|
||||||
23
special_tokens_map.json
Normal file
23
special_tokens_map.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bos_token": {
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"eos_token": {
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"unk_token": {
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
}
|
||||||
|
}
|
||||||
93385
tokenizer.json
Normal file
93385
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
3
tokenizer.model
Normal file
3
tokenizer.model
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
||||||
|
size 499723
|
||||||
33
tokenizer_config.json
Normal file
33
tokenizer_config.json
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
"add_bos_token": true,
|
||||||
|
"add_eos_token": false,
|
||||||
|
"bos_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"clean_up_tokenization_spaces": false,
|
||||||
|
"eos_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"model_max_length": 2048,
|
||||||
|
"pad_token": null,
|
||||||
|
"sp_model_kwargs": {},
|
||||||
|
"tokenizer_class": "LlamaTokenizer",
|
||||||
|
"unk_token": {
|
||||||
|
"__type": "AddedToken",
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": true,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
vigogne_logo.png
Normal file
BIN
vigogne_logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 311 KiB |
Reference in New Issue
Block a user