初始化项目,由ModelHub XC社区提供模型
Model: macadeliccc/SOLAR-10.7b-Instruct-dpo Source: Original Platform
This commit is contained in:
36
.gitattributes
vendored
Normal file
36
.gitattributes
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.model filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||||
|
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
orca-header.png filter=lfs diff=lfs merge=lfs -text
|
||||||
246
README.md
Normal file
246
README.md
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
---
|
||||||
|
license: cc-by-nc-4.0
|
||||||
|
library_name: transformers
|
||||||
|
model-index:
|
||||||
|
- name: SOLAR-10.7b-Instruct-dpo
|
||||||
|
results:
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: AI2 Reasoning Challenge (25-Shot)
|
||||||
|
type: ai2_arc
|
||||||
|
config: ARC-Challenge
|
||||||
|
split: test
|
||||||
|
args:
|
||||||
|
num_few_shot: 25
|
||||||
|
metrics:
|
||||||
|
- type: acc_norm
|
||||||
|
value: 71.76
|
||||||
|
name: normalized accuracy
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: HellaSwag (10-Shot)
|
||||||
|
type: hellaswag
|
||||||
|
split: validation
|
||||||
|
args:
|
||||||
|
num_few_shot: 10
|
||||||
|
metrics:
|
||||||
|
- type: acc_norm
|
||||||
|
value: 88.08
|
||||||
|
name: normalized accuracy
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: MMLU (5-Shot)
|
||||||
|
type: cais/mmlu
|
||||||
|
config: all
|
||||||
|
split: test
|
||||||
|
args:
|
||||||
|
num_few_shot: 5
|
||||||
|
metrics:
|
||||||
|
- type: acc
|
||||||
|
value: 66.06
|
||||||
|
name: accuracy
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: TruthfulQA (0-shot)
|
||||||
|
type: truthful_qa
|
||||||
|
config: multiple_choice
|
||||||
|
split: validation
|
||||||
|
args:
|
||||||
|
num_few_shot: 0
|
||||||
|
metrics:
|
||||||
|
- type: mc2
|
||||||
|
value: 71.98
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: Winogrande (5-shot)
|
||||||
|
type: winogrande
|
||||||
|
config: winogrande_xl
|
||||||
|
split: validation
|
||||||
|
args:
|
||||||
|
num_few_shot: 5
|
||||||
|
metrics:
|
||||||
|
- type: acc
|
||||||
|
value: 82.32
|
||||||
|
name: accuracy
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
- task:
|
||||||
|
type: text-generation
|
||||||
|
name: Text Generation
|
||||||
|
dataset:
|
||||||
|
name: GSM8k (5-shot)
|
||||||
|
type: gsm8k
|
||||||
|
config: main
|
||||||
|
split: test
|
||||||
|
args:
|
||||||
|
num_few_shot: 5
|
||||||
|
metrics:
|
||||||
|
- type: acc
|
||||||
|
value: 61.03
|
||||||
|
name: accuracy
|
||||||
|
source:
|
||||||
|
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
name: Open LLM Leaderboard
|
||||||
|
---
|
||||||
|
|
||||||
|
# SOLAR-10.7b-Instruct-dpo
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
This model is a finetune of upstage/SOLAR-10.7B-Instruct-v1.0 using Intel/orca_dpo_pairs
|
||||||
|
|
||||||
|
## Chat Template
|
||||||
|
|
||||||
|
This model follows the chatML chat template.
|
||||||
|
|
||||||
|
|
||||||
|
## Evaluations
|
||||||
|
|
||||||
|
### EQ Bench comparison with base model
|
||||||
|
|
||||||
|
These scores are the average of 3 iterations.
|
||||||
|
|
||||||
|
----Benchmark Complete----
|
||||||
|
+ 2024-01-25 04:41:01
|
||||||
|
+ Time taken: 236.1 mins
|
||||||
|
+ Prompt Format: ChatML
|
||||||
|
+ Model: macadeliccc/SOLAR-10.7b-Instruct-dpo
|
||||||
|
+ Score (v2): 72.79
|
||||||
|
+ Parseable: 165.67
|
||||||
|
---------------
|
||||||
|
Batch completed
|
||||||
|
Time taken: 236.1 mins
|
||||||
|
---------------
|
||||||
|
|
||||||
|
as compared to the original model:
|
||||||
|
|
||||||
|
----Benchmark Complete----
|
||||||
|
+ 2024-01-25 08:45:02
|
||||||
|
+ Time taken: 244.0 mins
|
||||||
|
+ Prompt Format: ChatML
|
||||||
|
+ Model: [upstage/SOLAR-10.7B-Instruct-v1.0](https://huggingface.co/upstage/SOLAR-10.7B-Instruct-v1.0)
|
||||||
|
+ Score (v2): 71.03
|
||||||
|
+ Parseable: 165.67
|
||||||
|
---------------
|
||||||
|
Batch completed
|
||||||
|
Time taken: 480.1 mins
|
||||||
|
---------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
| Model |AGIEval|GPT4All|TruthfulQA|Bigbench|Average|
|
||||||
|
|---------------------------------------------------------------------------------------|------:|------:|---------:|-------:|------:|
|
||||||
|
|[SOLAR-10.7b-Instruct-dpo](https://huggingface.co/macadeliccc/SOLAR-10.7b-Instruct-dpo)| 47.57| 74.3| 72.73| 45.76| 60.09|
|
||||||
|
|
||||||
|
### AGIEval
|
||||||
|
| Task |Version| Metric |Value| |Stderr|
|
||||||
|
|------------------------------|------:|--------|----:|---|-----:|
|
||||||
|
|agieval_aqua_rat | 0|acc |27.56|± | 2.81|
|
||||||
|
| | |acc_norm|26.77|± | 2.78|
|
||||||
|
|agieval_logiqa_en | 0|acc |41.63|± | 1.93|
|
||||||
|
| | |acc_norm|41.32|± | 1.93|
|
||||||
|
|agieval_lsat_ar | 0|acc |25.22|± | 2.87|
|
||||||
|
| | |acc_norm|24.35|± | 2.84|
|
||||||
|
|agieval_lsat_lr | 0|acc |54.12|± | 2.21|
|
||||||
|
| | |acc_norm|54.31|± | 2.21|
|
||||||
|
|agieval_lsat_rc | 0|acc |68.77|± | 2.83|
|
||||||
|
| | |acc_norm|69.14|± | 2.82|
|
||||||
|
|agieval_sat_en | 0|acc |79.13|± | 2.84|
|
||||||
|
| | |acc_norm|79.13|± | 2.84|
|
||||||
|
|agieval_sat_en_without_passage| 0|acc |44.66|± | 3.47|
|
||||||
|
| | |acc_norm|44.66|± | 3.47|
|
||||||
|
|agieval_sat_math | 0|acc |40.45|± | 3.32|
|
||||||
|
| | |acc_norm|40.91|± | 3.32|
|
||||||
|
|
||||||
|
Average: 47.57%
|
||||||
|
|
||||||
|
### GPT4All
|
||||||
|
| Task |Version| Metric |Value| |Stderr|
|
||||||
|
|-------------|------:|--------|----:|---|-----:|
|
||||||
|
|arc_challenge| 0|acc |60.49|± | 1.43|
|
||||||
|
| | |acc_norm|63.74|± | 1.40|
|
||||||
|
|arc_easy | 0|acc |82.07|± | 0.79|
|
||||||
|
| | |acc_norm|79.92|± | 0.82|
|
||||||
|
|boolq | 1|acc |88.56|± | 0.56|
|
||||||
|
|hellaswag | 0|acc |68.47|± | 0.46|
|
||||||
|
| | |acc_norm|86.06|± | 0.35|
|
||||||
|
|openbookqa | 0|acc |36.20|± | 2.15|
|
||||||
|
| | |acc_norm|46.60|± | 2.23|
|
||||||
|
|piqa | 0|acc |79.38|± | 0.94|
|
||||||
|
| | |acc_norm|79.71|± | 0.94|
|
||||||
|
|winogrande | 0|acc |75.53|± | 1.21|
|
||||||
|
|
||||||
|
Average: 74.3%
|
||||||
|
|
||||||
|
### TruthfulQA
|
||||||
|
| Task |Version|Metric|Value| |Stderr|
|
||||||
|
|-------------|------:|------|----:|---|-----:|
|
||||||
|
|truthfulqa_mc| 1|mc1 |57.77|± | 1.73|
|
||||||
|
| | |mc2 |72.73|± | 1.49|
|
||||||
|
|
||||||
|
Average: 72.73%
|
||||||
|
|
||||||
|
### Bigbench
|
||||||
|
| Task |Version| Metric |Value| |Stderr|
|
||||||
|
|------------------------------------------------|------:|---------------------|----:|---|-----:|
|
||||||
|
|bigbench_causal_judgement | 0|multiple_choice_grade|55.26|± | 3.62|
|
||||||
|
|bigbench_date_understanding | 0|multiple_choice_grade|62.87|± | 2.52|
|
||||||
|
|bigbench_disambiguation_qa | 0|multiple_choice_grade|46.51|± | 3.11|
|
||||||
|
|bigbench_geometric_shapes | 0|multiple_choice_grade|25.63|± | 2.31|
|
||||||
|
| | |exact_str_match | 0.00|± | 0.00|
|
||||||
|
|bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|28.00|± | 2.01|
|
||||||
|
|bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|20.57|± | 1.53|
|
||||||
|
|bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|46.67|± | 2.89|
|
||||||
|
|bigbench_movie_recommendation | 0|multiple_choice_grade|41.80|± | 2.21|
|
||||||
|
|bigbench_navigate | 0|multiple_choice_grade|64.00|± | 1.52|
|
||||||
|
|bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|60.00|± | 1.10|
|
||||||
|
|bigbench_ruin_names | 0|multiple_choice_grade|39.96|± | 2.32|
|
||||||
|
|bigbench_salient_translation_error_detection | 0|multiple_choice_grade|47.90|± | 1.58|
|
||||||
|
|bigbench_snarks | 0|multiple_choice_grade|64.09|± | 3.58|
|
||||||
|
|bigbench_sports_understanding | 0|multiple_choice_grade|71.10|± | 1.44|
|
||||||
|
|bigbench_temporal_sequences | 0|multiple_choice_grade|59.90|± | 1.55|
|
||||||
|
|bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|24.96|± | 1.22|
|
||||||
|
|bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|17.89|± | 0.92|
|
||||||
|
|bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|46.67|± | 2.89|
|
||||||
|
|
||||||
|
Average: 45.76%
|
||||||
|
|
||||||
|
Average score: 60.09%
|
||||||
|
|
||||||
|
Elapsed time: 02:10:16
|
||||||
|
# [Open LLM Leaderboard Evaluation Results](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)
|
||||||
|
Detailed results can be found [here](https://huggingface.co/datasets/open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo)
|
||||||
|
|
||||||
|
| Metric |Value|
|
||||||
|
|---------------------------------|----:|
|
||||||
|
|Avg. |73.54|
|
||||||
|
|AI2 Reasoning Challenge (25-Shot)|71.76|
|
||||||
|
|HellaSwag (10-Shot) |88.08|
|
||||||
|
|MMLU (5-Shot) |66.06|
|
||||||
|
|TruthfulQA (0-shot) |71.98|
|
||||||
|
|Winogrande (5-shot) |82.32|
|
||||||
|
|GSM8k (5-shot) |61.03|
|
||||||
|
|
||||||
29
config.json
Normal file
29
config.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"_name_or_path": "upstage/SOLAR-10.7B-Instruct-v1.0",
|
||||||
|
"architectures": [
|
||||||
|
"LlamaForCausalLM"
|
||||||
|
],
|
||||||
|
"attention_bias": false,
|
||||||
|
"attention_dropout": 0.0,
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"hidden_act": "silu",
|
||||||
|
"hidden_size": 4096,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 14336,
|
||||||
|
"max_position_embeddings": 4096,
|
||||||
|
"model_type": "llama",
|
||||||
|
"num_attention_heads": 32,
|
||||||
|
"num_hidden_layers": 48,
|
||||||
|
"num_key_value_heads": 8,
|
||||||
|
"pad_token_id": 2,
|
||||||
|
"pretraining_tp": 1,
|
||||||
|
"rms_norm_eps": 1e-05,
|
||||||
|
"rope_scaling": null,
|
||||||
|
"rope_theta": 10000.0,
|
||||||
|
"tie_word_embeddings": false,
|
||||||
|
"torch_dtype": "float16",
|
||||||
|
"transformers_version": "4.38.0.dev0",
|
||||||
|
"use_cache": true,
|
||||||
|
"vocab_size": 32000
|
||||||
|
}
|
||||||
8
generation_config.json
Normal file
8
generation_config.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"_from_model_config": true,
|
||||||
|
"bos_token_id": 1,
|
||||||
|
"eos_token_id": 2,
|
||||||
|
"pad_token_id": 2,
|
||||||
|
"transformers_version": "4.38.0.dev0",
|
||||||
|
"use_cache": false
|
||||||
|
}
|
||||||
3
model-00001-of-00005.safetensors
Normal file
3
model-00001-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:e1c64a7637714f752540bba7b2ce5f99fc9995bb1184350b7b25429a6552891e
|
||||||
|
size 4943162240
|
||||||
3
model-00002-of-00005.safetensors
Normal file
3
model-00002-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:3b1ee3d7ef0417faccdd49a331bd7b9d1cb061e46280afe66da543199f089033
|
||||||
|
size 4999819232
|
||||||
3
model-00003-of-00005.safetensors
Normal file
3
model-00003-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:da9d73ff63148a2e347e7bb4a841cbcb76a6c2c7b5fce5e7f26636f4fdd69eed
|
||||||
|
size 4915916080
|
||||||
3
model-00004-of-00005.safetensors
Normal file
3
model-00004-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:eab023a41e0b1fbf4873966baced427c5bd77dd53eae80439c2dfc66f2464ac4
|
||||||
|
size 4915916080
|
||||||
3
model-00005-of-00005.safetensors
Normal file
3
model-00005-of-00005.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:aee08c3afd689279875f1addfd663bea08993d5cacd9b71e515f35e0bc54a981
|
||||||
|
size 1688284744
|
||||||
442
model.safetensors.index.json
Normal file
442
model.safetensors.index.json
Normal file
@@ -0,0 +1,442 @@
|
|||||||
|
{
|
||||||
|
"metadata": {
|
||||||
|
"total_size": 21463048192
|
||||||
|
},
|
||||||
|
"weight_map": {
|
||||||
|
"lm_head.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.embed_tokens.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.19.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.20.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.20.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.input_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.21.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.22.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.22.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.22.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.22.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
|
||||||
|
"model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.28.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.29.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.30.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.30.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.31.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.input_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.32.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.33.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.33.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.33.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.33.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.33.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.33.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.33.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.33.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.33.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
|
||||||
|
"model.layers.34.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.34.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.35.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.36.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.37.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.38.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.39.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.40.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.40.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.41.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.42.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.input_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.43.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.44.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.44.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.44.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.44.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
|
||||||
|
"model.layers.45.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.45.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.46.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.input_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.47.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
|
||||||
|
"model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.input_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
|
||||||
|
"model.norm.weight": "model-00005-of-00005.safetensors"
|
||||||
|
}
|
||||||
|
}
|
||||||
3
orca-header.png
Normal file
3
orca-header.png
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:576f05403453e2f30d6d78f9cb7a4cfa6a03effd7e2185fb7488e47f8db00b15
|
||||||
|
size 2339328
|
||||||
30
special_tokens_map.json
Normal file
30
special_tokens_map.json
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"bos_token": {
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"eos_token": {
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"pad_token": {
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
},
|
||||||
|
"unk_token": {
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false
|
||||||
|
}
|
||||||
|
}
|
||||||
91122
tokenizer.json
Normal file
91122
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
BIN
tokenizer.model
(Stored with Git LFS)
Normal file
BIN
tokenizer.model
(Stored with Git LFS)
Normal file
Binary file not shown.
43
tokenizer_config.json
Normal file
43
tokenizer_config.json
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"add_bos_token": true,
|
||||||
|
"add_eos_token": false,
|
||||||
|
"added_tokens_decoder": {
|
||||||
|
"0": {
|
||||||
|
"content": "<unk>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false,
|
||||||
|
"special": true
|
||||||
|
},
|
||||||
|
"1": {
|
||||||
|
"content": "<s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false,
|
||||||
|
"special": true
|
||||||
|
},
|
||||||
|
"2": {
|
||||||
|
"content": "</s>",
|
||||||
|
"lstrip": false,
|
||||||
|
"normalized": false,
|
||||||
|
"rstrip": false,
|
||||||
|
"single_word": false,
|
||||||
|
"special": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additional_special_tokens": [],
|
||||||
|
"bos_token": "<s>",
|
||||||
|
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' %}{% if message['content']%}{{'### System:\n' + message['content']+'\n\n'}}{% endif %}{% elif message['role'] == 'user' %}{{'### User:\n' + message['content']+'\n\n'}}{% elif message['role'] == 'assistant' %}{{'### Assistant:\n' + message['content']}}{% endif %}{% if loop.last and add_generation_prompt %}{{ '### Assistant:\n' }}{% endif %}{% endfor %}",
|
||||||
|
"clean_up_tokenization_spaces": false,
|
||||||
|
"eos_token": "</s>",
|
||||||
|
"legacy": true,
|
||||||
|
"model_max_length": 1000000000000000019884624838656,
|
||||||
|
"pad_token": "</s>",
|
||||||
|
"sp_model_kwargs": {},
|
||||||
|
"spaces_between_special_tokens": false,
|
||||||
|
"tokenizer_class": "LlamaTokenizer",
|
||||||
|
"unk_token": "<unk>",
|
||||||
|
"use_default_system_prompt": true
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user