初始化项目,由ModelHub XC社区提供模型
Model: shisa-ai/ablation-153-finalsft-shisa-v2-unphi-4-14b Source: Original Platform
This commit is contained in:
49
.gitattributes
vendored
Normal file
49
.gitattributes
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.model filter=lfs diff=lfs merge=lfs -text
|
||||
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
||||
*.tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||
*.db* filter=lfs diff=lfs merge=lfs -text
|
||||
*.ark* filter=lfs diff=lfs merge=lfs -text
|
||||
**/*ckpt*data* filter=lfs diff=lfs merge=lfs -text
|
||||
**/*ckpt*.meta filter=lfs diff=lfs merge=lfs -text
|
||||
**/*ckpt*.index filter=lfs diff=lfs merge=lfs -text
|
||||
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||
*.gguf* filter=lfs diff=lfs merge=lfs -text
|
||||
*.ggml filter=lfs diff=lfs merge=lfs -text
|
||||
*.llamafile* filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
||||
269
README.md
Normal file
269
README.md
Normal file
@@ -0,0 +1,269 @@
|
||||
---
|
||||
library_name: transformers
|
||||
license: mit
|
||||
base_model: unsloth/phi-4
|
||||
tags:
|
||||
- generated_from_trainer
|
||||
datasets:
|
||||
- shisa-ai/shisa-v2-best-of-n-athenev2-tulu70b-llama33-only-no-sysprompt
|
||||
- shisa-ai/shisa-v2-roleplaying-sft
|
||||
- shisa-ai/translation_expanded_master_set_filtered
|
||||
- shisa-ai/rewild-set-deepseek-subset
|
||||
- shisa-ai/magpie-ultra-set
|
||||
- shisa-ai/magpie-advanced-questions-set
|
||||
- shisa-ai/japan-magpie-set
|
||||
model-index:
|
||||
- name: outputs/ablation-153-finalsft-shisa-v2-unphi-4-14b
|
||||
results: []
|
||||
---
|
||||
|
||||
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
||||
should probably proofread and complete it, then remove this comment. -->
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
|
||||
<details><summary>See axolotl config</summary>
|
||||
|
||||
axolotl version: `0.8.0.dev0`
|
||||
```yaml
|
||||
base_model: unsloth/phi-4
|
||||
model_type: AutoModelForCausalLM
|
||||
tokenizer_type: AutoTokenizer
|
||||
|
||||
load_in_8bit: false
|
||||
load_in_4bit: false
|
||||
strict: false
|
||||
|
||||
# User Liger
|
||||
plugins:
|
||||
- axolotl.integrations.liger.LigerPlugin
|
||||
liger_rope: true
|
||||
liger_rms_norm: true
|
||||
liger_glu_activation: true
|
||||
liger_fused_linear_cross_entropy: true
|
||||
|
||||
chat_template: tokenizer_default
|
||||
datasets:
|
||||
- path: shisa-ai/shisa-v2-best-of-n-athenev2-tulu70b-llama33-only-no-sysprompt
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_field_role: from
|
||||
message_field_content: value
|
||||
- path: shisa-ai/shisa-v2-roleplaying-sft
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
- path: shisa-ai/translation_expanded_master_set_filtered
|
||||
split: train[:25%]
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
- path: shisa-ai/rewild-set-deepseek-subset
|
||||
split: train[:25%]
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
- path: shisa-ai/magpie-ultra-set
|
||||
split: train[:8%]
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
- path: shisa-ai/magpie-advanced-questions-set
|
||||
split: train[:8%]
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
- path: shisa-ai/japan-magpie-set
|
||||
split: train
|
||||
type: chat_template
|
||||
field_messages: conversations
|
||||
message_property_mappings:
|
||||
role: role
|
||||
content: content
|
||||
roles:
|
||||
system:
|
||||
- system
|
||||
assistant:
|
||||
- gpt
|
||||
- model
|
||||
- assistant
|
||||
user:
|
||||
- human
|
||||
- user
|
||||
roles_to_train: ["assistant"]
|
||||
|
||||
dataset_prepared_path: last_run_prepared
|
||||
val_set_size: 0.05
|
||||
output_dir: ./outputs/ablation-153-finalsft-shisa-v2-unphi-4-14b
|
||||
|
||||
sequence_len: 8192
|
||||
sample_packing: true
|
||||
pad_to_sequence_len: true
|
||||
|
||||
# marginal difference
|
||||
neftune_noise_alpha: 5
|
||||
|
||||
use_wandb: true
|
||||
wandb_project: shisa-v2
|
||||
wandb_entity: augmxnt
|
||||
wandb_name: ablation-153-finalsft-shisa-v2-unphi-4-14b
|
||||
|
||||
gradient_accumulation_steps: 2
|
||||
micro_batch_size: 4
|
||||
num_epochs: 3
|
||||
optimizer: paged_adamw_8bit
|
||||
lr_scheduler: linear
|
||||
learning_rate: 7.5e-6
|
||||
|
||||
train_on_inputs: false
|
||||
group_by_length: false
|
||||
bf16: auto
|
||||
fp16:
|
||||
tf32: false
|
||||
|
||||
gradient_checkpointing: true
|
||||
gradient_checkpointing_kwargs:
|
||||
use_reentrant: false
|
||||
early_stopping_patience:
|
||||
resume_from_checkpoint:
|
||||
logging_steps: 1
|
||||
xformers_attention:
|
||||
flash_attention: true
|
||||
|
||||
warmup_steps: 100
|
||||
evals_per_epoch: 2
|
||||
eval_table_size:
|
||||
saves_per_epoch: 0
|
||||
save_total_limit: 1 # Only store a single checkpoint
|
||||
debug:
|
||||
deepspeed: zero3_bf16.json
|
||||
weight_decay: 0.0001
|
||||
fsdp:
|
||||
fsdp_config:
|
||||
special_tokens:
|
||||
# pad_token: "<|dummy_87|>"
|
||||
|
||||
```
|
||||
|
||||
</details><br>
|
||||
|
||||
# outputs/ablation-153-finalsft-shisa-v2-unphi-4-14b
|
||||
|
||||
This model is a fine-tuned version of [unsloth/phi-4](https://huggingface.co/unsloth/phi-4) on the shisa-ai/shisa-v2-best-of-n-athenev2-tulu70b-llama33-only-no-sysprompt, the shisa-ai/shisa-v2-roleplaying-sft, the shisa-ai/translation_expanded_master_set_filtered, the shisa-ai/rewild-set-deepseek-subset, the shisa-ai/magpie-ultra-set, the shisa-ai/magpie-advanced-questions-set and the shisa-ai/japan-magpie-set datasets.
|
||||
It achieves the following results on the evaluation set:
|
||||
- Loss: 0.4735
|
||||
|
||||
## Model description
|
||||
|
||||
More information needed
|
||||
|
||||
## Intended uses & limitations
|
||||
|
||||
More information needed
|
||||
|
||||
## Training and evaluation data
|
||||
|
||||
More information needed
|
||||
|
||||
## Training procedure
|
||||
|
||||
### Training hyperparameters
|
||||
|
||||
The following hyperparameters were used during training:
|
||||
- learning_rate: 7.5e-06
|
||||
- train_batch_size: 4
|
||||
- eval_batch_size: 4
|
||||
- seed: 42
|
||||
- distributed_type: multi-GPU
|
||||
- num_devices: 16
|
||||
- gradient_accumulation_steps: 2
|
||||
- total_train_batch_size: 128
|
||||
- total_eval_batch_size: 64
|
||||
- optimizer: Use OptimizerNames.PAGED_ADAMW_8BIT with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
||||
- lr_scheduler_type: linear
|
||||
- lr_scheduler_warmup_steps: 100
|
||||
- num_epochs: 3.0
|
||||
|
||||
### Training results
|
||||
|
||||
| Training Loss | Epoch | Step | Validation Loss |
|
||||
|:-------------:|:------:|:----:|:---------------:|
|
||||
| 0.7096 | 0.0020 | 1 | 0.7202 |
|
||||
| 0.5031 | 0.5 | 246 | 0.5037 |
|
||||
| 0.4589 | 1.0 | 492 | 0.4841 |
|
||||
| 0.4245 | 1.5 | 738 | 0.4780 |
|
||||
| 0.4423 | 2.0 | 984 | 0.4719 |
|
||||
| 0.3957 | 2.5 | 1230 | 0.4754 |
|
||||
| 0.3923 | 3.0 | 1476 | 0.4735 |
|
||||
|
||||
|
||||
### Framework versions
|
||||
|
||||
- Transformers 4.50.0
|
||||
- Pytorch 2.6.0+cu124
|
||||
- Datasets 3.4.1
|
||||
- Tokenizers 0.21.1
|
||||
31
config.json
Normal file
31
config.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"architectures": [
|
||||
"LlamaForCausalLM"
|
||||
],
|
||||
"attention_bias": false,
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 100257,
|
||||
"eos_token_id": 100265,
|
||||
"head_dim": 128,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 5120,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 17920,
|
||||
"max_position_embeddings": 16384,
|
||||
"mlp_bias": false,
|
||||
"model_type": "llama",
|
||||
"num_attention_heads": 40,
|
||||
"num_hidden_layers": 40,
|
||||
"num_key_value_heads": 10,
|
||||
"original_max_position_embeddings": 16384,
|
||||
"pad_token_id": 100351,
|
||||
"pretraining_tp": 1,
|
||||
"rms_norm_eps": 1e-05,
|
||||
"rope_scaling": null,
|
||||
"rope_theta": 250000,
|
||||
"tie_word_embeddings": false,
|
||||
"torch_dtype": "bfloat16",
|
||||
"transformers_version": "4.50.0",
|
||||
"use_cache": false,
|
||||
"vocab_size": 100352
|
||||
}
|
||||
1
configuration.json
Normal file
1
configuration.json
Normal file
@@ -0,0 +1 @@
|
||||
{"framework": "pytorch", "task": "text-generation", "allow_remote": true}
|
||||
8
generation_config.json
Normal file
8
generation_config.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"_from_model_config": true,
|
||||
"bos_token_id": 100257,
|
||||
"do_sample": true,
|
||||
"eos_token_id": 100265,
|
||||
"pad_token_id": 100351,
|
||||
"transformers_version": "4.50.0"
|
||||
}
|
||||
100001
merges.txt
Normal file
100001
merges.txt
Normal file
File diff suppressed because it is too large
Load Diff
3
model-00001-of-00006.safetensors
Normal file
3
model-00001-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7b0757394edd1f52092a51136f3ab25c3056e3c4ee124a535db88ef1b07108f0
|
||||
size 4933658528
|
||||
3
model-00002-of-00006.safetensors
Normal file
3
model-00002-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6495d1a4cbd1bdf0abfca300276dd25bbeaa9448724646bc3068aa9c17cdcee6
|
||||
size 4954693112
|
||||
3
model-00003-of-00006.safetensors
Normal file
3
model-00003-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7100705a36da298390c0347ba88ec8c08b2511c177274569a739c68232a247a0
|
||||
size 4902243992
|
||||
3
model-00004-of-00006.safetensors
Normal file
3
model-00004-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b43e4ad1d2578c16d97df5f9ce84b7aea89c963dff4d78c2ee2f4d2b49f3570d
|
||||
size 4954672440
|
||||
3
model-00005-of-00006.safetensors
Normal file
3
model-00005-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:03d765e3b8e5353ea7a3932280c1926dd91aab7091322b421b82221046496b23
|
||||
size 4954672432
|
||||
3
model-00006-of-00006.safetensors
Normal file
3
model-00006-of-00006.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c86f64aafe146a6953fad25510418139829851fb71c341e7a34c34a86c16a844
|
||||
size 4619116224
|
||||
370
model.safetensors.index.json
Normal file
370
model.safetensors.index.json
Normal file
@@ -0,0 +1,370 @@
|
||||
{
|
||||
"metadata": {
|
||||
"total_size": 29319014400
|
||||
},
|
||||
"weight_map": {
|
||||
"lm_head.weight": "model-00006-of-00006.safetensors",
|
||||
"model.embed_tokens.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.13.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.14.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.20.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.20.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.20.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.20.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.20.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
||||
"model.layers.21.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.21.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.22.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.27.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.27.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.27.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.27.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
||||
"model.layers.28.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.28.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.29.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.30.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.34.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
||||
"model.layers.35.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.35.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.36.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.37.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.38.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
||||
"model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
||||
"model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
||||
"model.norm.weight": "model-00006-of-00006.safetensors"
|
||||
}
|
||||
}
|
||||
30
special_tokens_map.json
Normal file
30
special_tokens_map.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"bos_token": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false
|
||||
},
|
||||
"eos_token": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": {
|
||||
"content": "<|dummy_87|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false
|
||||
},
|
||||
"unk_token": {
|
||||
"content": "�",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
3
tokenizer.json
Normal file
3
tokenizer.json
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:dd21124d96954e8a185367ef6b14bc09639855d4674cf4c5b0e0eb346651afc4
|
||||
size 7153264
|
||||
791
tokenizer_config.json
Normal file
791
tokenizer_config.json
Normal file
@@ -0,0 +1,791 @@
|
||||
{
|
||||
"add_prefix_space": false,
|
||||
"added_tokens_decoder": {
|
||||
"5809": {
|
||||
"content": "�",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100256": {
|
||||
"content": "<|dummy_0|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100257": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100258": {
|
||||
"content": "<|fim_prefix|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100259": {
|
||||
"content": "<|fim_middle|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100260": {
|
||||
"content": "<|fim_suffix|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100261": {
|
||||
"content": "<|dummy_1|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100262": {
|
||||
"content": "<|dummy_2|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100263": {
|
||||
"content": "<|dummy_3|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100264": {
|
||||
"content": "<|im_start|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100265": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100266": {
|
||||
"content": "<|im_sep|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100267": {
|
||||
"content": "<|dummy_4|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100268": {
|
||||
"content": "<|dummy_5|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100269": {
|
||||
"content": "<|dummy_6|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100270": {
|
||||
"content": "<|dummy_7|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100271": {
|
||||
"content": "<|dummy_8|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100272": {
|
||||
"content": "<|dummy_9|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100273": {
|
||||
"content": "<|dummy_10|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100274": {
|
||||
"content": "<|dummy_11|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100275": {
|
||||
"content": "<|dummy_12|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100276": {
|
||||
"content": "<|endofprompt|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100277": {
|
||||
"content": "<|dummy_13|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100278": {
|
||||
"content": "<|dummy_14|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100279": {
|
||||
"content": "<|dummy_15|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100280": {
|
||||
"content": "<|dummy_16|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100281": {
|
||||
"content": "<|dummy_17|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100282": {
|
||||
"content": "<|dummy_18|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100283": {
|
||||
"content": "<|dummy_19|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100284": {
|
||||
"content": "<|dummy_20|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100285": {
|
||||
"content": "<|dummy_21|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100286": {
|
||||
"content": "<|dummy_22|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100287": {
|
||||
"content": "<|dummy_23|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100288": {
|
||||
"content": "<|dummy_24|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100289": {
|
||||
"content": "<|dummy_25|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100290": {
|
||||
"content": "<|dummy_26|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100291": {
|
||||
"content": "<|dummy_27|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100292": {
|
||||
"content": "<|dummy_28|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100293": {
|
||||
"content": "<|dummy_29|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100294": {
|
||||
"content": "<|dummy_30|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100295": {
|
||||
"content": "<|dummy_31|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100296": {
|
||||
"content": "<|dummy_32|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100297": {
|
||||
"content": "<|dummy_33|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100298": {
|
||||
"content": "<|dummy_34|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100299": {
|
||||
"content": "<|dummy_35|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100300": {
|
||||
"content": "<|dummy_36|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100301": {
|
||||
"content": "<|dummy_37|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100302": {
|
||||
"content": "<|dummy_38|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100303": {
|
||||
"content": "<|dummy_39|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100304": {
|
||||
"content": "<|dummy_40|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100305": {
|
||||
"content": "<|dummy_41|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100306": {
|
||||
"content": "<|dummy_42|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100307": {
|
||||
"content": "<|dummy_43|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100308": {
|
||||
"content": "<|dummy_44|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100309": {
|
||||
"content": "<|dummy_45|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100310": {
|
||||
"content": "<|dummy_46|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100311": {
|
||||
"content": "<|dummy_47|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100312": {
|
||||
"content": "<|dummy_48|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100313": {
|
||||
"content": "<|dummy_49|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100314": {
|
||||
"content": "<|dummy_50|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100315": {
|
||||
"content": "<|dummy_51|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100316": {
|
||||
"content": "<|dummy_52|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100317": {
|
||||
"content": "<|dummy_53|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100318": {
|
||||
"content": "<|dummy_54|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100319": {
|
||||
"content": "<|dummy_55|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100320": {
|
||||
"content": "<|dummy_56|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100321": {
|
||||
"content": "<|dummy_57|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100322": {
|
||||
"content": "<|dummy_58|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100323": {
|
||||
"content": "<|dummy_59|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100324": {
|
||||
"content": "<|dummy_60|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100325": {
|
||||
"content": "<|dummy_61|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100326": {
|
||||
"content": "<|dummy_62|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100327": {
|
||||
"content": "<|dummy_63|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100328": {
|
||||
"content": "<|dummy_64|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100329": {
|
||||
"content": "<|dummy_65|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100330": {
|
||||
"content": "<|dummy_66|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100331": {
|
||||
"content": "<|dummy_67|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100332": {
|
||||
"content": "<|dummy_68|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100333": {
|
||||
"content": "<|dummy_69|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100334": {
|
||||
"content": "<|dummy_70|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100335": {
|
||||
"content": "<|dummy_71|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100336": {
|
||||
"content": "<|dummy_72|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100337": {
|
||||
"content": "<|dummy_73|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100338": {
|
||||
"content": "<|dummy_74|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100339": {
|
||||
"content": "<|dummy_75|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100340": {
|
||||
"content": "<|dummy_76|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100341": {
|
||||
"content": "<|dummy_77|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100342": {
|
||||
"content": "<|dummy_78|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100343": {
|
||||
"content": "<|dummy_79|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100344": {
|
||||
"content": "<|dummy_80|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100345": {
|
||||
"content": "<|dummy_81|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100346": {
|
||||
"content": "<|dummy_82|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100347": {
|
||||
"content": "<|dummy_83|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100348": {
|
||||
"content": "<|dummy_84|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100349": {
|
||||
"content": "<|dummy_85|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100350": {
|
||||
"content": "<|dummy_86|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"100351": {
|
||||
"content": "<|dummy_87|>",
|
||||
"lstrip": true,
|
||||
"normalized": false,
|
||||
"rstrip": true,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
}
|
||||
},
|
||||
"bos_token": "<|endoftext|>",
|
||||
"chat_template": "{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|im_start|>system<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'user') %}{{'<|im_start|>user<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'assistant') %}{{'<|im_start|>assistant<|im_sep|>' + message['content'] + '<|im_end|>'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant<|im_sep|>' }}{% endif %}",
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "<|im_end|>",
|
||||
"extra_special_tokens": {},
|
||||
"model_max_length": 16384,
|
||||
"pad_token": "<|dummy_87|>",
|
||||
"padding_side": "left",
|
||||
"tokenizer_class": "GPT2Tokenizer",
|
||||
"unk_token": "�"
|
||||
}
|
||||
3
training_args.bin
Normal file
3
training_args.bin
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b713ee2a1a254adaf119330943998d994cee5e6e4d3a4c6c6de6f2322680a5ca
|
||||
size 9080
|
||||
1
vocab.json
Normal file
1
vocab.json
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user