From 0d780d5ee94f498af4af9a1849f1309e1f1e4862 Mon Sep 17 00:00:00 2001 From: ModelHub XC Date: Mon, 4 May 2026 20:09:47 +0800 Subject: [PATCH] =?UTF-8?q?=E5=88=9D=E5=A7=8B=E5=8C=96=E9=A1=B9=E7=9B=AE?= =?UTF-8?q?=EF=BC=8C=E7=94=B1ModelHub=20XC=E7=A4=BE=E5=8C=BA=E6=8F=90?= =?UTF-8?q?=E4=BE=9B=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Model: Himitsui/Kaiju-11B Source: Original Platform --- .gitattributes | 35 ++ .ipynb_checkpoints/config-checkpoint.json | 28 ++ .../model.safetensors.index-checkpoint.json | 442 ++++++++++++++++++ README.md | 82 ++++ config.json | 28 ++ generation_config.json | 7 + model-00001-of-00005.safetensors | 3 + model-00002-of-00005.safetensors | 3 + model-00003-of-00005.safetensors | 3 + model-00004-of-00005.safetensors | 3 + model-00005-of-00005.safetensors | 3 + model.safetensors.index.json | 442 ++++++++++++++++++ special_tokens_map.json | 23 + tokenizer.model | 3 + tokenizer_config.json | 42 ++ 15 files changed, 1147 insertions(+) create mode 100644 .gitattributes create mode 100644 .ipynb_checkpoints/config-checkpoint.json create mode 100644 .ipynb_checkpoints/model.safetensors.index-checkpoint.json create mode 100644 README.md create mode 100644 config.json create mode 100644 generation_config.json create mode 100644 model-00001-of-00005.safetensors create mode 100644 model-00002-of-00005.safetensors create mode 100644 model-00003-of-00005.safetensors create mode 100644 model-00004-of-00005.safetensors create mode 100644 model-00005-of-00005.safetensors create mode 100644 model.safetensors.index.json create mode 100644 special_tokens_map.json create mode 100644 tokenizer.model create mode 100644 tokenizer_config.json diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..a6344aa --- /dev/null +++ b/.gitattributes @@ -0,0 +1,35 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/.ipynb_checkpoints/config-checkpoint.json b/.ipynb_checkpoints/config-checkpoint.json new file mode 100644 index 0000000..6bd2856 --- /dev/null +++ b/.ipynb_checkpoints/config-checkpoint.json @@ -0,0 +1,28 @@ +{ + "_name_or_path": "/workspace/MM/Fimbulvetr-11B-v2-Test-14", + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 1, + "eos_token_id": 2, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 4096, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 48, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "rope_theta": 10000.0, + "tie_word_embeddings": false, + "torch_dtype": "float16", + "transformers_version": "4.37.2", + "use_cache": false, + "vocab_size": 32000 +} diff --git a/.ipynb_checkpoints/model.safetensors.index-checkpoint.json b/.ipynb_checkpoints/model.safetensors.index-checkpoint.json new file mode 100644 index 0000000..d81a244 --- /dev/null +++ b/.ipynb_checkpoints/model.safetensors.index-checkpoint.json @@ -0,0 +1,442 @@ +{ + "metadata": { + "total_size": 21463048192 + }, + "weight_map": { + "lm_head.weight": "model-00005-of-00005.safetensors", + "model.embed_tokens.weight": "model-00001-of-00005.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.20.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.44.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.44.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.44.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.45.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.norm.weight": "model-00005-of-00005.safetensors" + } +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..a3e36d0 --- /dev/null +++ b/README.md @@ -0,0 +1,82 @@ +--- +license: cc-by-nc-4.0 +language: +- en +--- + +Included in this repo is the full precision model for Kaiju-11B + +(ノ≧∀≦)ノ ‥…━━━━━━━━━━━━━★ ||| ╲/\╭[ ᴼᴼ ౪ ᴼᴼ]╮/\╱\ + +Hiya! This is an experiment using Gryphe's [MergeMonster](https://github.com/Gryphe/MergeMonster). + +I decided to try and reduce what the community calls 'GPT-isms' or GPT Slop, Solar is a good model but does have fair share of positivity bias and 'slop' in roleplays. I used my friend [Sao](https://huggingface.co/Sao10K)'s models as bases as they are pretty popular, along with Kuromitsu and the popular Instruct-Uncensored tune. + +Alpaca Format should be fine as it is universal, Vicuna Format should work too. Universal-Light preset in SillyTavern is pretty nice too. :) + +💜 I hope this model may be useful to you 💜 + +*** + +Merge Details Below: + +
See Merge Config + +``` +----------------------------------------------------------------------------------------------------- +| Type | Phrase | Context | Raw Prob* | Used Prob** | Change | +----------------------------------------------------------------------------------------------------- +| BAD | anticipation | Her body quivers with | 9.99850% | 119.98% | -54.02% | +| BAD | anticipation | The atmosphere is thic.. | 8.82392% | 105.89% | -32.13% | +| BAD | unwavering | Filled with an | 0.09003% | 1.08% | -0.06% | +| BAD | determination | Her eyes were filled w.. | 0.19863% | 2.38% | -0.26% | +| BAD | determination | Her stubbornness only .. | 7.17110% | 86.05% | -39.86% | +| BAD | whisper | Her voice barely above.. | 96.55492% | 1158.66% | -8.91% | +| BAD | spine | shivers down her | 85.57597% | 1026.91% | -66.19% | +| BAD | sends shivers | The thrill of the act | 0.00230% | 0.03% | -0.00% | +| BAD | ministrations | She moans and twitches.. | 1.35264% | 16.23% | -10.49% | +| BAD | legs | wraps her | 2.45741% | 29.49% | -10.58% | +| BAD | imposing figure | He had an | 0.00356% | 0.04% | +0.00% | +| BAD | shared challenges | Their bond strengthene.. | 0.10075% | 1.21% | -0.03% | +| BAD | bond | forged a | 1.78930% | 21.47% | -9.07% | +| BAD | bond | an unspoken | 4.33001% | 51.96% | -28.17% | +| BAD | enhance our expe.. | I'm excited to see how | 0.00000% | 0.00% | +0.00% | +| BAD | sense of vulnera.. | create a | 0.00003% | 0.00% | -0.00% | +| BAD | dimensions of in.. | explore new | 0.00047% | 0.01% | -0.00% | +| BAD | deepening our co.. | while | 0.00003% | 0.00% | -0.00% | +| BAD | shared experiences | through | 0.00469% | 0.06% | -0.00% | +| BAD | societal expecta.. | that transcend | 0.00170% | 0.02% | -0.00% | +| BAD | conventional bou.. | that defy | 0.03593% | 0.43% | +0.04% | +| BAD | conventional bou.. | and defy | 0.00410% | 0.05% | +0.01% | +| BAD | open communication | an environment | 0.00000% | 0.00% | +0.00% | +| BAD | emotional vulner.. | an environment | 0.00000% | 0.00% | +0.00% | +| BAD | heightens our co.. | touch and the anticipa.. | 0.00000% | 0.00% | +0.00% | +| BAD | sensations you'r.. | I'm enjoying | 0.00000% | 0.00% | -0.00% | +| BAD | is truly arousing | attention to detail | 0.00000% | 0.00% | +0.00% | +| BAD | is truly arousing | way you explore my body | 0.00001% | 0.00% | +0.00% | +| BAD | challenge presen.. | my resolve unwavering .. | 0.00000% | 0.00% | +0.00% | +| BAD | humble vessel | surrendering to the ex.. | 0.00000% | 0.00% | +0.00% | +| BAD | bond | cherishing the unique | 1.37498% | 16.50% | +1.21% | +| BAD | bond | special | 0.05834% | 0.70% | +0.01% | +| BAD | grows stronger w.. | bond | 0.00000% | 0.00% | +0.00% | +| BAD | that cannot be b.. | bond | 0.00000% | 0.00% | -0.00% | +| BAD | becomes unbreaka.. | bond | 0.00000% | 0.00% | -0.00% | +| BAD | grew stronger wi.. | bond | 0.00000% | 0.00% | +0.00% | +| GOOD | The apple is in .. | Question: If I'm in th.. | 78.38934% | 78.39% | -10.79% | +------------------------------------------------------------------------------------------------------ +| Totals | 298.32% | 2717.54% | -269.30% | +------------------------------------------------------------------------------------------------------ +``` + +* = Unweighted, raw probability - ** = Probability after weight adjustments + +``` +-------- MERGE COMPOSITION --------- +Fimbulvetr-11B-v2-Test-14: 0.50 +KuroMitsu-11B: 0.18 +Fimbulvetr-10.7B-v1: 0.17 +SOLAR-10.7B-Instruct-v1.0-uncensored: 0.10 +Solstice-11B-v1: 0.05 +``` + +

\ No newline at end of file diff --git a/config.json b/config.json new file mode 100644 index 0000000..96a5ccb --- /dev/null +++ b/config.json @@ -0,0 +1,28 @@ +{ + "_name_or_path": "/home/mimitsu/AI/textgen/Kaiju-11B", + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 1, + "eos_token_id": 2, + "hidden_act": "silu", + "hidden_size": 4096, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 4096, + "model_type": "llama", + "num_attention_heads": 32, + "num_hidden_layers": 48, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "rope_theta": 10000.0, + "tie_word_embeddings": false, + "torch_dtype": "float16", + "transformers_version": "4.37.2", + "use_cache": true, + "vocab_size": 32000 +} diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000..a8f5326 --- /dev/null +++ b/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "bos_token_id": 1, + "eos_token_id": 2, + "transformers_version": "4.37.2", + "use_cache": false +} diff --git a/model-00001-of-00005.safetensors b/model-00001-of-00005.safetensors new file mode 100644 index 0000000..7d0a8f8 --- /dev/null +++ b/model-00001-of-00005.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cd52c5af87b306d6b5d8ac85157f1dd25d955234dbd4a3be898cc351532dae8 +size 4943162240 diff --git a/model-00002-of-00005.safetensors b/model-00002-of-00005.safetensors new file mode 100644 index 0000000..2b37a41 --- /dev/null +++ b/model-00002-of-00005.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b2875ae5d0737a56b16ef04a27f1f48b5e5eb6d04b8257a3ad8cccd05338162 +size 4999819232 diff --git a/model-00003-of-00005.safetensors b/model-00003-of-00005.safetensors new file mode 100644 index 0000000..b68c64b --- /dev/null +++ b/model-00003-of-00005.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58d84821d277f957c7d34485b45e78f268e9bdbbd1012040cd40050836a4c99d +size 4915916080 diff --git a/model-00004-of-00005.safetensors b/model-00004-of-00005.safetensors new file mode 100644 index 0000000..9fba9ed --- /dev/null +++ b/model-00004-of-00005.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1b55505ba1dada4e080a914facfa9b1e12a89ed5d8f1eeecc5c48e40ced45c5 +size 4915916080 diff --git a/model-00005-of-00005.safetensors b/model-00005-of-00005.safetensors new file mode 100644 index 0000000..34602d3 --- /dev/null +++ b/model-00005-of-00005.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38fa5bad8c3dcc48da3c7a2a10e141bd56129d601ff18fff438f0dee18d46576 +size 1688284744 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000..d81a244 --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,442 @@ +{ + "metadata": { + "total_size": 21463048192 + }, + "weight_map": { + "lm_head.weight": "model-00005-of-00005.safetensors", + "model.embed_tokens.weight": "model-00001-of-00005.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.20.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.input_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00005.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.input_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00005.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.42.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.input_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.down_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.post_attention_layernorm.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.43.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.44.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.44.mlp.gate_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.mlp.up_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.44.self_attn.k_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.o_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.q_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.44.self_attn.v_proj.weight": "model-00004-of-00005.safetensors", + "model.layers.45.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.45.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.46.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.input_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.down_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.gate_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.mlp.up_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.post_attention_layernorm.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.k_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.o_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.q_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.47.self_attn.v_proj.weight": "model-00005-of-00005.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.input_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00005.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00005.safetensors", + "model.norm.weight": "model-00005-of-00005.safetensors" + } +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000..451134b --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,23 @@ +{ + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tokenizer.model b/tokenizer.model new file mode 100644 index 0000000..8b443ef --- /dev/null +++ b/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055 +size 493443 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000..0fe140a --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,42 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "legacy": true, + "model_max_length": 1000000000000000019884624838656, + "pad_token": null, + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "LlamaTokenizer", + "unk_token": "", + "use_default_system_prompt": true +}