commit d3bee10885cba4427a143867fbe45b18795bb5f6 Author: ModelHub XC Date: Sat Apr 11 07:34:55 2026 +0800 初始化项目,由ModelHub XC社区提供模型 Model: inclusionAI/Ring-lite-distill-preview Source: Original Platform diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..21b3632 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,49 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bin.* filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zstandard filter=lfs diff=lfs merge=lfs -text +*.tfevents* filter=lfs diff=lfs merge=lfs -text +*.db* filter=lfs diff=lfs merge=lfs -text +*.ark* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*data* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.meta filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.index filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.gguf* filter=lfs diff=lfs merge=lfs -text +*.ggml filter=lfs diff=lfs merge=lfs -text +*.llamafile* filter=lfs diff=lfs merge=lfs -text +*.pt2 filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text + +tokenizer.json filter=lfs diff=lfs merge=lfs -text \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..fc637dc --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 inclusionAI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..562a0ed --- /dev/null +++ b/README.md @@ -0,0 +1,113 @@ +--- +license: mit +language: +- zh +- en +base_model: +- inclusionAI/Ling-lite +pipeline_tag: text-generation +library_name: transformers +--- + +# Ring-lite-distill-preview + +

+ +

+ +

+ 🤗 Hugging Face +

+ +## Introduction + +Ring-lite-distill-preview is an MoE LLM provided and open-sourced by InclusionAI, which has 16.8B parameters with 2.75B activated parameters. It was fine-tuned from [Ling-lite](https://modelscope.cn/models/inclusionAI/Ling-lite) using extensive reasoning-focused instruction data. This model delivers performance comparable to DeepSeek-R1-Distill-Qwen-7B on reasoning benchmarks while achieving better results on general benchmarks, especially superior performance on function-calling evaluation benchmarks (e.g., TEval, BFCl_v2) and instruction-following benchmarks (e.g., IFEval). This demonstrates that Ring-lite-distill is a more balanced and versatile model. Additionaly, it maintains competitive latency and throughput compared to other reasoning LLMs of similar size. + +## Model Downloads + +

+ +| **Model** | **#Total Params** | **#Activated Params** | **Context Length** | **Download** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| Ring-lite-distill-preview | 16.8B | 2.75B | 64K | [🤗 HuggingFace](https://huggingface.co/inclusionAI/Ring-lite-distill) | + +
+ +## Evaluation +In order to fully evaluate the model's performance, we examined Ring-lite-distill-preview in terms of both reasoning ability and general ability. +### Reasoning ability + +
+ +| **Model** | **AIME24** | **MATH-500** | **GPQA-diamond** | **LiveCodeBench** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| DeepSeek-R1-Distill-Qwen-7B (reported) | 55.5 | 92.8 | 49.1 | 37.6 | +| DeepSeek-R1-Distill-Qwen-7B (reproduce) | 53.2 | 93.7 | 50.4 | 36.5 | +| Ring-lite-distill-preview | 56.3 | 93.7 | 46.2 | 31.9 | + +
+ +### General ability + +
+ +| **Model** | **IFEval** | **T-eval** | **BFCL_v2** | **MMLU** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| DeepSeek-R1-Distill-Qwen-7B (reproduce) | 39.3 | 26.9 | 38.9 | 44.1 | +| Ring-lite-distill-preview | 75.3 | 81.3 | 63.0 | 63.3 | + +
+ +More details will be reported in our [technical report](https://github.com/inclusionAI/Ring/blob/main/Ring_Lite_Distill_Preview.pdf). + +## Quickstart + +### 🤗 Hugging Face Transformers +Here is a code snippet to show you how to use the chat model with `transformers`: + +```python +from transformers import AutoModelForCausalLM, AutoTokenizer + +model_name = "inclusionAI/Ring-lite-distill-preview" + +model = AutoModelForCausalLM.from_pretrained( + model_name, + torch_dtype="auto", + device_map="auto" +) +tokenizer = AutoTokenizer.from_pretrained(model_name) + +prompt = "Give me a short introduction to large language models." +messages = [ + {"role": "system", "content": "You are Ring, an assistant created by inclusionAI"}, + {"role": "user", "content": prompt} +] +text = tokenizer.apply_chat_template( + messages, + tokenize=False, + add_generation_prompt=True +) +model_inputs = tokenizer([text], return_tensors="pt").to(model.device) + +generated_ids = model.generate( + **model_inputs, + max_new_tokens=8192 +) +generated_ids = [ + output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids) +] + +response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0] +``` + +## Dataset +The training data of Ring-lite-distill-preview will be released soon. + +## Deployment +Please refer to [GitHub](https://github.com/inclusionAI/Ring/blob/main/README.md) + +## License +This code repository is licensed under [the MIT License](https://huggingface.co/inclusionAI/Ring-lite-distill/blob/main/LICENSE). + +## Citation +[TBD] \ No newline at end of file diff --git a/ant-bailing.png b/ant-bailing.png new file mode 100644 index 0000000..603e33d Binary files /dev/null and b/ant-bailing.png differ diff --git a/config.json b/config.json new file mode 100644 index 0000000..5ed8676 --- /dev/null +++ b/config.json @@ -0,0 +1,44 @@ +{ + "architectures": [ + "BailingMoeForCausalLM" + ], + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_bailing_moe.BailingMoeConfig", + "AutoModel": "modeling_bailing_moe.BailingMoeModel", + "AutoModelForCausalLM": "modeling_bailing_moe.BailingMoeForCausalLM" + }, + "eos_token_id": 126081, + "pad_token_id": 126081, + "first_k_dense_replace": 0, + "hidden_act": "silu", + "hidden_size": 2048, + "initializer_range": 0.006, + "intermediate_size": 5632, + "max_position_embeddings": 16384, + "model_type": "bailing_moe", + "moe_intermediate_size": 1408, + "num_experts": 64, + "num_shared_experts": 2, + "norm_topk_prob": true, + "num_attention_heads": 16, + "num_experts_per_tok": 6, + "num_hidden_layers": 28, + "num_key_value_heads": 4, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "rope_theta": 600000, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.36.0", + "use_cache": true, + "use_bias": false, + "use_qkv_bias": false, + "vocab_size": 126464, + "output_router_logits": false, + "embedding_dropout": 0.0, + "norm_head": true, + "norm_softmax": false, + "output_dropout": 0.0 +} \ No newline at end of file diff --git a/configuration.json b/configuration.json new file mode 100644 index 0000000..f9291c3 --- /dev/null +++ b/configuration.json @@ -0,0 +1 @@ +{"framework":"Pytorch","task":"text-generation"} \ No newline at end of file diff --git a/configuration_bailing_moe.py b/configuration_bailing_moe.py new file mode 100644 index 0000000..3f028c8 --- /dev/null +++ b/configuration_bailing_moe.py @@ -0,0 +1,78 @@ +""" Bailing MoE model configuration """ + +from transformers.configuration_utils import PretrainedConfig + + +class BailingMoeConfig(PretrainedConfig): + model_type = "bailing_moe" + + def __init__( + self, + vocab_size=30592, + hidden_size=1024, + intermediate_size=None, + num_hidden_layers=24, + num_attention_heads=16, + num_key_value_heads=0, + hidden_act="silu", + use_qkv_bias=False, # bailing only + use_bias=True, # bailing only + rms_norm_eps=1e-05, + norm_head=False, # bailing only + tie_word_embeddings=False, # PretrainedConfig key, here change default value. + embedding_dropout=0.1, + attention_dropout=0.1, + output_dropout=0.1, + initializer_range=0.02, + max_position_embeddings=16384, + rope_theta=10000.0, + use_cache=True, + use_sliding_window=False, + sliding_window=4096, + max_window_layers=28, + rope_scaling=None, + pad_token_id=126081, + num_experts=16, + num_shared_experts=0, + num_experts_per_tok=2, + norm_topk_prob=True, + moe_intermediate_size=None, + first_k_dense_replace=0, + head_dim=None, + output_router_logits=False, + **kwargs, + ): + self.num_hidden_layers = num_hidden_layers + self.vocab_size = vocab_size + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.num_attention_heads = num_attention_heads + self.num_key_value_heads = num_key_value_heads + self.hidden_act = hidden_act + self.use_qkv_bias = use_qkv_bias + self.use_bias = use_bias + self.norm_head = norm_head + self.rms_norm_eps = rms_norm_eps + self.embedding_dropout = embedding_dropout + self.attention_dropout = attention_dropout + self.output_dropout = output_dropout + self.initializer_range = initializer_range + self.max_position_embeddings = max_position_embeddings + self.rope_theta = rope_theta + self.use_cache = use_cache + self.use_sliding_window = use_sliding_window + self.sliding_window = sliding_window + self.max_window_layers = max_window_layers + self.head_dim = head_dim or self.hidden_size // self.num_attention_heads + self.rope_scaling = rope_scaling + + # MoE configs + self.num_experts = num_experts + self.num_shared_experts = num_shared_experts + self.num_experts_per_tok = num_experts_per_tok + self.norm_topk_prob = norm_topk_prob + self.moe_intermediate_size = moe_intermediate_size + self.first_k_dense_replace = first_k_dense_replace + self.output_router_logits = output_router_logits + + super().__init__(pad_token_id=pad_token_id, tie_word_embeddings=tie_word_embeddings, **kwargs) diff --git a/model-00001-of-00004.safetensors b/model-00001-of-00004.safetensors new file mode 100644 index 0000000..7fbad69 --- /dev/null +++ b/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf3d311c600c7dff74d973d95660c8464b310a1d177d694b0cac1a1fb81f309a +size 9305327072 diff --git a/model-00002-of-00004.safetensors b/model-00002-of-00004.safetensors new file mode 100644 index 0000000..6f3b5b5 --- /dev/null +++ b/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70b127b1b5d3a808585edcffe78ebd60c52bf58e5b0d095af51c7cc62990a381 +size 9305328272 diff --git a/model-00003-of-00004.safetensors b/model-00003-of-00004.safetensors new file mode 100644 index 0000000..7a02c03 --- /dev/null +++ b/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63eb2564a66331fb8cf5520782be797c467e4c890e4e44a338bfc90ee12bddbf +size 9305328672 diff --git a/model-00004-of-00004.safetensors b/model-00004-of-00004.safetensors new file mode 100644 index 0000000..b59f6d5 --- /dev/null +++ b/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1cc12c93b2b9d4f6880f96c9dc9c1ebe415ace99f39cbf398e673be1a23f3e4 +size 5688662080 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000..6116041 --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,5611 @@ +{ + "metadata": { + "total_size": 33603948672 + }, + "weight_map": { + "model.layers.0.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.0.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.1.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.2.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.3.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.4.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.5.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.6.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.7.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.10.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.10.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.11.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.12.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.13.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.14.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.15.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.8.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.9.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.16.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.17.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.17.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.18.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.18.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.19.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.19.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.20.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.21.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.22.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.23.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "lm_head.weight": "model-00004-of-00004.safetensors", + "model.layers.24.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.24.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.25.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.25.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.26.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.26.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.27.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.27.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors", + "model.rotary_emb.inv_freq": "model-00004-of-00004.safetensors", + "model.word_embeddings.weight": "model-00004-of-00004.safetensors" + } +} \ No newline at end of file diff --git a/modeling_bailing_moe.py b/modeling_bailing_moe.py new file mode 100644 index 0000000..f08a15a --- /dev/null +++ b/modeling_bailing_moe.py @@ -0,0 +1,1549 @@ +# coding=utf-8 +# Copyright 2023 Antgroup and The HuggingFace Inc. team. All rights reserved. +# +# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX +# and OPT implementations in this library. It has been modified from its +# original forms to accommodate minor architectural differences compared +# to GPT-NeoX and OPT used by the Meta AI team that trained the model. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch BailingMoE model.""" +import math +import warnings +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn.functional as F +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss + +from transformers.activations import ACT2FN +from transformers.cache_utils import Cache, DynamicCache +from transformers.modeling_attn_mask_utils import ( + AttentionMaskConverter, + _prepare_4d_attention_mask, + _prepare_4d_causal_attention_mask, + _prepare_4d_causal_attention_mask_for_sdpa, +) +from transformers.modeling_outputs import ( + MoeModelOutputWithPast, + MoeCausalLMOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.pytorch_utils import ALL_LAYERNORM_LAYERS, is_torch_greater_or_equal_than_1_13 +from transformers.utils import ( + add_start_docstrings, + add_start_docstrings_to_model_forward, + is_flash_attn_2_available, + is_flash_attn_greater_or_equal_2_10, + logging, + replace_return_docstrings, +) +from transformers.utils.import_utils import is_torch_fx_available +from .configuration_bailing_moe import BailingMoeConfig + + +if is_flash_attn_2_available(): + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa + + +# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph. +# It means that the function will not be traced through and simply appear as a node in the graph. +if is_torch_fx_available(): + if not is_torch_greater_or_equal_than_1_13: + import torch.fx + + _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask) + + +logger = logging.get_logger(__name__) + +_CONFIG_FOR_DOC = "BailingMoeConfig" + + +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad(torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + warnings.warn( + "Calling `transformers.models.BailingMoe.modeling_BailingMoe._prepare_4d_attention_mask` is deprecated and will be removed in v4.37. Use `transformers.modeling_attn_mask_utils._prepare_4d_attention_mask" + ) + return _prepare_4d_attention_mask(mask=mask, dtype=dtype, tgt_len=tgt_len) + + +def _make_causal_mask( + input_ids_shape: torch.Size, dtype: torch.dtype, device: torch.device, past_key_values_length: int = 0 +): + warnings.warn( + "Calling `transformers.models.BailingMoe.modeling_BailingMoe._make_causal_mask` is deprecated and will be removed in v4.37. Use `transformers.models.BailingMoe.modeling_BailingMoe.AttentionMaskConverter._make_causal_mask" + ) + return AttentionMaskConverter._make_causal_mask( + input_ids_shape=input_ids_shape, dtype=dtype, device=device, past_key_values_length=past_key_values_length + ) + + +class BailingMoeRMSNorm(nn.Module): + def __init__(self, hidden_size, eps=1e-6): + """ + BailingMoeRMSNorm is equivalent to T5LayerNorm + """ + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +ALL_LAYERNORM_LAYERS.append(BailingMoeRMSNorm) + + +class BailingMoeRotaryEmbedding(nn.Module): + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / (self.base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, device=self.inv_freq.device, dtype=torch.get_default_dtype() + ) + self.max_seq_len_cached = None + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq.to(t.device)) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if self.max_seq_len_cached is None or seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaLinearScalingRotaryEmbedding with Llama->BailingMoe +class BailingMoeLinearScalingRotaryEmbedding(BailingMoeRotaryEmbedding): + """BailingMoeRotaryEmbedding extended with linear scaling. Credits to the Reddit user /u/kaiokendev""" + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + t = t / self.scaling_factor + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.LlamaDynamicNTKScalingRotaryEmbedding with Llama->BailingMoe +class BailingMoeDynamicNTKScalingRotaryEmbedding(BailingMoeRotaryEmbedding): + """BailingMoeRotaryEmbedding extended with Dynamic NTK scaling. Credits to the Reddit users /u/bloc97 and /u/emozilla""" + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + + if seq_len > self.max_position_embeddings: + base = self.base * ( + (self.scaling_factor * seq_len / self.max_position_embeddings) - (self.scaling_factor - 1) + ) ** (self.dim / (self.dim - 2)) + inv_freq = 1.0 / (base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Inverse dim formula to find dim based on number of rotations +def yarn_find_correction_dim(num_rotations, dim, base=10000, max_position_embeddings=2048): + return (dim * math.log(max_position_embeddings / (num_rotations * 2 * math.pi))) / (2 * math.log(base)) + + +# Find dim range bounds based on rotations +def yarn_find_correction_range(low_rot, high_rot, dim, base=10000, max_position_embeddings=2048): + low = math.floor(yarn_find_correction_dim(low_rot, dim, base, max_position_embeddings)) + high = math.ceil(yarn_find_correction_dim(high_rot, dim, base, max_position_embeddings)) + return max(low, 0), min(high, dim - 1) # Clamp values just in case + + +def yarn_get_mscale(scale=1, mscale=1): + if scale <= 1: + return 1.0 + return 0.1 * mscale * math.log(scale) + 1.0 + + +def yarn_linear_ramp_mask(min, max, dim): + if min == max: + max += 0.001 # Prevent singularity + + linear_func = (torch.arange(dim, dtype=torch.float32) - min) / (max - min) + ramp_func = torch.clamp(linear_func, 0, 1) + return ramp_func + + +class BailingMoeYarnRotaryEmbedding(BailingMoeRotaryEmbedding): + + def __init__( + self, + dim, + max_position_embeddings=2048, + base=10000, + device=None, + scaling_factor=1.0, + original_max_position_embeddings=4096, + beta_fast=32, + beta_slow=1, + mscale=1, + mscale_all_dim=0, + ): + self.scaling_factor = scaling_factor + self.original_max_position_embeddings = original_max_position_embeddings + self.beta_fast = beta_fast + self.beta_slow = beta_slow + self.mscale = mscale + self.mscale_all_dim = mscale_all_dim + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + dim = self.dim + + freq_extra = 1.0 / (self.base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim)) + freq_inter = 1.0 / ( + self.scaling_factor * self.base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim) + ) + + low, high = yarn_find_correction_range( + self.beta_fast, + self.beta_slow, + dim, + self.base, + self.original_max_position_embeddings, + ) + inv_freq_mask = 1.0 - yarn_linear_ramp_mask(low, high, dim // 2).to(device=device, dtype=torch.float32) + inv_freq = freq_inter * (1 - inv_freq_mask) + freq_extra * inv_freq_mask + self.register_buffer("inv_freq", inv_freq, persistent=False) + + t = torch.arange(seq_len, device=device, dtype=torch.float32) + + freqs = torch.outer(t, inv_freq) + + _mscale = float( + yarn_get_mscale(self.scaling_factor, self.mscale) + / yarn_get_mscale(self.scaling_factor, self.mscale_all_dim) + ) + + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", (emb.cos() * _mscale).to(dtype), persistent=False) + self.register_buffer("sin_cached", (emb.sin() * _mscale).to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., : x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2 :] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.models.llama.modeling_llama.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`): + The position indices of the tokens corresponding to the query and key tensors. For example, this can be + used to pass offsetted position ids when working with a KV-cache. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising the query and key tensors rotated using the Rotary Position Embedding. + """ + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +class BailingMoeMLP(nn.Module): + def __init__(self, config: BailingMoeConfig, intermediate_size: int): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size + self.intermediate_size = intermediate_size + + self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, x): + return self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x)) + + +class BailingMoeGate(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.top_k = config.num_experts_per_tok + self.num_experts = config.num_experts + + # topk selection algorithm + self.norm_topk_prob = config.norm_topk_prob + self.gating_dim = config.hidden_size + self.weight = nn.Parameter(torch.empty((self.num_experts, self.gating_dim))) + self.reset_parameters() + + def reset_parameters(self) -> None: + import torch.nn.init as init + + init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + + def forward(self, hidden_states, sort=False): + bsz, seq_len, h = hidden_states.shape + # compute gating score + hidden_states = hidden_states.view(-1, h) + logits = F.linear(hidden_states, self.weight, None) + scores = logits.softmax(dim=-1, dtype=torch.float32) + + # select top-k experts + topk_weight, topk_idx = torch.topk(scores, k=self.top_k, dim=-1, sorted=sort) + + # norm gate to sum 1 + if self.top_k > 1 and self.norm_topk_prob: + denominator = topk_weight.sum(dim=-1, keepdim=True) + topk_weight = topk_weight / denominator + + return topk_idx, topk_weight, logits + + +class BailingMoeSparseMoeBlock(nn.Module): + """ + A mixed expert module containing shared experts. + """ + + def __init__(self, config: BailingMoeConfig): + super().__init__() + self.config = config + self.num_experts_per_tok = config.num_experts_per_tok + self._setup_experts() + self.gate = BailingMoeGate(config) + if config.num_shared_experts is not None: + self.shared_experts = BailingMoeMLP( + config=config, intermediate_size=config.moe_intermediate_size * config.num_shared_experts + ) + + def _setup_experts(self): + self.experts = nn.ModuleList( + [ + BailingMoeMLP(config=self.config, intermediate_size=self.config.moe_intermediate_size) + for _ in range(self.config.num_experts) + ] + ) + + def forward(self, hidden_states): + identity = hidden_states + bsz, seq_len, h = hidden_states.shape + topk_idx, topk_weight, router_logits = self.gate(hidden_states) + hidden_states = hidden_states.view(-1, hidden_states.shape[-1]) + flat_topk_idx = topk_idx.view(-1) + if self.training: + hidden_states = hidden_states.repeat_interleave(self.num_experts_per_tok, dim=0) + y = torch.empty_like(hidden_states) + for i, expert in enumerate(self.experts): + y[flat_topk_idx == i] = expert(hidden_states[flat_topk_idx == i]) + y = (y.view(*topk_weight.shape, -1) * topk_weight.unsqueeze(-1)).sum(dim=1) + y = y.to(hidden_states.dtype).view(bsz, seq_len, h) + else: + y = self.moe_infer(hidden_states, topk_idx, topk_weight).view(bsz, seq_len, h) + if self.config.num_shared_experts is not None: + y = y + self.shared_experts(identity) + return y, (router_logits.view(bsz, seq_len, -1), topk_idx.view(bsz, seq_len, -1)) + + @torch.no_grad() + def moe_infer(self, x, topk_ids, topk_weight): + cnts = topk_ids.new_zeros((topk_ids.shape[0], len(self.experts))) + cnts.scatter_(1, topk_ids, 1) + tokens_per_expert = cnts.sum(dim=0) + idxs = topk_ids.view(-1).argsort() + sorted_tokens = x[idxs // topk_ids.shape[1]] + sorted_tokens_shape = sorted_tokens.shape + tokens_per_expert = tokens_per_expert.cpu().numpy() + outputs = [] + start_idx = 0 + for i, num_tokens in enumerate(tokens_per_expert): + end_idx = start_idx + num_tokens + if num_tokens == 0: + continue + expert = self.experts[i] + tokens_for_this_expert = sorted_tokens[start_idx:end_idx] + expert_out = expert(tokens_for_this_expert) + outputs.append(expert_out) + start_idx = end_idx + + outs = torch.cat(outputs, dim=0) if len(outputs) else sorted_tokens.new_empty(0) + new_x = torch.empty_like(outs) + new_x[idxs] = outs + final_out = ( + new_x.view(*topk_ids.shape, -1) + .type(topk_weight.dtype) + .mul_(topk_weight.unsqueeze(dim=-1)) + .sum(dim=1) + .type(new_x.dtype) + ) + return final_out + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +# Copied from transformers.models.llama.modeling_llama.LlamaAttention with Llama->BailingMoe +class BailingMoeAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config: BailingMoeConfig, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f"Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will " + "to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` " + "when creating this class." + ) + + self.attention_dropout = config.attention_dropout + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = config.head_dim or self.hidden_size // self.num_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.rope_theta = config.rope_theta + self.is_causal = True + + self.query_key_value = nn.Linear( + self.hidden_size, + (self.num_heads + 2 * self.num_key_value_heads) * self.head_dim, + bias=config.use_qkv_bias, + ) + self.dense = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=config.use_bias) + self._init_rope() + + def _init_rope(self): + if self.config.rope_scaling is None: + self.rotary_emb = BailingMoeRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.rope_theta, + ) + else: + scaling_type = self.config.rope_scaling["type"] + scaling_factor = self.config.rope_scaling["factor"] + if scaling_type == "linear": + self.rotary_emb = BailingMoeLinearScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == "dynamic": + self.rotary_emb = BailingMoeDynamicNTKScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == "yarn": + kwargs = { + key: self.config.rope_scaling[key] + for key in [ + "original_max_position_embeddings", + "beta_fast", + "beta_slow", + "mscale", + "mscale_all_dim", + ] + if key in self.config.rope_scaling + } + self.rotary_emb = BailingMoeYarnRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + **kwargs, + ) + else: + raise ValueError(f"Unknown RoPE scaling type {scaling_type}") + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + + bsz, q_len, _ = hidden_states.size() + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} " + "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class " + "with a layer index." + ) + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_weights = torch.matmul(query_states / math.sqrt(self.head_dim), key_states.transpose(2, 3)) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_weights = nn.functional.dropout(attn_weights, p=self.attention_dropout, training=self.training) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + + attn_output = attn_output.reshape(bsz, q_len, -1) + + attn_output = self.dense(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Copied from transformers.models.llama.modeling_llama.LlamaFlashAttention2 with Llama->BailingMoe +class BailingMoeFlashAttention2(BailingMoeAttention): + """ + BailingMoe flash attention module. This module inherits from `BailingMoeAttention` as the weights of the module stays + untouched. The only required change would be on the forward pass where it needs to correctly call the public API of + flash attention and deal with padding tokens in case the input contains any of them. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1. + # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignement, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0. + # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left). + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + # BailingMoeFlashAttention2 attention does not support output_attentions + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop("padding_mask") + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache + # to be able to avoid many of these transpose/reshape/view. + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently cast in float32. Hence, we need + # cast them back in the correct dtype just to be sure everything works as expected. + # This might slow down training & inference so it is recommended to not cast the LayerNorms + # in fp32. (BailingMoeRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + # Handle the case where the model is quantized + if hasattr(self.config, "_pre_quantization_dtype"): + target_dtype = self.config._pre_quantization_dtype + elif torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + else: + target_dtype = self.q_proj.weight.dtype + + logger.warning_once( + f"The input hidden states seems to be silently casted in float32, this might be related to" + f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in" + f" {target_dtype}." + ) + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + attn_output = self._flash_attention_forward( + query_states, key_states, value_states, attention_mask, q_len, dropout=dropout_rate + ) + + attn_output = attn_output.reshape(bsz, q_len, -1).contiguous() + attn_output = self.dense(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, query_states, key_states, value_states, attention_mask, query_length, dropout=0.0, softmax_scale=None + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`int`, *optional*): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + query_length (`int`): + The length of the query sequence in terms of tokens. This represents the number of tokens in the + `query_states` tensor along the sequence dimension. It is used to determine the effective sequence + length for attention computations. + """ + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in BailingMoeFlashAttention2 __init__. + causal = self.is_causal and query_length != 1 + + # Contains at least one padding token in the sequence + if attention_mask is not None: + batch_size = query_states.shape[0] + query_states, key_states, value_states, indices_q, cu_seq_lens, max_seq_lens = self._upad_input( + query_states, key_states, value_states, attention_mask, query_length + ) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, query_length) + else: + attn_output = flash_attn_func( + query_states, key_states, value_states, dropout, softmax_scale=softmax_scale, causal=causal + ) + + return attn_output + + def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim), indices_k + ) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaSdpaAttention with Llama->BailingMoe +class BailingMoeSdpaAttention(BailingMoeAttention): + """ + BailingMoe attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from + `BailingMoeAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to + SDPA API. + """ + + # Adapted from BailingMoeAttention.forward + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if output_attentions: + # TODO: Improve this warning with e.g. `model.config.attn_implementation = "manual"` once this is implemented. + logger.warning_once( + "BailingMoeModel is using BailingMoeSdpaAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, " + 'but specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.' + ) + return super().forward( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + bsz, q_len, _ = hidden_states.size() + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + + # SDPA with memory-efficient backend is currently (torch==2.1.2) bugged with non-contiguous inputs with custom attn_mask, + # Reference: https://github.com/pytorch/pytorch/issues/112577. + if query_states.device.type == "cuda" and attention_mask is not None: + query_states = query_states.contiguous() + key_states = key_states.contiguous() + value_states = value_states.contiguous() + + attn_output = torch.nn.functional.scaled_dot_product_attention( + query_states, + key_states, + value_states, + attn_mask=attention_mask, + dropout_p=self.attention_dropout if self.training else 0.0, + # The q_len > 1 is necessary to match with AttentionMaskConverter.to_causal_4d that does not create a causal mask in case q_len == 1. + is_causal=self.is_causal and attention_mask is None and q_len > 1, + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, -1) + + attn_output = self.dense(attn_output) + + return attn_output, None, past_key_value + + +BAILING_MOE_ATTENTION_CLASSES = { + "eager": BailingMoeAttention, + "flash_attention_2": BailingMoeFlashAttention2, + "sdpa": BailingMoeSdpaAttention, +} + + +class BailingMoeDecoderLayer(nn.Module): + def __init__(self, config: BailingMoeConfig, layer_idx: int): + super().__init__() + self.hidden_size = config.hidden_size + + self.attention = BAILING_MOE_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) + + self.mlp = ( + BailingMoeSparseMoeBlock(config) + if (config.num_experts is not None and layer_idx >= config.first_k_dense_replace) + else BailingMoeMLP(config=config, intermediate_size=config.intermediate_size) + ) + self.input_layernorm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + self.post_attention_layernorm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + output_router_logits: Optional[bool] = False, + use_cache: Optional[bool] = False, + **kwargs, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): + attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1, + query_sequence_length, key_sequence_length)` if default attention is used. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): + cached past key and value projection states + output_attentions (`bool`, *optional*): + Whether to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_router_logits (`bool`, *optional*): + Whether or not to return the logits of all the routers. They are useful for computing the router loss, + and should not be returned during inference. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + """ + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.attention( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states = self.mlp(hidden_states) + if isinstance(hidden_states, tuple): + hidden_states, router_logits = hidden_states + else: + router_logits = None + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + if output_router_logits: + outputs += (router_logits,) + + return outputs + + +BAILINGMOE_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`BailingMoeConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + "The bare BailingMoe Model outputting raw hidden-states without any specific head on top.", + BAILINGMOE_START_DOCSTRING, +) +class BailingMoePreTrainedModel(PreTrainedModel): + config_class = BailingMoeConfig + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["BailingMoeDecoderLayer"] + _skip_keys_device_placement = "past_key_values" + _supports_flash_attn_2 = True + _supports_sdpa = True + _supports_cache_class = True + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + +BAILINGMOE_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + + [What are position IDs?](../glossary#position-ids) + past_key_values (`Cache` or `tuple(tuple(torch.FloatTensor))`, *optional*): + Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values` + returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`. + + Two formats are allowed: + - a [`~cache_utils.Cache`] instance; + - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy + cache format. + + The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the + legacy cache format will be returned. + + If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't + have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids` + of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +@add_start_docstrings( + "The bare BailingMoe Model outputting raw hidden-states without any specific head on top.", + BAILINGMOE_START_DOCSTRING, +) +class BailingMoeModel(BailingMoePreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`BailingMoeDecoderLayer`] + + Args: + config: BailingMoeConfig + """ + + def __init__(self, config: BailingMoeConfig): + super().__init__(config) + self.padding_idx = config.pad_token_id + self.vocab_size = config.vocab_size + + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx) + self.layers = nn.ModuleList( + [BailingMoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] + ) + self._use_sdpa = config._attn_implementation == "sdpa" + self._use_flash_attention_2 = config._attn_implementation == "flash_attention_2" + self.norm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.word_embeddings + + def set_input_embeddings(self, value): + self.word_embeddings = value + + @add_start_docstrings_to_model_forward(BAILINGMOE_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + **kwargs, + ) -> Union[Tuple, MoeModelOutputWithPast]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + output_router_logits = ( + output_router_logits if output_router_logits is not None else self.config.output_router_logits + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + batch_size, seq_length = input_ids.shape[:2] + elif inputs_embeds is not None: + batch_size, seq_length = inputs_embeds.shape[:2] + else: + raise ValueError("You have to specify either input_ids or inputs_embeds") + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`transformers." + ) + use_cache = False + + past_key_values_length = 0 + if use_cache: + use_legacy_cache = not isinstance(past_key_values, Cache) + if use_legacy_cache: + past_key_values = DynamicCache.from_legacy_cache(past_key_values) + past_key_values_length = past_key_values.get_usable_length(seq_length) + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device + ) + position_ids = position_ids.unsqueeze(0) + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + if self._use_flash_attention_2: + # 2d mask is passed through the layers + attention_mask = attention_mask if (attention_mask is not None and 0 in attention_mask) else None + elif self._use_sdpa and not output_attentions: + # output_attentions=True can not be supported when using SDPA, and we fall back on + # the manual implementation that requires a 4D causal mask in all cases. + attention_mask = _prepare_4d_causal_attention_mask_for_sdpa( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + ) + else: + # 4d mask is passed through the layers + attention_mask = _prepare_4d_causal_attention_mask( + attention_mask, (batch_size, seq_length), inputs_embeds, past_key_values_length + ) + + # embed positions + hidden_states = inputs_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + all_router_logits = () if output_router_logits else None + next_decoder_cache = None + + for decoder_layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states,) + + if self.gradient_checkpointing and self.training: + layer_outputs = self._gradient_checkpointing_func( + decoder_layer.__call__, + hidden_states, + attention_mask, + position_ids, + past_key_values, + output_attentions, + output_router_logits, + use_cache, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_values, + output_attentions=output_attentions, + output_router_logits=output_router_logits, + use_cache=use_cache, + ) + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache = layer_outputs[2 if output_attentions else 1] + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + if output_router_logits and layer_outputs[-1] is not None: + all_router_logits += (layer_outputs[-1],) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = None + if use_cache: + next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache + if not return_dict: + return tuple( + v + for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_router_logits] + if v is not None + ) + return MoeModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + router_logits=all_router_logits, + ) + + +class BailingMoeForCausalLM(BailingMoePreTrainedModel): + _tied_weights_keys = ["lm_head.weight"] + + def __init__(self, config: BailingMoeConfig): + super().__init__(config) + self.model = BailingMoeModel(config) + self.vocab_size = config.vocab_size + self.norm_head = config.norm_head + self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.word_embeddings + + def set_input_embeddings(self, value): + self.model.word_embeddings = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + def compute_logit(self, hidden_states): + if self.norm_head: + if self.training: + norm_weight = ( + self.lm_head.weight / (torch.norm(self.lm_head.weight, p=2, dim=0, keepdim=True) + 1e-7).detach() + ) + logits = F.linear(hidden_states, norm_weight, None) + else: + self.lm_head.weight.data = ( + self.lm_head.weight.data.float() + / (torch.norm(self.lm_head.weight.data.float(), p=2, dim=0, keepdim=True) + 1e-7) + ).to(hidden_states.dtype) + logits = F.linear(hidden_states, self.lm_head.weight.data, None) + self.norm_head = False + else: + logits = self.lm_head(hidden_states) + return logits + + @add_start_docstrings_to_model_forward(BAILINGMOE_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=MoeCausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + **kwargs, + ) -> Union[Tuple, MoeCausalLMOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer + + >>> model = BailingMoeForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS) + >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER) + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + output_router_logits = ( + output_router_logits if output_router_logits is not None else self.config.output_router_logits + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + output_router_logits=output_router_logits, + return_dict=return_dict, + **kwargs, + ) + + hidden_states = outputs[0] + + logits = self.compute_logit(hidden_states=hidden_states) + logits = logits.float() + + loss = None + aux_loss = None + + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits,) + outputs[1:] + if output_router_logits: + output = (aux_loss,) + output + return (loss,) + output if loss is not None else output + + return MoeCausalLMOutputWithPast( + loss=loss, + aux_loss=aux_loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + router_logits=outputs.router_logits, + ) + + def prepare_inputs_for_generation( + self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, token_type_ids=None, **kwargs + ): + if past_key_values is not None: + if isinstance(past_key_values, Cache): + cache_length = past_key_values.get_seq_length() + past_length = past_key_values.seen_tokens + max_cache_length = ( + past_key_values.get_max_length() + if hasattr(past_key_values, "get_max_length") + else past_key_values.get_max_cache_shape() + ) + else: + cache_length = past_length = past_key_values[0][0].shape[2] + max_cache_length = None + + # Keep only the unprocessed tokens: + # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where + # some of the inputs are exclusivelly passed as part of the cache (e.g. when passing input_embeds as input) + if attention_mask is not None and attention_mask.shape[1] > input_ids.shape[1]: + input_ids = input_ids[:, -(attention_mask.shape[1] - past_length) :] + # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard + # input_ids based on the past_length. + elif past_length < input_ids.shape[1]: + input_ids = input_ids[:, past_length:] + # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. + + # If we are about to go beyond the maximum cache length, we need to crop the input attention mask. + if ( + max_cache_length is not None + and attention_mask is not None + and cache_length + input_ids.shape[1] > max_cache_length + ): + attention_mask = attention_mask[:, -max_cache_length:] + + position_ids = kwargs.get("position_ids", None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1] :] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids} + + model_inputs.update( + { + "position_ids": position_ids, + "past_key_values": past_key_values, + "use_cache": kwargs.get("use_cache"), + "attention_mask": attention_mask, + } + ) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += ( + tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past), + ) + return reordered_past diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000..48153bf --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,15 @@ +{ + "additional_special_tokens": [ + "<|number_end|>", + "<|arithmetic_start|>", + "", + "<|arithmetic_end|>", + "", + "<|number_start|>" + ], + "bos_token": "<|startoftext|>", + "cls_token": "[CLS]", + "eos_token": "<|endoftext|>", + "gmask_token": "[gMASK]", + "pad_token": "<|endoftext|>" +} \ No newline at end of file diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000..bf80fca --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4263f84d1ae750eb427be937562c33737b5bb035fe107fd414d27c766d1f629 +size 6098421 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000..a1531eb --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,25 @@ +{ + "add_bos_token": false, + "add_eos_token": false, + "additional_special_tokens": [ + "", + "", + "<|arithmetic_start|>", + "<|arithmetic_end|>", + "<|number_start|>", + "<|number_end|>" + ], + "bos_token": "<|startoftext|>", + "chat_template": "{% for message in messages %}{% set role = message['role'] | lower %}{% if role == 'user' %}{% set role = 'HUMAN' %}{% endif %}{% set role = role | upper %}{{ '' + role + '' + message['content'].split('')[-1].lstrip('\\n') }}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT' }}{% endif %}", + "clean_up_tokenization_spaces": false, + "cls_token": "[CLS]", + "eos_token": "<|endoftext|>", + "gmask_token": "[gMASK]", + "merges_file": null, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "<|endoftext|>", + "tokenizer_class": "PreTrainedTokenizerFast", + "trust_remote_code": true, + "vocab_file": null, + "fast_tokenizer": true +}