From d3bee10885cba4427a143867fbe45b18795bb5f6 Mon Sep 17 00:00:00 2001 From: ModelHub XC Date: Sat, 11 Apr 2026 07:34:55 +0800 Subject: [PATCH] =?UTF-8?q?=E5=88=9D=E5=A7=8B=E5=8C=96=E9=A1=B9=E7=9B=AE?= =?UTF-8?q?=EF=BC=8C=E7=94=B1ModelHub=20XC=E7=A4=BE=E5=8C=BA=E6=8F=90?= =?UTF-8?q?=E4=BE=9B=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Model: inclusionAI/Ring-lite-distill-preview Source: Original Platform --- .gitattributes | 49 + LICENSE | 21 + README.md | 113 + ant-bailing.png | Bin 0 -> 58712 bytes config.json | 44 + configuration.json | 1 + configuration_bailing_moe.py | 78 + model-00001-of-00004.safetensors | 3 + model-00002-of-00004.safetensors | 3 + model-00003-of-00004.safetensors | 3 + model-00004-of-00004.safetensors | 3 + model.safetensors.index.json | 5611 ++++++++++++++++++++++++++++++ modeling_bailing_moe.py | 1549 +++++++++ special_tokens_map.json | 15 + tokenizer.json | 3 + tokenizer_config.json | 25 + 16 files changed, 7521 insertions(+) create mode 100644 .gitattributes create mode 100644 LICENSE create mode 100644 README.md create mode 100644 ant-bailing.png create mode 100644 config.json create mode 100644 configuration.json create mode 100644 configuration_bailing_moe.py create mode 100644 model-00001-of-00004.safetensors create mode 100644 model-00002-of-00004.safetensors create mode 100644 model-00003-of-00004.safetensors create mode 100644 model-00004-of-00004.safetensors create mode 100644 model.safetensors.index.json create mode 100644 modeling_bailing_moe.py create mode 100644 special_tokens_map.json create mode 100644 tokenizer.json create mode 100644 tokenizer_config.json diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..21b3632 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,49 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bin.* filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zstandard filter=lfs diff=lfs merge=lfs -text +*.tfevents* filter=lfs diff=lfs merge=lfs -text +*.db* filter=lfs diff=lfs merge=lfs -text +*.ark* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*data* filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.meta filter=lfs diff=lfs merge=lfs -text +**/*ckpt*.index filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.gguf* filter=lfs diff=lfs merge=lfs -text +*.ggml filter=lfs diff=lfs merge=lfs -text +*.llamafile* filter=lfs diff=lfs merge=lfs -text +*.pt2 filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text + +tokenizer.json filter=lfs diff=lfs merge=lfs -text \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..fc637dc --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 inclusionAI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..562a0ed --- /dev/null +++ b/README.md @@ -0,0 +1,113 @@ +--- +license: mit +language: +- zh +- en +base_model: +- inclusionAI/Ling-lite +pipeline_tag: text-generation +library_name: transformers +--- + +# Ring-lite-distill-preview + +

+ +

+ +

+ 🤗 Hugging Face +

+ +## Introduction + +Ring-lite-distill-preview is an MoE LLM provided and open-sourced by InclusionAI, which has 16.8B parameters with 2.75B activated parameters. It was fine-tuned from [Ling-lite](https://modelscope.cn/models/inclusionAI/Ling-lite) using extensive reasoning-focused instruction data. This model delivers performance comparable to DeepSeek-R1-Distill-Qwen-7B on reasoning benchmarks while achieving better results on general benchmarks, especially superior performance on function-calling evaluation benchmarks (e.g., TEval, BFCl_v2) and instruction-following benchmarks (e.g., IFEval). This demonstrates that Ring-lite-distill is a more balanced and versatile model. Additionaly, it maintains competitive latency and throughput compared to other reasoning LLMs of similar size. + +## Model Downloads + +

+ +| **Model** | **#Total Params** | **#Activated Params** | **Context Length** | **Download** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| Ring-lite-distill-preview | 16.8B | 2.75B | 64K | [🤗 HuggingFace](https://huggingface.co/inclusionAI/Ring-lite-distill) | + +
+ +## Evaluation +In order to fully evaluate the model's performance, we examined Ring-lite-distill-preview in terms of both reasoning ability and general ability. +### Reasoning ability + +
+ +| **Model** | **AIME24** | **MATH-500** | **GPQA-diamond** | **LiveCodeBench** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| DeepSeek-R1-Distill-Qwen-7B (reported) | 55.5 | 92.8 | 49.1 | 37.6 | +| DeepSeek-R1-Distill-Qwen-7B (reproduce) | 53.2 | 93.7 | 50.4 | 36.5 | +| Ring-lite-distill-preview | 56.3 | 93.7 | 46.2 | 31.9 | + +
+ +### General ability + +
+ +| **Model** | **IFEval** | **T-eval** | **BFCL_v2** | **MMLU** | +| :----------------: | :---------------: | :-------------------: | :----------------: | :----------: | +| DeepSeek-R1-Distill-Qwen-7B (reproduce) | 39.3 | 26.9 | 38.9 | 44.1 | +| Ring-lite-distill-preview | 75.3 | 81.3 | 63.0 | 63.3 | + +
+ +More details will be reported in our [technical report](https://github.com/inclusionAI/Ring/blob/main/Ring_Lite_Distill_Preview.pdf). + +## Quickstart + +### 🤗 Hugging Face Transformers +Here is a code snippet to show you how to use the chat model with `transformers`: + +```python +from transformers import AutoModelForCausalLM, AutoTokenizer + +model_name = "inclusionAI/Ring-lite-distill-preview" + +model = AutoModelForCausalLM.from_pretrained( + model_name, + torch_dtype="auto", + device_map="auto" +) +tokenizer = AutoTokenizer.from_pretrained(model_name) + +prompt = "Give me a short introduction to large language models." +messages = [ + {"role": "system", "content": "You are Ring, an assistant created by inclusionAI"}, + {"role": "user", "content": prompt} +] +text = tokenizer.apply_chat_template( + messages, + tokenize=False, + add_generation_prompt=True +) +model_inputs = tokenizer([text], return_tensors="pt").to(model.device) + +generated_ids = model.generate( + **model_inputs, + max_new_tokens=8192 +) +generated_ids = [ + output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids) +] + +response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0] +``` + +## Dataset +The training data of Ring-lite-distill-preview will be released soon. + +## Deployment +Please refer to [GitHub](https://github.com/inclusionAI/Ring/blob/main/README.md) + +## License +This code repository is licensed under [the MIT License](https://huggingface.co/inclusionAI/Ring-lite-distill/blob/main/LICENSE). + +## Citation +[TBD] \ No newline at end of file diff --git a/ant-bailing.png b/ant-bailing.png new file mode 100644 index 0000000000000000000000000000000000000000..603e33d25fb0b2668c094dbc5fbc777fafc70fba GIT binary patch literal 58712 zcmX_mWmJ@18}88EjkHLPG)N=e4a3k4(j_8D44o2EgOq>_HNem%-Ju|zQqn0cUH~zrqjS4)Pj2e+zAK#5&Njt9^Z&*2tU( z^j${5fuOHk0q;@gk5(d0n~x&Rp({tBSSVu`fG-p%7dqE}zg_>TcpmUIRZCn|Aqn4p z*MT*qRakIJ>fPYLow5T+Q7OuwML>YNV*l@tk`k`x&+}qx|5jK^(r|uFv!aGQTmF*@ z#pJ`Y(&DthLSR}QWqH`NZ!z=Z_wtcoQ-bjY!QQ&U`Tvft4P-E_{vrp9f66`$HduxB z^P&At%XNKc*3@h$Km@e|VEK37?)&C zxyb?9a_oqlK_{V!vWBWWN$7o&Aj<|nZUm{ZBXASZz;*@weE5RYPlMseK@`|E?tY#Tu{7 z`O6M`BBU2wPZ9}^-$ydfaXrw4es9>TL$Oj-0ZO&^$i-Vz!+4M&v3>gFVqe(4*C7;y zN-RKT0{Bm`IHJo2`r17N!vTE@zk8h=jPY!UVRLed`ZU z>Ij|kpCITG0|sK~Npch+(ots5J8r&@8sJNit%reCf1~D(RhHEgI`R=1l)dkpAJU=W zeYp57pj@At&lF$`WfY)Hdrw8LAb#)s6a0kkAulojl7KEQi*GCyM@~H#`Vk_D&wc7a zn;-0`qi8gd-X(-CS;iXM>4)8^OW-qR4xocvm#)x3C{IpM6ChzL^19>R%&_#ttz`D9 zVZi`(d)etnk2Vw{0Z#xtd9ib41R`7bh~H~qeaDkA@*nC)DoE7jagjdA<_+b=Rs?;; zH215)umhA=YY$m!T9>bDp`HDo33m?Y*}D;a`M-2Xt`d(8O7^qN-4qpNbHv9X?vk_ddZW+L78p z^hq(W>EGK9&(aGZUAdrh3wY&6?^%j@RP<9;$YUbavbH>w5X_O{`&Y1QaW&yqB3D`^ z?6qqEeh~N#Kfa&=GH{dGl%0A(Ib|8rp_3WHKm zZ~1WwjCu-b=m99s^0k8L$-#=>4h1Bil4TDWW6%_)@g|c{7o4at>wGRcrBFj#{5|Zc4-bXM;9REA<+Q3O;?)gGNUO z#qEJ9LG}W@QI$H#sq>iNND*=5GtQB=Kj5D|7%xmJ59#>_fRlqwM(5O$6f1;+H$R>{^9Ht_7hOCs%6eabe;0)>;fGuG01zTv>WGW`9-hZl`-Vzx z>QdgxSyD5pO>BrhAa+2PXafP0vWovY(Nf-qc{ri+Pjvf}vK_Spd|n8t9UriK zRfpn;ONpgl$dUq3s3GM{pSUe>`^=y{c0-fCAWHkmk`Esev)$Wk?#!%3Z- zI%6UY!e-WwT*)_ihLQM39E{&P9Z3&_86pMILA1>pQXKFxR(9&@ySFGW$T zg8zf#1_?*~fSXLTKVTKLzaJw`&|n} zxauwKm{^>29YX!UoZt*DmvTcJdE|q;(4T9BDMcJ*t%NLW8b#BjMi`-Z@$cOKMt%O1 zJNjJ5*2nwX2&TZGv(p5cHMPUrbxt`LiTj<{IBKD$_IHfgq|ZFAwZXdv*85I9klO|` zI9q4%SPdf@qR$_M-z!9b2V@g#a@cS-+;uVALk!_oxlkBSHxp>eywkHw-(*nSHpz7@ zeT8gkq{!v@O1^>H(#(^@o5|!82UiXju8A`BUWv`+)1BV$To5QqV)g~ku;>%dHz`6lcD>4`qHhrr;Tl5Jw5~22<*7deZ&q7wp zOz@JUKE|HIk#!A-w4GyUDr^yDs^yIXP&2+32688=jECHo?adl6?-=vrPx-%(OUBur z%^#$LP(=JTiv+&1DrKaVv40py{grLfYArmw{kWC!R-wN5$hyKExbiy6I*RH?w@uFbeu`vQF-%7u-KoDan*EHhI$W&$ z%y+n5_y6xR#&-y6ZdR}2$*5F{#c`;ZvHACY)Eu5!Oh|u;5rUi{9Z6HNkp(ef`8FOJ zjZbX!`r94Jr<-3Xfk3Mv1rS+K35oGSEnbe}Q%~*IkI;xT4dM5zvdNHsZIxX}@5?_a zQbi(qQ&xMSA8-C%!(NMt3{(-$fZfaYz4ou_7)pDmvB8{@Jt&e*{1#{o~afR zF%{XHabZzpfyd&4)-@@(+Q-7r)yqf!JhoJTANJqZ3KT|)3qvtao)*qBP8$?-{_p~v zj|ciRYnM4dz3k4(kgU3N8|BagUc^ss3`7hhm>zcv33wU)WolrnC5^2Oje$d2W#wE% zOK`PO{$u_HM97Sgt@|7MOWg2WrS2>0M}D%Wnb?`gRjE(4e!r5cfx+-;zswZB5=%VBEc7*r5k@YhvHZ53FESYM$Bn?>$cA8ObEqcB zOk!%Z(}$e@U6p1EZpgJuh#j&C6=qu&ND>Czg-4)FXr>f5(9F{shm#u%+bFey7e0el z*Muvn9jNEe6toR!N&r~FzZua|`cYl*o9!7!0mmrB((M%UY~{gW(yf6G)^q5T#K79b zCzqb(^!?y7cTj&vxrU0Z4?&YTOX?;i1R|7jJ~eZ_k++L%bQ1WJYYqofAz{83TwP;* z7q!P_D!rf%wuE3y;CZeo|0AJ7LZAdS-L64jLve}Jh4LJRr3!y$;a0c#r}V0$lsbyD zZYvdvg1}y>#3g@;W(0T+tOZ+ri8QQHr6#?kjnW5#73sk(_m?u2KlKTv0seEc%H*6wjPXeSfEUl!j#yrl{ z4)Ldiy_=jFjDsnH{4pqv(I}p*OE4w0AtYTTbSB@o!>Grp$}at`5(B@2;nWpV1*c|O zSV4JekT%LAw^?3uZ@XArte%z$+iSG@o>Wx82Bn2Z+*tT-w|;1Uz+fF&I2=R%Hz`FM zjfZKg*d+$l!7Aw1>xuf<_M;0J| zrXU8bJhh+6z+Hf++#y-Z2>;7<#73K!5l?FTQo{Jx_#s~m&{@hacdX$y`p|@b?91?Z z1?A7}{tP^9IS2If8c2pBaw79iMtD@Df^REFtx1&(amoyRXhWz5xRUsT4K<9JcDOpO zV3fs0AqPkamLGeGC50DT{#Msc_{c6&gJL$uw8{OftjD|EcBpSgyhgZ}FIE?1YN-(1 zz4c>4v0w(?g=#{I1U)Lvln@i{TP)x%W{k|)Ff*XD`EV6QFaGBNvN`>3mI@NE6#;Y8 zay%NKV@8cf-zs|+snD1wQMT5lpD*0zO+>oZT*Pd0{F4;}FR!;<%*z-T8)u7HEZEos zXrf{J&-Uws9jqn(g0oyMpFMfYgs@=rgr3HHCLsgA4u4ugVz1G5D5fVVjL3b8N5?AL z8p?xCYSzT(D-8<#2Atb}HQ!(ZZFKT2ixk<7OPxvVLZ$uB_6RexN}JcC{C)Lq--n%@ zTlpG>a7jwuI=Z8E_0)BS+HY`BN2w~<_LM{UsfElv>!QGRnqP7Ignp2km0S*d!%?)8 zV5wBSP9FR+qbZ=Ye-6EEp%f*w%IK%)tsl+Mws9C`ufbnSB7R==b*Gw}*jxG8oXn9F z;~fgzJ(zT=47D~~82zyQm1$z=g%ZOH_KINz|MJwC?}2bv;Va9XnRyzRW*B`1dGs8Q zsBxE73Bghj;U_U9=;A+qeW)1aXEhh9b8jOKor$`MCs&Y9y9h?@3Uv(0HL`VtHd4nR zCilW7Xs6Sx?~sPPQ*yd%_BkocK1S#IaT?`X9T(v_PPDJqsFdFX^FP+PBl zz9Ym|D){x)=9TVzM&F*Cz%A1tUunnENgZfILIO;0cjD5+yS7a8s$zJr2e$9$Q+`Sg z`MVQLP|=6nRZlZ@VxP0P&fy;37St9cg3e0Z_>u%ihLK+KBfX3t3Ksi|Ltz{x!vFBK zbg5;phfRU9!M))Jzuktj)E9OnPn$ne9qVqu|G`JkGAL_DaLkYS6oT|NCoX3$_)(;v zGK*)D_MRI))DRn6qay2Kk5A{ofeF9H)Ne;j9<=6B6DKfHQaCAdMzC^UKPsx?EX-lB zM`z*AWK7OVY(KZu?70q?V%jZ#(ARQHS{Idj=pP~hp2}4+0HNfy%eQ;fULn=It`2ll z+5toy7sX`gmx5q|AYA+F!&*#F0|^DJC8_eR8a!7n9=QK`Yyw>F?t_ zyAFe1Qlc!IFOW!>7dO<2HvW0&d*9wC9bj9WSjM?3b|Ln)j~n%Jm~J*EtNvF z;o}7o+XXwp7R&YD=xzrkfbkF|iF0k-q+DE`r~-1?ECjI`Y&0R}A&A1p@)^4I#-2X|kLrTJ1Q>p&)!`M?;fxy^~ zB0Zayh&g!AXEm~4Wl!>Ix%%`B4=Lo==+eB(-`t6d>-cH+%R9}O0=CH6-DYq}a{Ki+ zM3-}Hdv&jnYkAX4{x7lc!k9%i*5EOynSv&b-hLFeYHuu>v)2zybY#qq2BtVfsS^A> z?3yOGd`+*Cze{o^MNQVGm?A9nr!FlCZCZWnRIcc#24C&W9-H6eD~uv|Zh*X{>kaE5 z$`}F}YZtEmsOJDJ7LeHtg|a!R5%aW|m@}BsN6-}jJ%9QAOAjFk?=UFWo?kb8ey>Md z%ovqJi{+AMop@4NzVm|Cyn`9t=U)qt#ItSXy<+7BswY zuaMEl6UTZb()sM5{y@MSYX2r+0aKZgK_uB+&q=t`yzPouONl5H1LSiUBn4ykB4u`L zF0HR&OP`Fo52mwEp~!IYLM8Pn3CS8KwS%U)2BX#uRsS@(wV<6y8c%s{5LnNOfPm%( z#3RiVn5hg32-Au3SX?tGur(ArZD}=VUo|+f{xK(jddAh%?8)6y4HryV?nKJdunD)g zT)wG=6amw0aB>ASr22Pb<|d3IpY68kRlIqvk!2M+Lp)VI&ckP7iau?2%>^Uyr-?=h z@xq9Q3pp~m1E4Ywz80cs)_S#nan2|4ehyxVuWD~Hjfm%E8iqd#<@eDlv^#Z(pORan zX>>9mTZ6tUdrh0Gn8P}^wukE4ou@*h`dA=X7eNgj647EO zYK-#FsN(RIc9PTSmGyPvCEY}*uGY!MbP`3PCx%RK9VVqE@16f z@ySp4XrrUJZe*V`g~g{L2T|xm9~#zg`}_FJ2lh>9q{Bbr!D&}>`ELuq=bTN-yEv~@ z+!Af03&=L$S{>~#wWdm><08$32BX1J+Es5mhhnr{(wKCdidJs6?C8ZGge}5Ct@UdD zst!6IW{f)(p;qjDn>72cV$!gE^ zKf3+4doh+B#j_DzefmSucB<> zY;GF2-C_ zHc>=jxHFRF=@lo`?e=7gQS@+85K|iOC#c}I&A5Q;0ea9hH^brHcae6p-khc0AYnon~cz%_Y!L{W#GE;yUJCw?ze~bnVah zoe_O!Tj%Tnz*b|70jo8jH|xL-5EWac*vqWLIji4PumM}gL>qoS(q2AH5nfLHFxdFg z7Z_%lZ+*b?7gYweSnqKC8D`S`Z0sRLG!paOxV8%|O_5(OCey5^a88BCgIUFl%S!OR zUlQesjSL^+r-sdzpvzN#ZPq&=40c0CLuWeo$#1hMS>!>{(8aSF^f(F&2L zT_)#amjY-26Vf86{1*>-yj;6S3}de0a?W}2rqK7L9Ep){?&G{rB5}E8)nGDvsii-a z_Jc2yjXvsYDI4N-PCCCE`C6_aE<%g&>T;xKIl*Kp+w11ay@yN8Mz8B4g}s ztmU+928E`GdQ^bhe6kH7$E(~Q?1=si+c|Le;5q&mt^J{6L5 z>SvZ|9lB+GtEtu@+iSzwXzrFmF~y^Y|3Z^Zcupq3A)!GvOWLRXi|`gMXT2TYLwyjFH+tbvTzCTifu=BUT20RO)0$<$mC9qtNeRf|sLKv?c}TH}YsmlX(#)h|sjT~EHV8bVszA~w@AoiOz zef!djC1bndvM60S=~*CTyYc0zx+GAo9@J5Ym-B>TZ1Y{=K{)f=kMPT zg3SI+gBt}_q1Z=dcZDIIBHVB-=Ilq)jx+biM_R-8^guLHFiWnR9uqNphRq_$B6*v- z4>|J#IYF75+yS@Be3DK3{B|=tU@TXxM77DmUw%Te2Bqo zF;jad_iv(1xGv-I%K0PW*J03H^jQ{v>@)fiM@f(wb9a)6r6gHRkJ`ejtd}o&mVN{2 zbllV7u#-kRXWOjR=_^n8&;$|1v`9p2@!&)D3rhMgE(EV`F_mmivEIX9`_KHBu9mUS>VPW)9S7^u8t`ATKPp8W8}(m(IK zd;m9(#Q@r>-7;%6B$T0*tsfQfF8BLzTsD_vz&18-?}SLUbhs4h4L2ZIl*%B4VSAZl z>cYawbhWzSRfc+lnTP53ZfEDXiM*4qvp>&E%-pbhN(bB9zqM*#L-l18Um)Gx^&SJ%X{_wc;LGRR z8Wb@bozRo7t?oMbZokbaP9PUDOvr}{!H-gbN`k$p6@$}3U`X)Cms>8&+<>>vzh4OR z=Ng&XQOJ@eyb!qR7F51buTT{U!sSIh&16391l)`QuD+~nR?Mi@LUX5`hTux`wq9b! z9k*AsFSV(`D}~~lUs{bYza1Mo?~G8euNbUh)-vv>2U0Q>_2@?cV8^}qwZhe~1PU{e zrle_0JK?1lsvtKmzurLd6#&9>n=ihCZT>CxD&Crh+?q$B%#l%U?NrIWhU2*y4CUoi z#1{3ID_28eNZiCXOwex7zCpoWyIZa_KB-ugc*MA?Z=4PlT}Q`DTmInjel6eDMfQ0r zsgHX-=Kn-%>j0T(MFO^{5@4E9L@algD2m*p*m7g)R>4MJe?pRY73~kB$9p9CmGFUT z+iDGOzxbws`whQ~2<1IVEn2+};Z*Q7+Yh2i)3lPiTZS}g6k`pNk#F$Qoo5*Rk!MnI zLdS+a;Qn;qHo}y`V&9$eW8222)z^*RdRW>F6OwAE!1~wg07%;qLA_J&KneacC=c?9 zLfTTPQ!g6~|ISs#>KGVb!HOgfKEzP8QvpDmcS>O=w_#0DM@x{H-Y++1ls)%qnZx69~D+Ny)n}9 zkGgib+)x`ms*LX!VMZ5=-Ph)kMKpljP<4ANl61MnUXFs~U*AvM*0-`Jiy0jtPlLQg zWMXCT)*P9YTA0D4!1bNdmdtKauK`OQ2$+I)&zEKrAn$0_*bOK6Ttd${Zt z`8t}|f@7Kw`jS3OY;esMoV^Z~>rtG;6m`l~?V1+{>wAD^zoC8_TTl)`8@O2Mq6*~J z$`F5L{DREL(%OrnXPf<04iLhKDnq!2!Plm)GZMF%H`*UYIe5snV9r~Ihl%adI;Ay<_?DGf3cNOp;%c;{36I;1SH{_ufCnS6-X@sj)tXSlhFw{7gLe?6Q7Cd_8={RGg4Sgj z?O(3OrB|>akYXV)^KaylYDQ(GU6cw=H~eh{kHyVbcralinr0De-gHz#7*?n)F?;{h zo}P}M$AAAj)$^^B>+QZ7uhRvH23kUK*XMvb>+cm|)HOv7M4tuVQyysgQI(oh8Darf z$oDf6kuUT6@BrUPyH?Q<>e%)uKh73>Aq&O)Zr48*)j~e>fTt45%#u{&4W<#*-u5n< zzLg_)`yVPTIA*8A)~r|r450uw$^kj7K~<3L_L(*ES`eNWTI*;Doki?wmi-`T9Q z@$>GhKWP@+Q<28zlt$={fv7ULp`CbZ$_1LSs)nM_l6%1D+nleO<1Hlz5w+toL76nq z64ejhGvN7Z!V!Dj`){3R4E31lx^f9K25SUZo7$Q*gOs(1KG7Agmr=aI5 zF>+x?FtjnE)OoBFEI}_r8&$v$BAlJh=EquF`ib&+B>qilC;26}trkB)ve(67rm3gcJU8q)4sk$L>2cwF^p)B+ash`W2zx?okK4Yy@;a*dp_Ruq=gPkQtr1#V8UuRhd zmL&6zp5A}BVSUC%Z~Ghp`xS;HmMEo}riOgZa`E6-p+h!a#Wk0Zkgx+<5DI*Jo;+W zVh^K^XJ5BEDpw6Q;^wPVugj?fMF%^cx{=#^sCfedfoF9-e$iy*{gc|;^4pA-XEj1>Za-U0r z8Z75GM9qK?HIoYICZOJKQowMfbALNaVlTJdZsFwWkEU_fDpKrMNnLtN^k|5S`wxHd z<=OJQNWjQTGyY?`wCv)B=XjM>vA}@CaL3NR7D505shAp)kLHObWNkxE?@)tAjUFj$ zx1jJz)r!V8W58x;%Uj%$AyX|14?b!8T!U2cu|p+j*wC{z&|}m%>vZO9rlz9$E5d6c{}^Yq8L= zxwtFF;|ij?YG_^#Dy*+I$yk?WdUh~?`_)DQ7E~$C`9L}{RQO3J&%_LcZzr>5WBvg7 zplm3i9$C%swo&L}2`4Vz<`kG0v2epQrBZ}vpERF74Do!s4`Pa=>)N-8M8fHou%VDd z!-)t$Ra-m|jkI{(DUyP*n>5PHQ?ODUd@*(sqpy*jZ1--|=ukE?q4OVSBTDBp0qM!maHCUU=gf=r-i@DQ7M>5xhU z7w9TmrH_1dfxBBfn$*e$NnZ@Q95merLK#aK^xI!<9+`qN!HAKLzsvu^Xsj0~S@Km3ZLNog_CFQ)fp+=t{KtO^VVZ>v_fcWf|JTF6e?B?Ca9wTL3H3G>OO+juLgK5{40K7SijuJ>US^~SiJ`T>)(rSO2A^?lz=v?mp-H0BxwpwX0jjqP|{O-;Nu_IE4}k||jvj=$;PQXZ8&qgMB4?n@ubr#hV2x$CQ19>Grl^jIpe zl|Qm*Xz8!2?q}Ye$Ej&0ZSPOe!@n6U-qmOtQx38(rR8xdm1f%=*3DHdvMJ^_?u_%? zqvqs@9~<^7;wD&AkQY4YEmDsZ-WiSP;lKy_(rIOc^7?cEc$tiw>Zb|Mi#QMSW{k|{VafyRVFGk#kPVXO~*CquxoYaU+selSQ42?s-OT$`VR z0rksJUl}bq8^(s<(`qtCH=KIvl$nGs@tQ})F-!#pwZKwD&)ezRxZSfV zgJTl&TG}SAR-jk78SwLmZE_XGthY1#cuf1}Tiyt`+|;CHPM6y64gCGF?%8GTIKJXO zuXKtUJsjX15JfoS@xjc^kFM*!nCKLvSUFcSoZ&Us)pC+Q;EqX0n{~#?GLS`nDf+4` z_{@G4^1PwoLjKt}4!|ekjr`~pkySLh1>xGsmw)h!{Ncxp$HNQ@LKGDz=Oi`Lg5@bo z(9)=DTsTV@@kJ9h>({II7a22y%O3j9C6W$7aF{Y%161*4m(Vf1Lt4o+X#X6<^TLXP zCauw+WKi+y_^u7PHs@aE{&|?g2#Nd0Z`L_8@s{$uKxg!l8`x+=E~~}+U2si zB3myFRyF(5`a11F6htd{%ME#5WNqyeF|zdTdTfiskb(Eh#eu)83H_X{u#{ynziLR} z-FYriD|&xqtf%FEm5e<(WgoTg%5%L{@|}c<%nD#^bM41bie8eEF0sb54g$bA(!i;x zqM=ezFy2Hzh0cSfM^~%*HKANV%j?S}j-x}FzXX@EhZW+m6iTBd#$G}|HOi^X7M3+< z+-AXX&P_`HKe_qfG0?^y!zb=mafYc;US?)*s`#jF9Vd@UcIVZZ>L^en_YTf#fM@t7fY;0?b$|$Y^7zO6&k6}fn}eGP6e~|o$Y&00k0WTAbD%H zzOQS!ds-P`{W44Eoq=~8kjJFCuBc!5!gQe#UVBWhY?Z&YqZR@r92~t7jlD!6&qrwp zX3U@XGtKXgR=$`qAil*fyvbDT`Tg_ZlFl|orfWmw81j>=R#9(y* z1Ct>GR!VXGb0isIL|!evUtVLiPavDX)y;#3=rwU*XifO}{vp3M0Ni|GZ9xz`PvacguoFB6xGDOg zghDjOp|dY-?uK2d!;aiD6hG&xa!4_U`nRor$zS2xiks)yhhk*(E2fmDwQSCt!->~X zCNMyX0OV1a+8v>u+gS5YfL>#9!L&}6;^uLOcCIO436 z=3Lr%UwO4EvG_0*I*M=ma)aCQ26p94cWYbPQSp)S6<%YnSID==*5i4YGBoP6xysMx zlJ|Qlhs-2LyY31%QTN8kg{w-hQ5m-bm)DlO|7HEoJ_^7^g0b7oN&)s9zcRdG2W9Lv zW~&j8q_%t|Zx-#tth?=PKPBbe+AS4=Zo+nF;gRE!4&o1JGN2;~d-mdPa%dY?TBE^} z(Xl!Q^RrxBVMHE}26Ng9x5^I$Z!WWL`Z!nUdyNMs8mx+)ecl!);!gHF9@NBy00$WAo5~ zfUNh>>Saa;*TF`t;mJ-x$ts&}4jlW{!7g@Og%Rp|;w}q7xuCvIJ9RDNO02GJmKm^} z_iU~fA*L7bp%)`Z{Wz;78rtMvn*DXoFBnyAJpHd%`iUV%ve2|ZL!%<$hgVR`;m7lo zTlGFh-jBT$Vu_{Iul7064pJf*W*X!onrpw2w#z4(R3#~HpfJ}o9^^jRB)?~MR@w1k~~LsN?Y)stO*%XLq~4EFfNLFtQkGk!~G+a z2gAmfJ#y3|TuTHO&&b&qrSwO*R{f^05`%nsgnvuDqds-t5vmvWxvpN7X_MZox10EXd~V0z-rV&bHxdv&EV!$frKns6 zM)cLRs5Q27B2%E`x52{gX*~Dqji2TxU5izI36J(8QMm;^2>5Y&Y%5V^Vm8H6$TPQP ztyyHZI>={yYPYT-K0PYnQtBPBcA|4IP`Q9!Mhz4QTlAtuIHFazr!#??4>W zmAJ0LW-0z-iUMHE2a0Fhc9I!x2M3NWE>uQb&yvC4PH})@g#u`H)bwyg@9+C_{?vW% zW*KD(u8b=f`v-mwv?Cv_v-Zv0o!Y1@6Rk!641t!{>2m4QF^`|99tA_|tYy&euAx5; z1=}ypItSghV5`$JJG~p^G)nHI)HA@6WIvvVE{Ch^i<*VPd8H(v-;a&oElQNya6$4V z*h{aiy`@DNUrBN=iWo$YTA;10{07%S2yT~t+polQbC2Ho=i-7)QEFCO$1~=#A1_ub zY6_!Y3q0MQz=93me6I`%zR9$m|08`*0r0>$wJ8F8O6~z_-3pIjInBu~fJ(l(ffr?W zFTbn5h_I4CPiIiwv#)OC6O&?|Uo0a8g$O+=mx}9c2}otE|1k|#H#o_TG}IoWRrIIW zDT)n}EBe{5wPOACQ(~T|o!8bI#=|dS9Rw=uY{*^BRGThEblUC1Q5y>UDEQen_H%m^ zyHa#Yo7r}Op9thK&(2p3-zsLqSGujKj3j==A4&X*&c`^d12h(d3<cuLe(+{$B~4c8+Fc z7p!XgY;NIWLydIlgkQ!TllgHH>5@*#@wAoBYT9Gj#>i{_&ZEP*TJr$G#oN@yhr(4p zFT(_1M%Iq@J8P}r6H@uq@CHW|9}e$oJwh=N%7WZK@ipC#&eJmWe_GbHoLB!?EQh%& zUQuyR%I30yf|7qpE4yoV=5gkI&bF)CS^e>R*g1D2I!PL)#*iwMX2=l!?@&4@C*a{% zkW;nGeuWW8d^=`x(`V~b`UrP*dKtq@s*0PyQbyf^m9pd1nbsGQSjS(`O7xVxW#2p? zW7I0eR%T-9fOvy?r4Uf#7*rKwKQPEZDtIA0HNw)L-f<$zN|+PkeWNcGm22&dp!{%c zL(fd+yZl01Vw0#8c>%BgZx=hJLWW??8o(>JYFhdz4cD5=GKY7SiCN##b4x7+JOt-v zN4dv6bgWKy2u#@)b(orsCvZkNnINd+8D@S1bfvJV9FnSNm7N(iot$isW%QE4WzTrO zc8c_+AJLZ)KZFZ`%pxy#K1lu*8E&cVQ(>(F#I^x)mLG`o?M2^2$^93i>Gb9#CJYJr z3(!ZK{kEl?u4LXp#HQ5E0pJ(9uNop@+Ff2pQTy+~#HS_u-RQ~cN3K}C5M#3?ZYN5S zZKy-dCRLh9fqt-(PMM;nYe{?Ecd4{N$34q~FGlKVk%oq(+5Pi;Hh$HgFxdy@6wF`5 zE|U*F^572=kA8Nc;NmMw>YwrZ*31PB>+^djBf7kL()&Y4gXGw_i;H9UsNb5SUp`KX z?t>?Ts@yF;y=B8ACS>8`_`03dS1rW3)I8xDJL{=@sg@6l;dP(T$7$}&t1H<1bZ|Rk z^$e#_kZF>2_PcQ5C=lC`t*>To3zbBrmip)0+K)|>O%V-=b-7o~6;vTG$-#%YQVwpe z-IJsZAHA1;E9|0d{8$qf?BvP=Gx?8DV9*D``5$i9OWHBApD8c;RDa3TR9^of3w9S~ zs(R~q$N`D;QJ1uz7s>VeuWIhE^X5URBSd~MS~EZNCswYyyT9q5X{({?dIq>J{x)=# zZl?~PwZwc#kfxYC@zQwkhxdZ!^WilJufS*YXj9n%j(2G#y+6JTC0&b{ku9`=UDjZt@0~ri5x1= zUw_^2UjVMr=3zm)Z5^;-&V!5+U8-kQt8r;YJVA)Rd)?G#dZC`bNmK@wjlU1z@>GZI{d>O>F}(-&8# zy;q?9>n))sQQ&5@xX^FdO%CxmX$g5-#lPe${t}VQZlyj-A{0X|$K7a&ntqb+fW8v@ zahz}`+GVHhk1fCgW%WdIZ{CQ~e5RVOE`CedQldQYG|@*@L!ii{qWK^{PRO!NXWX{% zqh?8f=jq2^l<^?SEBlEMf=O)d28ECuim*;^xkx#B6r;x!IpkZ##;I;ylBn+n8>yu1 z>Ch1w#o}ty%w7K1KU(x63{`%l*CctjKmYJcJV&NB`r{8=_zIB?=EAa9RB1<2QR(yg zjgsF(Zrn0+Q#h3bR-#Aixk+m!On+V#bYhfG`ItC~GEk{g*^8LF22=!KB$W;{^#6H@ z?M$i>pxQ#T`eu(&#)n>C(ueVvz~;%zJLBBFKO&2&VyLPl1=Q_} z9}Y#}eS7?;XMmCC(I)QnhFji%=F+n)^Da;O!LUA7#MX(vSJK%+J47nht!TLdazlZ# zR&laB&RvCV|Ml^*7FixEWeS^Xx8Igsk5=EACxdeX@Yp+FDY9dL`r8Exn&r?aOx&)X zoj6D}|5;d9b6)+64MX{c*_9i$Vqfqg9W6xHozmdeC{Y!WzBx}+26KC;+@;hrq~dlJ zF>@-5VNibQW_ki|)oV?hcs%rt0F?wn1XQQR?I%X|<8})x`2t(!VZasAMFXWwzxvy) zR@Q5-TW&blkM?9(>;8o@g;Apiis?dfOod*{j$;Jr2N_61*{k!yubLC8p^+2)yc7i# znk##qn76{xd8ZwT?tzr*-IbU_Km67)lADRT@X|K&0FxurXXT=LWtrh6cmh+w4cXA9 zqVKt}A6yQ?Do&nVvWE6{dn;efa1J+Hyd7Bwe#OtI4QOV}t&n+;BL3w;5aIjWP3G78 z#_+ZyLxNakH(QPhpaboxyR)WQON{>tM~|irZrA>Fwii<&)`08|`s^_lKY;?9P07#{ zZC`ysk>%!Q!L=`{e52molUV>l)Olyowah;93rFTv?%T^R^_lI+YndXhp;DfVUsbdv zW-yQJ4HY-cZzk$NT4r)z#chHNjzz18K1t+{($(!dU)t)9a?BI@J<7XW8lQ%wL-QJ_ zX-XNAH7|P`Bd>p!KPX(hDD|D`cpFG+ve4LZqp)B;ScbRQn2s?Z*n#DGG>N^<30VEy z>LIEfw$GJI1|CB1Hh6nLdnL;`@C8Qj5*(S4i#%v}mQR!Sts?iN2MBq^(suYUaoSze zzCB1Pg@xNU{?2|~u?R><=QtYc9b`*yzvK({i|17I#7T{*z!jhvkuVR*i|f$v(wK^N z>Ug|+xEbLHtCaN3deF_onhKhN=t(<}UXB?@HY#ZymR$7c+SL~1yrp5#&*FKqjv0)N<>;v z(%-1S-^A`eGr2dcxdg(rg8 zeB-7w9oanSww??f=rwt@m7VNnwX-t8qebLfJDJiwh3!qYyzbXr7wP=eXqgx zm-h>!;@^;kJ47taYKM2o@qFW;UZwnp>FJ$;J<#4Dfum_^TBY{ob83Gktq#pD2@HEV zDkI-KnN zKrwwX5g+d2u{?Kw1U$?Ua$;bKg_bnM4zL({yoaTy2%|r-niD>2hI!;T4@+qKtlUyk zea}&TLu-Ma&>c? z>nDGkWxe+7XMfLpu;?b4_pF+8h#6tJK!*}a1oKne<-S+VApKiTlvLZb@Vf++ZpN)# zac%GA8@VoQZV*O=zoCb+d~0w#*Wa<0))Ko-0PJ>qU&_l>YA8AJIW{?IrvCq0i!F>Z%z^s%-#8yHzUGhgq*? zZhQHx$M!8ASWgMu-(h|gjyycleB@2sGq>@HOHq1 zk(FuU?54Uf?;&S<8=5~T2{rlx6+}u1o6EZQleTWkOrMq+l+moqo3>tIu&lX{B(a8?I%8y%hlwf*#+&vr*tGK^KE==ZLWBI>X67(Lt`M zW#v=<+TZMKA(+;hisoo^eTGFKxa+nP<@nL7R-OHI0*WKjG=RG&hY$hOEB0#{P2sRL z;2q0Xi&2YZY$B=6;(kU{=o@{<7pDhIW*C6B7jFE9UOq0fV@4)?1B`sYWKQ@LGbfU{ zn=WmaEm<)1*l16@kl_rK1zEB{ZFBxp{9?1>uIhl9du>LZtkAXP5z)|G4=!w1C7{Mg4Dxb@Wflip8ecT43{_CkH;9Ro zD0k&TW!Bajobj2ez|e)2?O@RMXx$*y6n7v77wEIwt5*G+?dk-tDo=ATRY67C=RQ+;;hk8u0|W{>4^s2wNWO9kL3ep?a-E zhWpJMXykRh2a1p(k)Ne9O`v75sTY@$GFB;BJ6S?*Z5F~X2FHgft^O5zuyBNc-`LMX zGfIb+3f(|Y7#~_}KU^-ZC0?+$K71W;_e~F-)lgochFK~Ti?+t_kfZF*>|}kxW=e&}-UQuUN!{e)>zGxYaP-|gqyKiiRbre$W|ZE?ga$Yt5O|l% zBfUQhXdrl2n z@o?V90IydVO$DK0x|(@-dDq)#GX5KFZSK{qK^ak-pt;1+>~ z<^AbgqabT%?cV0aG|KVVR3|QUs6Nw2dz{v6&ruP?Qs?9H(IE4aS#^puZw0(KS~#B0 zUtul!QQxu$!oB4yzDE(zT6ILkbL9#G)6ARq<>L&C?_p(G>sz(c);SwFlcjaQDa;{1 zqFmp?z5usqn(OhY>cB?Ts*3wZK6g5%gu^@9d&alAjHfT4TTG4B7%^CGE;&EM zDG&zoI@_0#k59RR7B@q*Rp6%vhAO6~zRS1c{qyFT17EO$r2CR+MCU{~)!Bq80nl%l zmdQ_|hHh=iC0Xw1*!-*Y7DZQY>ChGX9-w69JEXFWqxB@jHxV*~r~8u3(NT=TqSLiy zD{J#D3ZJ}PF;1Yjsyp*w$7J(NP@!?QWqP}^zzk(Q z3LMnqvhHJexl9+OZxN1D+PLNK7Sw3Z*YgrvGdTG+wR`dDG$6(73uVKn7r*%RGO^>$ z9np{$VkKhpn;rlLCMNhv-YuIUa@s5DJwxI%zo7J!@4d$%%86BaqyZH&i;DLb=ghUh z?i1MCxv?(7G4*VDv*r2A(vcNd2mU&1m+RUVjab{G0LG-3O2-O2sa=?5ezqw;#c1{3 zq}&Q_=n(!I)FlLB(VyDnv2cwNAelT$!JbtARrni~L*7!61v}_1Ro#A!H(a;Rr{UT8 zCyr;TfD~8;HWO+DFQAOoXX`TL9ceDLGJ8@UhMr5xbg9w2ra-RfU%jfpfIV$W6UEom z8TF;;y06fi4d#{%^n%je1)vHbCLVmLoZI(Lnn*S12A)%tk~T2zNKdK5C4N&L7A}e@{M)m+&uA+IDHKSdZP|Pv{QisW8ZI z=ltQ!UT%!6=XSn*8dnD?zD38>WXnH0h^hj~xI?!h|6k-uM0rXdy4i%BQW;OH`67z-L#>HrpjyUe(TlU%0J27`;=U+jV z5i6J*?4Vm6HlPcrZknhN5^D{A-5eFZ#nm;qZ=Zh8AMt)_Aplf^xSp-+m#Fc@e6sGY z^iaHgkic@4eB%p|hJQ6{4MPRB8<9C_+G2 zFvtw<$^#`+9M?pU#_;YAC>wbDfjxuo^Ww#V^p z(N(F>5?dgGNlND34ye=F^Lz12>h{7To*CsCi0c>c7M^Bbli$;)6v$*VBa*HJNqN9& zqhxzGDvk@A<5qdL3{W*zzdsdF`Mfhkz^-1gFmSWJm%ox}X&E((Fnio=2-Jx)lW6KR zxVFakd&?I1WGvJkjt-~SkHZ{D0B}|ff*aBo_a7xx!>UD5q>Zs!If702gv0dv;HhA9 zn8!7&+np3b$Hj|8PC05DKdx-?k=|LqIBr8iO;yLOd2=g5<{E9sYMhArg5#t=l65l^ z2cGb$GaYr8WChNL$d~tF4XfcEBdymB4{uBY6m6n{{W29OV z*y_*vU>LUVrfzl7=4jeOE)%9YRAp3w_n)D?vP{LLjE-uFU5A%*t^RMDi!9U( zj=V_B0``p}Dx)et_J=z+=y}#{++Hy989a&Mq@Of zx`s_X{k{Bh&Ox7Y%sbzxM6k9AxJiL=xIbD2H6hHxM*ee~GAIdbrK&U942}SxEv$x| zc`5x1PZW9x>wKvzS$>Ys#1IUw zFXsmAV=CMbW|tgsCLLBjqS0V0yWg%5XcvPbSq8UOc4#|4lWKg6dFs818x>7n(L&_pps+!TD>h{wr(;jZ!3=d z)=<8i{${fC_FVv;Yp1f+8Q<&Gd8Mskt>>0FkwjWVa;&T8iPdup z{V5YY7g`FB|LULUbz^>956g-AdFY?tR?s5;LH`t_Mp)j}rN5oldN9MEv7oWa7S~5x zGYj*G3wqg~4D%q4e=FbQgMI<4311Vp-}x1Dw2$vkA!lMAL}QkFhsC)5x^T|D@lA#P z9(xFp3~Lo}IMVv=p8mV)Es{k%&EbF_Hd513%?U_h%tI3WC@r zv_|ln@xH6s8WU*Id^C0HXF&@nGp<0bekKA2@WSRx^b&dM!ZGrt=E^uVY$SwWoe+idNH{tChoi ze3=FxZhU-8jndLbMs_F#`*%;%#+bvtgJy(jp{Ey^56CEAavO(lsX<-7XZbT%^8ovg z^A>gmoaXX9HVd34UK4`s8v)&SJ)H^>Ks$aEv%Ws%nRq5MYA%$iZ3PLSmH`{sOGs{F z%vLoUqjbalfki~0JFt2)G*QAuFFB^?N48J*v$^`7`?t)6rJUCY((phg9R1rQD>!?-`H)N_BEy)T|K1g5oq0Y zF{EgdIlp&(@C22EI=i8fhLdNH-nHlk1l`6TzkQM{JphkzxwJ7uW$3Up_Am0HNL5HR z#A=~3fjl$2Jq{lTy3dcqRw~;!ReAP-TLVcQv~0WlvsFjDo#G9ia3{o#1Y|ZUwh$BC zC30ho7SI*MQcC8~B5yiZ(j3R3YvG)Ce|E80r``O1MKk8+1oTKwC>px67nf$3`nAay ziyM>-vux_hetD!hkzh}sP0$b>w;Y$tvTq|RLsI6QM3rpoBU<|zFL{wj@%_U=YG3#S z;I3X_j4f&);5=yv_I(ZKY&R-D*8IIF=JEXJm}R(T$M2CLKrGY#zW5GZ#bc7CUC#A^ zJ%pNEwzUZL!S;nDz~5_QQvmbO%Unep4j7Wg)9hyvsF#A4$dIClpM}Q1VyBJskGH;hL zowH(?N8Z~$be65c`?Q3axScLD!@dn5BM^A#75K6f61Tb5FJkrD?)LTe7u3#Sp?Wb1=aCoMepkcT4%n zA7TB1rHaU`2J9y`%icwPG00Z9g=lIolAg9hr;%@uLehf(%sf)nDf_lkUSyjIvwB{R8@Fid#|q(@iW- zBC|ru;3&qUIiYAoKCl`^>0m#+gA~GAP*A#LDqY@23HhO3=@#2g1WwfY7lgv1+hv|e z$dAh%-4D1f$qxV%lv8L)u2_$=&)iYRU{)l^m z5zI^H78`Pe*r3)ni=*UIjG+24pL~*G!oKm~phklgYIJ{BY5XuE6(;{#x92mL{tnpF zo?ZO}E^iAN*j5K&_5DheohiwGU2wOi1RChv%V00`@<9Ry5wl8~H~RUaNKEwG_up!l zX7~6yI=NPXRL8I(&2%5twb~xLddbh_4^U@o0iR=uFa_ACToj}Onn+oO;2%~DtgYHK zwi>oeZ*KPgdN2l@sJ#n5W`cPdi~MW5zlM{d7w&zGV30%3ec?=R1f&U515{Hlxym4UsLV3iot-RaRuRmu*^Et6heRRT367BufmLY| zV3=-zVM_Gk??yV4oQR}AHj8qkWpAR&cggo?IM+8x1jd{b7I{CtxcR7cPMAold>5-z z92BycD(Ff`#y;VdB;p^GeTFKirbsA8C^3MRWM*Zewsi6%>bPs1L3fO@=T>`QLVq>6

E$vDMrY;I)ka?2^}eYiHE*K9L;1ylFki~qPY}+O5M>r$R<6K*afZ( zS776XZfX+jL)p#^Jze2=87}jOOU9FgzQNTWoUY`0@arKh4J|13Q>yr%w+h^69P6T3 zw3ygak|meO;coBmicbe3$k|?*8mft(}~hc(yNO z!tu!Anv#o!)^Omk@=e%3QmnovMtp^Gu zd*$GGWdkw}{4#M_LfuB`Oyk1-3T^`Dhr(UX!p*R8{U0zK0x~i!+Ge;~PrgqCrWzGp z)sOT*qVUDd4&|lwIv=3ZrGeaHSLi8+o zcf%<`v(BjuzCZ69AkM4HTsntNcX00QJ|0D=2^o@*3*DH&)mmF8v+CzJJhKmy-e|} zTk+E^PK`5}1d$YZtr`Sp8x_ToNwyjLl{Xak$C^+ zuERO(R30!iJrKyPQUy$EU|Nb~A8i$jE84fNeOu_zj#KHk0qQC*^M@HMjea*P9Ti=P z?A3b0B;S9fV;mEWFQafh0KJ)v!QW!!yluM~0&wfR4CDH50oM&&O@(o9GC$DWbk2v9 z-^2R@#SFDk6?GQ-bJcWR`GmT0v}cNAWWU8Mp)g$N++JWG(gTL`I&km^m%x5McmS&= zoLM%*=zfB6@caSaun?D`Dmvn6LQgQU>?FSO`r*&GST6$ zi;1EYl+DW_@*55zeOhG;I;=%&!Gs_P_u8Q^Paq0#?yop{0VT_n#5N$Es2M z>`f}oGDWAw-gC;4Gj5KNX2o=ANSw1kT^Y1?`|P*I&GkRbDw; z0294)b{x#MsYKn|M3=JFI8dl5y#b{kV9iy>Sv^nQ<*;KnTcbo@~KU-A6-o? zTWd&q5Vx<+@2^72{ZEI`!?X4T=N>QdZ7owhK3DB|UhnwBM zxvh_%H}jxQTdsgMeer2-QSb07Ja>As20i!no1ccclEd|IL`R`zcMH*MgGkY^HTuu- zpfb!CSwD*nGPk*xZ?>IU_PaO0j5pFyR1~KwWeQG&g}=c!+GsJyKJW*Fgn{LfL5j}- z1e_v%8=FDWo#}pXO>iD>>#)sFe3z*5o{ z2gAfDkYiY2Pi?`A^q&Vgw8?EPbtcflB&KL_tfU$12KB!D0b-=oj}Ff$Bl()l!?YMS zsrxOxNj+Dn*cwR>xxo#*5$p-nlGqxP=jO!vA3WnBWp$s*W~k0og7a4Qk76M);gtm_ zXTQ%=9GS$X3SEoV0)$}>Lu{GG5{(g5ab_j8JYdGF*fnEa3A*j=ecW=)WdR$p{g3LP zRxi-)D_!Ns!X!^JYTKpxutKrORHYN|Vp4fA-z2g8`ofrj`KDccD?uYs*goY`8zLkn%-Z{!$+Kir1jg~3oK|@vqzLD zL)*5U7;R@^D(WBrnIc+-KWr2TL#Piw@uA38*8!42pAM-r#NjUD!*et(F~&9g1(j*z z-G($uYIY6(MW>2o0Pf|LR7CXhX0#(eE5}3uJ!-U6jAMjVBm)xF0o1#W^GcwKHxyYs zNKrTYJ)+!t?;R{ai7f*7Bc>oEn9Gb5Xea)R+BZp%68v{527KSI{mRQXs3MJrMdosy z52d|oOgBT<0!Nz*?mLPFEiU?~Mr0LmzgW|7bRxdcG{x%OYCgzv1Efz&-%(g}IkZNA zo^QX*vcY{6)-CxCjP=|89ZldvoC02$6j2fX?^0Q}tS!-=1d{~Mv2YB{B9`dt@XLWJ zHD#!+VAwk2#~$O;Bpr2&n+OK-s0mhFT;`f>tE;q0&blp$nce)*dF|K}-ghvMj4*Ql zdKx2G@Y4tPQ`F}=nM6wtK+h;q%%O&@a+GyU!mxb__UT-{NLWP(GpTq|QdX{|$10rf zbU!RCYQd+Tjq#g>lmxq_?e0fM&13}?PK|2?^@}cI>lCy3pqHPC^wr+D&Z9*T667)X zL)hrcqbqD$M45nBFuuMbG|*XAGgS3cvgEBtG5pojo!eH8fn(R8poW)RB*oNX3N-Z- zPNS{}A)|<)vEcX!#cx13LU=9H!4olOf|gkMCO?ujXji+8yPenvv+T3;)oi1x#7on@ z$<;8Bw9_ZaHU~V#kyD4`ug4}znapiFYi;^1axoI^0?w%o_I8szg>_?m9R!D9>B{#Z zHBP-KZq>gBoZTIw5W^j7wv{pSq$JGW5I;x@|`$fFY&H`D~IN+9+Ce^HD9r)X{cPL&> zF!yrCgQ|ZkhZPE9I%Lu@f_eC9-167Ztt;UOAG_g!aHF#+g%|e3Jm()nJk4(1LRMXP zUfRf3GIt=KAP(>rw!`D-c>h2NT`EgZTQs+HR&fM~S{^U1*a0RuBC}h;8%q|n^>{^> z61?95?|s7%dy|gEm4K!_A~|z3V&j=7ZVr&+49d4Ez-JNx>Sss0t7%^2I19qeraoR3 zDum?r8jL5ttb8!HhM@GXgFVm1s;YMN-uE}b3`a*QgMWc7@4F{-WINggBsLI-r*#so z32}OpEVIii>oesIuopE~7}Fujl8M9vG-bp_U!nVZ{?i_2%_$T}va^3j?3o6J{$k?4}Gihcp?=&LwZs2mM zS%Q^+A@nseRjo~6!MRi^NPhkzvx~Qp@%K$M&M$fT#^T8GxO!4&+K-qf*PR6E6lYYa ze&vI!ZZo%ko8?7W^~3EgFc9 zw036xoqO%7=pNb*v|KXEgRs^E-XkdkwJ4PeC{=V&v)Wi)pZ2Y!C0adn1Du(V56&nE zzMdgYWPN|@{+T_||2G2^Bv1xJ5(ERBSSmc{tMOh|WbJ3Tx?|x017x9k>RIy2^A!se`r37o2Cv zg48b#^r<&|qEW72Msl&VTHF5Nd;pAxv2#BQY?)Ijaf$;|Z_>K~s25EqVl{ar@|R6L zGT(dhM*lxTxRffv&aNru;ZDtIX`<7cVK=poXZe*&@xG3QLh4UPex7T8xrBB5+}0-U z6Rz}sXY;)G16b1)YR?2K-~f#V#67uRKEgDDA|>()Tk$6MMa?m4cgztytQL9Jr}vg) zEkXzexs9vugHc%U=g!gPk*nyO8=?1_mJT%d*Q-&LAiFXCVMQHJ?eQ(M1=7kF$Lo|+ z@-z|=zyiB3+>*`)`$gC1l2%mMrF7cjY9!C=X~?A=?8c_JmW@ukstl-?Ipb6R9y;O! zk$T*dzcG=Qp{5CJ-0awXWTux#WmB2-+~=1X7=4G=?SAFMcp){mZI(M%reAd21zH#5 z=QIPRVrckup&7LILnzc(Uks1yN{Wm}Q zbgJHl093}c_}#WxGhu8r_XnpM+2P_A5$%=QDzLXt_4@rG{fMK0uueM*MS9DEqFa^C z*Gy)85?8Butf|O@9BC|VN>AO1)Bz9JZY~=2tmjI*4BFiwx|(38r(f6Gs+CVIt2!pX z@LvQJz>iB*0^V2_dKP<6*bgkKlKT+U;g5~h4#OYiV1dAe#i>G7s>~so(|rzyYiKyI!u8jZ(s5DHN5!(KMrKs`IK4K6vlTSK<=O(@;(% zi1ujdr-#6nY>mdrmG&|$23YG@(n?nMs9jM6Me=*5q3a?f`=0|e@W-DP5}DUd?5~R8 zW$o0Pq%zqao6Wm^OCJHwt18gDKkTg^CJ4Iun!$3;f~o{ogHQici|Sg-0M|e^7Ewho zu`zUB#--%?gQF~$YW0`X#5RvnzylsDvMEbM=F_->l2?0OIyO52Q*2|cNl>;;2^6^xo z6hHuaN&#t}&fVRhjL$Tegp?3yE_J^xW3z$<)CAHuft` zcGcM9w~qBwX`9P0_5fT$vE{|f^-54vHxI2QzIK2Jj2-|h zxLT@%!xTrS6Kh8h&Kld%ID(wY0FMNl8sprb9p#qmgMmMTy*HDchknT#w6*6W&7Y~O znc&xCj_YIMJ`g=G9*%w6KpJu~HZF5Q8|rc#>as1HdzZKeO~5uW-= z`W6Ys6cAg6;J266aU#Hr2u1W{FA#`^E&+_t;rLqd0u=GA;%W8=56m~x5l58p3Mj;V zKWvfr7Xp--k585YMv~P4>ix|GVE1O2R?hrJay%}A$7fZuE62?4^%UMsbhF+VphW^C zIUzsGRYLmoUtaPa5d*|*j7NY8_5@HBMdtU?IT9ss){egG7pg08{{i8eEsuJxUkn1i zW+eR0GZn$A?W;H#0c3;ipY{e>eMw|<;7gmMU0bxu?R!0Gf*kDL!psZgdw2g?(1;G!BaiFnv`rc5{RGD@p=UN{anAt>r$b3 z+KY03CD78zlbpq7p^)8g64DZ1Gk>^N2%+CwVW`+081%ODHwB>lF~~>AESVpXt8PU# zh>7F@)eUTK%G$R$$~nz!>G#;}D7rSL&HPD1`~ed>Z&Cn3Hp)J&nSE?S>zI199m-lI zCQIwI@vs9;DG>to1J*uFi*};u?#bMLUkeG0TxuILAm$`f<4$Gp6q0T_VEzuDs)!07 z7sV{CSh>Yl?Ju}t~Zp!Ed~vs3(l^EXSPdJqZh0cC3931)Jfflh&ss z&*!(Bz0YPi^E)zb)}#sRRzr7ODD8*#o0DN_Vq2H9`n_f#;hgjVnfq2C^f|c1h0cw! zSa;m%p1&X7BzB$AgAT)Sw1zcd9l#GLHe${kyp#hHKnD2;_s~d*qw{-{SY*rU zM4Lbz+Eut78@Y#{nzL^;4^HDyC5IwH8Cv+YF6&X@hil0*eH~)vR|8M&_~z*V0{cY$ z$IVdy8cyX1+I<;h-O}R~;r$YG;;njVyw|y;AL40LZ5$yKXbg2Fkgp z;w-gcY^7?6G_kEbGs*Gumuw8s)tK$W^F~w!B3Ix0RuzzaP15m)Q5pPP)T&lTQ+!#h z$^H!6B+SWXV5GmxZJfl?9h;RaYjc<0Pr5t@CQf`_B>I^acD3X{FJ_iB<+`D#3fB!P=XNf)Wf^EE_0aLK~?S1$G z79`e+P2JJuCq_%bD6lp(;NR36jw*MMyzcCqulFN z&ZPBcJjw|A%~ zV?938I3Pai_fnpsGq(&xDpn;HcKFiZD?pDxwuh#sh~%WP&r)*73(OH&gPS5ceu$7@ z{~&nC(8~VAjQMu465atH$2^Uc1o&n&sU1xgHdZ|`yNPZ1z(z}mwJPoLX=hXaC~aWt zUm1>U z5CNUqf%o3WiDDRqV>WAepV}gxbR`KjrwY{JhBB!n-llj~l*h!N++7;b)m(BT#E7H$ z{&H>oZF&ZmNDh?CKSfOA`bRGU9KWSxLIeC1bU zVfpW2o~(TV4HB{-p73&`IJSoSZwG)nL-avR?5kvCDaA=2ermN!$;%OTSbahYGRsGK zj}zC~E*&A0S!_ku*p4fs!?q*yg}zAV8lxPDUJ#*K`I~_?^&bjv>2lSUSA%;OLI-*Q zVaItv%m+mm2Gi!qo_Q#m+x>xNn6Z+=i6Q$XgeYADAGd4Sjgn&M%a7Gxk-x->mqJg4 z96f8#9?PNMa@`2a{mDV2A~(#3dcXwHQ9ZFVC>#*fJ|Y~s!FH1+7Tiqf4{)(Cw;v!O zQr8ZMudygtpx-!pzu1aJwRB_0;@y+vc8S{plrG=`LTzj5e9Pr)s~`vyJD0XkRwSCx z8puNn5!eU}2aIP+aTo*U0O4>>LKuM=JWJjyA)_emxOpklcH^wWRb0&_u>dXcSY^VZ zv!Mc#F{?Bft%cD2hw2l}y@M9G9YDQ6Pc;!REaQVg9`EwpQOLTFjbIk23Z^@T!gb6y&AwebC$tVC9^x62Sjpd~c@zM88%CRX~Iw8Cci zsxuJ+6LAp}8wP9#iXoiK(uSKOkq!zcq1TPg0Bg@#5=|4B)As7UKuA_@b^B_&*tta> zYQ<0O@2%T5d#U}?>`RdL`Q-J5HCY>%0Ng&nbLZT^5*i$xfdN zpM}u;)lsqmwYoOb$Spy2BoroZeY8Zw7%G7E4rRXqniHaRF-NZI|0mJ8rtD-R9mK}- z{@tsrYliDMjwQ$dEr1Zmi0yaNZ3)U)E!P(tt(vAdVFksN`-{yzqbvC2XOpzpM_2s@ zoGQYyC!gxZekyaDkUPd>bNwsBo7X5dXfOzZ9(ZutrqQ=%=cp;@ITGM}rbPY;X{c#T zb#70!k6XX|Dn3t0(^(aJ3rGJ?NU8!i)VlGw#s-kAOvQWJSxEj@g<2^q9~1I=bmeRL za@jGC#_dI1MCYcqGB1{oK|us3*m&i)_>8F zK{fa|SX^zL26|@N$sb`MkIU82&q2n(6Ai5>FrlP%rY=6P)>K`Y@TW!^BR*#ng(g7_ zWJtc$u_DJ)_d4O>@24*jlDp+_*~7q1;Ho z)z%ujOU4brdHX6sHM*qN+s3TF{uvdf7FtsE(fNQcBBaJWfdqtp<(ELJ5g@acZN?L4 z$?M8boNnl3YN`mP?CQ8&uC9IEM{aleO-O>f3Gn)d6yJB6DS!m^ar4>6I z}1mc|pbO+iB{FUX{WrE?7J>;xQB?y+EnQTT+WR993YY(_MWBj}ruRBV1PGKj0 zIeI;dki<-G{h74Cfco-B{bp0I#(rs!=moD7pQ;?Fy9u+&8mx@&I;0z`}qvy7uj^Wr}TS-7!Hi^dn2=_1vNW4uZE91 zuL!MJtVB)lFlH|)kPLfolCRdC+8fRriSsXO3+FQL#Tp(AUQdWwDqzOnz@}Wtr`N!i zNmW`p5;Y5mvwwaw=;kBGnj?PCHncwZ;e4GwY%yC`*H8GHC4Gdz z!@C*h>dO~<0J;Up%mTi3{1gXy2;Emv=2$!%qDNPiyT=I zmS%n7BpdyLQVv>GBcha2h6pkwg|Y=~I|P_D7cx(*o$svgQ4{;{5`mbAQ%ztKxg*gc z{OU^~!qCyp+I`e!EuA`D4SZ1$X^tVjp4u#N6pMF6bX5`~O#pLD%xlPRndW*M_oK+N zX9db}%z^O!iMIkqz|JP~{8on>Ka9A|pHHLOrqk>)H)juWdQWJ}*_s}&q zU-0sgS;a|s=hfOrgU%`VC>(IK z&@K?kL#0zQ1GF*l|4J{p5BngQjS=@|1kGfUmeL6>7##XNC-FSD0RXRm|J+_@0!4o; z-E1nA0L?T1Hx$%&C`w;wbM=VF0#$o$peyo@3VgDA-I_jR&mPJ81b zzCArjqz*(QiCg{Rh-B=U&W>-I|Me*Xa?EW#U(Ftt5HA{0Q$WK!dN#3%7_QTdMmL*> zU0dtBY(he*q%1>GpYW{`i4e2=?ImDk#Xk^>AtB5W?*YMI=v$V$hFymBWY(I*4o%Wu z3asamHcSgEZW$FP3QBP^NLH8AmZ8FbGMT!|9u#=qGpQLRy#%J1836y31l)Ys3m!^z z)2gl$waqNiG>(Oih=n{a&K;bXJZCwZ2M4sMXt%g4x^JszDuvC-fBIU_M(mxOgaM}! z`GRGie)JGW>ePt!xUQBz{bAbVUOqE*hI7StxFf$*f^IxS^7E2;V=kmm=Wi2)q3!id z=12D~JZ4$Tj5q6pQk8I5$4L8_rju5DalgA-iWe*_jn0HGYSDdjb;D&hd8pvWQQMvh zN>*FWz611;fo)Tw%cg4&zL%Q)J3iNsqiK0KpMF2DFpYKxbN=DmJK=%gJ!0{X{+vcO zr{Z?3p1tUS=e$Vv*T?n$8CSNFso;Q@U9+VdaYuOXB#3OI)a*28GD8td3g1szj&GwA zvQE{`k@S+`FuNjWzo;wMt;*Ks*hQMfJ&fxOgd=JvcQ-stHPI`g7xI8|HV_a5 zv=fd||K-X3(DeHu%Igur~uFV;oWZD^#t$FM_PXVH1Q)0Xl<~bQ&X%0&FN1EW- z4k6Ol0tLrrAL8%6#G?~MW|R}=nCPZ$m)yDAQ*OdJXZN>n1M$26e9pVRs2XpZ)A0Jw zKK4hMW6>AM`7gm7!Ub?kp-%-1<(6`Xwst0xt+6Nve@#cTn{cE9nf|;NGBX3hZ8w1h z2Or)w8F6s{DW=Vx54>_Do)QxZ8vrOQF?$SMK7DAi>A8M7g@-pI#{HB?*zSAgR;O;w-^3^m+(%}Q0-0;~v1U!axI<6!49T`%Q>;ro z532+#cjymqMN&%U6l(tH(_)t*fNkC{j1X&%`IIvi9D4``mu9>kbhKHRb}YNFA-A>Z z?LH{7!k-=i&*2V07&Xw(H)uU z8^ZItqzbf`y3dnyIt=piB(Uz1(M9#q0HhMzts1yT`U!zB91enck-s_9w&%&<8#G5yd%y(DDbr88^laWR@x)(0b>PKY}y_a`q&0rUfN>#-)1tL>w$xMFTSoYek4??I$~z;e?eRKDea+) zZ6v7O#!*!|tAC)bUElq*SNP3I%1<|6YuUT0RKK3nHqDdF(;kjoyeSXS@@|(kHAV1R)QiE< z%(!-BnL}jSwyX%QTC7NdfYCv;&JVzj0?}wJUS*77N_JiQ%X+&~vPnkUM@K`z zn>iqswGfacEG=y?gUzOAXskTPSu3vJ?R2L-lJFYCaiBoZU$EtL>jJ%carv7z++##$ z@!tm-TiwT`zPqH@=TO4_m>b-VV6+t4`IhCXx-)^0*c5mpVn5a zWPA140A~q^m$UxjuK127SuXL|&X>D83$y)^es6yCUX>vPwKVlD|&Nbo{zgnB}T8>LU#Rb0(*T|D#4_;l`M0YLd<;yz+ zHB#(8)yf&rp`82*f4ezO5w>Qn_~2~kTLnIXpRd2S082hLD z0*(i7*FpMykl~UVA_!9Z=H*Y~g{lmNeM0M0utdgg=O6t-*@V5`yj$6z56%=r-2 zRjwBNu{WPS@ea7;PmmOd1kxI0-J1S-JrB|3oKpdvT#n8pqwqJ&wq{5o0h*JnQy&Da zb2hl&zRtr8*#og5-^5`di4ZC#?0x*(k^f(`L|`dQOdVfHw>H8D+)u0-=YL+$7In=3 zc)IGaD4VanbT>;QA-zaS3n(q!EDcMi)Y3>d64C-9AYBU*ONVqQO1BcCq#(V5-?Q)U zy1sw8*z?SsnK@@p%zbm1Q}i|yWYIFkuEosOKd4m{Q)D%GL9b0a`I3e$nd`sBkE*nl zpNP{0+8TD8C^s0yn$zw2+)5H#G$j~%*y2!TF^3iCMgRDG$|B=S)7OKsN7p9{4-7gr zv{-ymr%nuq<-$oESFO)TF`vGsttOw1QlRX?mf(1^eed;#K z?*o=coZsZ*Yb(v@1j1NlfX#_mRODPRpK2d^Q^?=ht^PPfiG(VMsrZ$fICR1whJ@FO zMg_e<23sy5!KssiA3>T|@Dx-wHKtk?Y=eZ-fi9_3fO9k?}@PMYZ9I1uZ ze`{wsadqQC|5ovxNKgE|-#eFLxY>;E#%96nSX1XQfn!SyDu3xa$?FYjTW-q=X+b*R ze?R)s%8sw^!fb^22`2Oz)vO<-Y3APx8EeZ`=FPiYeZtqdocvO$F-X*AndZ9yrsB$nxyaP@6)h znF9*s-a<~Vo(P2km!E#=eEymvkAC!O6Tf@F>2QK?ir;W{@0G#1!yzCcxh0u4y_fwtkMP&5p9NP=hLV^c55n*NJo69;QT0}jXoFEuN7LXFe-%O~}LB-noSf${+l2bPx;3mLB~wlQ<<@QKYd; zo%i;4oHeV*ePHKHrT@gn10Kdz`Ij=yE|MYhPMNc1Iw7&uk(vL_!Q@2Z`hE5%FSvkBY?8qQST$E#2Sgs`u z?_Aj+&j+F1`lZRGq?wfhd^7YydMNQ2y#Z|~;!oG#C3Ec;_X-%dSNZ;>`IxKdK?&t9 z*so)+Ipc;VwXjco^!w_JUMniqBB_U&tdQZJ4(4#`L)PJx7Us+L1p{_XOt*SZ-C81? zCF`sT>-R7FL`kmxbAHqc75DC`^D*41T@ngKgV;?KCc_g01Nf~ALY9X&98aw&#aRcs zW!LQTT%5?!pV%JSqaQvzV8h6e-h1-c%Hds8nk#Jf8%5iky1;n_fMJlM0Q3W*bcv+<(_H7b>_#mQqtw;=lo`|-Z{dXr};87JV zS+7~z(GHW^nppPvPHywn+Tnc_pvrKY2+U+@6Z5qE6$-TI3b~SH4dJ6z&K(N9-QDyb z&dUPXzTMdycz?u~7R_|fL$#~a#+1?OjY%qOOPgGO9sj(Epf0oZVZqyXsoh=$a?@_M z=$(k=z}459SSvlqhoTyKSbu;l)c;G|VM92~g;evK;dbU-J4i0xEl^~?S0 zohiq-s}vqQ8*^5fscuFSAmDBO$W-(mcDBpccp`${69mfe+BXLS@WS_5?ayR8mY!=K z*kBBPvmx0UxQVg+{|U>smA&wxuBF9)|72YIsaoW1vh8weJDc~cTyshPPaDfVmILQs zZ(=xC3~EJRsQ@J{fQC}g@VHjB*M2sJ%B0z@i27LGoLR8C0?%a>wf)|Vc~D##|8bz! z;DhMqf7sj|XtiPub3Z??{z#XhJjhiMg=503VsEAkJY+U5E7PBMQ&-rC>z}Lmj7a`( ze^Mvy8AzH5dpnPsZ3#?&wQiVwji3!}4}et0mSMHEmQzLn#j?G<(YSPm4&Z1eaw z?RX%munNco{*a@vURqyUaKrpER_akh=eXdTB@Bz2!0D>;$bstTQLl}1w*sv081pp5 z{)I7=Piaj(o`vce2`f_%ihi+!p}(az zWP7NLL?%&~9otoUU~k}&>&d^ir7`afT5lD{{Sti*)A(qO{lR(3F!`=_Y&QuQ07Q{mi3Zl!HqX8r--U_4eTER8r4UP1?pa0tA?+7{ zWf@6zY_n{I|Cz|1QvG?w?SA#oWwUy;d*x!vjLzOC8xEWGp7&d;X9`h_p$&Q*0~OGh zbbPc;4PD$!g5wwZ$nlasEAI9a`lHS)~Z+@a|M`gA&GlgYLmu+YJvH z#(!5G1a`nMcvF8B4YraR4=_Z82|VMNnc<9k6W+YXaDvXm*-lWC=A@ofIhnCc62mia z&6$76HDu!I8J6N>7mP7%T{MLn#H9DvoSbq7{jE0ih8Cl^&avW70afhQVFHMEd{RQl@4Q;$R(?|30$%(Ldl#O=aswNO$C^-; zYS}y#Qs;gr4UOnO<$3sFvzJ*Iy!g)o>*0T!=>ecIG2&4A7TQSTF74jQ2zk@|E-bgn}hUW{mGkE>u?9 zr@KiSvW}}P>(3PqK0Tv>N*ufC*^sC#e`@_o1SndxA(U3r>7T>DBp>*|jE`*l>O86d zStA9N#txW_GYXC58HA?Uw~ zDzQ-JMNq>qHJ|$5jn1f-(ers3SKH_m(Z!_aw}q8Ol<$SgD@AJ0ie;`>VxOI5p8up~ z1J%8M5JNVbWu;hid7LQt+vmS}ug|$CK1NqceuTMKb=GB~+fvzrmzBNL59q${AkWbw zxX0*~21|c%Kfa7&3Sg0B6EGF64B=*tK|joXp=&+#-1kqMJ+mg4qsmkH5dK|mJWh9! zdD^N{VyIqSveJ@>>!NDOws zxp@_AGH_Zx37g~GpzOG_(#VMbSmKSCO#N>$!nTDq5k@QIh85mRqap8NDbTiXJNnqF zq~#H@o87RKIk^NvCU9YN%d_L&{b)Q)Z&d94jmgu(XdGfzG6;S_e_0Pf1y}DFJ9;-h z$8`u6iv7SjG##3RU>(6`YpH(J_VZ#vR!n;GH(?!gG-V``Ll1q>aj%n|jcqPsa*(OC z*epbLsYP0e^{(r!{Tc8$o=EtgS7Nzt2^7zMDQ6{Py}ClLM#_^dp-YUVNE+;gA@G{n z@!F{qle-dQ*3|(_V;`=t!b9PX`_Y~;<$Y(69FY|&O|<`BOe@IjXaP3F9{a=_cl7~FJzAA zAUnA>@Y$|=pR|CHb65X0c2lcL%F6S7v?1e1Dw_*rA9*~FuOt9`L!NZr5LkV+bm82V z_boEg{2XIx;k_Tg-poH(O;|%zyCnT9@vF-|VI6qSDoMLB`s9(5o1&APk3BF(;y<~1 zI!`kG0kgtIgau?LuEBLA~f;1h3@zkzP-2&Y|yjT|31po zOD1N?3osNT&ESnGkMIXIHXHB-OhOq;RMalp_p9cETnXw58b_sJ6$BnFoTW>{XqUjC z#56)2vHG3%Is6|NC?U}eDY~+TPi_nF$d|*B3$hf_TI;+D?}(C9#V7y1!$U6|F>^T1 zui-2`;N)*fUtR`liydscSbMj#!;t&3|9VfgeeifNYacmHEHrwsyt<>Z9&%6mj@G}O zqWeAgi9-dEtm0Zf^9F@pOzsKt7o~+ji9^!&hJZnXvOmDXJqhbHI6f0mD}5%RfOX#m z&bq=8KJeoyC@bkUDQFX`IFC>!8iCwkZSm2x7%0l-q*q9-Z`*cEICyU}z39{0xFGw> zWj#n=5*qPwnB6jwG2sI_^Tn&MA>WA1b+qSZmrtpg#Riy9d!?yz?m%b1p4ob*E2NMy zW>h3fSWhzBJ@U#bdYIV%x}VTg?P7Ii$asjc)Q5zF@jTDqTdXQt1cN+ax71JZR@;!q zZy#huc`;R7{&dqwdGUntOBk^g{FoW@D}LRgEpxw~6KC405`{y9c*)U2sJp;pwD~2Z ztH7&fCrZ8b@<={F_DHASTfZoRAk(rxO)g?fnrsBK5;SC{Ix9Y_;Z=y}TINo8-YlQM4b@FtlRdL-m3{b|49HCH!ljl$H* zhKfD2k=HYu?VBq^|2~^_GC0O6@5`yeoKI;B?X;n1?TM`%>@ixs*IuBoruPpldDw~G z9!+cx8AXSbD@gT9rKH=#CifzE)riCI!RcQICZLI)&s*uW;MGU_zMqF^g=Yf=(Z8Qz zBi+`)KvIPXmlM|aW*Yq^qBe+=CSUYcofDol9Gz|3@$c9&7_nW=ZQ4%|dHkLRsxi(! z57s+`p)J(WS-UQc!f-ubi>4FpAi{&MHsO&sR=d%qZfX&Akdp$~9FCn^uiX~*^{Z=@ z8XscEtqJ#=p@Zt5Px=hb+ZNwA8PP@vE1c1RqCAQB+_T&O+A(BO4Zx9k#t%83{ZvRB z#%Q2+aRA*eEmSEUic~Z6_y2&k%49E|h{o^0zWh%>9_5zMJwrs1kPP&;UMp1ZN%=+_ zGHx|0ty(=BG45_>*}-_s{>Ym5aqA@IW&ni9^OHWN_i$e@#CO{p0!F6e4~rFt$Fj+Dx7ik z@?mvy>x-@A!vAjWAfU{O{d@6guHxs?paReJbJD>lGA{oYv9cyw~dmT-nLWA(Bo<^Z5;R|=t6$mp9~ETkVc`s z7YCqHCy*eI_;(teii^fj&zuKXpcv%F!K~=h$zslCxokBWSKfVs!p_@IU!bKJbjp$Q zamn*!{Q^MNYbx*$;mmpshbQ1xc-7q1@70JO>Q|CO63?^f0Ob9Jec1?40Nvzuqu%TF zu+qOk> z&J;&y=dS|*G+xfh@T)Mzz=Y0)k|1v~|0Y0nBbh;lv6OWVQQaKTTA^AX@-2qrmei_> z2tsMDk3Rme!VnxpwDM@dp_K_wM;4;8J7DpB%X|OLPFJ(WPo;K(=(7$>__&1g?WE*T zsQjdu0e~j!2U3hr?vBg~{e8Qez&Ck+0YShU7_V%}J2rbde!yO7eco%wi{6 zzy~{~1cKK*Ge2j4UA=B2iiB&-wJ^bz1T3N^n?Ca)CoWQ{RD=v$*bq64uO9HsMb&g! zLSz?UOKQOI^Vn3t&`yhI<#p1sbi2WcZh?9()YO&Z&&EygU+vcDP+0H~G|VG10*Xg) zazaTKFqn?OO5GTPGGZ@s3n3cJ2W?SRNstIjbaArjDbvU%9)if9Eh~6}99Y1_qb)A~ zgYA;K6;Bi-81djtKT)9Xo7siZ1L+knM8x2_ivkn}|Dg}D26M(gseOKxbKn`qM`GGW zWiyOj6y){l$&8@Hb`LHaI1?1T`x|QFM?cXZJB0}$ykRJep_}>3$9bZ@xiG<>nq55& zD8ueQWf%halWmX{exLj{1k2lcbD%Shmf>nH_-QIMwriKlBA97&sRoW)M#fO_2#*M8 zuOBV#`gP1WZFLkH;qT01gOYyz2y1b~mN>w!B$OLWb z@m-M%x5f|qA^0EF+%PpZ3clUOusPktCPA>Y_a-)8ZtghUD9oS_MVw?xIA}+Ja=KOh zlDpyZ)(BVWQ0(V3+%^+s+9&bGA>H%+dZaiW($J@e`Tkt(6Ky)nEISr@IC%%mbDQmw zH>R5Au4wD`ahy?C+Xl}WlOR>REvy`soh(CiB#v8?M_71EmAFmvW{u zY9JLnO@>Vl3YR&(I3aH`5c`rIY8dwa<4PGsJ}gJ)<)2OrKcIGUM_=t@V<-u(%JuE zf|ZFM1dSR}d1PDO^Dt@%Jl~PTY?806>Vcl~>fa}a$3_$KdB=cX{wP_aYm4(C{cQMA z>lf@dF3Q(Wh1(PYu@xtuVjCaB>hl7ka>8RJRp(j~sv1+e8i zVL;C3tD;X8|!|=5Y7IKG~-QZ6j%RUp7U3&@G z?&g+odv@a-^u|A)A_mN+X6Z-PHz|V?xC0%fI}Wi~2gL$JWbU`rKh})U1}OD6#mpMt zB8!a`Wb(j1@c@ZKY2Kqy#Ttufem*wwll{J?Zi!{eSx6V~Jjt1$R7XakB)->(vm*n9 z0K2;L0pC@sXfa=l61v1J9VMC4hxtt~dfzyI9daQ=b7-GfyRu?35}9Mp>0S2(b-r^s z!Cu;?DyDa1?k(G*XEQ@e!`k$7qLEPA?FHx)_&M}q^6JGcwGJ5YXnuvG6esgb#sQTw zrV>qBYlpip^S@y08NSV4wXJEXKer8hEPzqrwLemz5)c#@6-mT@BVCB?#iI_5<Zy0<17eF0zbm`wX7nww;q`)z2cy4$L6 zrRI@GvwmiJ(l;~jb2$-W3mc4xF`p*!F()eAV+n1dI2bf)0JxMj3gOsv9DAWP*s_y& z^d}*p(fw1_^mm2bUf_iYbGtEsF0A?54!p7;LSq-&UfJ%ND5USFV6z^|yuc=V^SsG3 z$K?$o&UZfK-@I?<@rqx9Anr6UMqxQp8?r;#c-Vn|&V-Bv6hlKDt_iGE> z(U;Z7vj;9H&DrQ4)k1>$R|PL@@M=AB-dUHW??acX4lL*?Po8@??)>$j+;RCq+@-OoR%f}hAcU=Rz~F)UT4kq4k+C0Xxsog2Lgb?qEvpt*=Lx-;r-8)e#{6UvSH`YR@U0aD7UChLW+F1(l24j zr5tTJN()>3!TG;^E<`kD?7m3GcD}m*m3ROUzM|uq_6Ap}=dK2ErU16u6=ysX<453* z!wew+`f}ZgS=@l3gsz3rMi*srLGe~K(CU%*3`Yik%^XA z-gVf|pv%}sFE{tCvQHW%gXyHCxe=bd7?U7vL;s;2GYaOKDj(Og?&dy`k-B^8kwdLf zU{!F=ss}1{Ur3V!xYg`&E@?X^n$Kld{FApNqZWA|FiN zg>Pmj_}4|OqJ`0_$5@D|4M}0ai+;X{?f3TnlXBbQdKP*I(!vCgfHhSK`9vugI>T0h z3tZm#A{Gaq9O0eS-!w}KV2GD&(k>DoSV`fXEaxgfDMrXy8zO{#8zWK)`9Tpw-5$BK zt%O>VpC_L;fE~Pcya|_ywpeiR|DqYEM|Cu`N7$Bu(f$Hdx^u7yzh#4Yb@&g zkSvK=B<{XC9g=GN@VOJt!nA~A?o9PzRVH_Nj>N8+)aA_xh7C*LFSKc`kiz~GRm#O? z-&ODX!{$WC;^U)qvDD#;>L)nOZeZl+f;lV4x=#=If7bO+n8q|;SHUL$d?sRe(N@yF zV@Czn$I*bRAKd?w#YG-}8Vm}j^#m}e3pG(&F*!r;k*Ph^EOy@B4wZ;Ae9^M0xszGb zL|3sLKQrKnJzT)xhyJN|)!fafz}CvEz~;TM*sfg&cb()Gc1uJD$V$KlL1A<=&vVjf z&IYiR61)}Jz6T;}Wi`$8G&VK|I1?oIr+(LaqDF_k4GLwo=SBY*m@3$C zpc9I)DhHLtq?HYX&Uw2IMqbaYc%6(H`XY`!WUGlGiTh3PhjMV-u~vC!r%q$;dQRWp z^6$OXb>Y?unpPv4!m`e?N_jEKR*qG67B2_(M9^QFjs#rg=={D# zcZI_2R95^F^?1|F%zl;p8vM~r_4 zURgfx|6TEz;4PDFtwGtE%(OG4eGZI+l(P@ zG1B@Cc23#l{XZWL&*1Zeklrjg7)*HIz?@dz4$-ftj^9vnXtDrEJ*1IUSz2sVrn5hu zp`&(OIkInfXQ+lJHRGbO-4X}`sXDz$`Qm0LjGAo-Taw9!Ex8xVZUf#g#IF^qb^JxYN9IH$Su zI^ZiF1vGZPEy_$wb6w3|aR^u4Js3gpUzbn;>B5vuT@O`PkuE`sb$tL{Ex|MAsraxO zjq{JrfEPD(+&#chk-(CXfu5uW&qnUbqiT*2YmJ>ICc-4}dHa zXQop-!Y6~zRAV?mvy>YtIL|!6Bh-R{)6Kk<7at8b(%9uXGL=resH$v|Mx!Y-pTl**S8?)xXCWea>LPRH;j%3r@BAm;) z2Pb%l&Wkei80oEtOn%FO`gGmd`ES-=9Qq z1qdQObJA=#vl;G*bLg&5;&>t?s99EdEQZpn+xI~+PuOhCLSjMVUTrEhAQ_YL zN`3$vmIP`m^92lZ1YS*QF5J{6>K|PQb?r<{*%z6p0BIQg7pNxe#O+rJkXIaA6FU{l zOoF8aoDmOK<)(mydfpc$KpMth@n+HLkl~*?RF~~{QB}9xY8_Ak3URguyJTcs{Q@2Z zq}k$#{W1gF;>(6$OHTFok=P7MsMMbbpL&Jdgc7bx-g7yl5`!o%fbeqC%rUVV+V|g1 zkQ&MrCi%AdQ^rJm!0rwEdwec^&ujL5eO8+3&=SI#>W(5$2Eu<8tW!?Sz1gU+y_(xF zXG-IS=CgXQ!-s?b{N7Sz!A+A`e|7WGB1=TO<)IwUC?(7M_OwXJ{F1N_CK0GXJ_4-9 zL^9!4ngn5lQZVemh->@0~pum_vycU=s}id#OPs%?HXevMJ( z$+CR;486^WxvBbMTr#wXK4F4SIa)-hI+qA7_tXVN*D)jVkSWaS51_?wqAG#!nzD^V z_NMJScN{$GC`%DknCLv~>~gmPh^CCj^hNB8skiuq8d&E}v!xzcbbXV){)<`~aiDVy z%)2hP`ZWI%|L8!=gpn&KJG-OVp?tyk-!gye?Sk|c0~fb|UHnZ3bcEKxBuj?O#h`Yx z^s`Tqfy8D4J?y;6&Yl@?8f_6`YW#EoD`y2h!Sa@G$W0=T)a?kpzfP>GwuMC$0m_ey zevO>q4U4~BL%qzOE-&qbHry`D5eNs-ul9__rMFOguG0{afV{IB{_+~ip~P&ivNFK8 z^p_fd37&=gPRQrUeU3a>gf~PDzulcw1>p(y6{E#=P6EkWtZ(MzW}mXhdzxfSF7NJy z_G2C_*MB|Gwi*=oCMgJ(;YroAG#US4e*>%Z{h+-bO4Vf|0EpLzKFvr|_G(Ds+sGAk zsJ2!ort3yx0^59f9Dv6v(+s8f0P4juP>^vD@1Gm_Ygye8RQ@j`b8Q&z&NpO4&erg{ zY%MQT#rtb$Sjaws)z!o#6d&kRf-wW0ZsHb-8ztBB6d9}GVy$Yi98)4kllX9zWqSzd z_vus?Xy|JARYbZDFO`^7s{v$oK+l)t3!v$a=ie<{9EMG!Hms>LJafRGyE{rYumIWCtUyQ@ohWESqfcRi)57v< zL^`MW<$l*Mvhv_hf9n$Wng}GO`^t+%Y1^9g3b|D~yji_5Crn=a8Yw_%I}0zQyaG%g z&}xO#&jvCIX-(D0$~9+(RIhVOn=HimZ{|K1maR^!W;l6!l>FaO2ktL1YeC1D zlsgi;4)3MBU{f{&CgD%GK~t7iJJvuiZ_DJ)p0Hr#$85nzn{E1B1aSVVxgTw-uK6)* zei-H{$JvU5Qll>HT&9-1?GM%uIKRvLZ_CMrqz^)+poDnXUNypdd`ce{C`@t!!HW|t zFXTCl+kv;xFm#T;HUICERFI@*4U@=xPR-lcjX9fd0r253HQPMH+Vq_(w+Dz2a6gm~ zSV=qPevG`=8(2l$%?3M(=BCyhTwI)F16D8OBQjT6hU?0KXh4$^-I*}{`8)pEu5XGw zf3-pESKl^&i7(PbIP+{KX5JKoxz2Jt*G5H|4M+3=V>=(; zgzKD~j+<1FH5+A&c@X4Fxn>PBz=Q(u3rwt+z+)q-p`E>8qJ)jIe`6_>rd*mq2E5I- z96~{Q=C4aqB`2CbS(#5#mQ91o zwlTsb9ci}OiLb9DK7@b=p+vw=ycOpMvM~;-%zG0TwlxnMfTjxH3RbBc|Aj3BnZV(I zI9p2L8`CMQI0w4Nlu7(%sagAHl5t|*V5%)XE*@SM#I2VL)r!8MpL&vP z4snb>VJ2m7MBaYNCdho()S7npd*${b1q&fS;Qbi@Eb$mcN!}FM<^^)eM_00X)9quH z?gwZm{gS~cd1(~?i8Ko*7X3L=BZmKKXP|1tw5Fz86WFlTz^WvIuj$Nk1J{VHQVDxxZ$8>=M~DGQAw!f zJZrPi78976E5z2YH~VVNE>Z-SGL`qE*=%fNnEC5(v``D6ERi#klXlfhJq0uc>A`rp zWmJap;4yFD*@4#iLtivl97~3UQi`;SN1n`G)W?i!sqfhn7|6ml!F%c!@k@RiHcGn8 zA{t#YcUIiVl=7uL*xKuXM2`56=&|?_#O^neKiIsnf94@e@p~PIjlNtFfbaxf(s7g2 z16MTz@i4+!(A%i7g?7I0l%@|bl&MVz*}8AN51S@{mrtG>UUdOrNHfm`F{<8<>=nzx z)xg1v@0mp~px|UXIVvQWp2!cHPN>Bnm`_5UT@ zHX)Dk>6RZIiPc>d!HTXz<|hFeXH>xEANf2zBPMUUO3V{p{|k{Ws&(FWgV zOBu-+AQYo_1Jh~Yf~c_7>u!jvOrjm=hoIc2$_tSxp5_}~ zO@KWGlMKpCT?zpo*zbDOK0nH8lDyq)!AM|?xn%R^J>o&(ql!{#WHJ8DT(md$t78!H zv!i9(3pD$|D8~sLHbm(N*m=z?c6SGjMjtp~^+zVB+09et1N9sGKL&Y<;+)9OqHtP) z=D`@VOBr!p-Tb!Xdu~AtDidN458=cxqh4u7%?QB@TJ9Ec(3C}AaX4#b1dcJtT)sxq z6o(`M;xqVO@Puz3%C2O#am5Borj;9EF^;2m6qnEMGVaH!5WOqKHnyVKfn7}A_T9mh zbM(c%aq^G}FCXx?_S9M|JIPLn2XeiWke}>6`F4Q{GL17t^b@}4Xnv#45itMZ$jyfz zLyEHJ9_0X#z*t|?b(s>2rT~u0m%PIb2U%2gX;F8!XfXBI!HUb%-XOX>a4xUc$H6*l ztW@WKp1|*ewnZPyhoJ}bnEVL#rCU8sMHmT>bvt5NlHfT)rRV zK6%o7B*rqAOLXD&StR5Mm!coiCk}YiQ9l5@TF9 zwto`7e1W=&_fo&+AH;8X5(R%3zOH~V>Bp1}^>&a%Dbh{2a<6s0ON6kZUR$kRkjh=; z+ID`*(Secnzu_6mP91N>Jw=Toj{Y7)UKBVeOa&BTbA84_n${mo$&x(E-8HZMM3CQv zU-i%;>{4oCi%<5@X5QvNRfx#O2xK7abOqjhUs#K9k<#yL6FJDWoT8gr9Yw1O8&K6O zXIm)dEGXxl<-SVN-^>G_+&7;_zZO9ZM5JWni85&;5^Ild^=4y-!bTc@NK2W%4Nfhr z<8QjdxR=u=k%5XK&phi05~bAKuiY5ce)5x;Sr9y@F?rhN`Dxtp#kN5teFen*2AdQl zN$_!SqV(F870Lge8fP*f{o^=*hxAYkcJ}(2#WGBK3tA$lQxVqN)+?QJAM#lTzR8|k z>u7@q!B~|mlrl}v6ERMpcpT{f7x=c8{H)4urA<5S>sBFo|KN>Iv+T4`w9VwGFqREH zW*4oXiuXMFvODA0*w#L2C({1%C+X%(aVJ2Utb=_H+7?IHC4|?gfuzK z_y-`i{QQ3cii7m7Os2V%-`8W$$j{ zox4vV!8We_q{a$uVQS{wwW&yvY=dEN(<2xDl4GN9fh~gHNkGYa#ovh3L^U(r@92JT zAc#E%X5NY3$DEC8oxrmD7(%6Vv1v8dK;lL5cQQ#owdq+5?D-8k{v=pPsMdD>n`y<- zo^__AfhPV+Y}(0Nr4aY?jGezClmFs6Um%G#_A5i} z!e)B6x(qCEpi{48Ok$dFI9dx(m+kVTHsAjmKt+{cbX*Ytvkp5p4eM=3sW53~UD6v@ ztDH9G869QUaaZPB|Bb63zMy>zyBPr6&z|D;^g;dsL4B@u2sxxzATJov@0##>%=O_O zAA5z^$%G+|sMt^6>7STKJQ2eHO#Ay1+BjP#_d6Z{-7LyMTF9T_Gi#`QeYL`eAwnwC zTe*BKq9jI|f!7N59s8(D3RLrh@@*JP*ClJC>JlzLg28bW4?+Cx?WZJ&K@V;vuG=Mw zE~4C5!`)3ZB`vh5m$S+VPaj9Ouzx~fIFWx=Ry};)WB0zoHv;*9oV&TZZKg)lgN`Nu ztADB$??$GD1ZvaTe`#TVK?qx8c~52XG$JG$<|-74j5=)H1R<~!%YEaoHKJ_&+QN{1 z(5ln^7h~Ck{!MEN%5QEFmso}ZVVMf{T2@mz_Cg{&CE5MI2z|C{;DN3PWkB%IC7BHc zWg0`k0tHkn3G&hZ_Z(}v^`Ip2oudK~e+wP%&mx--phz6n$qyns8ec5=Ro7vweEKn$ z1Z`J#0S)}C#uR1lCPUf05|iv(Y7VJqkw{a_W9<7ByTu1v)dS+Tk@M^TuLr=8(h~(z zt;q=i7ho+6&Dq!!k1k2D`j%Yq(wI#JHXlU;J*Vsuyv?)k2^wNLp!;o)v-iRX>KnQQ z!|HOxjrWoOTChTH^FwLjL=@&)oxAIAw)<3fDjMjc4|l>$AVf-p(Pm~cL{-+r^r(8k zQQ30y3#?=~g0iqHP)($N;#)n(?i!Iyb@F%GN!$E z0vYp-9}S|hqHq=>mU2m;+(ho840Z3sPk_V)SG45>V$6DQ#g8pFw2ht3HxbnX zISHO|Db8m=UWgg=8Qx&3qaaP!|`b8{tfp8jQsb$%XXVZGxAv z;D|GfY)KqCk-I&xDS+Hj>0wE$QAFcMNN;UgHAc7w{Ixd>!y|k2^vsRy&(+2|o5*>L zHhD9v5w@|8=Il{?CPh=bEp@>wEwZ>SlZv}a5WfP@cphv_5eG`ieOdR|O>A_3Y52D0wIz)WC z@U@2#tPM}R>+Tl8{v}~={r>6PVLrq?8a=j=pw*hJ+)A zVg^6=Ol(W|rZ)zEWB7x~xC{|Zx5Gdd$8^pHmZt)Be0bmK+Ia!doXz+R#*8=6te8tP zZR1`KS!phEv!~N*{Ox1GzvkOd#VqiEZIlX(?S3bq87VT)IpX%{L|w2}M$9>3&$aQ% zCldQfC*T{AeR+A8OQ%*$Pk$J%+lf0 z5je-M7*WnS9};a3U!HHbo1aZ`p`d{(_M;&%mnTk88ZwkxlNe%!kn)r#xEd)c>yWT6 zpqXt*X zmY%zRbo{zWXNwMoodW3`fT3L;aB>s;YHyScYn@mbc zkki}=TwJ}CkS47|*_@71YmkuQj8)TK%LPjHahY*Ge# z0>%-d)Erx99?kE@+qoMY363hNMH-VGAZhSs{M}~;aW2XWxDolaC*N57En{#N$eXui zm-R^pg&myN+OkN1de)8qqb5Zc;6aY;j}}agb-CC>eYILAvOk7fRS1;r6f2dXFD0^z zJAqA?ZVuExpWv|;))+0YyPr=gP& zn49BX$dp0ch=+!0?J*cmjD2cQ`#Fo$I4|PK+b;83r_|rMTS85!XH&bg;lAWIWQ-+= zBp)$~w_tj)nN0%9HhL&B=Ul%U-46MnXQoB6N-g=9RFkg>>a*A@(mMCO=!!4qYP@^FN>aeln(G>kBP zcWqJENt(j|;d)z1qWsrzgi3dDl`KMQZ^k+|pU7 z8=7-}DT5P;{d!VQof8py*RiDs+xND-XgM;()Q~k03cUTo;jEurm!8)qjK07aT@{0b z0(OfE1LomSn^)vAo3C)x_@ll#&i5>^lC%23Zgqq?^UW-9QobxXA@f$B3bcXh6t;GL zH8%aM0Z_?>k~P())on;8vaEm6&@A%w5AY2;PYgE?aVCnJSm<=w2xq;heSu0m$oDm3 z&~RR}r8F%$twl+sC)SGTcvN5^`Wb`)jtx_L&xL!dl1oa}JxgAP=awAmOvRZ>Od1;F zM?FW*&z!z z2b5K0bQypu>exp)5n1Njat0wLDML|J*|DmLr#(lStGch3Ew?X+&yaR)yLjD6Q|Y6XUvwZ_PxoJALRdB`6dWIP*SIU`*a^!y zFejTg89vVSIfPejD@_j*77Y^llv!X->IMK+h9J2{%|mX#zsjarVvn_hr}%`%pm1W_-fvz5SaGH?nOQc=2{@Mv;tAzLlLf6!0j z4_P%J&+9o8khAlmvyBvUJPfXslBiWp=|643Vjs@dr+L(H)~tfF@NNyVDQ9rVaHM!e z+t@LOt>t|H)?ibpcHT~QS!SVL^xEVTfML@p3+ZiD-^pB>jy|idP19JVk%-WuecA=} zJzUaS4Yp1VrY^Aun+{zDcU;&Rczys_ZYLg=6{||sXc7m5G`lRW>1Z&zsryeSKBZ%Y ztosYY*-_qi@BAn@8%kyL2lc{v7r`cS`w0+p%iV?RzYL6wm*Do`kb;8m_`f{frLEHl z&A%`*m)}2lURCC4UhHQ5uV>`DrUj08V_9clRef=KQ$ZVv{C?euqIJIUaVVj}y2CO{ zfm0|z_!r0Mj~K}Vddx`OP%9+AVw%q6S+zJ|;lw{73oJYoBHxT#XXrzWHK3q=40 z6_AZu;?9=dC6yIU_P*YF9+SIn1sK9;&hgsG| z-14;+^4CvYV~x$fMSlKm!P0$(pTk_A#}>g{C;Ve@v%H3 z6FJwL6n=R5Y<;s0*?deE)bNmbM*GgNPKGWidEPMmMqH}5`NdZ74>K*RsIawQsrkqc z7tFU z-BEscv2YYZ@sm+wuNM-^=4ntx1zy!=Np>l2e0w0gykBj1Iw!nvl_JU`@Wf8sUB?QO zOW@1aXkopY|D5J(km>Fb<9B=0^~cu4o4xL#s*T+Rxps8mr2!ZSL@O6*1Q;;8jp)Y| zUj;|$yp>a^9$)6kh1pegF)xRY!-*3&(~<1Mb1TxLEYjLNMc<9{CM@l|FHADu-Sea- zPU4(($hUR6t}!OLDKlBmH1l?d?)c48{Y@{+kzx)byj(oQPOkkDg+nvDJ;;rC2~baA z|CR-%;)P|rmmbp{vOLUn8M{|wD1OlLGW9grdE<3mT10a1y~T%^7v%`j}H+@$Kysn>+oK776axC2XKDmChx|Hih*VTKqLHv z=}{hAOeH=&q-!%rhjAiw1rfpSd@ksy!Tj)6@ED(atUw+n6=+KKv?Ppj6^g3=Gr6+J zW#3ns>bn*jo&E3EYz$}94p6z(Yz2o^IrJh|>V2P7gvFtD?q$9C!a9#vx6CT0Q`qM} z)Q{2vXKByZc7AAyVD+``D6;wLeMDtW~ z6=757)9CkXNu{`gF7-~u^_W}hPt_LT-O<)dUDBKlm9#yVy;ld$PVR0+P~zpTPe)&o!xz1b<5Q-vg^uTk7w;3L}#@ z+z5Od+)j4sb%oB)M{9muJ6G^JL* z{lAi~IkkWgSK4C&6%e1riTT~bnlG!g?u zKuS=$!{5XAx7TZb?Rj?2d7pF7IrrXk?sbySuJ1mKJ|oKz^pa7cpS0h#O73(CyzLl0 z=Wmc(-1ocV8_uUW7SPb)lsKJZMN}j7_nN zr7*lzr%gfIE~XKEmXD}f`)0hSx?L!Al#d2{h;xpu0k7Fbz(lPnb_I;~}R z{!7iIUu0O=MMCLP786-?RrY~0pf|^>>ckx&r zg(AT^DrnK?3v0ea@XTXq&$uW7n-g?U+4vVyGemy2VUlAEZZZMUzVLe&yUSN%e0cC) z6~!fp53nBodpCnmjqTVwN5Hrql*r37(5xHA?c9%PQBhypAf#`I;i8P545XPr=Pw$b~-62r@BoU5gQL%V=NEyNT#RoU#ewd*=-bID0MJKt+ zmxL}eAseZ5LGdgcbgy)Yu8~fHhBN+$DT}_p!4fsc|EXH2o9XT1gk$a^lz91}{aOa9 zHepmy)wCTL;-)#Js8Zjeqlsm3S9N#xSrX^H>6=Cd?T;RA-+jphz5g^8&%xpjdB9?v zB@62J5i1H8RrULnRa)kN6@9jR5bVjkCY$~?WwT?nr{TH#rQLCa6 zU&f_tYSsGqBBTGMNt6sA(ZsZ;QW5vr*X^(3by5RmnZcKvKzkl;TrvI(En|h!7 zV{7If!hQEfp>Q=E%_n6{I)agj`uWQPuC)*b(%?0g$GL?`$^R_IB*M@sF0UB3I*N22 zGb(SCzzY-mL=f9gc%45nA^L6*?(lNEZ`MpU)xJJr3-YzI+nSYG{o!f9GJSLF2!|xpRT-z-7RzSb{rg3w5o&JS23RoHzNC)KdE^h@s~BoE zBy6O{Ki7k)j0wh7m5{zh{m@!h0|na`tOew$Hn1qe==>3`NbJvMLxQiLeB*-% zWK?bXMNzo}iR;Bv0O+mQiP2XTA{Z(Vb)Nzu)>=zHteG zE0Fa1oU%-ijJH+CN(Fg;1XU9>1{6mo3~dDD1}fUMP2!}$$X|~*QyyKu;(w!_jSf&zo$o}ZWs%FF1Eiq9%ZBy;%1l$hNdcxTSad;Ap zpr$gfJTtT-6 z^gVub+E$JFL2>1WA)_xQS`X zyWI92S4Xk?t%4qn-!cfvW{V|OXdf>gUi^C|3i04y z78u2v7Icz$mLoy`3^i)t2a)LtypEozRMGN*mPO{H&fLXk0UjJf~Cle0AYyycXIPd$dML<(=_c!YGUfdV&R@#E{AaEd|!)1{`0tZY=h1-g) zbl}gKI2CRpAUDw0M2-AAfg`nmv^41LpaZRN`$Q(6#8mZW;pG&P6>iXBp4^+BiJdu? zV4-kQHoBsSnKthcF3CI2Yw-K@(t?8VuKworEcR-6xz3(lc>T7#tZR8a2=1uZp>2Bk}!c~s@_TmuUJJ|?_sT_ef->Ml33$ywADibDqRq@+Zw{IrJTw} z3ZFGQ0W0;{tA96nZUCc<$9LNr+4d_*-jPOIPMR7=+NrNF(OB#DBYJ(PT{pC)svOjR z<{YT98eRamDXPoq1zWdNq(#jYlWzDkpGH%|5CI1-GG0u^*3%DM>kYRG3(`G&5`jx> z6FjOX`ex6F#RdTGFmAEkT&T`J)%heAY;4!g%#0ihzZ2%^6Jes_c>~h0JH#8ee4y|1 zvcmZ*gXJe;-r6B~d`u!XysVUFVzM8b=5=fIfMZ52kXZA=Nvqu6JP#SM!xxt%y=6;6 z%Ykj*z}!{N15ktYhqnHXxcBqR3bC})Z=yTfAJRa);jUj32BHm#jfInf{K>7YF36(F zqV(z|Wios2F!&X;J}bWfB}jS|fqhZ@ZKkqc9tU6nLAH@bNBU$fPu4t+f? zGr;Bv2X-MLq(BagMCX8wDOgCpks;j>4Nd`F5_yI!?n&Dt=|dhO0}6BiP2lHrsUrR{ zGb&7CD#OHjZE%{gZS`-L9|sz!;qGi15K9ZJW={JD_B=HrnQKFe1F| zNo%E0ErWd2Lz{ia-;07)`52+n`EF%sph2)LShRtWL;HF`XSf@#h`N7iBAzKfG~4M zP@iZs@9;a`Oh~eE|C*bOkC~)lPtbh(Sxna#IQ#5{9j(T|$G6|N3C77U7vCG|T_0hYMvn?g3y{;L-p z>axy7IdVi8osUOq5o=mf+i1c-MAVZAQ2!X(rFa%wGMnEwM?Jxg$xLk{P~Ani*-;Cx z6Y27*$M|p^1W9$)C@;idv0h}&HH)1IS8IJd)P{i{PnCB6#y_SNF-Bugelg0Q5vN-L z@9RP2!8MbJKMji|&W1)-o+WmH{{30?C$*urQYiX*g)kC`Q6A=(Jq67dFHQ;r4ERS zB7t(j6j*1N>PecKdYDx5&T=t_CR~sAzu!j}qyxpLB|};~FA;{{G#H-Qjf{6mc_^_j z^78#UKio#{<#W~r`LL~$Au0#23{CC|H6ZT9l6AmZ-}#Z)lVl3$e)Md!Q7Tu6G;u&t zlO_Qt=cCx0hs^SF&N)%)!^(zZnaf2wkflO;s$AcP^ZxV!;#bN_KDxOraZ72P<8q8-#T!OYHk4y zj=M8S+czQ)G#sY9X3mJz2Rp792;}wADONev@a$MIikOC_ZmBZ*e{JA3c4)y5cqjpp z&0Qk?`!k!a^Wq^_Kjh7)C&w|HMap0bbscd&aJ z8So-@^9I2DY<}~Ry|{VMsvc-l?SxC~XWGQOZOwLIG|JxSd}Kh1 zfv#5dbx~{|M(1VVwLF!N^bS_tf^U@Nd^>1Ko_0vS+K5{jC01_$ zy+RZQHMNOZqWJ5V1)d0tvUW?q3trs2Sh6<4g&7I=ub)fZzSSdIITy>t2Lh|~9@B=_ zYQL*sQ4M5R`bQUA8aJoV)*;r0N|&LS3V>M9LaO0x~LD<=0$Ye!ymFcz80J5n#ZwV zcya_fc?gV{JN!OB&Sj3o1T{Pfvne03s-LA$-ySJhZ{3!!yKF-K-HMB~1d?Pl58GV% z2Ek{YxvTP2W%${v0;sgMB~Mll-{=|}>2LJUQeKS5$_CR2`}=+Xx3@7fqOqaWfV<@B zTQA<(+-cFUGH*0(jir3f<)~e5K*MMTrVTc|(`n?O@D?W>6UsnfSH@jW?Qk-+=W3M^ z__lxIDg73!E#!e`D^#cAewO}KD6nLyTyghI(4C2Bi)#33H4lVm2i0gX4vpXROmG$o z7<1#oGVbTV#U%vx5doBXM+58kzB}pnZ^=TF!!;smka6M>^H@nRvgL#_KslE4HfuW+ z4R@b8Zo3nm=o)l$y%fcyT|RZ3TttPk_N47v8`&YWWTx|Njdh(HpR=Okj|A*nGW`>? z_vGKn0Gv$yj}zPKuxEMK6!MYxgb@pt`($QdfjTLnIw}Ppm#AKN-pexnL>&(OXH7cZ z8m2HqP+1LcP>Um4i;e5JH3Xz8t<*C&^>qQw2&fiZmvsDLYFIN$U?ke$UUDtH(~h@o z`MABz*Oo_ed3sb%x{g%^ud-OP68=8+X-A61tjs$(m1(BVKrm4&(qREgX4k}?xg+Qw ze6(Ox*3(c5E1Y0Pr%CF3?tT;KSeDGK^ZkvH)*d5}@tth!4S7P}5D$!@_TnDrJ`rzh z&zlRQ3}Ba{AY|i-jLYXo^h&^(XUGD@tkQoadW~y^mlq9MU4%Tuu)MQ>y3pA`UP(J4 zR1DjazxU#lJ7OyF;YR#v)R}SUm(eIlrsPMSUs`QoZf-cBClx}zedCM$v7Tb?D1&Nx z=n}I-$LWJOS0#d|l(?q}R=^ zGa#I1OXqf0NrmTp)3$E#9PwR=1bKb4_|T>D@}j0q+p)6qQDosmv$Y2lD6qRgp-|Vj zqiqG%L;=>J5K?%+2T!|ODe}c~y_;*X!^_}lZ~Hr-WCXgqrnyL9mUH{;gt~6c(eZo_ zwds1Y8y>-WkG&v5%AGbOlgRU=vyA^{&UX*Gq)&8PW$Yug&I1QN3F3w%!t=-Sy1~C* zJqkupF}hoydriE{l(5_`oMoa>$a@(!Ujb=aWoiGgx5DCHe9BZy&v4X?`TL*{G2{U? zNe0`XSo|f&rsBZ? zciFsA5AR8^TPpyqh3y|&u)(i2Ab|s@%lS(9YxO*TCdBmMk)?MO@1Iu;8dkaf5IUMk z+CVkh!eIrLol;`;S`vF0SP^qSCN3D%Z$2eQ1_q^$X3cxCl+4LleBUN}*l{-GflX?A8! zBcv>NCHXxKr6_xjir#f3mW*6G&0*4s4UxnWr=y!RF>cADY!j7v;zQqbq=E5`*ywGb zj7Z153A$K)OW(HZ4g3W#abi?9suy1|3b}L8r9eJWO!T0pF#C`$q#fJonDib8qb1B1 zzZnTZJ%y{DDCFRwJfr}}1z|Jox_`Q_Q<`=mCuMN!ZNDGf&0{gH;@bJmv_KmZmXh)* zuGB*+IO+(<@2F+gT zpge!=!t<~mM3d>g@*DZD2))K@Pj-vz%49wyfshTC#j`HK3_^ev$Fk%%`I7gByX`~F zN15Hn((7DrCG)Ahd#@|tJr(}+#OKm?by~m3z@5$b_0$hDk6s8#KZ!xb1zQ+|yW#(( zE+f5^@+vF-EjZ!flnvF!893f!fkYw*xJjm|@QS@1H4dmc#%j5|aQZc4+qBc$K|>P-{2C)z?|tggSGN(gKI0;a?s ze?;BB*1l4He*NlQGU@ImM|%yguV;C6E&nw9tjfGIXJa2ALn83orbhX;b3xUrk%c~?b`^g9wwyT)Vl{7Uh3v-H`U5?V|>N!LV63GAVbS#t zw_YNZ0qhnlOJPrWadjl7#cgob3(Iw}TR~O^UPO=xZCyePtoymqUO?W*FxrM%P=gtv z=^dmNW{q+byukfDdF0yqF_F|2B2Y0YJ{^AuQ4(qsp?p~RhlbNY+hTdnbJi|6_G)Q_ z%0a9#RF}ktz*t>}e~M>{D>ZmC0Ih`T+K%zEPkwo=`mNm!B`!X*|N3^@bm`lHR}gOh zs|Z@K@<9=OmFf{)>o!kKYB;rN?TUE5UGIY%*YMNkl#+$V~{!(!F)-0WPkR7$(>3 zztQ&BXhQ3!@Vlz5fCx6p>KNJimI^XSzKiwm1ZHl(L-Fz1<{$&o(8_r!lc#t^sh8lenD<$P2YVd(v0f)5|q4sU^@x8i_)O``Ho^g`zI6DN@Jv zKG$GqlO3qJDVYHa>_vl=>gQEs!=Dm7Zh7B=!TZ@YFZ}GFJCf#b-jbGY_aIaahB-iv zq!u3La9wla4{h?A(O?2v;GZRFW0D>D1|9fVj^yT#bIPJi;K{3OYLo3VX4VsC$ae6u z#NuP&t-{Zx1quzEKdxjX zQd7)u0ow12*b>{P%yFxK&)BlieN6L?cK+-cji7Qgw49$_4Uc809BTgejIdk_`?tLd zM{rla9Hdn*fDd*xpsDn|lyF)+)Y!amB18k_PYLeXR`}@Fj#|S0bX$mW)zV^AzRGoc zH#0k5N6&T6TF_fBmWW#RiS2-~{NV

`3Ma5uM)`hIBvz_n2H>=d#CJW4M^Y6+C;4&8yL5|fCRSz&-ITvUc2OedK&S1GvmY?c8Sm{4{<&|6!7(`EoFYKZlB&k_)iU@U$^a z%db&8^&Csjgc)&G*XmwtEDnsYn0;a0(Em56(%hgBDLoI|>MX&Bp3OK{YUJxKpd0uu QLjrtM5E}4uWs8vi1I_k(oB#j- literal 0 HcmV?d00001 diff --git a/config.json b/config.json new file mode 100644 index 0000000..5ed8676 --- /dev/null +++ b/config.json @@ -0,0 +1,44 @@ +{ + "architectures": [ + "BailingMoeForCausalLM" + ], + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_bailing_moe.BailingMoeConfig", + "AutoModel": "modeling_bailing_moe.BailingMoeModel", + "AutoModelForCausalLM": "modeling_bailing_moe.BailingMoeForCausalLM" + }, + "eos_token_id": 126081, + "pad_token_id": 126081, + "first_k_dense_replace": 0, + "hidden_act": "silu", + "hidden_size": 2048, + "initializer_range": 0.006, + "intermediate_size": 5632, + "max_position_embeddings": 16384, + "model_type": "bailing_moe", + "moe_intermediate_size": 1408, + "num_experts": 64, + "num_shared_experts": 2, + "norm_topk_prob": true, + "num_attention_heads": 16, + "num_experts_per_tok": 6, + "num_hidden_layers": 28, + "num_key_value_heads": 4, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": null, + "rope_theta": 600000, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.36.0", + "use_cache": true, + "use_bias": false, + "use_qkv_bias": false, + "vocab_size": 126464, + "output_router_logits": false, + "embedding_dropout": 0.0, + "norm_head": true, + "norm_softmax": false, + "output_dropout": 0.0 +} \ No newline at end of file diff --git a/configuration.json b/configuration.json new file mode 100644 index 0000000..f9291c3 --- /dev/null +++ b/configuration.json @@ -0,0 +1 @@ +{"framework":"Pytorch","task":"text-generation"} \ No newline at end of file diff --git a/configuration_bailing_moe.py b/configuration_bailing_moe.py new file mode 100644 index 0000000..3f028c8 --- /dev/null +++ b/configuration_bailing_moe.py @@ -0,0 +1,78 @@ +""" Bailing MoE model configuration """ + +from transformers.configuration_utils import PretrainedConfig + + +class BailingMoeConfig(PretrainedConfig): + model_type = "bailing_moe" + + def __init__( + self, + vocab_size=30592, + hidden_size=1024, + intermediate_size=None, + num_hidden_layers=24, + num_attention_heads=16, + num_key_value_heads=0, + hidden_act="silu", + use_qkv_bias=False, # bailing only + use_bias=True, # bailing only + rms_norm_eps=1e-05, + norm_head=False, # bailing only + tie_word_embeddings=False, # PretrainedConfig key, here change default value. + embedding_dropout=0.1, + attention_dropout=0.1, + output_dropout=0.1, + initializer_range=0.02, + max_position_embeddings=16384, + rope_theta=10000.0, + use_cache=True, + use_sliding_window=False, + sliding_window=4096, + max_window_layers=28, + rope_scaling=None, + pad_token_id=126081, + num_experts=16, + num_shared_experts=0, + num_experts_per_tok=2, + norm_topk_prob=True, + moe_intermediate_size=None, + first_k_dense_replace=0, + head_dim=None, + output_router_logits=False, + **kwargs, + ): + self.num_hidden_layers = num_hidden_layers + self.vocab_size = vocab_size + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.num_attention_heads = num_attention_heads + self.num_key_value_heads = num_key_value_heads + self.hidden_act = hidden_act + self.use_qkv_bias = use_qkv_bias + self.use_bias = use_bias + self.norm_head = norm_head + self.rms_norm_eps = rms_norm_eps + self.embedding_dropout = embedding_dropout + self.attention_dropout = attention_dropout + self.output_dropout = output_dropout + self.initializer_range = initializer_range + self.max_position_embeddings = max_position_embeddings + self.rope_theta = rope_theta + self.use_cache = use_cache + self.use_sliding_window = use_sliding_window + self.sliding_window = sliding_window + self.max_window_layers = max_window_layers + self.head_dim = head_dim or self.hidden_size // self.num_attention_heads + self.rope_scaling = rope_scaling + + # MoE configs + self.num_experts = num_experts + self.num_shared_experts = num_shared_experts + self.num_experts_per_tok = num_experts_per_tok + self.norm_topk_prob = norm_topk_prob + self.moe_intermediate_size = moe_intermediate_size + self.first_k_dense_replace = first_k_dense_replace + self.output_router_logits = output_router_logits + + super().__init__(pad_token_id=pad_token_id, tie_word_embeddings=tie_word_embeddings, **kwargs) diff --git a/model-00001-of-00004.safetensors b/model-00001-of-00004.safetensors new file mode 100644 index 0000000..7fbad69 --- /dev/null +++ b/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf3d311c600c7dff74d973d95660c8464b310a1d177d694b0cac1a1fb81f309a +size 9305327072 diff --git a/model-00002-of-00004.safetensors b/model-00002-of-00004.safetensors new file mode 100644 index 0000000..6f3b5b5 --- /dev/null +++ b/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70b127b1b5d3a808585edcffe78ebd60c52bf58e5b0d095af51c7cc62990a381 +size 9305328272 diff --git a/model-00003-of-00004.safetensors b/model-00003-of-00004.safetensors new file mode 100644 index 0000000..7a02c03 --- /dev/null +++ b/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63eb2564a66331fb8cf5520782be797c467e4c890e4e44a338bfc90ee12bddbf +size 9305328672 diff --git a/model-00004-of-00004.safetensors b/model-00004-of-00004.safetensors new file mode 100644 index 0000000..b59f6d5 --- /dev/null +++ b/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1cc12c93b2b9d4f6880f96c9dc9c1ebe415ace99f39cbf398e673be1a23f3e4 +size 5688662080 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000..6116041 --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,5611 @@ +{ + "metadata": { + "total_size": 33603948672 + }, + "weight_map": { + "model.layers.0.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.0.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.1.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.2.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.3.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.4.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.5.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.6.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.attention.dense.weight": "model-00001-of-00004.safetensors", + "model.layers.7.attention.query_key_value.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.0.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.1.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.10.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.11.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.12.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.13.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.14.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.15.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.16.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.17.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.18.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.19.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.2.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.20.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.21.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.22.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.23.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.24.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.25.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.26.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.27.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.28.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.29.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.3.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.30.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.31.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.32.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.33.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.34.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.35.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.36.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.37.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.38.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.39.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.4.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.40.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.41.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.42.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.43.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.44.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.45.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.46.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.47.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.48.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.49.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.5.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.50.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.51.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.52.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.53.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.54.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.55.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.56.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.57.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.58.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.59.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.6.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.60.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.61.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.62.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.63.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.7.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.8.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.experts.9.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.gate.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.shared_experts.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.10.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.10.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.11.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.12.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.13.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.14.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.15.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.8.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.attention.dense.weight": "model-00002-of-00004.safetensors", + "model.layers.9.attention.query_key_value.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.0.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.1.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.10.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.11.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.12.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.13.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.14.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.15.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.16.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.17.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.18.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.19.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.2.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.20.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.21.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.22.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.23.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.24.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.25.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.26.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.27.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.28.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.29.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.3.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.30.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.31.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.32.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.33.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.34.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.35.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.36.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.37.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.38.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.39.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.4.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.40.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.41.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.42.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.43.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.44.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.45.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.46.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.47.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.48.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.49.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.5.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.50.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.51.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.52.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.53.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.54.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.55.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.56.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.57.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.58.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.59.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.6.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.60.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.61.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.62.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.63.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.7.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.8.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.experts.9.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.shared_experts.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.16.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.17.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.17.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.18.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.18.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.19.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.19.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.20.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.21.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.22.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.attention.dense.weight": "model-00003-of-00004.safetensors", + "model.layers.23.attention.query_key_value.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.0.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.1.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.10.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.11.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.12.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.13.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.14.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.15.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.16.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.17.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.18.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.19.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.2.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.20.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.21.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.22.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.23.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.24.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.25.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.26.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.27.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.28.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.29.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.3.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.30.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.31.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.32.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.33.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.34.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.35.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.36.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.37.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.38.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.39.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.4.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.40.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.41.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.42.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.43.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.44.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.45.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.46.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.47.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.48.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.49.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.5.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.50.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.51.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.52.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.53.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.54.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.55.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.56.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.57.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.58.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.59.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.6.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.60.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.61.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.62.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.63.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.7.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.8.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.experts.9.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.shared_experts.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "lm_head.weight": "model-00004-of-00004.safetensors", + "model.layers.24.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.24.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.25.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.25.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.26.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.26.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.27.attention.dense.weight": "model-00004-of-00004.safetensors", + "model.layers.27.attention.query_key_value.weight": "model-00004-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.0.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.1.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.10.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.11.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.12.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.13.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.14.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.15.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.16.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.17.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.18.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.19.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.2.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.20.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.21.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.22.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.23.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.24.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.25.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.26.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.27.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.28.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.29.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.3.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.30.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.31.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.32.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.33.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.34.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.35.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.36.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.37.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.38.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.39.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.4.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.40.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.41.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.42.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.43.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.44.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.45.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.46.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.47.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.48.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.49.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.5.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.50.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.51.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.52.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.53.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.54.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.55.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.56.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.57.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.58.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.59.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.6.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.60.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.61.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.62.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.63.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.7.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.8.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.experts.9.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.gate.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.mlp.shared_experts.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors", + "model.rotary_emb.inv_freq": "model-00004-of-00004.safetensors", + "model.word_embeddings.weight": "model-00004-of-00004.safetensors" + } +} \ No newline at end of file diff --git a/modeling_bailing_moe.py b/modeling_bailing_moe.py new file mode 100644 index 0000000..f08a15a --- /dev/null +++ b/modeling_bailing_moe.py @@ -0,0 +1,1549 @@ +# coding=utf-8 +# Copyright 2023 Antgroup and The HuggingFace Inc. team. All rights reserved. +# +# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX +# and OPT implementations in this library. It has been modified from its +# original forms to accommodate minor architectural differences compared +# to GPT-NeoX and OPT used by the Meta AI team that trained the model. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch BailingMoE model.""" +import math +import warnings +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn.functional as F +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss + +from transformers.activations import ACT2FN +from transformers.cache_utils import Cache, DynamicCache +from transformers.modeling_attn_mask_utils import ( + AttentionMaskConverter, + _prepare_4d_attention_mask, + _prepare_4d_causal_attention_mask, + _prepare_4d_causal_attention_mask_for_sdpa, +) +from transformers.modeling_outputs import ( + MoeModelOutputWithPast, + MoeCausalLMOutputWithPast, +) +from transformers.modeling_utils import PreTrainedModel +from transformers.pytorch_utils import ALL_LAYERNORM_LAYERS, is_torch_greater_or_equal_than_1_13 +from transformers.utils import ( + add_start_docstrings, + add_start_docstrings_to_model_forward, + is_flash_attn_2_available, + is_flash_attn_greater_or_equal_2_10, + logging, + replace_return_docstrings, +) +from transformers.utils.import_utils import is_torch_fx_available +from .configuration_bailing_moe import BailingMoeConfig + + +if is_flash_attn_2_available(): + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa + + +# This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph. +# It means that the function will not be traced through and simply appear as a node in the graph. +if is_torch_fx_available(): + if not is_torch_greater_or_equal_than_1_13: + import torch.fx + + _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask) + + +logger = logging.get_logger(__name__) + +_CONFIG_FOR_DOC = "BailingMoeConfig" + + +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad(torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + warnings.warn( + "Calling `transformers.models.BailingMoe.modeling_BailingMoe._prepare_4d_attention_mask` is deprecated and will be removed in v4.37. Use `transformers.modeling_attn_mask_utils._prepare_4d_attention_mask" + ) + return _prepare_4d_attention_mask(mask=mask, dtype=dtype, tgt_len=tgt_len) + + +def _make_causal_mask( + input_ids_shape: torch.Size, dtype: torch.dtype, device: torch.device, past_key_values_length: int = 0 +): + warnings.warn( + "Calling `transformers.models.BailingMoe.modeling_BailingMoe._make_causal_mask` is deprecated and will be removed in v4.37. Use `transformers.models.BailingMoe.modeling_BailingMoe.AttentionMaskConverter._make_causal_mask" + ) + return AttentionMaskConverter._make_causal_mask( + input_ids_shape=input_ids_shape, dtype=dtype, device=device, past_key_values_length=past_key_values_length + ) + + +class BailingMoeRMSNorm(nn.Module): + def __init__(self, hidden_size, eps=1e-6): + """ + BailingMoeRMSNorm is equivalent to T5LayerNorm + """ + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +ALL_LAYERNORM_LAYERS.append(BailingMoeRMSNorm) + + +class BailingMoeRotaryEmbedding(nn.Module): + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / (self.base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, device=self.inv_freq.device, dtype=torch.get_default_dtype() + ) + self.max_seq_len_cached = None + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq.to(t.device)) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if self.max_seq_len_cached is None or seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaLinearScalingRotaryEmbedding with Llama->BailingMoe +class BailingMoeLinearScalingRotaryEmbedding(BailingMoeRotaryEmbedding): + """BailingMoeRotaryEmbedding extended with linear scaling. Credits to the Reddit user /u/kaiokendev""" + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + t = t / self.scaling_factor + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.LlamaDynamicNTKScalingRotaryEmbedding with Llama->BailingMoe +class BailingMoeDynamicNTKScalingRotaryEmbedding(BailingMoeRotaryEmbedding): + """BailingMoeRotaryEmbedding extended with Dynamic NTK scaling. Credits to the Reddit users /u/bloc97 and /u/emozilla""" + + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0): + self.scaling_factor = scaling_factor + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + + if seq_len > self.max_position_embeddings: + base = self.base * ( + (self.scaling_factor * seq_len / self.max_position_embeddings) - (self.scaling_factor - 1) + ) ** (self.dim / (self.dim - 2)) + inv_freq = 1.0 / (base ** (torch.arange(0, self.dim, 2).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + t = torch.arange(self.max_seq_len_cached, device=device, dtype=self.inv_freq.dtype) + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + +# Inverse dim formula to find dim based on number of rotations +def yarn_find_correction_dim(num_rotations, dim, base=10000, max_position_embeddings=2048): + return (dim * math.log(max_position_embeddings / (num_rotations * 2 * math.pi))) / (2 * math.log(base)) + + +# Find dim range bounds based on rotations +def yarn_find_correction_range(low_rot, high_rot, dim, base=10000, max_position_embeddings=2048): + low = math.floor(yarn_find_correction_dim(low_rot, dim, base, max_position_embeddings)) + high = math.ceil(yarn_find_correction_dim(high_rot, dim, base, max_position_embeddings)) + return max(low, 0), min(high, dim - 1) # Clamp values just in case + + +def yarn_get_mscale(scale=1, mscale=1): + if scale <= 1: + return 1.0 + return 0.1 * mscale * math.log(scale) + 1.0 + + +def yarn_linear_ramp_mask(min, max, dim): + if min == max: + max += 0.001 # Prevent singularity + + linear_func = (torch.arange(dim, dtype=torch.float32) - min) / (max - min) + ramp_func = torch.clamp(linear_func, 0, 1) + return ramp_func + + +class BailingMoeYarnRotaryEmbedding(BailingMoeRotaryEmbedding): + + def __init__( + self, + dim, + max_position_embeddings=2048, + base=10000, + device=None, + scaling_factor=1.0, + original_max_position_embeddings=4096, + beta_fast=32, + beta_slow=1, + mscale=1, + mscale_all_dim=0, + ): + self.scaling_factor = scaling_factor + self.original_max_position_embeddings = original_max_position_embeddings + self.beta_fast = beta_fast + self.beta_slow = beta_slow + self.mscale = mscale + self.mscale_all_dim = mscale_all_dim + super().__init__(dim, max_position_embeddings, base, device) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + dim = self.dim + + freq_extra = 1.0 / (self.base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim)) + freq_inter = 1.0 / ( + self.scaling_factor * self.base ** (torch.arange(0, dim, 2, dtype=torch.float32, device=device) / dim) + ) + + low, high = yarn_find_correction_range( + self.beta_fast, + self.beta_slow, + dim, + self.base, + self.original_max_position_embeddings, + ) + inv_freq_mask = 1.0 - yarn_linear_ramp_mask(low, high, dim // 2).to(device=device, dtype=torch.float32) + inv_freq = freq_inter * (1 - inv_freq_mask) + freq_extra * inv_freq_mask + self.register_buffer("inv_freq", inv_freq, persistent=False) + + t = torch.arange(seq_len, device=device, dtype=torch.float32) + + freqs = torch.outer(t, inv_freq) + + _mscale = float( + yarn_get_mscale(self.scaling_factor, self.mscale) + / yarn_get_mscale(self.scaling_factor, self.mscale_all_dim) + ) + + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", (emb.cos() * _mscale).to(dtype), persistent=False) + self.register_buffer("sin_cached", (emb.sin() * _mscale).to(dtype), persistent=False) + + +# Copied from transformers.models.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., : x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2 :] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.models.llama.modeling_llama.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`): + The position indices of the tokens corresponding to the query and key tensors. For example, this can be + used to pass offsetted position ids when working with a KV-cache. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising the query and key tensors rotated using the Rotary Position Embedding. + """ + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +class BailingMoeMLP(nn.Module): + def __init__(self, config: BailingMoeConfig, intermediate_size: int): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size + self.intermediate_size = intermediate_size + + self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, x): + return self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x)) + + +class BailingMoeGate(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.top_k = config.num_experts_per_tok + self.num_experts = config.num_experts + + # topk selection algorithm + self.norm_topk_prob = config.norm_topk_prob + self.gating_dim = config.hidden_size + self.weight = nn.Parameter(torch.empty((self.num_experts, self.gating_dim))) + self.reset_parameters() + + def reset_parameters(self) -> None: + import torch.nn.init as init + + init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + + def forward(self, hidden_states, sort=False): + bsz, seq_len, h = hidden_states.shape + # compute gating score + hidden_states = hidden_states.view(-1, h) + logits = F.linear(hidden_states, self.weight, None) + scores = logits.softmax(dim=-1, dtype=torch.float32) + + # select top-k experts + topk_weight, topk_idx = torch.topk(scores, k=self.top_k, dim=-1, sorted=sort) + + # norm gate to sum 1 + if self.top_k > 1 and self.norm_topk_prob: + denominator = topk_weight.sum(dim=-1, keepdim=True) + topk_weight = topk_weight / denominator + + return topk_idx, topk_weight, logits + + +class BailingMoeSparseMoeBlock(nn.Module): + """ + A mixed expert module containing shared experts. + """ + + def __init__(self, config: BailingMoeConfig): + super().__init__() + self.config = config + self.num_experts_per_tok = config.num_experts_per_tok + self._setup_experts() + self.gate = BailingMoeGate(config) + if config.num_shared_experts is not None: + self.shared_experts = BailingMoeMLP( + config=config, intermediate_size=config.moe_intermediate_size * config.num_shared_experts + ) + + def _setup_experts(self): + self.experts = nn.ModuleList( + [ + BailingMoeMLP(config=self.config, intermediate_size=self.config.moe_intermediate_size) + for _ in range(self.config.num_experts) + ] + ) + + def forward(self, hidden_states): + identity = hidden_states + bsz, seq_len, h = hidden_states.shape + topk_idx, topk_weight, router_logits = self.gate(hidden_states) + hidden_states = hidden_states.view(-1, hidden_states.shape[-1]) + flat_topk_idx = topk_idx.view(-1) + if self.training: + hidden_states = hidden_states.repeat_interleave(self.num_experts_per_tok, dim=0) + y = torch.empty_like(hidden_states) + for i, expert in enumerate(self.experts): + y[flat_topk_idx == i] = expert(hidden_states[flat_topk_idx == i]) + y = (y.view(*topk_weight.shape, -1) * topk_weight.unsqueeze(-1)).sum(dim=1) + y = y.to(hidden_states.dtype).view(bsz, seq_len, h) + else: + y = self.moe_infer(hidden_states, topk_idx, topk_weight).view(bsz, seq_len, h) + if self.config.num_shared_experts is not None: + y = y + self.shared_experts(identity) + return y, (router_logits.view(bsz, seq_len, -1), topk_idx.view(bsz, seq_len, -1)) + + @torch.no_grad() + def moe_infer(self, x, topk_ids, topk_weight): + cnts = topk_ids.new_zeros((topk_ids.shape[0], len(self.experts))) + cnts.scatter_(1, topk_ids, 1) + tokens_per_expert = cnts.sum(dim=0) + idxs = topk_ids.view(-1).argsort() + sorted_tokens = x[idxs // topk_ids.shape[1]] + sorted_tokens_shape = sorted_tokens.shape + tokens_per_expert = tokens_per_expert.cpu().numpy() + outputs = [] + start_idx = 0 + for i, num_tokens in enumerate(tokens_per_expert): + end_idx = start_idx + num_tokens + if num_tokens == 0: + continue + expert = self.experts[i] + tokens_for_this_expert = sorted_tokens[start_idx:end_idx] + expert_out = expert(tokens_for_this_expert) + outputs.append(expert_out) + start_idx = end_idx + + outs = torch.cat(outputs, dim=0) if len(outputs) else sorted_tokens.new_empty(0) + new_x = torch.empty_like(outs) + new_x[idxs] = outs + final_out = ( + new_x.view(*topk_ids.shape, -1) + .type(topk_weight.dtype) + .mul_(topk_weight.unsqueeze(dim=-1)) + .sum(dim=1) + .type(new_x.dtype) + ) + return final_out + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +# Copied from transformers.models.llama.modeling_llama.LlamaAttention with Llama->BailingMoe +class BailingMoeAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config: BailingMoeConfig, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f"Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will " + "to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` " + "when creating this class." + ) + + self.attention_dropout = config.attention_dropout + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = config.head_dim or self.hidden_size // self.num_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.rope_theta = config.rope_theta + self.is_causal = True + + self.query_key_value = nn.Linear( + self.hidden_size, + (self.num_heads + 2 * self.num_key_value_heads) * self.head_dim, + bias=config.use_qkv_bias, + ) + self.dense = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=config.use_bias) + self._init_rope() + + def _init_rope(self): + if self.config.rope_scaling is None: + self.rotary_emb = BailingMoeRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.rope_theta, + ) + else: + scaling_type = self.config.rope_scaling["type"] + scaling_factor = self.config.rope_scaling["factor"] + if scaling_type == "linear": + self.rotary_emb = BailingMoeLinearScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == "dynamic": + self.rotary_emb = BailingMoeDynamicNTKScalingRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + ) + elif scaling_type == "yarn": + kwargs = { + key: self.config.rope_scaling[key] + for key in [ + "original_max_position_embeddings", + "beta_fast", + "beta_slow", + "mscale", + "mscale_all_dim", + ] + if key in self.config.rope_scaling + } + self.rotary_emb = BailingMoeYarnRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + scaling_factor=scaling_factor, + base=self.rope_theta, + **kwargs, + ) + else: + raise ValueError(f"Unknown RoPE scaling type {scaling_type}") + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + + bsz, q_len, _ = hidden_states.size() + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} " + "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class " + "with a layer index." + ) + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_weights = torch.matmul(query_states / math.sqrt(self.head_dim), key_states.transpose(2, 3)) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_weights = nn.functional.dropout(attn_weights, p=self.attention_dropout, training=self.training) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + + attn_output = attn_output.reshape(bsz, q_len, -1) + + attn_output = self.dense(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +# Copied from transformers.models.llama.modeling_llama.LlamaFlashAttention2 with Llama->BailingMoe +class BailingMoeFlashAttention2(BailingMoeAttention): + """ + BailingMoe flash attention module. This module inherits from `BailingMoeAttention` as the weights of the module stays + untouched. The only required change would be on the forward pass where it needs to correctly call the public API of + flash attention and deal with padding tokens in case the input contains any of them. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # TODO: Should be removed once Flash Attention for RoCm is bumped to 2.1. + # flash_attn<2.1 generates top-left aligned causal mask, while what is needed here is bottom-right alignement, that was made default for flash_attn>=2.1. This attribute is used to handle this difference. Reference: https://github.com/Dao-AILab/flash-attention/releases/tag/v2.1.0. + # Beware that with flash_attn<2.1, using q_seqlen != k_seqlen (except for the case q_seqlen == 1) produces a wrong mask (top-left). + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + # BailingMoeFlashAttention2 attention does not support output_attentions + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + + # overwrite attention_mask with padding_mask + attention_mask = kwargs.pop("padding_mask") + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache + # to be able to avoid many of these transpose/reshape/view. + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently cast in float32. Hence, we need + # cast them back in the correct dtype just to be sure everything works as expected. + # This might slow down training & inference so it is recommended to not cast the LayerNorms + # in fp32. (BailingMoeRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + # Handle the case where the model is quantized + if hasattr(self.config, "_pre_quantization_dtype"): + target_dtype = self.config._pre_quantization_dtype + elif torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + else: + target_dtype = self.q_proj.weight.dtype + + logger.warning_once( + f"The input hidden states seems to be silently casted in float32, this might be related to" + f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in" + f" {target_dtype}." + ) + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + attn_output = self._flash_attention_forward( + query_states, key_states, value_states, attention_mask, q_len, dropout=dropout_rate + ) + + attn_output = attn_output.reshape(bsz, q_len, -1).contiguous() + attn_output = self.dense(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, query_states, key_states, value_states, attention_mask, query_length, dropout=0.0, softmax_scale=None + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`int`, *optional*): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + query_length (`int`): + The length of the query sequence in terms of tokens. This represents the number of tokens in the + `query_states` tensor along the sequence dimension. It is used to determine the effective sequence + length for attention computations. + """ + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in BailingMoeFlashAttention2 __init__. + causal = self.is_causal and query_length != 1 + + # Contains at least one padding token in the sequence + if attention_mask is not None: + batch_size = query_states.shape[0] + query_states, key_states, value_states, indices_q, cu_seq_lens, max_seq_lens = self._upad_input( + query_states, key_states, value_states, attention_mask, query_length + ) + + cu_seqlens_q, cu_seqlens_k = cu_seq_lens + max_seqlen_in_batch_q, max_seqlen_in_batch_k = max_seq_lens + + attn_output_unpad = flash_attn_varlen_func( + query_states, + key_states, + value_states, + cu_seqlens_q=cu_seqlens_q, + cu_seqlens_k=cu_seqlens_k, + max_seqlen_q=max_seqlen_in_batch_q, + max_seqlen_k=max_seqlen_in_batch_k, + dropout_p=dropout, + softmax_scale=softmax_scale, + causal=causal, + ) + + attn_output = pad_input(attn_output_unpad, indices_q, batch_size, query_length) + else: + attn_output = flash_attn_func( + query_states, key_states, value_states, dropout, softmax_scale=softmax_scale, causal=causal + ) + + return attn_output + + def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim), indices_k + ) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaSdpaAttention with Llama->BailingMoe +class BailingMoeSdpaAttention(BailingMoeAttention): + """ + BailingMoe attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from + `BailingMoeAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to + SDPA API. + """ + + # Adapted from BailingMoeAttention.forward + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if output_attentions: + # TODO: Improve this warning with e.g. `model.config.attn_implementation = "manual"` once this is implemented. + logger.warning_once( + "BailingMoeModel is using BailingMoeSdpaAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, " + 'but specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.' + ) + return super().forward( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + bsz, q_len, _ = hidden_states.size() + + qkv = self.query_key_value(hidden_states) + qkv = qkv.view(bsz, q_len, self.num_heads + 2 * self.num_key_value_heads, self.head_dim) + + query_states, key_states, value_states = qkv.split( + [self.num_heads, self.num_key_value_heads, self.num_key_value_heads], dim=-2 + ) + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + + # SDPA with memory-efficient backend is currently (torch==2.1.2) bugged with non-contiguous inputs with custom attn_mask, + # Reference: https://github.com/pytorch/pytorch/issues/112577. + if query_states.device.type == "cuda" and attention_mask is not None: + query_states = query_states.contiguous() + key_states = key_states.contiguous() + value_states = value_states.contiguous() + + attn_output = torch.nn.functional.scaled_dot_product_attention( + query_states, + key_states, + value_states, + attn_mask=attention_mask, + dropout_p=self.attention_dropout if self.training else 0.0, + # The q_len > 1 is necessary to match with AttentionMaskConverter.to_causal_4d that does not create a causal mask in case q_len == 1. + is_causal=self.is_causal and attention_mask is None and q_len > 1, + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, -1) + + attn_output = self.dense(attn_output) + + return attn_output, None, past_key_value + + +BAILING_MOE_ATTENTION_CLASSES = { + "eager": BailingMoeAttention, + "flash_attention_2": BailingMoeFlashAttention2, + "sdpa": BailingMoeSdpaAttention, +} + + +class BailingMoeDecoderLayer(nn.Module): + def __init__(self, config: BailingMoeConfig, layer_idx: int): + super().__init__() + self.hidden_size = config.hidden_size + + self.attention = BAILING_MOE_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) + + self.mlp = ( + BailingMoeSparseMoeBlock(config) + if (config.num_experts is not None and layer_idx >= config.first_k_dense_replace) + else BailingMoeMLP(config=config, intermediate_size=config.intermediate_size) + ) + self.input_layernorm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + self.post_attention_layernorm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + output_router_logits: Optional[bool] = False, + use_cache: Optional[bool] = False, + **kwargs, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): + attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1, + query_sequence_length, key_sequence_length)` if default attention is used. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): + cached past key and value projection states + output_attentions (`bool`, *optional*): + Whether to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_router_logits (`bool`, *optional*): + Whether or not to return the logits of all the routers. They are useful for computing the router loss, + and should not be returned during inference. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + """ + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.attention( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states = self.mlp(hidden_states) + if isinstance(hidden_states, tuple): + hidden_states, router_logits = hidden_states + else: + router_logits = None + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (self_attn_weights,) + + if use_cache: + outputs += (present_key_value,) + + if output_router_logits: + outputs += (router_logits,) + + return outputs + + +BAILINGMOE_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`BailingMoeConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + "The bare BailingMoe Model outputting raw hidden-states without any specific head on top.", + BAILINGMOE_START_DOCSTRING, +) +class BailingMoePreTrainedModel(PreTrainedModel): + config_class = BailingMoeConfig + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["BailingMoeDecoderLayer"] + _skip_keys_device_placement = "past_key_values" + _supports_flash_attn_2 = True + _supports_sdpa = True + _supports_cache_class = True + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + +BAILINGMOE_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.n_positions - 1]`. + + [What are position IDs?](../glossary#position-ids) + past_key_values (`Cache` or `tuple(tuple(torch.FloatTensor))`, *optional*): + Pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used to speed up sequential decoding. This typically consists in the `past_key_values` + returned by the model at a previous stage of decoding, when `use_cache=True` or `config.use_cache=True`. + + Two formats are allowed: + - a [`~cache_utils.Cache`] instance; + - Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`). This is also known as the legacy + cache format. + + The model will output the same cache format that is fed as input. If no `past_key_values` are passed, the + legacy cache format will be returned. + + If `past_key_values` are used, the user can optionally input only the last `input_ids` (those that don't + have their past key value states given to this model) of shape `(batch_size, 1)` instead of all `input_ids` + of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +@add_start_docstrings( + "The bare BailingMoe Model outputting raw hidden-states without any specific head on top.", + BAILINGMOE_START_DOCSTRING, +) +class BailingMoeModel(BailingMoePreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`BailingMoeDecoderLayer`] + + Args: + config: BailingMoeConfig + """ + + def __init__(self, config: BailingMoeConfig): + super().__init__(config) + self.padding_idx = config.pad_token_id + self.vocab_size = config.vocab_size + + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx) + self.layers = nn.ModuleList( + [BailingMoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] + ) + self._use_sdpa = config._attn_implementation == "sdpa" + self._use_flash_attention_2 = config._attn_implementation == "flash_attention_2" + self.norm = BailingMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.word_embeddings + + def set_input_embeddings(self, value): + self.word_embeddings = value + + @add_start_docstrings_to_model_forward(BAILINGMOE_INPUTS_DOCSTRING) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + **kwargs, + ) -> Union[Tuple, MoeModelOutputWithPast]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + output_router_logits = ( + output_router_logits if output_router_logits is not None else self.config.output_router_logits + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") + elif input_ids is not None: + batch_size, seq_length = input_ids.shape[:2] + elif inputs_embeds is not None: + batch_size, seq_length = inputs_embeds.shape[:2] + else: + raise ValueError("You have to specify either input_ids or inputs_embeds") + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`transformers." + ) + use_cache = False + + past_key_values_length = 0 + if use_cache: + use_legacy_cache = not isinstance(past_key_values, Cache) + if use_legacy_cache: + past_key_values = DynamicCache.from_legacy_cache(past_key_values) + past_key_values_length = past_key_values.get_usable_length(seq_length) + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device + ) + position_ids = position_ids.unsqueeze(0) + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + if self._use_flash_attention_2: + # 2d mask is passed through the layers + attention_mask = attention_mask if (attention_mask is not None and 0 in attention_mask) else None + elif self._use_sdpa and not output_attentions: + # output_attentions=True can not be supported when using SDPA, and we fall back on + # the manual implementation that requires a 4D causal mask in all cases. + attention_mask = _prepare_4d_causal_attention_mask_for_sdpa( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + ) + else: + # 4d mask is passed through the layers + attention_mask = _prepare_4d_causal_attention_mask( + attention_mask, (batch_size, seq_length), inputs_embeds, past_key_values_length + ) + + # embed positions + hidden_states = inputs_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + all_router_logits = () if output_router_logits else None + next_decoder_cache = None + + for decoder_layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states,) + + if self.gradient_checkpointing and self.training: + layer_outputs = self._gradient_checkpointing_func( + decoder_layer.__call__, + hidden_states, + attention_mask, + position_ids, + past_key_values, + output_attentions, + output_router_logits, + use_cache, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_values, + output_attentions=output_attentions, + output_router_logits=output_router_logits, + use_cache=use_cache, + ) + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache = layer_outputs[2 if output_attentions else 1] + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + if output_router_logits and layer_outputs[-1] is not None: + all_router_logits += (layer_outputs[-1],) + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = None + if use_cache: + next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache + if not return_dict: + return tuple( + v + for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_router_logits] + if v is not None + ) + return MoeModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + router_logits=all_router_logits, + ) + + +class BailingMoeForCausalLM(BailingMoePreTrainedModel): + _tied_weights_keys = ["lm_head.weight"] + + def __init__(self, config: BailingMoeConfig): + super().__init__(config) + self.model = BailingMoeModel(config) + self.vocab_size = config.vocab_size + self.norm_head = config.norm_head + self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.word_embeddings + + def set_input_embeddings(self, value): + self.model.word_embeddings = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + def compute_logit(self, hidden_states): + if self.norm_head: + if self.training: + norm_weight = ( + self.lm_head.weight / (torch.norm(self.lm_head.weight, p=2, dim=0, keepdim=True) + 1e-7).detach() + ) + logits = F.linear(hidden_states, norm_weight, None) + else: + self.lm_head.weight.data = ( + self.lm_head.weight.data.float() + / (torch.norm(self.lm_head.weight.data.float(), p=2, dim=0, keepdim=True) + 1e-7) + ).to(hidden_states.dtype) + logits = F.linear(hidden_states, self.lm_head.weight.data, None) + self.norm_head = False + else: + logits = self.lm_head(hidden_states) + return logits + + @add_start_docstrings_to_model_forward(BAILINGMOE_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=MoeCausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_router_logits: Optional[bool] = None, + return_dict: Optional[bool] = None, + **kwargs, + ) -> Union[Tuple, MoeCausalLMOutputWithPast]: + r""" + Args: + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + + Returns: + + Example: + + ```python + >>> from transformers import AutoTokenizer + + >>> model = BailingMoeForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS) + >>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER) + + >>> prompt = "Hey, are you conscious? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you." + ```""" + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + output_router_logits = ( + output_router_logits if output_router_logits is not None else self.config.output_router_logits + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + output_router_logits=output_router_logits, + return_dict=return_dict, + **kwargs, + ) + + hidden_states = outputs[0] + + logits = self.compute_logit(hidden_states=hidden_states) + logits = logits.float() + + loss = None + aux_loss = None + + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits,) + outputs[1:] + if output_router_logits: + output = (aux_loss,) + output + return (loss,) + output if loss is not None else output + + return MoeCausalLMOutputWithPast( + loss=loss, + aux_loss=aux_loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + router_logits=outputs.router_logits, + ) + + def prepare_inputs_for_generation( + self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, token_type_ids=None, **kwargs + ): + if past_key_values is not None: + if isinstance(past_key_values, Cache): + cache_length = past_key_values.get_seq_length() + past_length = past_key_values.seen_tokens + max_cache_length = ( + past_key_values.get_max_length() + if hasattr(past_key_values, "get_max_length") + else past_key_values.get_max_cache_shape() + ) + else: + cache_length = past_length = past_key_values[0][0].shape[2] + max_cache_length = None + + # Keep only the unprocessed tokens: + # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where + # some of the inputs are exclusivelly passed as part of the cache (e.g. when passing input_embeds as input) + if attention_mask is not None and attention_mask.shape[1] > input_ids.shape[1]: + input_ids = input_ids[:, -(attention_mask.shape[1] - past_length) :] + # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard + # input_ids based on the past_length. + elif past_length < input_ids.shape[1]: + input_ids = input_ids[:, past_length:] + # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. + + # If we are about to go beyond the maximum cache length, we need to crop the input attention mask. + if ( + max_cache_length is not None + and attention_mask is not None + and cache_length + input_ids.shape[1] > max_cache_length + ): + attention_mask = attention_mask[:, -max_cache_length:] + + position_ids = kwargs.get("position_ids", None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1] :] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids} + + model_inputs.update( + { + "position_ids": position_ids, + "past_key_values": past_key_values, + "use_cache": kwargs.get("use_cache"), + "attention_mask": attention_mask, + } + ) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += ( + tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past), + ) + return reordered_past diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000..48153bf --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,15 @@ +{ + "additional_special_tokens": [ + "<|number_end|>", + "<|arithmetic_start|>", + "", + "<|arithmetic_end|>", + "", + "<|number_start|>" + ], + "bos_token": "<|startoftext|>", + "cls_token": "[CLS]", + "eos_token": "<|endoftext|>", + "gmask_token": "[gMASK]", + "pad_token": "<|endoftext|>" +} \ No newline at end of file diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000..bf80fca --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4263f84d1ae750eb427be937562c33737b5bb035fe107fd414d27c766d1f629 +size 6098421 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000..a1531eb --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,25 @@ +{ + "add_bos_token": false, + "add_eos_token": false, + "additional_special_tokens": [ + "", + "", + "<|arithmetic_start|>", + "<|arithmetic_end|>", + "<|number_start|>", + "<|number_end|>" + ], + "bos_token": "<|startoftext|>", + "chat_template": "{% for message in messages %}{% set role = message['role'] | lower %}{% if role == 'user' %}{% set role = 'HUMAN' %}{% endif %}{% set role = role | upper %}{{ '' + role + '' + message['content'].split('')[-1].lstrip('\\n') }}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT' }}{% endif %}", + "clean_up_tokenization_spaces": false, + "cls_token": "[CLS]", + "eos_token": "<|endoftext|>", + "gmask_token": "[gMASK]", + "merges_file": null, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "<|endoftext|>", + "tokenizer_class": "PreTrainedTokenizerFast", + "trust_remote_code": true, + "vocab_file": null, + "fast_tokenizer": true +}