From b886417077a982409537108ce6002db4a3c7b881 Mon Sep 17 00:00:00 2001 From: ModelHub XC Date: Fri, 10 Apr 2026 10:29:09 +0800 Subject: [PATCH] =?UTF-8?q?=E5=88=9D=E5=A7=8B=E5=8C=96=E9=A1=B9=E7=9B=AE?= =?UTF-8?q?=EF=BC=8C=E7=94=B1ModelHub=20XC=E7=A4=BE=E5=8C=BA=E6=8F=90?= =?UTF-8?q?=E4=BE=9B=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Model: eaddario/Dolphin-Mistral-24B-Venice-Edition-pruned-GGUF Source: Original Platform --- .gitattributes | 37 + .gitignore | 584 ++++++++ Dolphin-Mistral-24B-Venice-Edition-F16.gguf | 3 + ...stral-24B-Venice-Edition-pruned-IQ3_M.gguf | 3 + ...stral-24B-Venice-Edition-pruned-IQ3_S.gguf | 3 + ...tral-24B-Venice-Edition-pruned-IQ4_NL.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q3_K_L.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q3_K_M.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q3_K_S.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q4_K_M.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q4_K_S.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q5_K_M.gguf | 3 + ...tral-24B-Venice-Edition-pruned-Q5_K_S.gguf | 3 + ...istral-24B-Venice-Edition-pruned-Q6_K.gguf | 3 + ...istral-24B-Venice-Edition-pruned-Q8_0.gguf | 3 + README.md | 227 ++++ ...lphin-Mistral-24B-Venice-Edition-micro.dat | 3 + ...lphin-Mistral-24B-Venice-Edition-small.dat | 3 + ...olphin-Mistral-24B-Venice-Edition-tiny.dat | 3 + ...phin-Mistral-24B-Venice-Edition-F16.logits | 3 + ...Dolphin-Mistral-24B-Venice-Edition-F16.arc | 21 + ...Dolphin-Mistral-24B-Venice-Edition-F16.hsw | 20 + .../Dolphin-Mistral-24B-Venice-Edition-F16.md | 1206 +++++++++++++++++ ...olphin-Mistral-24B-Venice-Edition-F16.mmlu | 21 + ...Dolphin-Mistral-24B-Venice-Edition-F16.tqa | 21 + ...Dolphin-Mistral-24B-Venice-Edition-F16.wng | 19 + ...olphin-Mistral-24B-Venice-Edition-IQ3_M.md | 1154 ++++++++++++++++ ...olphin-Mistral-24B-Venice-Edition-IQ3_S.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-IQ4_NL.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q3_K_L.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q3_K_M.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q3_K_S.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q4_K_M.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q4_K_S.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q5_K_M.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-Q5_K_S.md | 1154 ++++++++++++++++ ...Dolphin-Mistral-24B-Venice-Edition-Q6_K.md | 1154 ++++++++++++++++ ...Dolphin-Mistral-24B-Venice-Edition-Q8_0.md | 1154 ++++++++++++++++ ...lphin-Mistral-24B-Venice-Edition-iq3_m.arc | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_m.hsw | 20 + ...phin-Mistral-24B-Venice-Edition-iq3_m.mmlu | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_m.ppx | 37 + ...lphin-Mistral-24B-Venice-Edition-iq3_m.tqa | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_m.wng | 19 + ...lphin-Mistral-24B-Venice-Edition-iq3_s.arc | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_s.hsw | 20 + ...phin-Mistral-24B-Venice-Edition-iq3_s.mmlu | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_s.ppx | 37 + ...lphin-Mistral-24B-Venice-Edition-iq3_s.tqa | 21 + ...lphin-Mistral-24B-Venice-Edition-iq3_s.wng | 19 + ...phin-Mistral-24B-Venice-Edition-iq4_nl.arc | 21 + ...phin-Mistral-24B-Venice-Edition-iq4_nl.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-iq4_nl.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-iq4_nl.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-iq4_nl.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-iq4_nl.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q3_k_l.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_l.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q3_k_l.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_l.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q3_k_l.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_l.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q3_k_m.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_m.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q3_k_m.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_m.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q3_k_m.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_m.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q3_k_s.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_s.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q3_k_s.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_s.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q3_k_s.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q3_k_s.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q4_k_m.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_m.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q4_k_m.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_m.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q4_k_m.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_m.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q4_k_s.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_s.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q4_k_s.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_s.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q4_k_s.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q4_k_s.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q5_k_m.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_m.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q5_k_m.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_m.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q5_k_m.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_m.wng | 19 + ...phin-Mistral-24B-Venice-Edition-q5_k_s.arc | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_s.hsw | 20 + ...hin-Mistral-24B-Venice-Edition-q5_k_s.mmlu | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_s.ppx | 37 + ...phin-Mistral-24B-Venice-Edition-q5_k_s.tqa | 21 + ...phin-Mistral-24B-Venice-Edition-q5_k_s.wng | 19 + ...olphin-Mistral-24B-Venice-Edition-q6_k.arc | 21 + ...olphin-Mistral-24B-Venice-Edition-q6_k.hsw | 20 + ...lphin-Mistral-24B-Venice-Edition-q6_k.mmlu | 21 + ...olphin-Mistral-24B-Venice-Edition-q6_k.ppx | 37 + ...olphin-Mistral-24B-Venice-Edition-q6_k.tqa | 21 + ...olphin-Mistral-24B-Venice-Edition-q6_k.wng | 19 + ...olphin-Mistral-24B-Venice-Edition-q8_0.arc | 21 + ...olphin-Mistral-24B-Venice-Edition-q8_0.hsw | 20 + ...lphin-Mistral-24B-Venice-Edition-q8_0.mmlu | 21 + ...olphin-Mistral-24B-Venice-Edition-q8_0.ppx | 37 + ...olphin-Mistral-24B-Venice-Edition-q8_0.tqa | 21 + ...olphin-Mistral-24B-Venice-Edition-q8_0.wng | 19 + 110 files changed, 17723 insertions(+) create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 Dolphin-Mistral-24B-Venice-Edition-F16.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf create mode 100644 Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf create mode 100644 README.md create mode 100644 imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-micro.dat create mode 100644 imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-small.dat create mode 100644 imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-tiny.dat create mode 100644 logits/Dolphin-Mistral-24B-Venice-Edition-F16.logits create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-F16.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_M.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_S.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-IQ4_NL.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_L.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_M.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_S.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_M.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_S.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_M.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_S.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q6_K.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-Q8_0.md create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.wng create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.arc create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.hsw create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.mmlu create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.ppx create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.tqa create mode 100644 scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.wng diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..d1254ed --- /dev/null +++ b/.gitattributes @@ -0,0 +1,37 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +*.gguf filter=lfs diff=lfs merge=lfs -text +*.logits filter=lfs diff=lfs merge=lfs -text +*.dat filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..86d94a1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,584 @@ +# https://github.com/github/gitignore + +# ------------- +# General Linux +# ------------- +.directory +.fuse_hidden* +.nfs* +.Trash-* +*~ + +# ----------- +# General OSX +# ----------- +._* +.apdisk +.AppleDB +.AppleDesktop +.AppleDouble +.com.apple.timemachine.donotpresent +.DocumentRevisions-V100 +.DS_Store +.fseventsd +.LSOverride +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +Network Trash Folder +Temporary Items + +# --------------- +# General Windows +# --------------- +[Dd]esktop.ini +*.cab +*.lnk +*.msi +*.msix +*.msm +*.msp +*.stackdump +$RECYCLE.BIN/ +ehthumbs_vista.db +ehthumbs.db +Thumbs.db +Thumbs.db:encryptable + +# ----------------- +# General JetBrains +# ----------------- +.idea_modules/ +.idea/ +*.iml +*.ipr +*.iws + +# --------------- +# General VS Code +# --------------- +!.vscode/*.code-snippets +!.vscode/extensions.json +!.vscode/launch.json +!.vscode/settings.json +!.vscode/tasks.json +.history/ +.vscode/ +*.vsix + +# --------------------- +# General Visual Studio +# --------------------- +__pycache__/ +_NCrunch_* +_pkginfo.txt +_Pvt_Extensions +_ReSharper*/ +_TeamCity* +_UpgradeReport_Files/ +!?*.[Cc]ache/ +!.vscode/extensions.json +!**/[Pp]ackages/build/ +.*crunch*.local.xml +.builds +.cr/personal +.fake/ +.ionide/ +.localhistory/ +.mfractor/ +.ntvs_analysis.dat +.paket/paket.exe +.sass-cache/ +.vs/ +.vscode/* +.vshistory/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +[Bb]in/ +[Bb]uild[Ll]og.* +[Dd]ebug/ +[Dd]ebugPS/ +[Dd]ebugPublic/ +[Ee]xpress/ +[Ll]og/ +[Ll]ogs/ +[Oo]bj/ +[Rr]elease/ +[Rr]eleasePS/ +[Rr]eleases/ +[Tt]est[Rr]esult*/ +[Ww][Ii][Nn]32/ +*_h.h +*_i.c +*_p.c +*_wpftmp.csproj +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl +*- [Bb]ackup.rdl +*.[Cc]ache +*.[Pp]ublish.xml +*.[Rr]e[Ss]harper +*.appx +*.appxbundle +*.appxupload +*.aps +*.azurePubxml +*.bim_*.settings +*.bim.layout +*.binlog +*.btm.cs +*.btp.cs +*.build.csdef +*.cachefile +*.code-workspace +*.coverage +*.coveragexml +*.dbmdl +*.dbproj.schemaview +*.dotCover +*.DotSettings.user +*.dsp +*.dsw +*.e2e +*.GhostDoc.xml +*.gpState +*.ilk +*.iobj +*.ipdb +*.jfm +*.jmconfig +*.ldf +*.mdf +*.meta +*.mm.* +*.ncb +*.ndf +*.nuget.props +*.nuget.targets +*.nupkg +*.nvuser +*.obj +*.odx.cs +*.opendb +*.opensdf +*.opt +*.pch +*.pdb +*.pfx +*.pgc +*.pgd +*.pidb +*.plg +*.psess +*.publishproj +*.publishsettings +*.pubxml +*.pyc +*.rdl.data +*.rptproj.bak +*.rptproj.rsuser +*.rsp +*.rsuser +*.sap +*.sbr +*.scc +*.sdf +*.sln.docstates +*.sln.iml +*.snupkg +*.suo +*.svclog +*.tlb +*.tlh +*.tli +*.tlog +*.tmp +*.tmp_proj +*.tss +*.user +*.userosscache +*.userprefs +*.vbp +*.vbw +*.VC.db +*.VC.VC.opendb +*.VisualState.xml +*.vsp +*.vspscc +*.vspx +*.vssscc +*.xsd.cs +**/[Pp]ackages/* +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.HTMLClient/GeneratedArtifacts +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +~$* +$tf/ +AppPackages/ +artifacts/ +ASALocalRun/ +AutoTest.Net/ +Backup*/ +BenchmarkDotNet.Artifacts/ +bld/ +BundleArtifacts/ +ClientBin/ +coverage*.info +coverage*.json +coverage*.xml +csx/ +dlldata.c +DocProject/buildhelp/ +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/*.HxC +DocProject/Help/*.HxT +DocProject/Help/html +DocProject/Help/Html2 +ecf/ +FakesAssemblies/ +FodyWeavers.xsd +Generated_Code/ +healthchecksdb +ipch/ +MigrationBackup/ +mono_crash.* +nCrunchTemp_* +node_modules/ +nunit-*.xml +OpenCover/ +orleans.codegen.cs +Package.StoreAssociation.xml +paket-files/ +project.fragment.lock.json +project.lock.json +publish/ +PublishScripts/ +rcf/ +ScaffoldingReadMe.txt +ServiceFabricBackup/ +StyleCopReport.xml +TestResult.xml +UpgradeLog*.htm +UpgradeLog*.XML +x64/ +x86/ + +# ---------------------- +# General Archived Files +# ---------------------- +*.7z +*.bz2 +*.bzip +*.bzip2 +*.deb +*.dmg +*.egg +*.gem +*.gz +*.gzip +*.iso +*.jar +*.lzma +*.rar +*.rpm +*.tar +*.tgz +*.txz +*.tzst +*.xar +*.xpi +*.xz +*.zip +*.zst + +# ----- +# C/C++ +# ----- +.tmp_versions/ +*.a +*.app +*.cmd +*.d +*.dll +*.dSYM/ +*.dylib +*.elf +*.exe +*.exp +*.gch +*.hex +*.i*86 +*.idb +*.ko +*.la +*.lai +*.lib +*.lo +*.map +*.mod* +*.o +*.out +*.slo +*.so +*.so.* +*.su +*.x86_64 +dkms.conf +Mkfile.old +Module.symvers +modules.order + +# ---- +# CUDA +# ---- +*.cubin +*.fatbin +*.gpu +*.i +*.ii +*.ptx + +# -- +# Go +# -- +.env +*.exe~ +*.test +go.work +go.work.sum + +# ---- +# Java +# ---- +*.class +*.ctxt +*.ear +*.hprof +*.nar +*.tar.gz +*.war +hs_err_pid* +replay_pid* + +# ----- +# Julia +# ----- +*.jl.*.cov +*.jl.cov +*.jl.mem +deps/build.log +deps/deps.jl +deps/downloads/ +deps/src/ +deps/usr/ +docs/build/ +docs/site/ +Manifest.toml + +# ------------------------------ +# JavaScript / Node / TypeScript +# ------------------------------ +.cache +.cache/ +.docusaurus +.dynamodb/ +.env.development.local +.env.local +.env.production.local +.env.test.local +.eslintcache +.fusebox/ +.grunt +.lock-wscript +.next +.node_repl_history +.npm +.nuxt +.nyc_output +.parcel-cache +.pnp.* +.pnpm-debug.log* +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ +.serverless/ +.stylelintcache +.temp +.tern-port +.vscode-test +.vuepress/dist +.yarn-integrity +.yarn/build-state.yml +.yarn/cache +.yarn/install-state.gz +.yarn/unplugged +*.lcov +*.pid +*.pid.lock +*.seed +*.tsbuildinfo +bower_components +dist +jspm_packages/ +lerna-debug.log* +lib-cov +logs +npm-debug.log* +out +pids +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json +web_modules/ +yarn-debug.log* +yarn-error.log* + +# ------ +# Python +# ------ +__pypackages__/ +.coverage +.coverage.* +.dmypy.json +.eggs/ +.hypothesis/ +.installed.cfg +.ipynb_checkpoints +.mypy_cache/ +.nox/ +.pdm-build/ +.pdm-python +.pdm.toml +.pybuilder/ +.pyre/ +.pytest_cache/ +.Python +.python-version +.pytype/ +.ropeproject +.scrapy +.spyderproject +.spyproject +.tox/ +.venv +.webassets-cache +*.cover +*.egg-info/ +*.manifest +*.mo +*.pot +*.py,cover +*.py[cod] +*.sage.py +*.spec +*$py.class +/site +build/ +celerybeat-schedule +celerybeat.pid +cover/ +coverage.xml +cython_debug/ +db.sqlite3 +db.sqlite3-journal +develop-eggs/ +dist/ +dmypy.json +docs/_build/ +downloads/ +eggs/ +env.bak/ +env/ +htmlcov/ +instance/ +ipython_config.py +lib/ +lib64/ +local_settings.py +MANIFEST +nosetests.xml +parts/ +pip-delete-this-directory.txt +pip-log.txt +profile_default/ +sdist/ +share/python-wheels/ +target/ +var/ +venv.bak/ +venv/ +wheels/ + +# ---- +# Rust +# ---- +**/*.rs.bk +Cargo.lock +debug/ + +# ----- +# Scala +# ----- + +# ----- +# CMake +# ----- +_deps +cmake_install.cmake +CMakeCache.txt +CMakeFiles +CMakeLists.txt.user +CMakeScripts +CMakeUserPresets.json +compile_commands.json +CTestTestfile.cmake +install_manifest.txt +Makefile +Testing + +# ------ +# Gradle +# ------ +!gradle-wrapper.jar +!gradle-wrapper.properties +!src/**/build/ +.classpath +.gradle +.gradletasknamecache +.project +**/build/ +gradle-app.setting + +# ----- +# Maven +# ----- +.mvn/ +buildNumber.properties +dependency-reduced-pom.xml +pom.xml.next +pom.xml.releaseBackup +pom.xml.tag +pom.xml.versionsBackup +release.properties + +# --------- +# Terraform +# --------- +.terraform.tfstate.lock.info +.terraform/ +.terraformrc +*_override.tf +*_override.tf.json +*.tfstate +*.tfstate.* +*.tfvars +*.tfvars.json +crash.*.log +crash.log +override.tf +override.tf.json +terraform.rc \ No newline at end of file diff --git a/Dolphin-Mistral-24B-Venice-Edition-F16.gguf b/Dolphin-Mistral-24B-Venice-Edition-F16.gguf new file mode 100644 index 0000000..93fc6dd --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-F16.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c79615012a379a8850a04bc2ab9abe884ca75afa123d35f9266297d46b6b444 +size 47153518592 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf new file mode 100644 index 0000000..a657905 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0aef3c959bb993ff6b3436ceba8f6c84380632e35024212463d2c1b9d87c649a +size 9551090944 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf new file mode 100644 index 0000000..6a4a765 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:225dbd86c81f9f0c21e983dc48ae50a4030438870b06775036af68b413d9f491 +size 9258390784 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf new file mode 100644 index 0000000..f816f1d --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:065931827062c69435d6070cb52d0756d825f44dedada29d3e37393ffeb219b6 +size 11590161664 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf new file mode 100644 index 0000000..c2bb828 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5dc5bfdfe5688dae240596d4c7b5ce27596f6e8b04cc13acc93a024592ef6acc +size 10801190144 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf new file mode 100644 index 0000000..abc2b17 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89519ef728550abb6337361e574ada6bff595979a2bcd25f47d4a45c161778e5 +size 9932674304 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf new file mode 100644 index 0000000..975c4b0 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd7f9a8b813884de5d97574e72696f09db90bfdc61c06b33caf196bfc10a9bc6 +size 8857474304 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf new file mode 100644 index 0000000..8575082 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93460770f3a0cd61c1b343128b29d07e886ccbca03788c6d57f6b5d9322558ea +size 12390356224 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf new file mode 100644 index 0000000..d18e6c5 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:71c03419c7fc592bb3ef8cfd7350ce508ef5407e4fd92e03ad4d192c23fde026 +size 11698296064 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf new file mode 100644 index 0000000..40b1e02 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e9bbd91b1fe1ea8da71308b991fbc16b8d2852595428d77789636289a918765 +size 14343902464 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf new file mode 100644 index 0000000..2da5556 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fab643caf3db32222fc47668880ab7fb63d6b24b62917ed65fecd052dce5de29 +size 13905712384 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf new file mode 100644 index 0000000..f03b8e4 --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a79cc78ede99402be7cfd56f4ae10c3e6abeb9d7f16787faa7e6a630d5c687e +size 16782292224 diff --git a/Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf new file mode 100644 index 0000000..f5d08ba --- /dev/null +++ b/Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79e735a0efaf45354e9cb9d7b38e4309a4d7c528a741fc0d1a0cb4ab817181b9 +size 21883573504 diff --git a/README.md b/README.md new file mode 100644 index 0000000..2171f5a --- /dev/null +++ b/README.md @@ -0,0 +1,227 @@ +--- +base_model: +- cognitivecomputations/Dolphin-Mistral-24B-Venice-Edition +datasets: +- eaddario/imatrix-calibration +language: +- en +license: +- apache-2.0 +pipeline_tag: text-generation +tags: +- gguf +- quant +- pruned +- experimental +--- + +# Experimental layer-wise + pruned (layers 4 and 5) quantization of cognitivecomputations/Dolphin-Mistral-24B-Venice-Edition + +Using [LLaMA C++][llm] release [b5770][llm-rel] for quantization. + +Original model: [cognitivecomputations/Dolphin-Mistral-24B-Venice-Edition][mdl] + +From the original model creators: + +> [![Discord](https://img.shields.io/discord/1156064224225808488?logo=Discord&logoColor=%23ffffff&label=Discord&link=https%3A%2F%2Fdiscord.gg%2FtCMkMDDHwm)](https://discord.gg/h3K4XGj2RH) +> Discord: https://discord.gg/h3K4XGj2RH +> Website: https://dphn.ai +> Twitter: https://x.com/dphnAI +> +> ![image/jpeg](https://cdn-uploads.huggingface.co/production/uploads/68485b28c949339ca04c370c/LMOLMYwK-ixnGGdSBXew6.jpeg) +> +> ## What is Dolphin Mistral 24B Venice Edition? +> +> Dolphin Mistral 24B Venice Edition is a collaborative project we undertook with Venice.ai with the goal of creating the most uncensored version of Mistral 24B for use within the Venice ecosystem. +> +> Dolphin Mistral 24B Venice Edition is now live on https://venice.ai/ as “Venice Uncensored,” the new default model for all Venice users. +> +> Dolphin aims to be a general purpose model, similar to the models behind ChatGPT, Claude, Gemini. But these models present problems for businesses seeking to include AI in their products. +> 1) They maintain control of the system prompt, deprecating and changing things as they wish, often causing software to break. +> 2) They maintain control of the model versions, sometimes changing things silently, or deprecating older models that your business relies on. +> 3) They maintain control of the alignment, and in particular the alignment is one-size-fits all, not tailored to the application. +> 4) They can see all your queries and they can potentially use that data in ways you wouldn't want. +> Dolphin, in contrast, is steerable and gives control to the system owner. You set the system prompt. You decide the alignment. You have control of your data. Dolphin does not impose its ethics or guidelines on you. You are the one who decides the guidelines. +> +> Dolphin belongs to YOU, it is your tool, an extension of your will. +> Just as you are personally responsible for what you do with a knife, gun, fire, car, or the internet, you are the creator and originator of any content you generate with Dolphin. + +From Eric Hartford's, the creator of the Dolphin model series, [Uncensored Models](https://erichartford.com/uncensored-models): + +> Most of these models (for example, Alpaca, Vicuna, WizardLM, MPT-7B-Chat, Wizard-Vicuna, GPT4-X-Vicuna) have some sort of embedded alignment. For general purposes, this is a good thing. This is what stops the model from doing bad things, like teaching you how to cook meth and make bombs. But what is the nature of this alignment? And, why is it so? +> +> The reason these models are aligned is that they are trained with data that was generated by ChatGPT, which itself is aligned by an alignment team at OpenAI. As it is a black box, we don't know all the reasons for the decisions that were made, but we can observe it generally is aligned with American popular culture, and to obey American law... + +# PLEASE READ THIS BEFORE USING THESE EXPERIMENTAL VERSIONS! + +An area of personal interest is finding ways to optimize the inference performance of LLMs when deployed in resource-constrained environments like commodity hardware, desktops, laptops, mobiles, edge devices, etc. There are many approaches to accomplish this, including architecture simplification and knowledge distillation, but my focus has been primarily on quantization and pruning. + +The method used to produce these experimental versions is covered in [Squeezing Tensor Bits: the quest for smaller LLMs][mdm], but at a high level it involves using a custom version of `llama-imatrix` and `llama-quantize` to identify influential tensors, quantize the most important layers to higher bit precision and the less important to lower bits, and remove (prune) one or more layers. This process was partly inspired by Dumitru's et al [Layer-Wise Quantization: A Pragmatic and Effective Method for Quantizing LLMs Beyond Integer Bit-Levels][lwq-ppr], and Xin Men's et al [ShortGPT: Layers in Large Language Models are More Redundant Than You Expect][sgpt-ppr] + +As of version [b5125][qtz-lwq], [llama-quantize][qtz] can perform **tensor-wide quantization (TWQ)**, whereby user-defined tensors are quantized at a specific level, or perform **layer-wise quantization (LWQ)** by selecting different quantization types per tensor/layer. For example, `--tensor-type attn_v=q6_k` will quantize all *Attention Value* tensors at *q6_k* (TWQ), and `--tensor-type "\.([0-9]|1[01257]|31)\.attn_k=q4_k"` will quantize *Attention Key* tensors on layers 0 to 9, 10, 11, 12, 15, 17 and 31 at *q4_k*, leaving the remaining layers at their default value (LWQ). + +As of version [b5740][qtz-prn], [llama-quantize][qtz] can also prune models during quantisation by providing a comma-separated list in the `--prune-layers` command line option. The pruning operation will renumber remaining layers to avoid gaps in the sequence, update the relevant model metadata and, if an imatrix is available, it will use the correct importance score vector. This option can be used alongside `--tensor-type` to perform tensor/layer-wise quantization on selected tensor types, whilst at the same time pruning others. For example: +``` +llama-quantize --tensor-type attn=q6_k --prune-layers 3,7,11 --imatrix imatrix.dat model-f32.gguf model-q4_k_m.gguf q4_k_m +``` + +An enhanced version of [llama-imatrix][imx] generates useful statistics to guide the tensor and layer selection process. `--show-statistics` will display: + +- **Σ(Act²):** the sum of all squared activations over the tensor (i.e. the Importance Scores) +- **Min & Max:** minimum and maximum squared activation values +- **μ & σ:** activations' mean and standard deviation +- **% Active:** proportion of elements whose average squared activation exceeds a very small threshold (1e-5). Helpful to determine how alive/dormant the tensor is during inference +- **N:** number of squared activations in the tensor +- **Entropy:** entropy of the squared activation distribution, in bits (standard Shannon entropy measurement) +- **E (norm):** Normalized entropy. +- **ZD Score:** z-score distribution as described in 3.1 Layer Importance Scores in the Layer-Wise Quantization paper +- **CosSim:** cosine similarity between same type tensors with respect to the previous layer (i.e. blk.7.attn_k and blk.6.attn_k) + +Please note that statistics are calculated for each individual tensor and should be used to compare between tensors of the same type only. For example, assuming that *attn_k* in layer 10 has a higher influence during inference than *attn_k* in layer 7 because its **Σ(Act²)** is larger makes sense, whilst concluding the same between *attn_k* and *ffn_down* does not. + +There’s a [pull request][imtx-pr] to merge these changes back into the core llama.cpp project. This may or may not ever happen so, until then, the modified version will be available on [GitHub][gh]. + +For testing and comparison I use models produced by [Unsloth][ust] ([Daniel and Michael Han][ust-ai] do some really advanced level stuff!) and [Bartowski][btk] (see credits below) but if they don't provide versions of the required model, all tests and comparisons are done against naive quantizations obtained by simply running `llama-quantize` with no further optimization. + +All experimental versions were generated using an appropriate imatrix created from calibration datasets available at [eaddario/imatrix-calibration][ical]. At its core, an Importance Matrix (imatrix) is a table or, more broadly, a structured representation that scores the relative importance of different features or parameters in a machine learning model. It essentially quantifies the "impact" each feature has on a specific outcome, prediction, or relationship being modelled, and it helps to counterbalance the negative effects of quantization and pruning. + +The process to generate these models is roughly as follows: + +1. Convert the original model's tensors to [GGUF][ggf] F16* +2. Estimate the Perplexity score for the F16 model (baseline) using the [wikitext-2-raw-v1][wki-dat] dataset, and save the [logits][lgt] +3. Generate an [imatrix][imx-dat] from selected calibration datasets +4. Determine tensor and layer Importance Score contribution using the enhanced version of `llama-imatrix` +5. Select an appropriate quant level for each tensor and quantize/prune the model using `llama-quantize`. In this model's case, layers **4** and **5** have been pruned +6. Calculate Perplexity, KL Divergence, ARC (Easy+Challenge), HellaSwag, MMLU, Truthful QA and WinoGrande scores for each quantized model +7. Keep versions with the best scores +8. Repeat until all desired quants are created. I find that quantizations below Q3/IQ3 are not fit for my purposes and therefore do not usually generate them, but happy to provide other quants on request. + +*[BF16][bf16] would be preferred, but Apple's GPUs don't support it yet, and therefore any operations are executed in the CPU, making it unacceptably slow. This is expected to change in the near term but until then, if you are using Apple kit avoid using any models tagged BF16 + +# Models + +### Sizes (in GB) +| Model | Bartowski | Repo | Shrinkage | +| ----------------------------------------------------------------------------------------------------------- | --------: | ---: | --------: | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf) | 10.7 | 9.6 | 10.3% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf) | 9.9 | 9.3 | 6.2% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf) | 13.5 | 11.6 | 14.1% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf) | 12.4 | 10.8 | 12.9% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf) | 11.5 | 9.9 | 13.9% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf) | 10.4 | 8.9 | 14.4% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 14.3 | 12.4 | 13.3% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf) | 13.5 | 11.7 | 13.3% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf) | 16.8 | 14.3 | 14.9% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf) | 16.3 | 13.9 | 14.7% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf) | 19.7 | 16.8 | 14.7% | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf) | 25.1 | 21.9 | 12.7% | + +### Perplexity and KL Divergence scores +| Model | μPPL | 𝜌PPL | μKLD | RMS Δp | +| ----------------------------------------------------------------------------------------------------------- | ------------------: | -----: | -----------------: | ------------: | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf) | 20.379006 ±0.160275 | 73.93% | 1.290608 ±0.004304 | 37.928 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf) | 21.165413 ±0.164512 | 73.80% | 1.340446 ±0.004301 | 38.586 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf) | 18.783744 ±0.146959 | 74.79% | 1.199318 ±0.004258 | 36.745 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf) | 19.313300 ±0.150799 | 74.61% | 1.248712 ±0.004216 | 37.260 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf) | 18.723777 ±0.145380 | 75.90% | 1.226150 ±0.004006 | 36.807 ±0.087 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf) | 19.765437 ±0.153182 | 74.13% | 1.295119 ±0.004177 | 38.004 ±0.087 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 18.556910 ±0.145472 | 74.92% | 1.187728 ±0.004237 | 36.521 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M-bartowski][b-q4km] | 6.304728 ±0.042418 | 99.60% | 0.016941 ±0.000138 | 4.031 ±0.037 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf) | 18.663517 ±0.146425 | 74.87% | 1.192878 ±0.004250 | 36.598 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf) | 18.174846 ±0.142320 | 75.14% | 1.159685 ±0.004238 | 36.214 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf) | 18.199918 ±0.142513 | 75.20% | 1.160040 ±0.004229 | 36.220 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf) | 18.213825 ±0.142965 | 75.05% | 1.158026 ±0.004262 | 36.219 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf) | 18.203515 ±0.142826 | 75.02% | 1.158351 ±0.004265 | 36.227 ±0.088 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-F16](./Dolphin-Mistral-24B-Venice-Edition-F16.gguf) | 6.180577 ±0.041038 | 100% | N/A | N/A | + +### ARC, HellaSwag, MMLU, Truthful QA and WinoGrande scores +Scores generated using [llama-perplexity][ppl] with 750 tasks per test, and a context size of 768 tokens. + +For the test data used in the generation of these scores, follow the appropriate links: [HellaSwag][hsw-tst], [ARC, MMLU, Truthful QA][tst-dat] and [WinoGrande][wng-tst] + +| Model | ARC | HellaSwag | MMLU | Truthful QA | WinoGrande | Avg Score | +| ----------------------------------------------------------------------------------------------------------- | --------------: | --------: | --------------: | --------------: | --------------: | --------: | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf) | 65.6000 ±1.7358 | 79.60 | 42.9333 ±1.8086 | 38.4000 ±1.7771 | 72.4000 ±1.6334 | 59.79 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf) | 64.9333 ±1.7436 | 79.87 | 42.0000 ±1.8034 | 38.0000 ±1.7736 | 72.5333 ±1.6309 | 59.47 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL](./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf) | 68.4000 ±1.6988 | 80.66 | 44.9333 ±1.8176 | 38.1333 ±1.7748 | 74.4000 ±1.5947 | 61.31 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf) | 67.2000 ±1.7155 | 80.27 | 43.2000 ±1.8100 | 39.6000 ±1.7870 | 72.9333 ±1.6235 | 60.64 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf) | 66.6667 ±1.7225 | 80.67 | 43.8667 ±1.8132 | 39.4667 ±1.7860 | 72.2667 ±1.6358 | 60.59 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf) | 66.2667 ±1.7276 | 78.93 | 43.7333 ±1.8126 | 38.1333 ±1.7748 | 72.8000 ±1.6260 | 59.97 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 68.0000 ±1.7045 | 80.93 | 45.2000 ±1.8185 | 36.6667 ±1.7608 | 72.1333 ±1.6382 | 60.59 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M-bartowski][b-q4km] | 69.8667 ±1.6766 | 84.27 | 45.3333 ±1.8190 | 37.6000 ±1.7699 | 80.2667 ±1.4542 | 63.47 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf) | 67.0667 ±1.7172 | 81.07 | 45.2000 ±1.8185 | 36.2667 ±1.7567 | 72.0000 ±1.6406 | 60.32 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf) | 67.0667 ±1.7172 | 81.73 | 44.5333 ±1.8160 | 37.8667 ±1.7724 | 73.8667 ±1.6054 | 61.01 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf) | 67.3333 ±1.7137 | 81.47 | 44.2667 ±1.8149 | 38.6667 ±1.7794 | 74.2667 ±1.5974 | 61.20 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf) | 67.4667 ±1.7119 | 81.07 | 44.5333 ±1.8160 | 39.6000 ±1.7870 | 73.8667 ±1.6054 | 61.31 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf) | 68.1333 ±1.7026 | 81.33 | 44.9333 ±1.8176 | 38.2667 ±1.7759 | 74.4000 ±1.5947 | 61.41 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-F16](./Dolphin-Mistral-24B-Venice-Edition-F16.gguf) | 70.8000 ±1.6614 | 84.53 | 45.3333 ±1.8190 | 38.1333 ±1.7748 | 80.2667 ±1.4542 | 63.81 | + +### Tokens per Second - Benchmarks +Scores generated using [llama-bench][bch]. Naive (`llama-quantize` with no optimization) Q4_K_M quantization included for comparison. + +| model | size | params | backend | threads | test | t/s | +| ----------------------------------------------------------------------------------------------------------- | --------: | ------: | ---------- | ------: | ------------: | ------------: | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 11.53 GiB | 22.46 B | Metal,BLAS | 12 | pp512 | 266.57 ±14.60 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 11.53 GiB | 22.46 B | Metal,BLAS | 12 | tg128 | 27.60 ±0.54 | +| [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M](./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf) | 11.53 GiB | 22.46 B | Metal,BLAS | 12 | pp1024+tg1024 | 41.55 ±2.99 | +| [Dolphin-Mistral-24B-Venice-Edition-Q4_K_M-bartowski][b-q4km] | 13.34 GiB | 23.57 B | Metal,BLAS | 12 | pp512 | 253.67 ±2.26 | +| [Dolphin-Mistral-24B-Venice-Edition-Q4_K_M-bartowski][b-q4km] | 13.34 GiB | 23.57 B | Metal,BLAS | 12 | tg128 | 27.69 ±0.46 | +| [Dolphin-Mistral-24B-Venice-Edition-Q4_K_M-bartowski][b-q4km] | 13.34 GiB | 23.57 B | Metal,BLAS | 12 | pp1024+tg1024 | 45.54 ±0.18 | + +# Metrics used +**[Perplexity][ppx]:** one of the key metrics used in NLP evaluation. It measures the quality of a language model by evaluating how well it predicts the next token given a particular sequence of words. A PPL of **1** indicates an exact match between predicted and actual, whereas values greater than one indicate a degree of "surprise" the generated token differs from the expected. + +**[Kullback–Leibler (KL) Divergence][kld]:** a statistical measure of how much a probability distribution differs from another. When quantizing models (or altering the original tensors in any way for that matter), the closest we can preserve the weights' probability distribution to the original model the better, thus the closest to **0** the better. + +**[AI2 Reasoning Challenge (ARC)][arc]:** a benchmark to evaluate the ability of AI models to answer complex science questions that require logical reasoning beyond pattern matching. + +**[HellaSwag][hsw]:** the Harder Endings, Longer contexts, and Low-shot Activities for Situations With Adversarial Generations (bit of a mouthful!) is a benchmark designed to test commonsense natural language inference. It requires the model to predict the most likely ending of a sentence. + +**[MMLU][mmlu]:** the Massive Multitask Language Understanding evaluates LLMs’ general knowledge and problem-solving abilities across 57 subjects, including elementary mathematics, US history, computer science, and law. + +**[Truthful QA][tqa]:** evaluates how well LLMs generate truthful responses to questions. It identifies whether AI models can avoid generating false or misleading information, particularly in areas where human knowledge is prone to misconceptions. + +**[Winogrande][wng]:** based on the [Winograd Schema Challenge][wng-chl], is a natural language understanding task requiring models to resolve ambiguities in sentences involving pronoun references. + +## Credits +[LLaMa C++][llm] has a large and vibrant community of [contributors][llm-ctt] (~1,200 last time I checked) that actively maintain and extend its functionality, adding new models and architectures almost as fast as they appear (considering the breakneck speed at which the AI/ML field is advancing, this alone is a remarkable feat!), and whilst I'm grateful to each and everyone of them, I want to recognise three people in particular: **Thank You!** [Colin Kealty][btk] for the many contributions and for being one of the best sources of high quality quantized models available on Hugging Face, and a really big ***Thank You!*** to [Georgi Gerganov][ggg] for his amazing work with **llama.cpp** and the **ggml/gguf** libraries, and [Iwan Kawrakow][ikk] for being one of the key authors behind the many quantisation algorithms and the imatrix functionality. + +[arc]: https://leaderboard.allenai.org/arc/submissions/get-started +[b-q4km]: https://huggingface.co/bartowski/cognitivecomputations_Dolphin-Mistral-24B-Venice-Edition-GGUF/blob/main/cognitivecomputations_Dolphin-Mistral-24B-Venice-Edition-Q4_K_M.gguf +[bch]: https://github.com/ggml-org/llama.cpp/tree/master/tools/llama-bench +[bf16]: https://en.wikipedia.org/wiki/Bfloat16_floating-point_format +[btk]: https://huggingface.co/bartowski +[ggf]: https://huggingface.co/docs/hub/en/gguf +[ggg]: https://github.com/ggerganov +[gh]: https://github.com/EAddario/llama.cpp/tree/imatrix +[hsw-tst]: https://github.com/klosax/hellaswag_text_data +[hsw]: https://rowanzellers.com/hellaswag +[ical]: https://huggingface.co/datasets/eaddario/imatrix-calibration +[ikk]: https://github.com/ikawrakow +[imtx-pr]: https://github.com/ggml-org/llama.cpp/pull/12718 +[imx-dat]: https://huggingface.co/eaddario/Dolphin-Mistral-24B-Venice-Edition-pruned-GGUF/tree/main/imatrix +[imx]: https://github.com/ggml-org/llama.cpp/tree/master/tools/imatrix +[kld]: https://en.wikipedia.org/wiki/Kullback–Leibler_divergence +[lgt]: https://huggingface.co/eaddario/Dolphin-Mistral-24B-Venice-Edition-pruned-GGUF/tree/main/logits +[llm-ctt]: https://github.com/ggml-org/llama.cpp/graphs/contributors +[llm-rel]: https://github.com/ggml-org/llama.cpp/releases/tag/b5770 +[llm]: https://github.com/ggerganov/llama.cpp +[lwq-ppr]: https://arxiv.org/abs/2406.17415 +[mdl]: https://huggingface.co/cognitivecomputations/Dolphin-Mistral-24B-Venice-Edition +[mdm]: https://medium.com/@eaddario/squeezing-tensor-bits-the-quest-for-smaller-llms-86b23bd052ca +[mmlu]: https://github.com/hendrycks/test +[ppl]: https://github.com/ggml-org/llama.cpp/tree/master/tools/perplexity +[ppx]: https://huggingface.co/docs/transformers/en/perplexity +[qtz-lwq]: https://github.com/ggml-org/llama.cpp/releases/tag/b5125 +[qtz-prn]: https://github.com/ggml-org/llama.cpp/releases/tag/b5740 +[qtz]: https://github.com/ggml-org/llama.cpp/tree/master/tools/quantize +[sgpt-ppr]: https://arxiv.org/abs/2403.03853 +[tqa]: https://github.com/sylinrl/TruthfulQA +[tst-dat]: https://huggingface.co/datasets/ikawrakow/validation-datasets-for-llama.cpp/tree/main +[u-q4km]: https://huggingface.co/unsloth/ +[ust-ai]: https://unsloth.ai +[ust]: https://huggingface.co/unsloth +[wki-dat]: https://huggingface.co/datasets/Salesforce/wikitext/tree/main/wikitext-2-raw-v1 +[wng-chl]: https://cdn.aaai.org/ocs/4492/4492-21843-1-PB.pdf +[wng-tst]: https://huggingface.co/datasets/ikawrakow/winogrande-eval-for-llama.cpp/tree/main +[wng]: https://winogrande.allenai.org diff --git a/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-micro.dat b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-micro.dat new file mode 100644 index 0000000..82e451a --- /dev/null +++ b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-micro.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a900de611282ef326d336237d2faee0cbf29be1a94442da5e96d7baecd376a11 +size 10024052 diff --git a/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-small.dat b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-small.dat new file mode 100644 index 0000000..f6a621f --- /dev/null +++ b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-small.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1477cef161f2039b80933395b2d4790c749f50ea22de8afea46a1726bd1cb5e +size 10024052 diff --git a/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-tiny.dat b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-tiny.dat new file mode 100644 index 0000000..75b5784 --- /dev/null +++ b/imatrix/imatrix-Dolphin-Mistral-24B-Venice-Edition-tiny.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:01d570a51a4c0bf8d9d9e912c87c36b87e4c187d2dd475ba06d3878f2e84571c +size 10024051 diff --git a/logits/Dolphin-Mistral-24B-Venice-Edition-F16.logits b/logits/Dolphin-Mistral-24B-Venice-Edition-F16.logits new file mode 100644 index 0000000..f0e412b --- /dev/null +++ b/logits/Dolphin-Mistral-24B-Venice-Edition-F16.logits @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0584b90b815c7b9de70582e1c5bf84b14ce449e66255dac5ec0de140c2a0a48 +size 39761286068 diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.arc new file mode 100644 index 0000000..268ddaa --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 39 key-value pairs and 363 tensors from ./Dolphin-Mistral-24B-Venice-Edition-F16.gguf (version GGUF V3 (latest)) + +Final result: 70.8000 +/- 1.6614 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 40685.08 ms +llama_perf_context_print: prompt eval time = 151474.18 ms / 36666 tokens ( 4.13 ms per token, 242.06 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 152313.64 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.hsw new file mode 100644 index 0000000..d0127cb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 39 key-value pairs and 363 tensors from ./Dolphin-Mistral-24B-Venice-Edition-F16.gguf (version GGUF V3 (latest)) + +750 84.53333333% [81.7702%, 86.9445%] + + +llama_perf_context_print: load time = 2898.86 ms +llama_perf_context_print: prompt eval time = 545800.60 ms / 129319 tokens ( 4.22 ms per token, 236.93 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 549439.05 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.md b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.md new file mode 100644 index 0000000..45c52a4 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.md @@ -0,0 +1,1206 @@ +# Dolphin-Mistral-24B-Venice-Edition-F16.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 42 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 363 | +| 3 | UINT64 | 1 | GGUF.kv_count | 39 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.block_count | 40 | +| 17 | UINT32 | 1 | llama.context_length | 32768 | +| 18 | UINT32 | 1 | llama.embedding_length | 5120 | +| 19 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 20 | UINT32 | 1 | llama.attention.head_count | 32 | +| 21 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 22 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 23 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 24 | UINT32 | 1 | llama.attention.key_length | 128 | +| 25 | UINT32 | 1 | llama.attention.value_length | 128 | +| 26 | UINT32 | 1 | general.file_type | 1 | +| 27 | UINT32 | 1 | llama.vocab_size | 131072 | +| 28 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 29 | UINT32 | 1 | general.quantization_version | 2 | +| 30 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 31 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 32 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 33 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 34 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 35 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 36 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 37 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 38 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 39 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 40 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 41 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 42 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | + +## Tensors Overview ~24B Elements + +Total number of elements in all tensors: 23572403200 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-F16.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-F16gguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~24B Elements](#tensors-overview-24b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + - [Block 38 Tensor Group : ~556M Elements](#block-38-tensor-group--556m-elements) + - [Block 39 Tensor Group : ~556M Elements](#block-39-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784800 | 0x50000000 | +| 1 | token_embd.weight | 0x50784800 | 0x50000000 | +| 2 | blk.0.attn_norm.weight | 0xa0784800 | 0x5000 | +| 3 | blk.0.ffn_down.weight | 0xa0789800 | 0x14000000 | +| 4 | blk.0.ffn_gate.weight | 0xb4789800 | 0x14000000 | +| 5 | blk.0.ffn_up.weight | 0xc8789800 | 0x14000000 | +| 6 | blk.0.ffn_norm.weight | 0xdc789800 | 0x5000 | +| 7 | blk.0.attn_k.weight | 0xdc78e800 | 0xa00000 | +| 8 | blk.0.attn_output.weight | 0xdd18e800 | 0x2800000 | +| 9 | blk.0.attn_q.weight | 0xdf98e800 | 0x2800000 | +| 10 | blk.0.attn_v.weight | 0xe218e800 | 0xa00000 | +| 11 | blk.1.attn_norm.weight | 0xe2b8e800 | 0x5000 | +| 12 | blk.1.ffn_down.weight | 0xe2b93800 | 0x14000000 | +| 13 | blk.1.ffn_gate.weight | 0xf6b93800 | 0x14000000 | +| 14 | blk.1.ffn_up.weight | 0x10ab93800 | 0x14000000 | +| 15 | blk.1.ffn_norm.weight | 0x11eb93800 | 0x5000 | +| 16 | blk.1.attn_k.weight | 0x11eb98800 | 0xa00000 | +| 17 | blk.1.attn_output.weight | 0x11f598800 | 0x2800000 | +| 18 | blk.1.attn_q.weight | 0x121d98800 | 0x2800000 | +| 19 | blk.1.attn_v.weight | 0x124598800 | 0xa00000 | +| 20 | blk.10.attn_norm.weight | 0x124f98800 | 0x5000 | +| 21 | blk.10.ffn_down.weight | 0x124f9d800 | 0x14000000 | +| 22 | blk.10.ffn_gate.weight | 0x138f9d800 | 0x14000000 | +| 23 | blk.10.ffn_up.weight | 0x14cf9d800 | 0x14000000 | +| 24 | blk.10.ffn_norm.weight | 0x160f9d800 | 0x5000 | +| 25 | blk.10.attn_k.weight | 0x160fa2800 | 0xa00000 | +| 26 | blk.10.attn_output.weight | 0x1619a2800 | 0x2800000 | +| 27 | blk.10.attn_q.weight | 0x1641a2800 | 0x2800000 | +| 28 | blk.10.attn_v.weight | 0x1669a2800 | 0xa00000 | +| 29 | blk.11.attn_norm.weight | 0x1673a2800 | 0x5000 | +| 30 | blk.11.ffn_down.weight | 0x1673a7800 | 0x14000000 | +| 31 | blk.11.ffn_gate.weight | 0x17b3a7800 | 0x14000000 | +| 32 | blk.11.ffn_up.weight | 0x18f3a7800 | 0x14000000 | +| 33 | blk.11.ffn_norm.weight | 0x1a33a7800 | 0x5000 | +| 34 | blk.11.attn_k.weight | 0x1a33ac800 | 0xa00000 | +| 35 | blk.11.attn_output.weight | 0x1a3dac800 | 0x2800000 | +| 36 | blk.11.attn_q.weight | 0x1a65ac800 | 0x2800000 | +| 37 | blk.11.attn_v.weight | 0x1a8dac800 | 0xa00000 | +| 38 | blk.12.attn_norm.weight | 0x1a97ac800 | 0x5000 | +| 39 | blk.12.ffn_down.weight | 0x1a97b1800 | 0x14000000 | +| 40 | blk.12.ffn_gate.weight | 0x1bd7b1800 | 0x14000000 | +| 41 | blk.12.ffn_up.weight | 0x1d17b1800 | 0x14000000 | +| 42 | blk.12.ffn_norm.weight | 0x1e57b1800 | 0x5000 | +| 43 | blk.12.attn_k.weight | 0x1e57b6800 | 0xa00000 | +| 44 | blk.12.attn_output.weight | 0x1e61b6800 | 0x2800000 | +| 45 | blk.12.attn_q.weight | 0x1e89b6800 | 0x2800000 | +| 46 | blk.12.attn_v.weight | 0x1eb1b6800 | 0xa00000 | +| 47 | blk.13.attn_norm.weight | 0x1ebbb6800 | 0x5000 | +| 48 | blk.13.ffn_down.weight | 0x1ebbbb800 | 0x14000000 | +| 49 | blk.13.ffn_gate.weight | 0x1ffbbb800 | 0x14000000 | +| 50 | blk.13.ffn_up.weight | 0x213bbb800 | 0x14000000 | +| 51 | blk.13.ffn_norm.weight | 0x227bbb800 | 0x5000 | +| 52 | blk.13.attn_k.weight | 0x227bc0800 | 0xa00000 | +| 53 | blk.13.attn_output.weight | 0x2285c0800 | 0x2800000 | +| 54 | blk.13.attn_q.weight | 0x22adc0800 | 0x2800000 | +| 55 | blk.13.attn_v.weight | 0x22d5c0800 | 0xa00000 | +| 56 | blk.14.attn_norm.weight | 0x22dfc0800 | 0x5000 | +| 57 | blk.14.ffn_down.weight | 0x22dfc5800 | 0x14000000 | +| 58 | blk.14.ffn_gate.weight | 0x241fc5800 | 0x14000000 | +| 59 | blk.14.ffn_up.weight | 0x255fc5800 | 0x14000000 | +| 60 | blk.14.ffn_norm.weight | 0x269fc5800 | 0x5000 | +| 61 | blk.14.attn_k.weight | 0x269fca800 | 0xa00000 | +| 62 | blk.14.attn_output.weight | 0x26a9ca800 | 0x2800000 | +| 63 | blk.14.attn_q.weight | 0x26d1ca800 | 0x2800000 | +| 64 | blk.14.attn_v.weight | 0x26f9ca800 | 0xa00000 | +| 65 | blk.15.attn_norm.weight | 0x2703ca800 | 0x5000 | +| 66 | blk.15.ffn_down.weight | 0x2703cf800 | 0x14000000 | +| 67 | blk.15.ffn_gate.weight | 0x2843cf800 | 0x14000000 | +| 68 | blk.15.ffn_up.weight | 0x2983cf800 | 0x14000000 | +| 69 | blk.15.ffn_norm.weight | 0x2ac3cf800 | 0x5000 | +| 70 | blk.15.attn_k.weight | 0x2ac3d4800 | 0xa00000 | +| 71 | blk.15.attn_output.weight | 0x2acdd4800 | 0x2800000 | +| 72 | blk.15.attn_q.weight | 0x2af5d4800 | 0x2800000 | +| 73 | blk.15.attn_v.weight | 0x2b1dd4800 | 0xa00000 | +| 74 | blk.16.attn_norm.weight | 0x2b27d4800 | 0x5000 | +| 75 | blk.16.ffn_down.weight | 0x2b27d9800 | 0x14000000 | +| 76 | blk.16.ffn_gate.weight | 0x2c67d9800 | 0x14000000 | +| 77 | blk.16.ffn_up.weight | 0x2da7d9800 | 0x14000000 | +| 78 | blk.16.ffn_norm.weight | 0x2ee7d9800 | 0x5000 | +| 79 | blk.16.attn_k.weight | 0x2ee7de800 | 0xa00000 | +| 80 | blk.16.attn_output.weight | 0x2ef1de800 | 0x2800000 | +| 81 | blk.16.attn_q.weight | 0x2f19de800 | 0x2800000 | +| 82 | blk.16.attn_v.weight | 0x2f41de800 | 0xa00000 | +| 83 | blk.17.attn_norm.weight | 0x2f4bde800 | 0x5000 | +| 84 | blk.17.ffn_down.weight | 0x2f4be3800 | 0x14000000 | +| 85 | blk.17.ffn_gate.weight | 0x308be3800 | 0x14000000 | +| 86 | blk.17.ffn_up.weight | 0x31cbe3800 | 0x14000000 | +| 87 | blk.17.ffn_norm.weight | 0x330be3800 | 0x5000 | +| 88 | blk.17.attn_k.weight | 0x330be8800 | 0xa00000 | +| 89 | blk.17.attn_output.weight | 0x3315e8800 | 0x2800000 | +| 90 | blk.17.attn_q.weight | 0x333de8800 | 0x2800000 | +| 91 | blk.17.attn_v.weight | 0x3365e8800 | 0xa00000 | +| 92 | blk.18.attn_norm.weight | 0x336fe8800 | 0x5000 | +| 93 | blk.18.ffn_down.weight | 0x336fed800 | 0x14000000 | +| 94 | blk.18.ffn_gate.weight | 0x34afed800 | 0x14000000 | +| 95 | blk.18.ffn_up.weight | 0x35efed800 | 0x14000000 | +| 96 | blk.18.ffn_norm.weight | 0x372fed800 | 0x5000 | +| 97 | blk.18.attn_k.weight | 0x372ff2800 | 0xa00000 | +| 98 | blk.18.attn_output.weight | 0x3739f2800 | 0x2800000 | +| 99 | blk.18.attn_q.weight | 0x3761f2800 | 0x2800000 | +| 100 | blk.18.attn_v.weight | 0x3789f2800 | 0xa00000 | +| 101 | blk.19.attn_norm.weight | 0x3793f2800 | 0x5000 | +| 102 | blk.19.ffn_down.weight | 0x3793f7800 | 0x14000000 | +| 103 | blk.19.ffn_gate.weight | 0x38d3f7800 | 0x14000000 | +| 104 | blk.19.ffn_up.weight | 0x3a13f7800 | 0x14000000 | +| 105 | blk.19.ffn_norm.weight | 0x3b53f7800 | 0x5000 | +| 106 | blk.19.attn_k.weight | 0x3b53fc800 | 0xa00000 | +| 107 | blk.19.attn_output.weight | 0x3b5dfc800 | 0x2800000 | +| 108 | blk.19.attn_q.weight | 0x3b85fc800 | 0x2800000 | +| 109 | blk.19.attn_v.weight | 0x3badfc800 | 0xa00000 | +| 110 | blk.2.attn_norm.weight | 0x3bb7fc800 | 0x5000 | +| 111 | blk.2.ffn_down.weight | 0x3bb801800 | 0x14000000 | +| 112 | blk.2.ffn_gate.weight | 0x3cf801800 | 0x14000000 | +| 113 | blk.2.ffn_up.weight | 0x3e3801800 | 0x14000000 | +| 114 | blk.2.ffn_norm.weight | 0x3f7801800 | 0x5000 | +| 115 | blk.2.attn_k.weight | 0x3f7806800 | 0xa00000 | +| 116 | blk.2.attn_output.weight | 0x3f8206800 | 0x2800000 | +| 117 | blk.2.attn_q.weight | 0x3faa06800 | 0x2800000 | +| 118 | blk.2.attn_v.weight | 0x3fd206800 | 0xa00000 | +| 119 | blk.20.attn_norm.weight | 0x3fdc06800 | 0x5000 | +| 120 | blk.20.ffn_down.weight | 0x3fdc0b800 | 0x14000000 | +| 121 | blk.20.ffn_gate.weight | 0x411c0b800 | 0x14000000 | +| 122 | blk.20.ffn_up.weight | 0x425c0b800 | 0x14000000 | +| 123 | blk.20.ffn_norm.weight | 0x439c0b800 | 0x5000 | +| 124 | blk.20.attn_k.weight | 0x439c10800 | 0xa00000 | +| 125 | blk.20.attn_output.weight | 0x43a610800 | 0x2800000 | +| 126 | blk.20.attn_q.weight | 0x43ce10800 | 0x2800000 | +| 127 | blk.20.attn_v.weight | 0x43f610800 | 0xa00000 | +| 128 | blk.21.attn_norm.weight | 0x440010800 | 0x5000 | +| 129 | blk.21.ffn_down.weight | 0x440015800 | 0x14000000 | +| 130 | blk.21.ffn_gate.weight | 0x454015800 | 0x14000000 | +| 131 | blk.21.ffn_up.weight | 0x468015800 | 0x14000000 | +| 132 | blk.21.ffn_norm.weight | 0x47c015800 | 0x5000 | +| 133 | blk.21.attn_k.weight | 0x47c01a800 | 0xa00000 | +| 134 | blk.21.attn_output.weight | 0x47ca1a800 | 0x2800000 | +| 135 | blk.21.attn_q.weight | 0x47f21a800 | 0x2800000 | +| 136 | blk.21.attn_v.weight | 0x481a1a800 | 0xa00000 | +| 137 | blk.22.attn_norm.weight | 0x48241a800 | 0x5000 | +| 138 | blk.22.ffn_down.weight | 0x48241f800 | 0x14000000 | +| 139 | blk.22.ffn_gate.weight | 0x49641f800 | 0x14000000 | +| 140 | blk.22.ffn_up.weight | 0x4aa41f800 | 0x14000000 | +| 141 | blk.22.ffn_norm.weight | 0x4be41f800 | 0x5000 | +| 142 | blk.22.attn_k.weight | 0x4be424800 | 0xa00000 | +| 143 | blk.22.attn_output.weight | 0x4bee24800 | 0x2800000 | +| 144 | blk.22.attn_q.weight | 0x4c1624800 | 0x2800000 | +| 145 | blk.22.attn_v.weight | 0x4c3e24800 | 0xa00000 | +| 146 | blk.23.attn_norm.weight | 0x4c4824800 | 0x5000 | +| 147 | blk.23.ffn_down.weight | 0x4c4829800 | 0x14000000 | +| 148 | blk.23.ffn_gate.weight | 0x4d8829800 | 0x14000000 | +| 149 | blk.23.ffn_up.weight | 0x4ec829800 | 0x14000000 | +| 150 | blk.23.ffn_norm.weight | 0x500829800 | 0x5000 | +| 151 | blk.23.attn_k.weight | 0x50082e800 | 0xa00000 | +| 152 | blk.23.attn_output.weight | 0x50122e800 | 0x2800000 | +| 153 | blk.23.attn_q.weight | 0x503a2e800 | 0x2800000 | +| 154 | blk.23.attn_v.weight | 0x50622e800 | 0xa00000 | +| 155 | blk.24.attn_norm.weight | 0x506c2e800 | 0x5000 | +| 156 | blk.24.ffn_down.weight | 0x506c33800 | 0x14000000 | +| 157 | blk.24.ffn_gate.weight | 0x51ac33800 | 0x14000000 | +| 158 | blk.24.ffn_up.weight | 0x52ec33800 | 0x14000000 | +| 159 | blk.24.ffn_norm.weight | 0x542c33800 | 0x5000 | +| 160 | blk.24.attn_k.weight | 0x542c38800 | 0xa00000 | +| 161 | blk.24.attn_output.weight | 0x543638800 | 0x2800000 | +| 162 | blk.24.attn_q.weight | 0x545e38800 | 0x2800000 | +| 163 | blk.24.attn_v.weight | 0x548638800 | 0xa00000 | +| 164 | blk.25.attn_norm.weight | 0x549038800 | 0x5000 | +| 165 | blk.25.ffn_down.weight | 0x54903d800 | 0x14000000 | +| 166 | blk.25.ffn_gate.weight | 0x55d03d800 | 0x14000000 | +| 167 | blk.25.ffn_up.weight | 0x57103d800 | 0x14000000 | +| 168 | blk.25.ffn_norm.weight | 0x58503d800 | 0x5000 | +| 169 | blk.25.attn_k.weight | 0x585042800 | 0xa00000 | +| 170 | blk.25.attn_output.weight | 0x585a42800 | 0x2800000 | +| 171 | blk.25.attn_q.weight | 0x588242800 | 0x2800000 | +| 172 | blk.25.attn_v.weight | 0x58aa42800 | 0xa00000 | +| 173 | blk.26.attn_norm.weight | 0x58b442800 | 0x5000 | +| 174 | blk.26.ffn_down.weight | 0x58b447800 | 0x14000000 | +| 175 | blk.26.ffn_gate.weight | 0x59f447800 | 0x14000000 | +| 176 | blk.26.ffn_up.weight | 0x5b3447800 | 0x14000000 | +| 177 | blk.26.ffn_norm.weight | 0x5c7447800 | 0x5000 | +| 178 | blk.26.attn_k.weight | 0x5c744c800 | 0xa00000 | +| 179 | blk.26.attn_output.weight | 0x5c7e4c800 | 0x2800000 | +| 180 | blk.26.attn_q.weight | 0x5ca64c800 | 0x2800000 | +| 181 | blk.26.attn_v.weight | 0x5cce4c800 | 0xa00000 | +| 182 | blk.27.attn_norm.weight | 0x5cd84c800 | 0x5000 | +| 183 | blk.27.ffn_down.weight | 0x5cd851800 | 0x14000000 | +| 184 | blk.27.ffn_gate.weight | 0x5e1851800 | 0x14000000 | +| 185 | blk.27.ffn_up.weight | 0x5f5851800 | 0x14000000 | +| 186 | blk.27.ffn_norm.weight | 0x609851800 | 0x5000 | +| 187 | blk.27.attn_k.weight | 0x609856800 | 0xa00000 | +| 188 | blk.27.attn_output.weight | 0x60a256800 | 0x2800000 | +| 189 | blk.27.attn_q.weight | 0x60ca56800 | 0x2800000 | +| 190 | blk.27.attn_v.weight | 0x60f256800 | 0xa00000 | +| 191 | blk.28.attn_norm.weight | 0x60fc56800 | 0x5000 | +| 192 | blk.28.ffn_down.weight | 0x60fc5b800 | 0x14000000 | +| 193 | blk.28.ffn_gate.weight | 0x623c5b800 | 0x14000000 | +| 194 | blk.28.ffn_up.weight | 0x637c5b800 | 0x14000000 | +| 195 | blk.28.ffn_norm.weight | 0x64bc5b800 | 0x5000 | +| 196 | blk.28.attn_k.weight | 0x64bc60800 | 0xa00000 | +| 197 | blk.28.attn_output.weight | 0x64c660800 | 0x2800000 | +| 198 | blk.28.attn_q.weight | 0x64ee60800 | 0x2800000 | +| 199 | blk.28.attn_v.weight | 0x651660800 | 0xa00000 | +| 200 | blk.29.attn_norm.weight | 0x652060800 | 0x5000 | +| 201 | blk.29.ffn_down.weight | 0x652065800 | 0x14000000 | +| 202 | blk.29.ffn_gate.weight | 0x666065800 | 0x14000000 | +| 203 | blk.29.ffn_up.weight | 0x67a065800 | 0x14000000 | +| 204 | blk.29.ffn_norm.weight | 0x68e065800 | 0x5000 | +| 205 | blk.29.attn_k.weight | 0x68e06a800 | 0xa00000 | +| 206 | blk.29.attn_output.weight | 0x68ea6a800 | 0x2800000 | +| 207 | blk.29.attn_q.weight | 0x69126a800 | 0x2800000 | +| 208 | blk.29.attn_v.weight | 0x693a6a800 | 0xa00000 | +| 209 | blk.3.attn_norm.weight | 0x69446a800 | 0x5000 | +| 210 | blk.3.ffn_down.weight | 0x69446f800 | 0x14000000 | +| 211 | blk.3.ffn_gate.weight | 0x6a846f800 | 0x14000000 | +| 212 | blk.3.ffn_up.weight | 0x6bc46f800 | 0x14000000 | +| 213 | blk.3.ffn_norm.weight | 0x6d046f800 | 0x5000 | +| 214 | blk.3.attn_k.weight | 0x6d0474800 | 0xa00000 | +| 215 | blk.3.attn_output.weight | 0x6d0e74800 | 0x2800000 | +| 216 | blk.3.attn_q.weight | 0x6d3674800 | 0x2800000 | +| 217 | blk.3.attn_v.weight | 0x6d5e74800 | 0xa00000 | +| 218 | blk.30.attn_norm.weight | 0x6d6874800 | 0x5000 | +| 219 | blk.30.ffn_down.weight | 0x6d6879800 | 0x14000000 | +| 220 | blk.30.ffn_gate.weight | 0x6ea879800 | 0x14000000 | +| 221 | blk.30.ffn_up.weight | 0x6fe879800 | 0x14000000 | +| 222 | blk.30.ffn_norm.weight | 0x712879800 | 0x5000 | +| 223 | blk.30.attn_k.weight | 0x71287e800 | 0xa00000 | +| 224 | blk.30.attn_output.weight | 0x71327e800 | 0x2800000 | +| 225 | blk.30.attn_q.weight | 0x715a7e800 | 0x2800000 | +| 226 | blk.30.attn_v.weight | 0x71827e800 | 0xa00000 | +| 227 | blk.31.attn_norm.weight | 0x718c7e800 | 0x5000 | +| 228 | blk.31.ffn_down.weight | 0x718c83800 | 0x14000000 | +| 229 | blk.31.ffn_gate.weight | 0x72cc83800 | 0x14000000 | +| 230 | blk.31.ffn_up.weight | 0x740c83800 | 0x14000000 | +| 231 | blk.31.ffn_norm.weight | 0x754c83800 | 0x5000 | +| 232 | blk.31.attn_k.weight | 0x754c88800 | 0xa00000 | +| 233 | blk.31.attn_output.weight | 0x755688800 | 0x2800000 | +| 234 | blk.31.attn_q.weight | 0x757e88800 | 0x2800000 | +| 235 | blk.31.attn_v.weight | 0x75a688800 | 0xa00000 | +| 236 | blk.32.attn_norm.weight | 0x75b088800 | 0x5000 | +| 237 | blk.32.ffn_down.weight | 0x75b08d800 | 0x14000000 | +| 238 | blk.32.ffn_gate.weight | 0x76f08d800 | 0x14000000 | +| 239 | blk.32.ffn_up.weight | 0x78308d800 | 0x14000000 | +| 240 | blk.32.ffn_norm.weight | 0x79708d800 | 0x5000 | +| 241 | blk.32.attn_k.weight | 0x797092800 | 0xa00000 | +| 242 | blk.32.attn_output.weight | 0x797a92800 | 0x2800000 | +| 243 | blk.32.attn_q.weight | 0x79a292800 | 0x2800000 | +| 244 | blk.32.attn_v.weight | 0x79ca92800 | 0xa00000 | +| 245 | blk.33.attn_norm.weight | 0x79d492800 | 0x5000 | +| 246 | blk.33.ffn_down.weight | 0x79d497800 | 0x14000000 | +| 247 | blk.33.ffn_gate.weight | 0x7b1497800 | 0x14000000 | +| 248 | blk.33.ffn_up.weight | 0x7c5497800 | 0x14000000 | +| 249 | blk.33.ffn_norm.weight | 0x7d9497800 | 0x5000 | +| 250 | blk.33.attn_k.weight | 0x7d949c800 | 0xa00000 | +| 251 | blk.33.attn_output.weight | 0x7d9e9c800 | 0x2800000 | +| 252 | blk.33.attn_q.weight | 0x7dc69c800 | 0x2800000 | +| 253 | blk.33.attn_v.weight | 0x7dee9c800 | 0xa00000 | +| 254 | blk.34.attn_norm.weight | 0x7df89c800 | 0x5000 | +| 255 | blk.34.ffn_down.weight | 0x7df8a1800 | 0x14000000 | +| 256 | blk.34.ffn_gate.weight | 0x7f38a1800 | 0x14000000 | +| 257 | blk.34.ffn_up.weight | 0x8078a1800 | 0x14000000 | +| 258 | blk.34.ffn_norm.weight | 0x81b8a1800 | 0x5000 | +| 259 | blk.34.attn_k.weight | 0x81b8a6800 | 0xa00000 | +| 260 | blk.34.attn_output.weight | 0x81c2a6800 | 0x2800000 | +| 261 | blk.34.attn_q.weight | 0x81eaa6800 | 0x2800000 | +| 262 | blk.34.attn_v.weight | 0x8212a6800 | 0xa00000 | +| 263 | blk.35.attn_norm.weight | 0x821ca6800 | 0x5000 | +| 264 | blk.35.ffn_down.weight | 0x821cab800 | 0x14000000 | +| 265 | blk.35.ffn_gate.weight | 0x835cab800 | 0x14000000 | +| 266 | blk.35.ffn_up.weight | 0x849cab800 | 0x14000000 | +| 267 | blk.35.ffn_norm.weight | 0x85dcab800 | 0x5000 | +| 268 | blk.35.attn_k.weight | 0x85dcb0800 | 0xa00000 | +| 269 | blk.35.attn_output.weight | 0x85e6b0800 | 0x2800000 | +| 270 | blk.35.attn_q.weight | 0x860eb0800 | 0x2800000 | +| 271 | blk.35.attn_v.weight | 0x8636b0800 | 0xa00000 | +| 272 | blk.36.attn_norm.weight | 0x8640b0800 | 0x5000 | +| 273 | blk.36.ffn_down.weight | 0x8640b5800 | 0x14000000 | +| 274 | blk.36.ffn_gate.weight | 0x8780b5800 | 0x14000000 | +| 275 | blk.36.ffn_up.weight | 0x88c0b5800 | 0x14000000 | +| 276 | blk.36.ffn_norm.weight | 0x8a00b5800 | 0x5000 | +| 277 | blk.36.attn_k.weight | 0x8a00ba800 | 0xa00000 | +| 278 | blk.36.attn_output.weight | 0x8a0aba800 | 0x2800000 | +| 279 | blk.36.attn_q.weight | 0x8a32ba800 | 0x2800000 | +| 280 | blk.36.attn_v.weight | 0x8a5aba800 | 0xa00000 | +| 281 | blk.37.attn_norm.weight | 0x8a64ba800 | 0x5000 | +| 282 | blk.37.ffn_down.weight | 0x8a64bf800 | 0x14000000 | +| 283 | blk.37.ffn_gate.weight | 0x8ba4bf800 | 0x14000000 | +| 284 | blk.37.ffn_up.weight | 0x8ce4bf800 | 0x14000000 | +| 285 | blk.37.ffn_norm.weight | 0x8e24bf800 | 0x5000 | +| 286 | blk.37.attn_k.weight | 0x8e24c4800 | 0xa00000 | +| 287 | blk.37.attn_output.weight | 0x8e2ec4800 | 0x2800000 | +| 288 | blk.37.attn_q.weight | 0x8e56c4800 | 0x2800000 | +| 289 | blk.37.attn_v.weight | 0x8e7ec4800 | 0xa00000 | +| 290 | blk.38.attn_norm.weight | 0x8e88c4800 | 0x5000 | +| 291 | blk.38.ffn_down.weight | 0x8e88c9800 | 0x14000000 | +| 292 | blk.38.ffn_gate.weight | 0x8fc8c9800 | 0x14000000 | +| 293 | blk.38.ffn_up.weight | 0x9108c9800 | 0x14000000 | +| 294 | blk.38.ffn_norm.weight | 0x9248c9800 | 0x5000 | +| 295 | blk.38.attn_k.weight | 0x9248ce800 | 0xa00000 | +| 296 | blk.38.attn_output.weight | 0x9252ce800 | 0x2800000 | +| 297 | blk.38.attn_q.weight | 0x927ace800 | 0x2800000 | +| 298 | blk.38.attn_v.weight | 0x92a2ce800 | 0xa00000 | +| 299 | blk.39.attn_norm.weight | 0x92acce800 | 0x5000 | +| 300 | blk.39.ffn_down.weight | 0x92acd3800 | 0x14000000 | +| 301 | blk.39.ffn_gate.weight | 0x93ecd3800 | 0x14000000 | +| 302 | blk.39.ffn_up.weight | 0x952cd3800 | 0x14000000 | +| 303 | blk.39.ffn_norm.weight | 0x966cd3800 | 0x5000 | +| 304 | blk.39.attn_k.weight | 0x966cd8800 | 0xa00000 | +| 305 | blk.39.attn_output.weight | 0x9676d8800 | 0x2800000 | +| 306 | blk.39.attn_q.weight | 0x969ed8800 | 0x2800000 | +| 307 | blk.39.attn_v.weight | 0x96c6d8800 | 0xa00000 | +| 308 | blk.4.attn_norm.weight | 0x96d0d8800 | 0x5000 | +| 309 | blk.4.ffn_down.weight | 0x96d0dd800 | 0x14000000 | +| 310 | blk.4.ffn_gate.weight | 0x9810dd800 | 0x14000000 | +| 311 | blk.4.ffn_up.weight | 0x9950dd800 | 0x14000000 | +| 312 | blk.4.ffn_norm.weight | 0x9a90dd800 | 0x5000 | +| 313 | blk.4.attn_k.weight | 0x9a90e2800 | 0xa00000 | +| 314 | blk.4.attn_output.weight | 0x9a9ae2800 | 0x2800000 | +| 315 | blk.4.attn_q.weight | 0x9ac2e2800 | 0x2800000 | +| 316 | blk.4.attn_v.weight | 0x9aeae2800 | 0xa00000 | +| 317 | blk.5.attn_norm.weight | 0x9af4e2800 | 0x5000 | +| 318 | blk.5.ffn_down.weight | 0x9af4e7800 | 0x14000000 | +| 319 | blk.5.ffn_gate.weight | 0x9c34e7800 | 0x14000000 | +| 320 | blk.5.ffn_up.weight | 0x9d74e7800 | 0x14000000 | +| 321 | blk.5.ffn_norm.weight | 0x9eb4e7800 | 0x5000 | +| 322 | blk.5.attn_k.weight | 0x9eb4ec800 | 0xa00000 | +| 323 | blk.5.attn_output.weight | 0x9ebeec800 | 0x2800000 | +| 324 | blk.5.attn_q.weight | 0x9ee6ec800 | 0x2800000 | +| 325 | blk.5.attn_v.weight | 0x9f0eec800 | 0xa00000 | +| 326 | blk.6.attn_norm.weight | 0x9f18ec800 | 0x5000 | +| 327 | blk.6.ffn_down.weight | 0x9f18f1800 | 0x14000000 | +| 328 | blk.6.ffn_gate.weight | 0xa058f1800 | 0x14000000 | +| 329 | blk.6.ffn_up.weight | 0xa198f1800 | 0x14000000 | +| 330 | blk.6.ffn_norm.weight | 0xa2d8f1800 | 0x5000 | +| 331 | blk.6.attn_k.weight | 0xa2d8f6800 | 0xa00000 | +| 332 | blk.6.attn_output.weight | 0xa2e2f6800 | 0x2800000 | +| 333 | blk.6.attn_q.weight | 0xa30af6800 | 0x2800000 | +| 334 | blk.6.attn_v.weight | 0xa332f6800 | 0xa00000 | +| 335 | blk.7.attn_norm.weight | 0xa33cf6800 | 0x5000 | +| 336 | blk.7.ffn_down.weight | 0xa33cfb800 | 0x14000000 | +| 337 | blk.7.ffn_gate.weight | 0xa47cfb800 | 0x14000000 | +| 338 | blk.7.ffn_up.weight | 0xa5bcfb800 | 0x14000000 | +| 339 | blk.7.ffn_norm.weight | 0xa6fcfb800 | 0x5000 | +| 340 | blk.7.attn_k.weight | 0xa6fd00800 | 0xa00000 | +| 341 | blk.7.attn_output.weight | 0xa70700800 | 0x2800000 | +| 342 | blk.7.attn_q.weight | 0xa72f00800 | 0x2800000 | +| 343 | blk.7.attn_v.weight | 0xa75700800 | 0xa00000 | +| 344 | blk.8.attn_norm.weight | 0xa76100800 | 0x5000 | +| 345 | blk.8.ffn_down.weight | 0xa76105800 | 0x14000000 | +| 346 | blk.8.ffn_gate.weight | 0xa8a105800 | 0x14000000 | +| 347 | blk.8.ffn_up.weight | 0xa9e105800 | 0x14000000 | +| 348 | blk.8.ffn_norm.weight | 0xab2105800 | 0x5000 | +| 349 | blk.8.attn_k.weight | 0xab210a800 | 0xa00000 | +| 350 | blk.8.attn_output.weight | 0xab2b0a800 | 0x2800000 | +| 351 | blk.8.attn_q.weight | 0xab530a800 | 0x2800000 | +| 352 | blk.8.attn_v.weight | 0xab7b0a800 | 0xa00000 | +| 353 | blk.9.attn_norm.weight | 0xab850a800 | 0x5000 | +| 354 | blk.9.ffn_down.weight | 0xab850f800 | 0x14000000 | +| 355 | blk.9.ffn_gate.weight | 0xacc50f800 | 0x14000000 | +| 356 | blk.9.ffn_up.weight | 0xae050f800 | 0x14000000 | +| 357 | blk.9.ffn_norm.weight | 0xaf450f800 | 0x5000 | +| 358 | blk.9.attn_k.weight | 0xaf4514800 | 0xa00000 | +| 359 | blk.9.attn_output.weight | 0xaf4f14800 | 0x2800000 | +| 360 | blk.9.attn_q.weight | 0xaf7714800 | 0x2800000 | +| 361 | blk.9.attn_v.weight | 0xaf9f14800 | 0xa00000 | +| 362 | output_norm.weight | 0xafa914800 | 0x5000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | F16 | +| 1 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | F16 | +| 362 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.69% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 2 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 3 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 4 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 5 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 6 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 7 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 8 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 9 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 10 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 11 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 12 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 13 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 14 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 15 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 16 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 17 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 18 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 19 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 20 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 21 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 22 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 23 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 24 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 25 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 26 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 27 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 28 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 29 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 30 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 31 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 32 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 33 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 34 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 35 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 36 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 37 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 38 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 39 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 40 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 41 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 42 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 43 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 44 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 45 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 46 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 47 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 48 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 49 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 50 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 51 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 52 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 53 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 54 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 55 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 56 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 57 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 58 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 59 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 60 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 61 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 62 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 63 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 64 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 65 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 66 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 67 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 68 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 69 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 70 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 71 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 72 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 73 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 74 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 75 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 76 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 77 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 78 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 79 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 80 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 81 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 82 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 83 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 84 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 85 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 86 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 87 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 88 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 89 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 90 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 91 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 92 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 93 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 94 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 95 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 96 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 97 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 98 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 99 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 100 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 101 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 102 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 103 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 104 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 105 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 106 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 107 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 108 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 109 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 110 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 111 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 112 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 113 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 114 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 115 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 116 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 117 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 118 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 119 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 120 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 121 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 122 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 123 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 124 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 125 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 126 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 127 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 128 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 129 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 130 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 131 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 132 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 133 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 134 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 135 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 136 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 137 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 138 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 139 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 140 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 141 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 142 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 143 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 144 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 145 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 146 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 147 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 148 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 149 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 150 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 151 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 152 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 153 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 154 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 155 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 156 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 157 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 158 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 159 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 160 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 161 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 162 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 163 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 164 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 165 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 166 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 167 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 168 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 169 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 170 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 171 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 172 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 173 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 174 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 175 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 176 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 177 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 178 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 179 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 180 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 181 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 182 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 183 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 184 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 185 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 186 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 187 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 188 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 189 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 190 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 191 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 192 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 193 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 194 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 195 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 196 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 197 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 198 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 199 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 200 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 201 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 202 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 203 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 204 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 205 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 206 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 207 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 208 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 209 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 210 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 211 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 212 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 213 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 214 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 215 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 216 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 217 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 218 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 219 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 220 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 221 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 222 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 223 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 224 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 225 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 226 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 227 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 228 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 229 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 230 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 231 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 232 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 233 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 234 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 235 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 236 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 237 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 238 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 239 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 240 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 241 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 242 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 243 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 244 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 245 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 246 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 247 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 248 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 249 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 250 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 251 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 252 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 253 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 254 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 255 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 256 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 257 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 258 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 259 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 260 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 261 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 262 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 263 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 264 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 265 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 266 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 267 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 268 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 269 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 270 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 271 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 272 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 273 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 274 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 275 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 276 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 277 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 278 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 279 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 280 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 281 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 282 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 283 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 284 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 285 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 286 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 287 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 288 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 289 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 38 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 290 | blk.38.attn_norm.weight | Block 38 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 291 | blk.38.ffn_down.weight | Block 38 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 292 | blk.38.ffn_gate.weight | Block 38 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 293 | blk.38.ffn_up.weight | Block 38 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 294 | blk.38.ffn_norm.weight | Block 38 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 295 | blk.38.attn_k.weight | Block 38 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 296 | blk.38.attn_output.weight | Block 38 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 297 | blk.38.attn_q.weight | Block 38 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 298 | blk.38.attn_v.weight | Block 38 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.38: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 39 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 299 | blk.39.attn_norm.weight | Block 39 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 300 | blk.39.ffn_down.weight | Block 39 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 301 | blk.39.ffn_gate.weight | Block 39 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 302 | blk.39.ffn_up.weight | Block 39 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 303 | blk.39.ffn_norm.weight | Block 39 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 304 | blk.39.attn_k.weight | Block 39 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 305 | blk.39.attn_output.weight | Block 39 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 306 | blk.39.attn_q.weight | Block 39 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 307 | blk.39.attn_v.weight | Block 39 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.39: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 308 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 309 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 310 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 311 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 312 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 313 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 314 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 315 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 316 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 317 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 318 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 319 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 320 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 321 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 322 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 323 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 324 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 325 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 326 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 327 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 328 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 329 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 330 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 331 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 332 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 333 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 334 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 335 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 336 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 337 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 338 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 339 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 340 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 341 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 342 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 343 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 344 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 345 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 346 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 347 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 348 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 349 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 350 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 351 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 352 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.36% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 353 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 354 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | F16 | +| 355 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 356 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | F16 | +| 357 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 358 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 359 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | F16 | +| 360 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | F16 | +| 361 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.36% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.mmlu new file mode 100644 index 0000000..4688e84 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 39 key-value pairs and 363 tensors from ./Dolphin-Mistral-24B-Venice-Edition-F16.gguf (version GGUF V3 (latest)) + +Final result: 45.3333 +/- 1.8190 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 2912.06 ms +llama_perf_context_print: prompt eval time = 289195.06 ms / 68956 tokens ( 4.19 ms per token, 238.44 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 290438.95 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.tqa new file mode 100644 index 0000000..f9a678b --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 39 key-value pairs and 363 tensors from ./Dolphin-Mistral-24B-Venice-Edition-F16.gguf (version GGUF V3 (latest)) + +Final result: 38.1333 +/- 1.7748 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 2866.02 ms +llama_perf_context_print: prompt eval time = 221192.17 ms / 51053 tokens ( 4.33 ms per token, 230.81 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 222713.65 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-F16.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.wng new file mode 100644 index 0000000..8db37f8 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-F16.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 39 key-value pairs and 363 tensors from ./Dolphin-Mistral-24B-Venice-Edition-F16.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 80.2667 +/- 1.4542 + +llama_perf_context_print: load time = 2915.27 ms +llama_perf_context_print: prompt eval time = 93765.90 ms / 22541 tokens ( 4.16 ms per token, 240.40 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 94258.84 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_M.md b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_M.md new file mode 100644 index 0000000..dfafe57 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_M.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 27 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3\_M.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-iq3_mgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x11300000 | +| 1 | output_norm.weight | 0x11a84500 | 0x5000 | +| 2 | token_embd.weight | 0x11a89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x22d89500 | 0x1ea000 | +| 4 | blk.0.attn_norm.weight | 0x22f73500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x22f78500 | 0xb40000 | +| 6 | blk.0.attn_q.weight | 0x23ab8500 | 0x7a8000 | +| 7 | blk.0.attn_v.weight | 0x24260500 | 0x226000 | +| 8 | blk.0.ffn_down.weight | 0x24486500 | 0x5a00000 | +| 9 | blk.0.ffn_gate.weight | 0x29e86500 | 0x3d40000 | +| 10 | blk.0.ffn_norm.weight | 0x2dbc6500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x2dbcb500 | 0x3d40000 | +| 12 | blk.1.attn_k.weight | 0x3190b500 | 0x1ea000 | +| 13 | blk.1.attn_norm.weight | 0x31af5500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x31afa500 | 0xb40000 | +| 15 | blk.1.attn_q.weight | 0x3263a500 | 0x7a8000 | +| 16 | blk.1.attn_v.weight | 0x32de2500 | 0x226000 | +| 17 | blk.1.ffn_down.weight | 0x33008500 | 0x5a00000 | +| 18 | blk.1.ffn_gate.weight | 0x38a08500 | 0x3d40000 | +| 19 | blk.1.ffn_norm.weight | 0x3c748500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x3c74d500 | 0x3d40000 | +| 21 | blk.2.attn_k.weight | 0x4048d500 | 0x1ea000 | +| 22 | blk.2.attn_norm.weight | 0x40677500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x4067c500 | 0xb40000 | +| 24 | blk.2.attn_q.weight | 0x411bc500 | 0x7a8000 | +| 25 | blk.2.attn_v.weight | 0x41964500 | 0x226000 | +| 26 | blk.2.ffn_down.weight | 0x41b8a500 | 0x5a00000 | +| 27 | blk.2.ffn_gate.weight | 0x4758a500 | 0x3d40000 | +| 28 | blk.2.ffn_norm.weight | 0x4b2ca500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x4b2cf500 | 0x3d40000 | +| 30 | blk.3.attn_k.weight | 0x4f00f500 | 0x1ea000 | +| 31 | blk.3.attn_norm.weight | 0x4f1f9500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x4f1fe500 | 0xb40000 | +| 33 | blk.3.attn_q.weight | 0x4fd3e500 | 0x7a8000 | +| 34 | blk.3.attn_v.weight | 0x504e6500 | 0x226000 | +| 35 | blk.3.ffn_down.weight | 0x5070c500 | 0x5a00000 | +| 36 | blk.3.ffn_gate.weight | 0x5610c500 | 0x3d40000 | +| 37 | blk.3.ffn_norm.weight | 0x59e4c500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x59e51500 | 0x3d40000 | +| 39 | blk.4.attn_k.weight | 0x5db91500 | 0x1ea000 | +| 40 | blk.4.attn_norm.weight | 0x5dd7b500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x5dd80500 | 0xb40000 | +| 42 | blk.4.attn_q.weight | 0x5e8c0500 | 0x7a8000 | +| 43 | blk.4.attn_v.weight | 0x5f068500 | 0x226000 | +| 44 | blk.4.ffn_down.weight | 0x5f28e500 | 0x5a00000 | +| 45 | blk.4.ffn_gate.weight | 0x64c8e500 | 0x3d40000 | +| 46 | blk.4.ffn_norm.weight | 0x689ce500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x689d3500 | 0x3d40000 | +| 48 | blk.5.attn_k.weight | 0x6c713500 | 0x1ea000 | +| 49 | blk.5.attn_norm.weight | 0x6c8fd500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x6c902500 | 0xb40000 | +| 51 | blk.5.attn_q.weight | 0x6d442500 | 0x7a8000 | +| 52 | blk.5.attn_v.weight | 0x6dbea500 | 0x226000 | +| 53 | blk.5.ffn_down.weight | 0x6de10500 | 0x44c0000 | +| 54 | blk.5.ffn_gate.weight | 0x722d0500 | 0x3d40000 | +| 55 | blk.5.ffn_norm.weight | 0x76010500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x76015500 | 0x3d40000 | +| 57 | blk.6.attn_k.weight | 0x79d55500 | 0x1ea000 | +| 58 | blk.6.attn_norm.weight | 0x79f3f500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x79f44500 | 0xb40000 | +| 60 | blk.6.attn_q.weight | 0x7aa84500 | 0x7a8000 | +| 61 | blk.6.attn_v.weight | 0x7b22c500 | 0x226000 | +| 62 | blk.6.ffn_down.weight | 0x7b452500 | 0x44c0000 | +| 63 | blk.6.ffn_gate.weight | 0x7f912500 | 0x3d40000 | +| 64 | blk.6.ffn_norm.weight | 0x83652500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x83657500 | 0x3d40000 | +| 66 | blk.7.attn_k.weight | 0x87397500 | 0x1ea000 | +| 67 | blk.7.attn_norm.weight | 0x87581500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x87586500 | 0xb40000 | +| 69 | blk.7.attn_q.weight | 0x880c6500 | 0x7a8000 | +| 70 | blk.7.attn_v.weight | 0x8886e500 | 0x226000 | +| 71 | blk.7.ffn_down.weight | 0x88a94500 | 0x44c0000 | +| 72 | blk.7.ffn_gate.weight | 0x8cf54500 | 0x3d40000 | +| 73 | blk.7.ffn_norm.weight | 0x90c94500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x90c99500 | 0x3d40000 | +| 75 | blk.8.attn_k.weight | 0x949d9500 | 0x1ea000 | +| 76 | blk.8.attn_norm.weight | 0x94bc3500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x94bc8500 | 0xb40000 | +| 78 | blk.8.attn_q.weight | 0x95708500 | 0x7a8000 | +| 79 | blk.8.attn_v.weight | 0x95eb0500 | 0x226000 | +| 80 | blk.8.ffn_down.weight | 0x960d6500 | 0x44c0000 | +| 81 | blk.8.ffn_gate.weight | 0x9a596500 | 0x3d40000 | +| 82 | blk.8.ffn_norm.weight | 0x9e2d6500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x9e2db500 | 0x3d40000 | +| 84 | blk.9.attn_k.weight | 0xa201b500 | 0x1ea000 | +| 85 | blk.9.attn_norm.weight | 0xa2205500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xa220a500 | 0xb40000 | +| 87 | blk.9.attn_q.weight | 0xa2d4a500 | 0x7a8000 | +| 88 | blk.9.attn_v.weight | 0xa34f2500 | 0x226000 | +| 89 | blk.9.ffn_down.weight | 0xa3718500 | 0x44c0000 | +| 90 | blk.9.ffn_gate.weight | 0xa7bd8500 | 0x3d40000 | +| 91 | blk.9.ffn_norm.weight | 0xab918500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xab91d500 | 0x3d40000 | +| 93 | blk.10.attn_k.weight | 0xaf65d500 | 0x1ea000 | +| 94 | blk.10.attn_norm.weight | 0xaf847500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xaf84c500 | 0xb40000 | +| 96 | blk.10.attn_q.weight | 0xb038c500 | 0x7a8000 | +| 97 | blk.10.attn_v.weight | 0xb0b34500 | 0x226000 | +| 98 | blk.10.ffn_down.weight | 0xb0d5a500 | 0x44c0000 | +| 99 | blk.10.ffn_gate.weight | 0xb521a500 | 0x3d40000 | +| 100 | blk.10.ffn_norm.weight | 0xb8f5a500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xb8f5f500 | 0x3d40000 | +| 102 | blk.11.attn_k.weight | 0xbcc9f500 | 0x1ea000 | +| 103 | blk.11.attn_norm.weight | 0xbce89500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xbce8e500 | 0xb40000 | +| 105 | blk.11.attn_q.weight | 0xbd9ce500 | 0x7a8000 | +| 106 | blk.11.attn_v.weight | 0xbe176500 | 0x226000 | +| 107 | blk.11.ffn_down.weight | 0xbe39c500 | 0x44c0000 | +| 108 | blk.11.ffn_gate.weight | 0xc285c500 | 0x3d40000 | +| 109 | blk.11.ffn_norm.weight | 0xc659c500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xc65a1500 | 0x3d40000 | +| 111 | blk.12.attn_k.weight | 0xca2e1500 | 0x1ea000 | +| 112 | blk.12.attn_norm.weight | 0xca4cb500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xca4d0500 | 0xb40000 | +| 114 | blk.12.attn_q.weight | 0xcb010500 | 0x7a8000 | +| 115 | blk.12.attn_v.weight | 0xcb7b8500 | 0x226000 | +| 116 | blk.12.ffn_down.weight | 0xcb9de500 | 0x44c0000 | +| 117 | blk.12.ffn_gate.weight | 0xcfe9e500 | 0x3d40000 | +| 118 | blk.12.ffn_norm.weight | 0xd3bde500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xd3be3500 | 0x3d40000 | +| 120 | blk.13.attn_k.weight | 0xd7923500 | 0x1ea000 | +| 121 | blk.13.attn_norm.weight | 0xd7b0d500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xd7b12500 | 0xb40000 | +| 123 | blk.13.attn_q.weight | 0xd8652500 | 0x7a8000 | +| 124 | blk.13.attn_v.weight | 0xd8dfa500 | 0x226000 | +| 125 | blk.13.ffn_down.weight | 0xd9020500 | 0x44c0000 | +| 126 | blk.13.ffn_gate.weight | 0xdd4e0500 | 0x3d40000 | +| 127 | blk.13.ffn_norm.weight | 0xe1220500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0xe1225500 | 0x3d40000 | +| 129 | blk.14.attn_k.weight | 0xe4f65500 | 0x1ea000 | +| 130 | blk.14.attn_norm.weight | 0xe514f500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0xe5154500 | 0xb40000 | +| 132 | blk.14.attn_q.weight | 0xe5c94500 | 0x7a8000 | +| 133 | blk.14.attn_v.weight | 0xe643c500 | 0x226000 | +| 134 | blk.14.ffn_down.weight | 0xe6662500 | 0x44c0000 | +| 135 | blk.14.ffn_gate.weight | 0xeab22500 | 0x3d40000 | +| 136 | blk.14.ffn_norm.weight | 0xee862500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0xee867500 | 0x3d40000 | +| 138 | blk.15.attn_k.weight | 0xf25a7500 | 0x1ea000 | +| 139 | blk.15.attn_norm.weight | 0xf2791500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0xf2796500 | 0xb40000 | +| 141 | blk.15.attn_q.weight | 0xf32d6500 | 0x7a8000 | +| 142 | blk.15.attn_v.weight | 0xf3a7e500 | 0x226000 | +| 143 | blk.15.ffn_down.weight | 0xf3ca4500 | 0x44c0000 | +| 144 | blk.15.ffn_gate.weight | 0xf8164500 | 0x3d40000 | +| 145 | blk.15.ffn_norm.weight | 0xfbea4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0xfbea9500 | 0x3d40000 | +| 147 | blk.16.attn_k.weight | 0xffbe9500 | 0x1ea000 | +| 148 | blk.16.attn_norm.weight | 0xffdd3500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0xffdd8500 | 0xb40000 | +| 150 | blk.16.attn_q.weight | 0x100918500 | 0x7a8000 | +| 151 | blk.16.attn_v.weight | 0x1010c0500 | 0x226000 | +| 152 | blk.16.ffn_down.weight | 0x1012e6500 | 0x44c0000 | +| 153 | blk.16.ffn_gate.weight | 0x1057a6500 | 0x3d40000 | +| 154 | blk.16.ffn_norm.weight | 0x1094e6500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x1094eb500 | 0x3d40000 | +| 156 | blk.17.attn_k.weight | 0x10d22b500 | 0x226000 | +| 157 | blk.17.attn_norm.weight | 0x10d451500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x10d456500 | 0xb40000 | +| 159 | blk.17.attn_q.weight | 0x10df96500 | 0x898000 | +| 160 | blk.17.attn_v.weight | 0x10e82e500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x10eafe500 | 0x44c0000 | +| 162 | blk.17.ffn_gate.weight | 0x112fbe500 | 0x3d40000 | +| 163 | blk.17.ffn_norm.weight | 0x116cfe500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x116d03500 | 0x3d40000 | +| 165 | blk.18.attn_k.weight | 0x11aa43500 | 0x226000 | +| 166 | blk.18.attn_norm.weight | 0x11ac69500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x11ac6e500 | 0xb40000 | +| 168 | blk.18.attn_q.weight | 0x11b7ae500 | 0x898000 | +| 169 | blk.18.attn_v.weight | 0x11c046500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x11c316500 | 0x44c0000 | +| 171 | blk.18.ffn_gate.weight | 0x1207d6500 | 0x3d40000 | +| 172 | blk.18.ffn_norm.weight | 0x124516500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x12451b500 | 0x3d40000 | +| 174 | blk.19.attn_k.weight | 0x12825b500 | 0x1ea000 | +| 175 | blk.19.attn_norm.weight | 0x128445500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x12844a500 | 0xb40000 | +| 177 | blk.19.attn_q.weight | 0x128f8a500 | 0x7a8000 | +| 178 | blk.19.attn_v.weight | 0x129732500 | 0x226000 | +| 179 | blk.19.ffn_down.weight | 0x129958500 | 0x44c0000 | +| 180 | blk.19.ffn_gate.weight | 0x12de18500 | 0x3d40000 | +| 181 | blk.19.ffn_norm.weight | 0x131b58500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x131b5d500 | 0x3d40000 | +| 183 | blk.20.attn_k.weight | 0x13589d500 | 0x226000 | +| 184 | blk.20.attn_norm.weight | 0x135ac3500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x135ac8500 | 0xb40000 | +| 186 | blk.20.attn_q.weight | 0x136608500 | 0x898000 | +| 187 | blk.20.attn_v.weight | 0x136ea0500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x137170500 | 0x44c0000 | +| 189 | blk.20.ffn_gate.weight | 0x13b630500 | 0x44c0000 | +| 190 | blk.20.ffn_norm.weight | 0x13faf0500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x13faf5500 | 0x44c0000 | +| 192 | blk.21.attn_k.weight | 0x143fb5500 | 0x1ea000 | +| 193 | blk.21.attn_norm.weight | 0x14419f500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x1441a4500 | 0xb40000 | +| 195 | blk.21.attn_q.weight | 0x144ce4500 | 0x7a8000 | +| 196 | blk.21.attn_v.weight | 0x14548c500 | 0x226000 | +| 197 | blk.21.ffn_down.weight | 0x1456b2500 | 0x44c0000 | +| 198 | blk.21.ffn_gate.weight | 0x149b72500 | 0x44c0000 | +| 199 | blk.21.ffn_norm.weight | 0x14e032500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x14e037500 | 0x44c0000 | +| 201 | blk.22.attn_k.weight | 0x1524f7500 | 0x226000 | +| 202 | blk.22.attn_norm.weight | 0x15271d500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x152722500 | 0xb40000 | +| 204 | blk.22.attn_q.weight | 0x153262500 | 0x898000 | +| 205 | blk.22.attn_v.weight | 0x153afa500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x153dca500 | 0x44c0000 | +| 207 | blk.22.ffn_gate.weight | 0x15828a500 | 0x44c0000 | +| 208 | blk.22.ffn_norm.weight | 0x15c74a500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x15c74f500 | 0x44c0000 | +| 210 | blk.23.attn_k.weight | 0x160c0f500 | 0x226000 | +| 211 | blk.23.attn_norm.weight | 0x160e35500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x160e3a500 | 0xb40000 | +| 213 | blk.23.attn_q.weight | 0x16197a500 | 0x898000 | +| 214 | blk.23.attn_v.weight | 0x162212500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x1624e2500 | 0x44c0000 | +| 216 | blk.23.ffn_gate.weight | 0x1669a2500 | 0x44c0000 | +| 217 | blk.23.ffn_norm.weight | 0x16ae62500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x16ae67500 | 0x44c0000 | +| 219 | blk.24.attn_k.weight | 0x16f327500 | 0x226000 | +| 220 | blk.24.attn_norm.weight | 0x16f54d500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x16f552500 | 0xb40000 | +| 222 | blk.24.attn_q.weight | 0x170092500 | 0x898000 | +| 223 | blk.24.attn_v.weight | 0x17092a500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x170bfa500 | 0x44c0000 | +| 225 | blk.24.ffn_gate.weight | 0x1750ba500 | 0x44c0000 | +| 226 | blk.24.ffn_norm.weight | 0x17957a500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x17957f500 | 0x44c0000 | +| 228 | blk.25.attn_k.weight | 0x17da3f500 | 0x226000 | +| 229 | blk.25.attn_norm.weight | 0x17dc65500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x17dc6a500 | 0xb40000 | +| 231 | blk.25.attn_q.weight | 0x17e7aa500 | 0x898000 | +| 232 | blk.25.attn_v.weight | 0x17f042500 | 0x2d0000 | +| 233 | blk.25.ffn_down.weight | 0x17f312500 | 0x44c0000 | +| 234 | blk.25.ffn_gate.weight | 0x1837d2500 | 0x44c0000 | +| 235 | blk.25.ffn_norm.weight | 0x187c92500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x187c97500 | 0x44c0000 | +| 237 | blk.26.attn_k.weight | 0x18c157500 | 0x226000 | +| 238 | blk.26.attn_norm.weight | 0x18c37d500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x18c382500 | 0xb40000 | +| 240 | blk.26.attn_q.weight | 0x18cec2500 | 0x898000 | +| 241 | blk.26.attn_v.weight | 0x18d75a500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x18da2a500 | 0x44c0000 | +| 243 | blk.26.ffn_gate.weight | 0x191eea500 | 0x44c0000 | +| 244 | blk.26.ffn_norm.weight | 0x1963aa500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x1963af500 | 0x44c0000 | +| 246 | blk.27.attn_k.weight | 0x19a86f500 | 0x1ea000 | +| 247 | blk.27.attn_norm.weight | 0x19aa59500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x19aa5e500 | 0xb40000 | +| 249 | blk.27.attn_q.weight | 0x19b59e500 | 0x7a8000 | +| 250 | blk.27.attn_v.weight | 0x19bd46500 | 0x226000 | +| 251 | blk.27.ffn_down.weight | 0x19bf6c500 | 0x44c0000 | +| 252 | blk.27.ffn_gate.weight | 0x1a042c500 | 0x44c0000 | +| 253 | blk.27.ffn_norm.weight | 0x1a48ec500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x1a48f1500 | 0x44c0000 | +| 255 | blk.28.attn_k.weight | 0x1a8db1500 | 0x226000 | +| 256 | blk.28.attn_norm.weight | 0x1a8fd7500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1a8fdc500 | 0xb40000 | +| 258 | blk.28.attn_q.weight | 0x1a9b1c500 | 0x898000 | +| 259 | blk.28.attn_v.weight | 0x1aa3b4500 | 0x2d0000 | +| 260 | blk.28.ffn_down.weight | 0x1aa684500 | 0x44c0000 | +| 261 | blk.28.ffn_gate.weight | 0x1aeb44500 | 0x44c0000 | +| 262 | blk.28.ffn_norm.weight | 0x1b3004500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x1b3009500 | 0x44c0000 | +| 264 | blk.29.attn_k.weight | 0x1b74c9500 | 0x226000 | +| 265 | blk.29.attn_norm.weight | 0x1b76ef500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x1b76f4500 | 0xb40000 | +| 267 | blk.29.attn_q.weight | 0x1b8234500 | 0x898000 | +| 268 | blk.29.attn_v.weight | 0x1b8acc500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x1b8d9c500 | 0x44c0000 | +| 270 | blk.29.ffn_gate.weight | 0x1bd25c500 | 0x44c0000 | +| 271 | blk.29.ffn_norm.weight | 0x1c171c500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x1c1721500 | 0x44c0000 | +| 273 | blk.30.attn_k.weight | 0x1c5be1500 | 0x226000 | +| 274 | blk.30.attn_norm.weight | 0x1c5e07500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x1c5e0c500 | 0xb40000 | +| 276 | blk.30.attn_q.weight | 0x1c694c500 | 0x898000 | +| 277 | blk.30.attn_v.weight | 0x1c71e4500 | 0x2d0000 | +| 278 | blk.30.ffn_down.weight | 0x1c74b4500 | 0x44c0000 | +| 279 | blk.30.ffn_gate.weight | 0x1cb974500 | 0x44c0000 | +| 280 | blk.30.ffn_norm.weight | 0x1cfe34500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x1cfe39500 | 0x44c0000 | +| 282 | blk.31.attn_k.weight | 0x1d42f9500 | 0x226000 | +| 283 | blk.31.attn_norm.weight | 0x1d451f500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x1d4524500 | 0xb40000 | +| 285 | blk.31.attn_q.weight | 0x1d5064500 | 0x898000 | +| 286 | blk.31.attn_v.weight | 0x1d58fc500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x1d5bcc500 | 0x44c0000 | +| 288 | blk.31.ffn_gate.weight | 0x1da08c500 | 0x44c0000 | +| 289 | blk.31.ffn_norm.weight | 0x1de54c500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x1de551500 | 0x44c0000 | +| 291 | blk.32.attn_k.weight | 0x1e2a11500 | 0x226000 | +| 292 | blk.32.attn_norm.weight | 0x1e2c37500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x1e2c3c500 | 0xb40000 | +| 294 | blk.32.attn_q.weight | 0x1e377c500 | 0x898000 | +| 295 | blk.32.attn_v.weight | 0x1e4014500 | 0x2d0000 | +| 296 | blk.32.ffn_down.weight | 0x1e42e4500 | 0x44c0000 | +| 297 | blk.32.ffn_gate.weight | 0x1e87a4500 | 0x44c0000 | +| 298 | blk.32.ffn_norm.weight | 0x1ecc64500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x1ecc69500 | 0x44c0000 | +| 300 | blk.33.attn_k.weight | 0x1f1129500 | 0x226000 | +| 301 | blk.33.attn_norm.weight | 0x1f134f500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x1f1354500 | 0xb40000 | +| 303 | blk.33.attn_q.weight | 0x1f1e94500 | 0x898000 | +| 304 | blk.33.attn_v.weight | 0x1f272c500 | 0x2d0000 | +| 305 | blk.33.ffn_down.weight | 0x1f29fc500 | 0x44c0000 | +| 306 | blk.33.ffn_gate.weight | 0x1f6ebc500 | 0x44c0000 | +| 307 | blk.33.ffn_norm.weight | 0x1fb37c500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x1fb381500 | 0x44c0000 | +| 309 | blk.34.attn_k.weight | 0x1ff841500 | 0x226000 | +| 310 | blk.34.attn_norm.weight | 0x1ffa67500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x1ffa6c500 | 0xb40000 | +| 312 | blk.34.attn_q.weight | 0x2005ac500 | 0x898000 | +| 313 | blk.34.attn_v.weight | 0x200e44500 | 0x2d0000 | +| 314 | blk.34.ffn_down.weight | 0x201114500 | 0x44c0000 | +| 315 | blk.34.ffn_gate.weight | 0x2055d4500 | 0x44c0000 | +| 316 | blk.34.ffn_norm.weight | 0x209a94500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x209a99500 | 0x44c0000 | +| 318 | blk.35.attn_k.weight | 0x20df59500 | 0x226000 | +| 319 | blk.35.attn_norm.weight | 0x20e17f500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x20e184500 | 0xb40000 | +| 321 | blk.35.attn_q.weight | 0x20ecc4500 | 0x898000 | +| 322 | blk.35.attn_v.weight | 0x20f55c500 | 0x2d0000 | +| 323 | blk.35.ffn_down.weight | 0x20f82c500 | 0x44c0000 | +| 324 | blk.35.ffn_gate.weight | 0x213cec500 | 0x44c0000 | +| 325 | blk.35.ffn_norm.weight | 0x2181ac500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x2181b1500 | 0x44c0000 | +| 327 | blk.36.attn_k.weight | 0x21c671500 | 0x226000 | +| 328 | blk.36.attn_norm.weight | 0x21c897500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x21c89c500 | 0xb40000 | +| 330 | blk.36.attn_q.weight | 0x21d3dc500 | 0x898000 | +| 331 | blk.36.attn_v.weight | 0x21dc74500 | 0x2d0000 | +| 332 | blk.36.ffn_down.weight | 0x21df44500 | 0x44c0000 | +| 333 | blk.36.ffn_gate.weight | 0x222404500 | 0x44c0000 | +| 334 | blk.36.ffn_norm.weight | 0x2268c4500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x2268c9500 | 0x44c0000 | +| 336 | blk.37.attn_k.weight | 0x22ad89500 | 0x226000 | +| 337 | blk.37.attn_norm.weight | 0x22afaf500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x22afb4500 | 0xb40000 | +| 339 | blk.37.attn_q.weight | 0x22baf4500 | 0x898000 | +| 340 | blk.37.attn_v.weight | 0x22c38c500 | 0x2d0000 | +| 341 | blk.37.ffn_down.weight | 0x22c65c500 | 0x44c0000 | +| 342 | blk.37.ffn_gate.weight | 0x230b1c500 | 0x44c0000 | +| 343 | blk.37.ffn_norm.weight | 0x234fdc500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x234fe1500 | 0x44c0000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:------| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ3_S | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ3_S | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_S.md b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_S.md new file mode 100644 index 0000000..7aa10a9 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ3_S.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 26 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3\_S.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-iq3_sgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0xf500000 | +| 1 | output_norm.weight | 0xfc84500 | 0x5000 | +| 2 | token_embd.weight | 0xfc89500 | 0xf500000 | +| 3 | blk.0.attn_k.weight | 0x1f189500 | 0x1ea000 | +| 4 | blk.0.attn_norm.weight | 0x1f373500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x1f378500 | 0x898000 | +| 6 | blk.0.attn_q.weight | 0x1fc10500 | 0x7a8000 | +| 7 | blk.0.attn_v.weight | 0x203b8500 | 0x226000 | +| 8 | blk.0.ffn_down.weight | 0x205de500 | 0x44c0000 | +| 9 | blk.0.ffn_gate.weight | 0x24a9e500 | 0x3d40000 | +| 10 | blk.0.ffn_norm.weight | 0x287de500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x287e3500 | 0x3d40000 | +| 12 | blk.1.attn_k.weight | 0x2c523500 | 0x1ea000 | +| 13 | blk.1.attn_norm.weight | 0x2c70d500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x2c712500 | 0x898000 | +| 15 | blk.1.attn_q.weight | 0x2cfaa500 | 0x7a8000 | +| 16 | blk.1.attn_v.weight | 0x2d752500 | 0x226000 | +| 17 | blk.1.ffn_down.weight | 0x2d978500 | 0x44c0000 | +| 18 | blk.1.ffn_gate.weight | 0x31e38500 | 0x3d40000 | +| 19 | blk.1.ffn_norm.weight | 0x35b78500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x35b7d500 | 0x3d40000 | +| 21 | blk.2.attn_k.weight | 0x398bd500 | 0x1ea000 | +| 22 | blk.2.attn_norm.weight | 0x39aa7500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x39aac500 | 0x898000 | +| 24 | blk.2.attn_q.weight | 0x3a344500 | 0x7a8000 | +| 25 | blk.2.attn_v.weight | 0x3aaec500 | 0x226000 | +| 26 | blk.2.ffn_down.weight | 0x3ad12500 | 0x44c0000 | +| 27 | blk.2.ffn_gate.weight | 0x3f1d2500 | 0x3d40000 | +| 28 | blk.2.ffn_norm.weight | 0x42f12500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x42f17500 | 0x3d40000 | +| 30 | blk.3.attn_k.weight | 0x46c57500 | 0x1ea000 | +| 31 | blk.3.attn_norm.weight | 0x46e41500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x46e46500 | 0x898000 | +| 33 | blk.3.attn_q.weight | 0x476de500 | 0x7a8000 | +| 34 | blk.3.attn_v.weight | 0x47e86500 | 0x226000 | +| 35 | blk.3.ffn_down.weight | 0x480ac500 | 0x44c0000 | +| 36 | blk.3.ffn_gate.weight | 0x4c56c500 | 0x3d40000 | +| 37 | blk.3.ffn_norm.weight | 0x502ac500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x502b1500 | 0x3d40000 | +| 39 | blk.4.attn_k.weight | 0x53ff1500 | 0x1ea000 | +| 40 | blk.4.attn_norm.weight | 0x541db500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x541e0500 | 0x898000 | +| 42 | blk.4.attn_q.weight | 0x54a78500 | 0x7a8000 | +| 43 | blk.4.attn_v.weight | 0x55220500 | 0x226000 | +| 44 | blk.4.ffn_down.weight | 0x55446500 | 0x44c0000 | +| 45 | blk.4.ffn_gate.weight | 0x59906500 | 0x3d40000 | +| 46 | blk.4.ffn_norm.weight | 0x5d646500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x5d64b500 | 0x3d40000 | +| 48 | blk.5.attn_k.weight | 0x6138b500 | 0x1ea000 | +| 49 | blk.5.attn_norm.weight | 0x61575500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x6157a500 | 0x898000 | +| 51 | blk.5.attn_q.weight | 0x61e12500 | 0x7a8000 | +| 52 | blk.5.attn_v.weight | 0x625ba500 | 0x226000 | +| 53 | blk.5.ffn_down.weight | 0x627e0500 | 0x44c0000 | +| 54 | blk.5.ffn_gate.weight | 0x66ca0500 | 0x3d40000 | +| 55 | blk.5.ffn_norm.weight | 0x6a9e0500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x6a9e5500 | 0x3d40000 | +| 57 | blk.6.attn_k.weight | 0x6e725500 | 0x1ea000 | +| 58 | blk.6.attn_norm.weight | 0x6e90f500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x6e914500 | 0x898000 | +| 60 | blk.6.attn_q.weight | 0x6f1ac500 | 0x7a8000 | +| 61 | blk.6.attn_v.weight | 0x6f954500 | 0x226000 | +| 62 | blk.6.ffn_down.weight | 0x6fb7a500 | 0x44c0000 | +| 63 | blk.6.ffn_gate.weight | 0x7403a500 | 0x3d40000 | +| 64 | blk.6.ffn_norm.weight | 0x77d7a500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x77d7f500 | 0x3d40000 | +| 66 | blk.7.attn_k.weight | 0x7babf500 | 0x1ea000 | +| 67 | blk.7.attn_norm.weight | 0x7bca9500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x7bcae500 | 0x898000 | +| 69 | blk.7.attn_q.weight | 0x7c546500 | 0x7a8000 | +| 70 | blk.7.attn_v.weight | 0x7ccee500 | 0x226000 | +| 71 | blk.7.ffn_down.weight | 0x7cf14500 | 0x44c0000 | +| 72 | blk.7.ffn_gate.weight | 0x813d4500 | 0x3d40000 | +| 73 | blk.7.ffn_norm.weight | 0x85114500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x85119500 | 0x3d40000 | +| 75 | blk.8.attn_k.weight | 0x88e59500 | 0x1ea000 | +| 76 | blk.8.attn_norm.weight | 0x89043500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x89048500 | 0x898000 | +| 78 | blk.8.attn_q.weight | 0x898e0500 | 0x7a8000 | +| 79 | blk.8.attn_v.weight | 0x8a088500 | 0x226000 | +| 80 | blk.8.ffn_down.weight | 0x8a2ae500 | 0x44c0000 | +| 81 | blk.8.ffn_gate.weight | 0x8e76e500 | 0x3d40000 | +| 82 | blk.8.ffn_norm.weight | 0x924ae500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x924b3500 | 0x3d40000 | +| 84 | blk.9.attn_k.weight | 0x961f3500 | 0x1ea000 | +| 85 | blk.9.attn_norm.weight | 0x963dd500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0x963e2500 | 0x898000 | +| 87 | blk.9.attn_q.weight | 0x96c7a500 | 0x7a8000 | +| 88 | blk.9.attn_v.weight | 0x97422500 | 0x226000 | +| 89 | blk.9.ffn_down.weight | 0x97648500 | 0x44c0000 | +| 90 | blk.9.ffn_gate.weight | 0x9bb08500 | 0x3d40000 | +| 91 | blk.9.ffn_norm.weight | 0x9f848500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0x9f84d500 | 0x3d40000 | +| 93 | blk.10.attn_k.weight | 0xa358d500 | 0x1ea000 | +| 94 | blk.10.attn_norm.weight | 0xa3777500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xa377c500 | 0x898000 | +| 96 | blk.10.attn_q.weight | 0xa4014500 | 0x7a8000 | +| 97 | blk.10.attn_v.weight | 0xa47bc500 | 0x226000 | +| 98 | blk.10.ffn_down.weight | 0xa49e2500 | 0x44c0000 | +| 99 | blk.10.ffn_gate.weight | 0xa8ea2500 | 0x3d40000 | +| 100 | blk.10.ffn_norm.weight | 0xacbe2500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xacbe7500 | 0x3d40000 | +| 102 | blk.11.attn_k.weight | 0xb0927500 | 0x1ea000 | +| 103 | blk.11.attn_norm.weight | 0xb0b11500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xb0b16500 | 0x898000 | +| 105 | blk.11.attn_q.weight | 0xb13ae500 | 0x7a8000 | +| 106 | blk.11.attn_v.weight | 0xb1b56500 | 0x226000 | +| 107 | blk.11.ffn_down.weight | 0xb1d7c500 | 0x44c0000 | +| 108 | blk.11.ffn_gate.weight | 0xb623c500 | 0x3d40000 | +| 109 | blk.11.ffn_norm.weight | 0xb9f7c500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xb9f81500 | 0x3d40000 | +| 111 | blk.12.attn_k.weight | 0xbdcc1500 | 0x1ea000 | +| 112 | blk.12.attn_norm.weight | 0xbdeab500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xbdeb0500 | 0x898000 | +| 114 | blk.12.attn_q.weight | 0xbe748500 | 0x7a8000 | +| 115 | blk.12.attn_v.weight | 0xbeef0500 | 0x226000 | +| 116 | blk.12.ffn_down.weight | 0xbf116500 | 0x44c0000 | +| 117 | blk.12.ffn_gate.weight | 0xc35d6500 | 0x3d40000 | +| 118 | blk.12.ffn_norm.weight | 0xc7316500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xc731b500 | 0x3d40000 | +| 120 | blk.13.attn_k.weight | 0xcb05b500 | 0x1ea000 | +| 121 | blk.13.attn_norm.weight | 0xcb245500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xcb24a500 | 0x898000 | +| 123 | blk.13.attn_q.weight | 0xcbae2500 | 0x7a8000 | +| 124 | blk.13.attn_v.weight | 0xcc28a500 | 0x226000 | +| 125 | blk.13.ffn_down.weight | 0xcc4b0500 | 0x44c0000 | +| 126 | blk.13.ffn_gate.weight | 0xd0970500 | 0x3d40000 | +| 127 | blk.13.ffn_norm.weight | 0xd46b0500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0xd46b5500 | 0x3d40000 | +| 129 | blk.14.attn_k.weight | 0xd83f5500 | 0x1ea000 | +| 130 | blk.14.attn_norm.weight | 0xd85df500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0xd85e4500 | 0x898000 | +| 132 | blk.14.attn_q.weight | 0xd8e7c500 | 0x7a8000 | +| 133 | blk.14.attn_v.weight | 0xd9624500 | 0x226000 | +| 134 | blk.14.ffn_down.weight | 0xd984a500 | 0x44c0000 | +| 135 | blk.14.ffn_gate.weight | 0xddd0a500 | 0x3d40000 | +| 136 | blk.14.ffn_norm.weight | 0xe1a4a500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0xe1a4f500 | 0x3d40000 | +| 138 | blk.15.attn_k.weight | 0xe578f500 | 0x1ea000 | +| 139 | blk.15.attn_norm.weight | 0xe5979500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0xe597e500 | 0x898000 | +| 141 | blk.15.attn_q.weight | 0xe6216500 | 0x7a8000 | +| 142 | blk.15.attn_v.weight | 0xe69be500 | 0x226000 | +| 143 | blk.15.ffn_down.weight | 0xe6be4500 | 0x44c0000 | +| 144 | blk.15.ffn_gate.weight | 0xeb0a4500 | 0x3d40000 | +| 145 | blk.15.ffn_norm.weight | 0xeede4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0xeede9500 | 0x3d40000 | +| 147 | blk.16.attn_k.weight | 0xf2b29500 | 0x1ea000 | +| 148 | blk.16.attn_norm.weight | 0xf2d13500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0xf2d18500 | 0x898000 | +| 150 | blk.16.attn_q.weight | 0xf35b0500 | 0x7a8000 | +| 151 | blk.16.attn_v.weight | 0xf3d58500 | 0x226000 | +| 152 | blk.16.ffn_down.weight | 0xf3f7e500 | 0x44c0000 | +| 153 | blk.16.ffn_gate.weight | 0xf843e500 | 0x3d40000 | +| 154 | blk.16.ffn_norm.weight | 0xfc17e500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0xfc183500 | 0x3d40000 | +| 156 | blk.17.attn_k.weight | 0xffec3500 | 0x226000 | +| 157 | blk.17.attn_norm.weight | 0x1000e9500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x1000ee500 | 0x898000 | +| 159 | blk.17.attn_q.weight | 0x100986500 | 0x898000 | +| 160 | blk.17.attn_v.weight | 0x10121e500 | 0x226000 | +| 161 | blk.17.ffn_down.weight | 0x101444500 | 0x44c0000 | +| 162 | blk.17.ffn_gate.weight | 0x105904500 | 0x3d40000 | +| 163 | blk.17.ffn_norm.weight | 0x109644500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x109649500 | 0x3d40000 | +| 165 | blk.18.attn_k.weight | 0x10d389500 | 0x226000 | +| 166 | blk.18.attn_norm.weight | 0x10d5af500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x10d5b4500 | 0x898000 | +| 168 | blk.18.attn_q.weight | 0x10de4c500 | 0x898000 | +| 169 | blk.18.attn_v.weight | 0x10e6e4500 | 0x226000 | +| 170 | blk.18.ffn_down.weight | 0x10e90a500 | 0x44c0000 | +| 171 | blk.18.ffn_gate.weight | 0x112dca500 | 0x3d40000 | +| 172 | blk.18.ffn_norm.weight | 0x116b0a500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x116b0f500 | 0x3d40000 | +| 174 | blk.19.attn_k.weight | 0x11a84f500 | 0x1ea000 | +| 175 | blk.19.attn_norm.weight | 0x11aa39500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x11aa3e500 | 0x898000 | +| 177 | blk.19.attn_q.weight | 0x11b2d6500 | 0x7a8000 | +| 178 | blk.19.attn_v.weight | 0x11ba7e500 | 0x226000 | +| 179 | blk.19.ffn_down.weight | 0x11bca4500 | 0x44c0000 | +| 180 | blk.19.ffn_gate.weight | 0x120164500 | 0x3d40000 | +| 181 | blk.19.ffn_norm.weight | 0x123ea4500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x123ea9500 | 0x3d40000 | +| 183 | blk.20.attn_k.weight | 0x127be9500 | 0x226000 | +| 184 | blk.20.attn_norm.weight | 0x127e0f500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x127e14500 | 0x898000 | +| 186 | blk.20.attn_q.weight | 0x1286ac500 | 0x898000 | +| 187 | blk.20.attn_v.weight | 0x128f44500 | 0x226000 | +| 188 | blk.20.ffn_down.weight | 0x12916a500 | 0x44c0000 | +| 189 | blk.20.ffn_gate.weight | 0x12d62a500 | 0x44c0000 | +| 190 | blk.20.ffn_norm.weight | 0x131aea500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x131aef500 | 0x44c0000 | +| 192 | blk.21.attn_k.weight | 0x135faf500 | 0x1ea000 | +| 193 | blk.21.attn_norm.weight | 0x136199500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x13619e500 | 0x898000 | +| 195 | blk.21.attn_q.weight | 0x136a36500 | 0x7a8000 | +| 196 | blk.21.attn_v.weight | 0x1371de500 | 0x226000 | +| 197 | blk.21.ffn_down.weight | 0x137404500 | 0x44c0000 | +| 198 | blk.21.ffn_gate.weight | 0x13b8c4500 | 0x44c0000 | +| 199 | blk.21.ffn_norm.weight | 0x13fd84500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x13fd89500 | 0x44c0000 | +| 201 | blk.22.attn_k.weight | 0x144249500 | 0x226000 | +| 202 | blk.22.attn_norm.weight | 0x14446f500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x144474500 | 0x898000 | +| 204 | blk.22.attn_q.weight | 0x144d0c500 | 0x898000 | +| 205 | blk.22.attn_v.weight | 0x1455a4500 | 0x226000 | +| 206 | blk.22.ffn_down.weight | 0x1457ca500 | 0x44c0000 | +| 207 | blk.22.ffn_gate.weight | 0x149c8a500 | 0x44c0000 | +| 208 | blk.22.ffn_norm.weight | 0x14e14a500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x14e14f500 | 0x44c0000 | +| 210 | blk.23.attn_k.weight | 0x15260f500 | 0x226000 | +| 211 | blk.23.attn_norm.weight | 0x152835500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x15283a500 | 0x898000 | +| 213 | blk.23.attn_q.weight | 0x1530d2500 | 0x898000 | +| 214 | blk.23.attn_v.weight | 0x15396a500 | 0x226000 | +| 215 | blk.23.ffn_down.weight | 0x153b90500 | 0x44c0000 | +| 216 | blk.23.ffn_gate.weight | 0x158050500 | 0x44c0000 | +| 217 | blk.23.ffn_norm.weight | 0x15c510500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x15c515500 | 0x44c0000 | +| 219 | blk.24.attn_k.weight | 0x1609d5500 | 0x226000 | +| 220 | blk.24.attn_norm.weight | 0x160bfb500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x160c00500 | 0x898000 | +| 222 | blk.24.attn_q.weight | 0x161498500 | 0x898000 | +| 223 | blk.24.attn_v.weight | 0x161d30500 | 0x226000 | +| 224 | blk.24.ffn_down.weight | 0x161f56500 | 0x44c0000 | +| 225 | blk.24.ffn_gate.weight | 0x166416500 | 0x44c0000 | +| 226 | blk.24.ffn_norm.weight | 0x16a8d6500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x16a8db500 | 0x44c0000 | +| 228 | blk.25.attn_k.weight | 0x16ed9b500 | 0x226000 | +| 229 | blk.25.attn_norm.weight | 0x16efc1500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x16efc6500 | 0x898000 | +| 231 | blk.25.attn_q.weight | 0x16f85e500 | 0x898000 | +| 232 | blk.25.attn_v.weight | 0x1700f6500 | 0x226000 | +| 233 | blk.25.ffn_down.weight | 0x17031c500 | 0x44c0000 | +| 234 | blk.25.ffn_gate.weight | 0x1747dc500 | 0x44c0000 | +| 235 | blk.25.ffn_norm.weight | 0x178c9c500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x178ca1500 | 0x44c0000 | +| 237 | blk.26.attn_k.weight | 0x17d161500 | 0x226000 | +| 238 | blk.26.attn_norm.weight | 0x17d387500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x17d38c500 | 0x898000 | +| 240 | blk.26.attn_q.weight | 0x17dc24500 | 0x898000 | +| 241 | blk.26.attn_v.weight | 0x17e4bc500 | 0x226000 | +| 242 | blk.26.ffn_down.weight | 0x17e6e2500 | 0x44c0000 | +| 243 | blk.26.ffn_gate.weight | 0x182ba2500 | 0x44c0000 | +| 244 | blk.26.ffn_norm.weight | 0x187062500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x187067500 | 0x44c0000 | +| 246 | blk.27.attn_k.weight | 0x18b527500 | 0x1ea000 | +| 247 | blk.27.attn_norm.weight | 0x18b711500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x18b716500 | 0x898000 | +| 249 | blk.27.attn_q.weight | 0x18bfae500 | 0x7a8000 | +| 250 | blk.27.attn_v.weight | 0x18c756500 | 0x226000 | +| 251 | blk.27.ffn_down.weight | 0x18c97c500 | 0x44c0000 | +| 252 | blk.27.ffn_gate.weight | 0x190e3c500 | 0x44c0000 | +| 253 | blk.27.ffn_norm.weight | 0x1952fc500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x195301500 | 0x44c0000 | +| 255 | blk.28.attn_k.weight | 0x1997c1500 | 0x226000 | +| 256 | blk.28.attn_norm.weight | 0x1999e7500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1999ec500 | 0x898000 | +| 258 | blk.28.attn_q.weight | 0x19a284500 | 0x898000 | +| 259 | blk.28.attn_v.weight | 0x19ab1c500 | 0x226000 | +| 260 | blk.28.ffn_down.weight | 0x19ad42500 | 0x44c0000 | +| 261 | blk.28.ffn_gate.weight | 0x19f202500 | 0x44c0000 | +| 262 | blk.28.ffn_norm.weight | 0x1a36c2500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x1a36c7500 | 0x44c0000 | +| 264 | blk.29.attn_k.weight | 0x1a7b87500 | 0x226000 | +| 265 | blk.29.attn_norm.weight | 0x1a7dad500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x1a7db2500 | 0x898000 | +| 267 | blk.29.attn_q.weight | 0x1a864a500 | 0x898000 | +| 268 | blk.29.attn_v.weight | 0x1a8ee2500 | 0x226000 | +| 269 | blk.29.ffn_down.weight | 0x1a9108500 | 0x44c0000 | +| 270 | blk.29.ffn_gate.weight | 0x1ad5c8500 | 0x44c0000 | +| 271 | blk.29.ffn_norm.weight | 0x1b1a88500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x1b1a8d500 | 0x44c0000 | +| 273 | blk.30.attn_k.weight | 0x1b5f4d500 | 0x226000 | +| 274 | blk.30.attn_norm.weight | 0x1b6173500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x1b6178500 | 0x898000 | +| 276 | blk.30.attn_q.weight | 0x1b6a10500 | 0x898000 | +| 277 | blk.30.attn_v.weight | 0x1b72a8500 | 0x226000 | +| 278 | blk.30.ffn_down.weight | 0x1b74ce500 | 0x44c0000 | +| 279 | blk.30.ffn_gate.weight | 0x1bb98e500 | 0x44c0000 | +| 280 | blk.30.ffn_norm.weight | 0x1bfe4e500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x1bfe53500 | 0x44c0000 | +| 282 | blk.31.attn_k.weight | 0x1c4313500 | 0x226000 | +| 283 | blk.31.attn_norm.weight | 0x1c4539500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x1c453e500 | 0x898000 | +| 285 | blk.31.attn_q.weight | 0x1c4dd6500 | 0x898000 | +| 286 | blk.31.attn_v.weight | 0x1c566e500 | 0x226000 | +| 287 | blk.31.ffn_down.weight | 0x1c5894500 | 0x44c0000 | +| 288 | blk.31.ffn_gate.weight | 0x1c9d54500 | 0x44c0000 | +| 289 | blk.31.ffn_norm.weight | 0x1ce214500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x1ce219500 | 0x44c0000 | +| 291 | blk.32.attn_k.weight | 0x1d26d9500 | 0x226000 | +| 292 | blk.32.attn_norm.weight | 0x1d28ff500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x1d2904500 | 0x898000 | +| 294 | blk.32.attn_q.weight | 0x1d319c500 | 0x898000 | +| 295 | blk.32.attn_v.weight | 0x1d3a34500 | 0x226000 | +| 296 | blk.32.ffn_down.weight | 0x1d3c5a500 | 0x44c0000 | +| 297 | blk.32.ffn_gate.weight | 0x1d811a500 | 0x44c0000 | +| 298 | blk.32.ffn_norm.weight | 0x1dc5da500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x1dc5df500 | 0x44c0000 | +| 300 | blk.33.attn_k.weight | 0x1e0a9f500 | 0x226000 | +| 301 | blk.33.attn_norm.weight | 0x1e0cc5500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x1e0cca500 | 0x898000 | +| 303 | blk.33.attn_q.weight | 0x1e1562500 | 0x898000 | +| 304 | blk.33.attn_v.weight | 0x1e1dfa500 | 0x226000 | +| 305 | blk.33.ffn_down.weight | 0x1e2020500 | 0x44c0000 | +| 306 | blk.33.ffn_gate.weight | 0x1e64e0500 | 0x44c0000 | +| 307 | blk.33.ffn_norm.weight | 0x1ea9a0500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x1ea9a5500 | 0x44c0000 | +| 309 | blk.34.attn_k.weight | 0x1eee65500 | 0x226000 | +| 310 | blk.34.attn_norm.weight | 0x1ef08b500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x1ef090500 | 0x898000 | +| 312 | blk.34.attn_q.weight | 0x1ef928500 | 0x898000 | +| 313 | blk.34.attn_v.weight | 0x1f01c0500 | 0x226000 | +| 314 | blk.34.ffn_down.weight | 0x1f03e6500 | 0x44c0000 | +| 315 | blk.34.ffn_gate.weight | 0x1f48a6500 | 0x44c0000 | +| 316 | blk.34.ffn_norm.weight | 0x1f8d66500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x1f8d6b500 | 0x44c0000 | +| 318 | blk.35.attn_k.weight | 0x1fd22b500 | 0x226000 | +| 319 | blk.35.attn_norm.weight | 0x1fd451500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x1fd456500 | 0x898000 | +| 321 | blk.35.attn_q.weight | 0x1fdcee500 | 0x898000 | +| 322 | blk.35.attn_v.weight | 0x1fe586500 | 0x226000 | +| 323 | blk.35.ffn_down.weight | 0x1fe7ac500 | 0x44c0000 | +| 324 | blk.35.ffn_gate.weight | 0x202c6c500 | 0x44c0000 | +| 325 | blk.35.ffn_norm.weight | 0x20712c500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x207131500 | 0x44c0000 | +| 327 | blk.36.attn_k.weight | 0x20b5f1500 | 0x226000 | +| 328 | blk.36.attn_norm.weight | 0x20b817500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x20b81c500 | 0x898000 | +| 330 | blk.36.attn_q.weight | 0x20c0b4500 | 0x898000 | +| 331 | blk.36.attn_v.weight | 0x20c94c500 | 0x226000 | +| 332 | blk.36.ffn_down.weight | 0x20cb72500 | 0x44c0000 | +| 333 | blk.36.ffn_gate.weight | 0x211032500 | 0x44c0000 | +| 334 | blk.36.ffn_norm.weight | 0x2154f2500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x2154f7500 | 0x44c0000 | +| 336 | blk.37.attn_k.weight | 0x2199b7500 | 0x226000 | +| 337 | blk.37.attn_norm.weight | 0x219bdd500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x219be2500 | 0x898000 | +| 339 | blk.37.attn_q.weight | 0x21a47a500 | 0x898000 | +| 340 | blk.37.attn_v.weight | 0x21ad12500 | 0x226000 | +| 341 | blk.37.ffn_down.weight | 0x21af38500 | 0x44c0000 | +| 342 | blk.37.ffn_gate.weight | 0x21f3f8500 | 0x44c0000 | +| 343 | blk.37.ffn_norm.weight | 0x2238b8500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x2238bd500 | 0x44c0000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:--------| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ3_XXS | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ3_XXS | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:--------| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_XXS | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:--------| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_XXS | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_XXS | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:------| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ3_S | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ3_S | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-IQ4_NL.md b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ4_NL.md new file mode 100644 index 0000000..4513b65 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-IQ4_NL.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 25 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4\_NL.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-iq4_nlgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x16800000 | +| 1 | output_norm.weight | 0x16f84500 | 0x5000 | +| 2 | token_embd.weight | 0x16f89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x28289500 | 0x226000 | +| 4 | blk.0.attn_norm.weight | 0x284af500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x284b4500 | 0xb40000 | +| 6 | blk.0.attn_q.weight | 0x28ff4500 | 0x898000 | +| 7 | blk.0.attn_v.weight | 0x2988c500 | 0x2a8000 | +| 8 | blk.0.ffn_down.weight | 0x29b34500 | 0x5a00000 | +| 9 | blk.0.ffn_gate.weight | 0x2f534500 | 0x44c0000 | +| 10 | blk.0.ffn_norm.weight | 0x339f4500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x339f9500 | 0x44c0000 | +| 12 | blk.1.attn_k.weight | 0x37eb9500 | 0x226000 | +| 13 | blk.1.attn_norm.weight | 0x380df500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x380e4500 | 0xb40000 | +| 15 | blk.1.attn_q.weight | 0x38c24500 | 0x898000 | +| 16 | blk.1.attn_v.weight | 0x394bc500 | 0x2a8000 | +| 17 | blk.1.ffn_down.weight | 0x39764500 | 0x5a00000 | +| 18 | blk.1.ffn_gate.weight | 0x3f164500 | 0x44c0000 | +| 19 | blk.1.ffn_norm.weight | 0x43624500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x43629500 | 0x44c0000 | +| 21 | blk.2.attn_k.weight | 0x47ae9500 | 0x226000 | +| 22 | blk.2.attn_norm.weight | 0x47d0f500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x47d14500 | 0xb40000 | +| 24 | blk.2.attn_q.weight | 0x48854500 | 0x898000 | +| 25 | blk.2.attn_v.weight | 0x490ec500 | 0x2a8000 | +| 26 | blk.2.ffn_down.weight | 0x49394500 | 0x5a00000 | +| 27 | blk.2.ffn_gate.weight | 0x4ed94500 | 0x44c0000 | +| 28 | blk.2.ffn_norm.weight | 0x53254500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x53259500 | 0x44c0000 | +| 30 | blk.3.attn_k.weight | 0x57719500 | 0x226000 | +| 31 | blk.3.attn_norm.weight | 0x5793f500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x57944500 | 0xb40000 | +| 33 | blk.3.attn_q.weight | 0x58484500 | 0x898000 | +| 34 | blk.3.attn_v.weight | 0x58d1c500 | 0x2a8000 | +| 35 | blk.3.ffn_down.weight | 0x58fc4500 | 0x5a00000 | +| 36 | blk.3.ffn_gate.weight | 0x5e9c4500 | 0x44c0000 | +| 37 | blk.3.ffn_norm.weight | 0x62e84500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x62e89500 | 0x44c0000 | +| 39 | blk.4.attn_k.weight | 0x67349500 | 0x226000 | +| 40 | blk.4.attn_norm.weight | 0x6756f500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x67574500 | 0xb40000 | +| 42 | blk.4.attn_q.weight | 0x680b4500 | 0x898000 | +| 43 | blk.4.attn_v.weight | 0x6894c500 | 0x2a8000 | +| 44 | blk.4.ffn_down.weight | 0x68bf4500 | 0x5a00000 | +| 45 | blk.4.ffn_gate.weight | 0x6e5f4500 | 0x44c0000 | +| 46 | blk.4.ffn_norm.weight | 0x72ab4500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x72ab9500 | 0x44c0000 | +| 48 | blk.5.attn_k.weight | 0x76f79500 | 0x226000 | +| 49 | blk.5.attn_norm.weight | 0x7719f500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x771a4500 | 0xb40000 | +| 51 | blk.5.attn_q.weight | 0x77ce4500 | 0x898000 | +| 52 | blk.5.attn_v.weight | 0x7857c500 | 0x2a8000 | +| 53 | blk.5.ffn_down.weight | 0x78824500 | 0x5a00000 | +| 54 | blk.5.ffn_gate.weight | 0x7e224500 | 0x44c0000 | +| 55 | blk.5.ffn_norm.weight | 0x826e4500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x826e9500 | 0x44c0000 | +| 57 | blk.6.attn_k.weight | 0x86ba9500 | 0x226000 | +| 58 | blk.6.attn_norm.weight | 0x86dcf500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x86dd4500 | 0xb40000 | +| 60 | blk.6.attn_q.weight | 0x87914500 | 0x898000 | +| 61 | blk.6.attn_v.weight | 0x881ac500 | 0x2a8000 | +| 62 | blk.6.ffn_down.weight | 0x88454500 | 0x5a00000 | +| 63 | blk.6.ffn_gate.weight | 0x8de54500 | 0x44c0000 | +| 64 | blk.6.ffn_norm.weight | 0x92314500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x92319500 | 0x44c0000 | +| 66 | blk.7.attn_k.weight | 0x967d9500 | 0x226000 | +| 67 | blk.7.attn_norm.weight | 0x969ff500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x96a04500 | 0xb40000 | +| 69 | blk.7.attn_q.weight | 0x97544500 | 0x898000 | +| 70 | blk.7.attn_v.weight | 0x97ddc500 | 0x2a8000 | +| 71 | blk.7.ffn_down.weight | 0x98084500 | 0x5a00000 | +| 72 | blk.7.ffn_gate.weight | 0x9da84500 | 0x44c0000 | +| 73 | blk.7.ffn_norm.weight | 0xa1f44500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xa1f49500 | 0x44c0000 | +| 75 | blk.8.attn_k.weight | 0xa6409500 | 0x226000 | +| 76 | blk.8.attn_norm.weight | 0xa662f500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xa6634500 | 0xb40000 | +| 78 | blk.8.attn_q.weight | 0xa7174500 | 0x898000 | +| 79 | blk.8.attn_v.weight | 0xa7a0c500 | 0x2a8000 | +| 80 | blk.8.ffn_down.weight | 0xa7cb4500 | 0x5a00000 | +| 81 | blk.8.ffn_gate.weight | 0xad6b4500 | 0x44c0000 | +| 82 | blk.8.ffn_norm.weight | 0xb1b74500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xb1b79500 | 0x44c0000 | +| 84 | blk.9.attn_k.weight | 0xb6039500 | 0x226000 | +| 85 | blk.9.attn_norm.weight | 0xb625f500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xb6264500 | 0xb40000 | +| 87 | blk.9.attn_q.weight | 0xb6da4500 | 0x898000 | +| 88 | blk.9.attn_v.weight | 0xb763c500 | 0x2a8000 | +| 89 | blk.9.ffn_down.weight | 0xb78e4500 | 0x5a00000 | +| 90 | blk.9.ffn_gate.weight | 0xbd2e4500 | 0x44c0000 | +| 91 | blk.9.ffn_norm.weight | 0xc17a4500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xc17a9500 | 0x44c0000 | +| 93 | blk.10.attn_k.weight | 0xc5c69500 | 0x226000 | +| 94 | blk.10.attn_norm.weight | 0xc5e8f500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xc5e94500 | 0xb40000 | +| 96 | blk.10.attn_q.weight | 0xc69d4500 | 0x898000 | +| 97 | blk.10.attn_v.weight | 0xc726c500 | 0x2a8000 | +| 98 | blk.10.ffn_down.weight | 0xc7514500 | 0x5a00000 | +| 99 | blk.10.ffn_gate.weight | 0xccf14500 | 0x44c0000 | +| 100 | blk.10.ffn_norm.weight | 0xd13d4500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xd13d9500 | 0x44c0000 | +| 102 | blk.11.attn_k.weight | 0xd5899500 | 0x226000 | +| 103 | blk.11.attn_norm.weight | 0xd5abf500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xd5ac4500 | 0xb40000 | +| 105 | blk.11.attn_q.weight | 0xd6604500 | 0x898000 | +| 106 | blk.11.attn_v.weight | 0xd6e9c500 | 0x2a8000 | +| 107 | blk.11.ffn_down.weight | 0xd7144500 | 0x5a00000 | +| 108 | blk.11.ffn_gate.weight | 0xdcb44500 | 0x44c0000 | +| 109 | blk.11.ffn_norm.weight | 0xe1004500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xe1009500 | 0x44c0000 | +| 111 | blk.12.attn_k.weight | 0xe54c9500 | 0x226000 | +| 112 | blk.12.attn_norm.weight | 0xe56ef500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xe56f4500 | 0xb40000 | +| 114 | blk.12.attn_q.weight | 0xe6234500 | 0x898000 | +| 115 | blk.12.attn_v.weight | 0xe6acc500 | 0x2a8000 | +| 116 | blk.12.ffn_down.weight | 0xe6d74500 | 0x5a00000 | +| 117 | blk.12.ffn_gate.weight | 0xec774500 | 0x44c0000 | +| 118 | blk.12.ffn_norm.weight | 0xf0c34500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xf0c39500 | 0x44c0000 | +| 120 | blk.13.attn_k.weight | 0xf50f9500 | 0x226000 | +| 121 | blk.13.attn_norm.weight | 0xf531f500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xf5324500 | 0xb40000 | +| 123 | blk.13.attn_q.weight | 0xf5e64500 | 0x898000 | +| 124 | blk.13.attn_v.weight | 0xf66fc500 | 0x2a8000 | +| 125 | blk.13.ffn_down.weight | 0xf69a4500 | 0x5a00000 | +| 126 | blk.13.ffn_gate.weight | 0xfc3a4500 | 0x44c0000 | +| 127 | blk.13.ffn_norm.weight | 0x100864500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x100869500 | 0x44c0000 | +| 129 | blk.14.attn_k.weight | 0x104d29500 | 0x226000 | +| 130 | blk.14.attn_norm.weight | 0x104f4f500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x104f54500 | 0xb40000 | +| 132 | blk.14.attn_q.weight | 0x105a94500 | 0x898000 | +| 133 | blk.14.attn_v.weight | 0x10632c500 | 0x2a8000 | +| 134 | blk.14.ffn_down.weight | 0x1065d4500 | 0x5a00000 | +| 135 | blk.14.ffn_gate.weight | 0x10bfd4500 | 0x44c0000 | +| 136 | blk.14.ffn_norm.weight | 0x110494500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x110499500 | 0x44c0000 | +| 138 | blk.15.attn_k.weight | 0x114959500 | 0x226000 | +| 139 | blk.15.attn_norm.weight | 0x114b7f500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x114b84500 | 0xb40000 | +| 141 | blk.15.attn_q.weight | 0x1156c4500 | 0x898000 | +| 142 | blk.15.attn_v.weight | 0x115f5c500 | 0x2a8000 | +| 143 | blk.15.ffn_down.weight | 0x116204500 | 0x5a00000 | +| 144 | blk.15.ffn_gate.weight | 0x11bc04500 | 0x44c0000 | +| 145 | blk.15.ffn_norm.weight | 0x1200c4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x1200c9500 | 0x44c0000 | +| 147 | blk.16.attn_k.weight | 0x124589500 | 0x226000 | +| 148 | blk.16.attn_norm.weight | 0x1247af500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x1247b4500 | 0xb40000 | +| 150 | blk.16.attn_q.weight | 0x1252f4500 | 0x898000 | +| 151 | blk.16.attn_v.weight | 0x125b8c500 | 0x2a8000 | +| 152 | blk.16.ffn_down.weight | 0x125e34500 | 0x5a00000 | +| 153 | blk.16.ffn_gate.weight | 0x12b834500 | 0x44c0000 | +| 154 | blk.16.ffn_norm.weight | 0x12fcf4500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x12fcf9500 | 0x44c0000 | +| 156 | blk.17.attn_k.weight | 0x1341b9500 | 0x2d0000 | +| 157 | blk.17.attn_norm.weight | 0x134489500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x13448e500 | 0xb40000 | +| 159 | blk.17.attn_q.weight | 0x134fce500 | 0xb40000 | +| 160 | blk.17.attn_v.weight | 0x135b0e500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x135dde500 | 0x5a00000 | +| 162 | blk.17.ffn_gate.weight | 0x13b7de500 | 0x44c0000 | +| 163 | blk.17.ffn_norm.weight | 0x13fc9e500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x13fca3500 | 0x44c0000 | +| 165 | blk.18.attn_k.weight | 0x144163500 | 0x2d0000 | +| 166 | blk.18.attn_norm.weight | 0x144433500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x144438500 | 0xb40000 | +| 168 | blk.18.attn_q.weight | 0x144f78500 | 0xb40000 | +| 169 | blk.18.attn_v.weight | 0x145ab8500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x145d88500 | 0x5a00000 | +| 171 | blk.18.ffn_gate.weight | 0x14b788500 | 0x44c0000 | +| 172 | blk.18.ffn_norm.weight | 0x14fc48500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x14fc4d500 | 0x44c0000 | +| 174 | blk.19.attn_k.weight | 0x15410d500 | 0x226000 | +| 175 | blk.19.attn_norm.weight | 0x154333500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x154338500 | 0xb40000 | +| 177 | blk.19.attn_q.weight | 0x154e78500 | 0x898000 | +| 178 | blk.19.attn_v.weight | 0x155710500 | 0x2a8000 | +| 179 | blk.19.ffn_down.weight | 0x1559b8500 | 0x5a00000 | +| 180 | blk.19.ffn_gate.weight | 0x15b3b8500 | 0x44c0000 | +| 181 | blk.19.ffn_norm.weight | 0x15f878500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x15f87d500 | 0x44c0000 | +| 183 | blk.20.attn_k.weight | 0x163d3d500 | 0x2d0000 | +| 184 | blk.20.attn_norm.weight | 0x16400d500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x164012500 | 0xb40000 | +| 186 | blk.20.attn_q.weight | 0x164b52500 | 0xb40000 | +| 187 | blk.20.attn_v.weight | 0x165692500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x165962500 | 0x5a00000 | +| 189 | blk.20.ffn_gate.weight | 0x16b362500 | 0x5a00000 | +| 190 | blk.20.ffn_norm.weight | 0x170d62500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x170d67500 | 0x5a00000 | +| 192 | blk.21.attn_k.weight | 0x176767500 | 0x226000 | +| 193 | blk.21.attn_norm.weight | 0x17698d500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x176992500 | 0xb40000 | +| 195 | blk.21.attn_q.weight | 0x1774d2500 | 0x898000 | +| 196 | blk.21.attn_v.weight | 0x177d6a500 | 0x2a8000 | +| 197 | blk.21.ffn_down.weight | 0x178012500 | 0x5a00000 | +| 198 | blk.21.ffn_gate.weight | 0x17da12500 | 0x5a00000 | +| 199 | blk.21.ffn_norm.weight | 0x183412500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x183417500 | 0x5a00000 | +| 201 | blk.22.attn_k.weight | 0x188e17500 | 0x2d0000 | +| 202 | blk.22.attn_norm.weight | 0x1890e7500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x1890ec500 | 0xb40000 | +| 204 | blk.22.attn_q.weight | 0x189c2c500 | 0xb40000 | +| 205 | blk.22.attn_v.weight | 0x18a76c500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x18aa3c500 | 0x5a00000 | +| 207 | blk.22.ffn_gate.weight | 0x19043c500 | 0x5a00000 | +| 208 | blk.22.ffn_norm.weight | 0x195e3c500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x195e41500 | 0x5a00000 | +| 210 | blk.23.attn_k.weight | 0x19b841500 | 0x2d0000 | +| 211 | blk.23.attn_norm.weight | 0x19bb11500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x19bb16500 | 0xb40000 | +| 213 | blk.23.attn_q.weight | 0x19c656500 | 0xb40000 | +| 214 | blk.23.attn_v.weight | 0x19d196500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x19d466500 | 0x5a00000 | +| 216 | blk.23.ffn_gate.weight | 0x1a2e66500 | 0x5a00000 | +| 217 | blk.23.ffn_norm.weight | 0x1a8866500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x1a886b500 | 0x5a00000 | +| 219 | blk.24.attn_k.weight | 0x1ae26b500 | 0x2d0000 | +| 220 | blk.24.attn_norm.weight | 0x1ae53b500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x1ae540500 | 0xb40000 | +| 222 | blk.24.attn_q.weight | 0x1af080500 | 0xb40000 | +| 223 | blk.24.attn_v.weight | 0x1afbc0500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x1afe90500 | 0x5a00000 | +| 225 | blk.24.ffn_gate.weight | 0x1b5890500 | 0x5a00000 | +| 226 | blk.24.ffn_norm.weight | 0x1bb290500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x1bb295500 | 0x5a00000 | +| 228 | blk.25.attn_k.weight | 0x1c0c95500 | 0x2d0000 | +| 229 | blk.25.attn_norm.weight | 0x1c0f65500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x1c0f6a500 | 0xb40000 | +| 231 | blk.25.attn_q.weight | 0x1c1aaa500 | 0xb40000 | +| 232 | blk.25.attn_v.weight | 0x1c25ea500 | 0x2d0000 | +| 233 | blk.25.ffn_down.weight | 0x1c28ba500 | 0x5a00000 | +| 234 | blk.25.ffn_gate.weight | 0x1c82ba500 | 0x5a00000 | +| 235 | blk.25.ffn_norm.weight | 0x1cdcba500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x1cdcbf500 | 0x5a00000 | +| 237 | blk.26.attn_k.weight | 0x1d36bf500 | 0x2d0000 | +| 238 | blk.26.attn_norm.weight | 0x1d398f500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x1d3994500 | 0xb40000 | +| 240 | blk.26.attn_q.weight | 0x1d44d4500 | 0xb40000 | +| 241 | blk.26.attn_v.weight | 0x1d5014500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x1d52e4500 | 0x5a00000 | +| 243 | blk.26.ffn_gate.weight | 0x1dace4500 | 0x5a00000 | +| 244 | blk.26.ffn_norm.weight | 0x1e06e4500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x1e06e9500 | 0x5a00000 | +| 246 | blk.27.attn_k.weight | 0x1e60e9500 | 0x226000 | +| 247 | blk.27.attn_norm.weight | 0x1e630f500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x1e6314500 | 0xb40000 | +| 249 | blk.27.attn_q.weight | 0x1e6e54500 | 0x898000 | +| 250 | blk.27.attn_v.weight | 0x1e76ec500 | 0x2a8000 | +| 251 | blk.27.ffn_down.weight | 0x1e7994500 | 0x5a00000 | +| 252 | blk.27.ffn_gate.weight | 0x1ed394500 | 0x5a00000 | +| 253 | blk.27.ffn_norm.weight | 0x1f2d94500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x1f2d99500 | 0x5a00000 | +| 255 | blk.28.attn_k.weight | 0x1f8799500 | 0x2d0000 | +| 256 | blk.28.attn_norm.weight | 0x1f8a69500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1f8a6e500 | 0xb40000 | +| 258 | blk.28.attn_q.weight | 0x1f95ae500 | 0xb40000 | +| 259 | blk.28.attn_v.weight | 0x1fa0ee500 | 0x2d0000 | +| 260 | blk.28.ffn_down.weight | 0x1fa3be500 | 0x5a00000 | +| 261 | blk.28.ffn_gate.weight | 0x1ffdbe500 | 0x5a00000 | +| 262 | blk.28.ffn_norm.weight | 0x2057be500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x2057c3500 | 0x5a00000 | +| 264 | blk.29.attn_k.weight | 0x20b1c3500 | 0x2d0000 | +| 265 | blk.29.attn_norm.weight | 0x20b493500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x20b498500 | 0xb40000 | +| 267 | blk.29.attn_q.weight | 0x20bfd8500 | 0xb40000 | +| 268 | blk.29.attn_v.weight | 0x20cb18500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x20cde8500 | 0x5a00000 | +| 270 | blk.29.ffn_gate.weight | 0x2127e8500 | 0x5a00000 | +| 271 | blk.29.ffn_norm.weight | 0x2181e8500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x2181ed500 | 0x5a00000 | +| 273 | blk.30.attn_k.weight | 0x21dbed500 | 0x2d0000 | +| 274 | blk.30.attn_norm.weight | 0x21debd500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x21dec2500 | 0xb40000 | +| 276 | blk.30.attn_q.weight | 0x21ea02500 | 0xb40000 | +| 277 | blk.30.attn_v.weight | 0x21f542500 | 0x2d0000 | +| 278 | blk.30.ffn_down.weight | 0x21f812500 | 0x5a00000 | +| 279 | blk.30.ffn_gate.weight | 0x225212500 | 0x5a00000 | +| 280 | blk.30.ffn_norm.weight | 0x22ac12500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x22ac17500 | 0x5a00000 | +| 282 | blk.31.attn_k.weight | 0x230617500 | 0x2d0000 | +| 283 | blk.31.attn_norm.weight | 0x2308e7500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x2308ec500 | 0xb40000 | +| 285 | blk.31.attn_q.weight | 0x23142c500 | 0xb40000 | +| 286 | blk.31.attn_v.weight | 0x231f6c500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x23223c500 | 0x5a00000 | +| 288 | blk.31.ffn_gate.weight | 0x237c3c500 | 0x5a00000 | +| 289 | blk.31.ffn_norm.weight | 0x23d63c500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x23d641500 | 0x5a00000 | +| 291 | blk.32.attn_k.weight | 0x243041500 | 0x2d0000 | +| 292 | blk.32.attn_norm.weight | 0x243311500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x243316500 | 0xb40000 | +| 294 | blk.32.attn_q.weight | 0x243e56500 | 0xb40000 | +| 295 | blk.32.attn_v.weight | 0x244996500 | 0x2d0000 | +| 296 | blk.32.ffn_down.weight | 0x244c66500 | 0x5a00000 | +| 297 | blk.32.ffn_gate.weight | 0x24a666500 | 0x5a00000 | +| 298 | blk.32.ffn_norm.weight | 0x250066500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x25006b500 | 0x5a00000 | +| 300 | blk.33.attn_k.weight | 0x255a6b500 | 0x2d0000 | +| 301 | blk.33.attn_norm.weight | 0x255d3b500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x255d40500 | 0xb40000 | +| 303 | blk.33.attn_q.weight | 0x256880500 | 0xb40000 | +| 304 | blk.33.attn_v.weight | 0x2573c0500 | 0x2d0000 | +| 305 | blk.33.ffn_down.weight | 0x257690500 | 0x5a00000 | +| 306 | blk.33.ffn_gate.weight | 0x25d090500 | 0x5a00000 | +| 307 | blk.33.ffn_norm.weight | 0x262a90500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x262a95500 | 0x5a00000 | +| 309 | blk.34.attn_k.weight | 0x268495500 | 0x2d0000 | +| 310 | blk.34.attn_norm.weight | 0x268765500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x26876a500 | 0xb40000 | +| 312 | blk.34.attn_q.weight | 0x2692aa500 | 0xb40000 | +| 313 | blk.34.attn_v.weight | 0x269dea500 | 0x2d0000 | +| 314 | blk.34.ffn_down.weight | 0x26a0ba500 | 0x5a00000 | +| 315 | blk.34.ffn_gate.weight | 0x26faba500 | 0x5a00000 | +| 316 | blk.34.ffn_norm.weight | 0x2754ba500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x2754bf500 | 0x5a00000 | +| 318 | blk.35.attn_k.weight | 0x27aebf500 | 0x2d0000 | +| 319 | blk.35.attn_norm.weight | 0x27b18f500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x27b194500 | 0xb40000 | +| 321 | blk.35.attn_q.weight | 0x27bcd4500 | 0xb40000 | +| 322 | blk.35.attn_v.weight | 0x27c814500 | 0x2d0000 | +| 323 | blk.35.ffn_down.weight | 0x27cae4500 | 0x5a00000 | +| 324 | blk.35.ffn_gate.weight | 0x2824e4500 | 0x5a00000 | +| 325 | blk.35.ffn_norm.weight | 0x287ee4500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x287ee9500 | 0x5a00000 | +| 327 | blk.36.attn_k.weight | 0x28d8e9500 | 0x2d0000 | +| 328 | blk.36.attn_norm.weight | 0x28dbb9500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x28dbbe500 | 0xb40000 | +| 330 | blk.36.attn_q.weight | 0x28e6fe500 | 0xb40000 | +| 331 | blk.36.attn_v.weight | 0x28f23e500 | 0x2d0000 | +| 332 | blk.36.ffn_down.weight | 0x28f50e500 | 0x5a00000 | +| 333 | blk.36.ffn_gate.weight | 0x294f0e500 | 0x5a00000 | +| 334 | blk.36.ffn_norm.weight | 0x29a90e500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x29a913500 | 0x5a00000 | +| 336 | blk.37.attn_k.weight | 0x2a0313500 | 0x2d0000 | +| 337 | blk.37.attn_norm.weight | 0x2a05e3500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x2a05e8500 | 0xb40000 | +| 339 | blk.37.attn_q.weight | 0x2a1128500 | 0xb40000 | +| 340 | blk.37.attn_v.weight | 0x2a1c68500 | 0x2d0000 | +| 341 | blk.37.ffn_down.weight | 0x2a1f38500 | 0x5a00000 | +| 342 | blk.37.ffn_gate.weight | 0x2a7938500 | 0x5a00000 | +| 343 | blk.37.ffn_norm.weight | 0x2ad338500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x2ad33d500 | 0x5a00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-------| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ4_NL | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | IQ3_S | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-------| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ3_S | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ3_S | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ3_S | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_XS | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-------| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | IQ4_NL | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | IQ4_NL | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | IQ4_NL | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | IQ4_NL | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | IQ4_NL | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_L.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_L.md new file mode 100644 index 0000000..7912dfb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_L.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 13 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3\_K\_L.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q3_k_lgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x11300000 | +| 1 | output_norm.weight | 0x11a84500 | 0x5000 | +| 2 | token_embd.weight | 0x11a89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x22d89500 | 0x1a4000 | +| 4 | blk.0.attn_norm.weight | 0x22f2d500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x22f32500 | 0xdc0000 | +| 6 | blk.0.attn_q.weight | 0x23cf2500 | 0x690000 | +| 7 | blk.0.attn_v.weight | 0x24382500 | 0x2d0000 | +| 8 | blk.0.ffn_down.weight | 0x24652500 | 0x6e00000 | +| 9 | blk.0.ffn_gate.weight | 0x2b452500 | 0x3480000 | +| 10 | blk.0.ffn_norm.weight | 0x2e8d2500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x2e8d7500 | 0x3480000 | +| 12 | blk.1.attn_k.weight | 0x31d57500 | 0x1a4000 | +| 13 | blk.1.attn_norm.weight | 0x31efb500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x31f00500 | 0xdc0000 | +| 15 | blk.1.attn_q.weight | 0x32cc0500 | 0x690000 | +| 16 | blk.1.attn_v.weight | 0x33350500 | 0x2d0000 | +| 17 | blk.1.ffn_down.weight | 0x33620500 | 0x6e00000 | +| 18 | blk.1.ffn_gate.weight | 0x3a420500 | 0x3480000 | +| 19 | blk.1.ffn_norm.weight | 0x3d8a0500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x3d8a5500 | 0x3480000 | +| 21 | blk.2.attn_k.weight | 0x40d25500 | 0x1a4000 | +| 22 | blk.2.attn_norm.weight | 0x40ec9500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x40ece500 | 0xdc0000 | +| 24 | blk.2.attn_q.weight | 0x41c8e500 | 0x690000 | +| 25 | blk.2.attn_v.weight | 0x4231e500 | 0x2d0000 | +| 26 | blk.2.ffn_down.weight | 0x425ee500 | 0x6e00000 | +| 27 | blk.2.ffn_gate.weight | 0x493ee500 | 0x3480000 | +| 28 | blk.2.ffn_norm.weight | 0x4c86e500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x4c873500 | 0x3480000 | +| 30 | blk.3.attn_k.weight | 0x4fcf3500 | 0x1a4000 | +| 31 | blk.3.attn_norm.weight | 0x4fe97500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x4fe9c500 | 0xdc0000 | +| 33 | blk.3.attn_q.weight | 0x50c5c500 | 0x690000 | +| 34 | blk.3.attn_v.weight | 0x512ec500 | 0x2d0000 | +| 35 | blk.3.ffn_down.weight | 0x515bc500 | 0x6e00000 | +| 36 | blk.3.ffn_gate.weight | 0x583bc500 | 0x3480000 | +| 37 | blk.3.ffn_norm.weight | 0x5b83c500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x5b841500 | 0x3480000 | +| 39 | blk.4.attn_k.weight | 0x5ecc1500 | 0x1a4000 | +| 40 | blk.4.attn_norm.weight | 0x5ee65500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x5ee6a500 | 0xdc0000 | +| 42 | blk.4.attn_q.weight | 0x5fc2a500 | 0x690000 | +| 43 | blk.4.attn_v.weight | 0x602ba500 | 0x2d0000 | +| 44 | blk.4.ffn_down.weight | 0x6058a500 | 0x6e00000 | +| 45 | blk.4.ffn_gate.weight | 0x6738a500 | 0x3480000 | +| 46 | blk.4.ffn_norm.weight | 0x6a80a500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x6a80f500 | 0x3480000 | +| 48 | blk.5.attn_k.weight | 0x6dc8f500 | 0x1a4000 | +| 49 | blk.5.attn_norm.weight | 0x6de33500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x6de38500 | 0xdc0000 | +| 51 | blk.5.attn_q.weight | 0x6ebf8500 | 0x690000 | +| 52 | blk.5.attn_v.weight | 0x6f288500 | 0x2d0000 | +| 53 | blk.5.ffn_down.weight | 0x6f558500 | 0x6e00000 | +| 54 | blk.5.ffn_gate.weight | 0x76358500 | 0x3480000 | +| 55 | blk.5.ffn_norm.weight | 0x797d8500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x797dd500 | 0x3480000 | +| 57 | blk.6.attn_k.weight | 0x7cc5d500 | 0x1a4000 | +| 58 | blk.6.attn_norm.weight | 0x7ce01500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x7ce06500 | 0xdc0000 | +| 60 | blk.6.attn_q.weight | 0x7dbc6500 | 0x690000 | +| 61 | blk.6.attn_v.weight | 0x7e256500 | 0x2d0000 | +| 62 | blk.6.ffn_down.weight | 0x7e526500 | 0x6e00000 | +| 63 | blk.6.ffn_gate.weight | 0x85326500 | 0x3480000 | +| 64 | blk.6.ffn_norm.weight | 0x887a6500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x887ab500 | 0x3480000 | +| 66 | blk.7.attn_k.weight | 0x8bc2b500 | 0x1a4000 | +| 67 | blk.7.attn_norm.weight | 0x8bdcf500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x8bdd4500 | 0xdc0000 | +| 69 | blk.7.attn_q.weight | 0x8cb94500 | 0x690000 | +| 70 | blk.7.attn_v.weight | 0x8d224500 | 0x2d0000 | +| 71 | blk.7.ffn_down.weight | 0x8d4f4500 | 0x6e00000 | +| 72 | blk.7.ffn_gate.weight | 0x942f4500 | 0x3480000 | +| 73 | blk.7.ffn_norm.weight | 0x97774500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x97779500 | 0x3480000 | +| 75 | blk.8.attn_k.weight | 0x9abf9500 | 0x1a4000 | +| 76 | blk.8.attn_norm.weight | 0x9ad9d500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x9ada2500 | 0xdc0000 | +| 78 | blk.8.attn_q.weight | 0x9bb62500 | 0x690000 | +| 79 | blk.8.attn_v.weight | 0x9c1f2500 | 0x2d0000 | +| 80 | blk.8.ffn_down.weight | 0x9c4c2500 | 0x6e00000 | +| 81 | blk.8.ffn_gate.weight | 0xa32c2500 | 0x3480000 | +| 82 | blk.8.ffn_norm.weight | 0xa6742500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xa6747500 | 0x3480000 | +| 84 | blk.9.attn_k.weight | 0xa9bc7500 | 0x1a4000 | +| 85 | blk.9.attn_norm.weight | 0xa9d6b500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xa9d70500 | 0xdc0000 | +| 87 | blk.9.attn_q.weight | 0xaab30500 | 0x690000 | +| 88 | blk.9.attn_v.weight | 0xab1c0500 | 0x2d0000 | +| 89 | blk.9.ffn_down.weight | 0xab490500 | 0x6e00000 | +| 90 | blk.9.ffn_gate.weight | 0xb2290500 | 0x3480000 | +| 91 | blk.9.ffn_norm.weight | 0xb5710500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xb5715500 | 0x3480000 | +| 93 | blk.10.attn_k.weight | 0xb8b95500 | 0x1a4000 | +| 94 | blk.10.attn_norm.weight | 0xb8d39500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xb8d3e500 | 0xdc0000 | +| 96 | blk.10.attn_q.weight | 0xb9afe500 | 0x690000 | +| 97 | blk.10.attn_v.weight | 0xba18e500 | 0x2d0000 | +| 98 | blk.10.ffn_down.weight | 0xba45e500 | 0x6e00000 | +| 99 | blk.10.ffn_gate.weight | 0xc125e500 | 0x3480000 | +| 100 | blk.10.ffn_norm.weight | 0xc46de500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xc46e3500 | 0x3480000 | +| 102 | blk.11.attn_k.weight | 0xc7b63500 | 0x1a4000 | +| 103 | blk.11.attn_norm.weight | 0xc7d07500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xc7d0c500 | 0xdc0000 | +| 105 | blk.11.attn_q.weight | 0xc8acc500 | 0x690000 | +| 106 | blk.11.attn_v.weight | 0xc915c500 | 0x2d0000 | +| 107 | blk.11.ffn_down.weight | 0xc942c500 | 0x6e00000 | +| 108 | blk.11.ffn_gate.weight | 0xd022c500 | 0x3480000 | +| 109 | blk.11.ffn_norm.weight | 0xd36ac500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xd36b1500 | 0x3480000 | +| 111 | blk.12.attn_k.weight | 0xd6b31500 | 0x1a4000 | +| 112 | blk.12.attn_norm.weight | 0xd6cd5500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xd6cda500 | 0xdc0000 | +| 114 | blk.12.attn_q.weight | 0xd7a9a500 | 0x690000 | +| 115 | blk.12.attn_v.weight | 0xd812a500 | 0x2d0000 | +| 116 | blk.12.ffn_down.weight | 0xd83fa500 | 0x6e00000 | +| 117 | blk.12.ffn_gate.weight | 0xdf1fa500 | 0x3480000 | +| 118 | blk.12.ffn_norm.weight | 0xe267a500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xe267f500 | 0x3480000 | +| 120 | blk.13.attn_k.weight | 0xe5aff500 | 0x1a4000 | +| 121 | blk.13.attn_norm.weight | 0xe5ca3500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xe5ca8500 | 0xdc0000 | +| 123 | blk.13.attn_q.weight | 0xe6a68500 | 0x690000 | +| 124 | blk.13.attn_v.weight | 0xe70f8500 | 0x2d0000 | +| 125 | blk.13.ffn_down.weight | 0xe73c8500 | 0x6e00000 | +| 126 | blk.13.ffn_gate.weight | 0xee1c8500 | 0x3480000 | +| 127 | blk.13.ffn_norm.weight | 0xf1648500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0xf164d500 | 0x3480000 | +| 129 | blk.14.attn_k.weight | 0xf4acd500 | 0x1a4000 | +| 130 | blk.14.attn_norm.weight | 0xf4c71500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0xf4c76500 | 0xdc0000 | +| 132 | blk.14.attn_q.weight | 0xf5a36500 | 0x690000 | +| 133 | blk.14.attn_v.weight | 0xf60c6500 | 0x2d0000 | +| 134 | blk.14.ffn_down.weight | 0xf6396500 | 0x6e00000 | +| 135 | blk.14.ffn_gate.weight | 0xfd196500 | 0x3480000 | +| 136 | blk.14.ffn_norm.weight | 0x100616500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x10061b500 | 0x3480000 | +| 138 | blk.15.attn_k.weight | 0x103a9b500 | 0x1a4000 | +| 139 | blk.15.attn_norm.weight | 0x103c3f500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x103c44500 | 0xdc0000 | +| 141 | blk.15.attn_q.weight | 0x104a04500 | 0x690000 | +| 142 | blk.15.attn_v.weight | 0x105094500 | 0x2d0000 | +| 143 | blk.15.ffn_down.weight | 0x105364500 | 0x6e00000 | +| 144 | blk.15.ffn_gate.weight | 0x10c164500 | 0x3480000 | +| 145 | blk.15.ffn_norm.weight | 0x10f5e4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x10f5e9500 | 0x3480000 | +| 147 | blk.16.attn_k.weight | 0x112a69500 | 0x1a4000 | +| 148 | blk.16.attn_norm.weight | 0x112c0d500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x112c12500 | 0xdc0000 | +| 150 | blk.16.attn_q.weight | 0x1139d2500 | 0x690000 | +| 151 | blk.16.attn_v.weight | 0x114062500 | 0x2d0000 | +| 152 | blk.16.ffn_down.weight | 0x114332500 | 0x6e00000 | +| 153 | blk.16.ffn_gate.weight | 0x11b132500 | 0x3480000 | +| 154 | blk.16.ffn_norm.weight | 0x11e5b2500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x11e5b7500 | 0x3480000 | +| 156 | blk.17.attn_k.weight | 0x121a37500 | 0x226000 | +| 157 | blk.17.attn_norm.weight | 0x121c5d500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x121c62500 | 0xdc0000 | +| 159 | blk.17.attn_q.weight | 0x122a22500 | 0x898000 | +| 160 | blk.17.attn_v.weight | 0x1232ba500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x12358a500 | 0x6e00000 | +| 162 | blk.17.ffn_gate.weight | 0x12a38a500 | 0x3480000 | +| 163 | blk.17.ffn_norm.weight | 0x12d80a500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x12d80f500 | 0x3480000 | +| 165 | blk.18.attn_k.weight | 0x130c8f500 | 0x226000 | +| 166 | blk.18.attn_norm.weight | 0x130eb5500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x130eba500 | 0xdc0000 | +| 168 | blk.18.attn_q.weight | 0x131c7a500 | 0x898000 | +| 169 | blk.18.attn_v.weight | 0x132512500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x1327e2500 | 0x6e00000 | +| 171 | blk.18.ffn_gate.weight | 0x1395e2500 | 0x3480000 | +| 172 | blk.18.ffn_norm.weight | 0x13ca62500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x13ca67500 | 0x3480000 | +| 174 | blk.19.attn_k.weight | 0x13fee7500 | 0x1a4000 | +| 175 | blk.19.attn_norm.weight | 0x14008b500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x140090500 | 0xdc0000 | +| 177 | blk.19.attn_q.weight | 0x140e50500 | 0x690000 | +| 178 | blk.19.attn_v.weight | 0x1414e0500 | 0x2d0000 | +| 179 | blk.19.ffn_down.weight | 0x1417b0500 | 0x6e00000 | +| 180 | blk.19.ffn_gate.weight | 0x1485b0500 | 0x3480000 | +| 181 | blk.19.ffn_norm.weight | 0x14ba30500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x14ba35500 | 0x3480000 | +| 183 | blk.20.attn_k.weight | 0x14eeb5500 | 0x226000 | +| 184 | blk.20.attn_norm.weight | 0x14f0db500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x14f0e0500 | 0xdc0000 | +| 186 | blk.20.attn_q.weight | 0x14fea0500 | 0x898000 | +| 187 | blk.20.attn_v.weight | 0x150738500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x150a08500 | 0x6e00000 | +| 189 | blk.20.ffn_gate.weight | 0x157808500 | 0x44c0000 | +| 190 | blk.20.ffn_norm.weight | 0x15bcc8500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x15bccd500 | 0x44c0000 | +| 192 | blk.21.attn_k.weight | 0x16018d500 | 0x1a4000 | +| 193 | blk.21.attn_norm.weight | 0x160331500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x160336500 | 0xdc0000 | +| 195 | blk.21.attn_q.weight | 0x1610f6500 | 0x690000 | +| 196 | blk.21.attn_v.weight | 0x161786500 | 0x2d0000 | +| 197 | blk.21.ffn_down.weight | 0x161a56500 | 0x6e00000 | +| 198 | blk.21.ffn_gate.weight | 0x168856500 | 0x44c0000 | +| 199 | blk.21.ffn_norm.weight | 0x16cd16500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x16cd1b500 | 0x44c0000 | +| 201 | blk.22.attn_k.weight | 0x1711db500 | 0x226000 | +| 202 | blk.22.attn_norm.weight | 0x171401500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x171406500 | 0xdc0000 | +| 204 | blk.22.attn_q.weight | 0x1721c6500 | 0x898000 | +| 205 | blk.22.attn_v.weight | 0x172a5e500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x172d2e500 | 0x6e00000 | +| 207 | blk.22.ffn_gate.weight | 0x179b2e500 | 0x44c0000 | +| 208 | blk.22.ffn_norm.weight | 0x17dfee500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x17dff3500 | 0x44c0000 | +| 210 | blk.23.attn_k.weight | 0x1824b3500 | 0x226000 | +| 211 | blk.23.attn_norm.weight | 0x1826d9500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x1826de500 | 0xdc0000 | +| 213 | blk.23.attn_q.weight | 0x18349e500 | 0x898000 | +| 214 | blk.23.attn_v.weight | 0x183d36500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x184006500 | 0x6e00000 | +| 216 | blk.23.ffn_gate.weight | 0x18ae06500 | 0x44c0000 | +| 217 | blk.23.ffn_norm.weight | 0x18f2c6500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x18f2cb500 | 0x44c0000 | +| 219 | blk.24.attn_k.weight | 0x19378b500 | 0x226000 | +| 220 | blk.24.attn_norm.weight | 0x1939b1500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x1939b6500 | 0xdc0000 | +| 222 | blk.24.attn_q.weight | 0x194776500 | 0x898000 | +| 223 | blk.24.attn_v.weight | 0x19500e500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x1952de500 | 0x6e00000 | +| 225 | blk.24.ffn_gate.weight | 0x19c0de500 | 0x44c0000 | +| 226 | blk.24.ffn_norm.weight | 0x1a059e500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x1a05a3500 | 0x44c0000 | +| 228 | blk.25.attn_k.weight | 0x1a4a63500 | 0x226000 | +| 229 | blk.25.attn_norm.weight | 0x1a4c89500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x1a4c8e500 | 0xdc0000 | +| 231 | blk.25.attn_q.weight | 0x1a5a4e500 | 0x898000 | +| 232 | blk.25.attn_v.weight | 0x1a62e6500 | 0x2d0000 | +| 233 | blk.25.ffn_down.weight | 0x1a65b6500 | 0x6e00000 | +| 234 | blk.25.ffn_gate.weight | 0x1ad3b6500 | 0x44c0000 | +| 235 | blk.25.ffn_norm.weight | 0x1b1876500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x1b187b500 | 0x44c0000 | +| 237 | blk.26.attn_k.weight | 0x1b5d3b500 | 0x226000 | +| 238 | blk.26.attn_norm.weight | 0x1b5f61500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x1b5f66500 | 0xdc0000 | +| 240 | blk.26.attn_q.weight | 0x1b6d26500 | 0x898000 | +| 241 | blk.26.attn_v.weight | 0x1b75be500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x1b788e500 | 0x6e00000 | +| 243 | blk.26.ffn_gate.weight | 0x1be68e500 | 0x44c0000 | +| 244 | blk.26.ffn_norm.weight | 0x1c2b4e500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x1c2b53500 | 0x44c0000 | +| 246 | blk.27.attn_k.weight | 0x1c7013500 | 0x1a4000 | +| 247 | blk.27.attn_norm.weight | 0x1c71b7500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x1c71bc500 | 0xdc0000 | +| 249 | blk.27.attn_q.weight | 0x1c7f7c500 | 0x690000 | +| 250 | blk.27.attn_v.weight | 0x1c860c500 | 0x2d0000 | +| 251 | blk.27.ffn_down.weight | 0x1c88dc500 | 0x6e00000 | +| 252 | blk.27.ffn_gate.weight | 0x1cf6dc500 | 0x44c0000 | +| 253 | blk.27.ffn_norm.weight | 0x1d3b9c500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x1d3ba1500 | 0x44c0000 | +| 255 | blk.28.attn_k.weight | 0x1d8061500 | 0x226000 | +| 256 | blk.28.attn_norm.weight | 0x1d8287500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1d828c500 | 0xdc0000 | +| 258 | blk.28.attn_q.weight | 0x1d904c500 | 0x898000 | +| 259 | blk.28.attn_v.weight | 0x1d98e4500 | 0x2d0000 | +| 260 | blk.28.ffn_down.weight | 0x1d9bb4500 | 0x6e00000 | +| 261 | blk.28.ffn_gate.weight | 0x1e09b4500 | 0x44c0000 | +| 262 | blk.28.ffn_norm.weight | 0x1e4e74500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x1e4e79500 | 0x44c0000 | +| 264 | blk.29.attn_k.weight | 0x1e9339500 | 0x226000 | +| 265 | blk.29.attn_norm.weight | 0x1e955f500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x1e9564500 | 0xdc0000 | +| 267 | blk.29.attn_q.weight | 0x1ea324500 | 0x898000 | +| 268 | blk.29.attn_v.weight | 0x1eabbc500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x1eae8c500 | 0x6e00000 | +| 270 | blk.29.ffn_gate.weight | 0x1f1c8c500 | 0x44c0000 | +| 271 | blk.29.ffn_norm.weight | 0x1f614c500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x1f6151500 | 0x44c0000 | +| 273 | blk.30.attn_k.weight | 0x1fa611500 | 0x226000 | +| 274 | blk.30.attn_norm.weight | 0x1fa837500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x1fa83c500 | 0xdc0000 | +| 276 | blk.30.attn_q.weight | 0x1fb5fc500 | 0x898000 | +| 277 | blk.30.attn_v.weight | 0x1fbe94500 | 0x2d0000 | +| 278 | blk.30.ffn_down.weight | 0x1fc164500 | 0x6e00000 | +| 279 | blk.30.ffn_gate.weight | 0x202f64500 | 0x44c0000 | +| 280 | blk.30.ffn_norm.weight | 0x207424500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x207429500 | 0x44c0000 | +| 282 | blk.31.attn_k.weight | 0x20b8e9500 | 0x226000 | +| 283 | blk.31.attn_norm.weight | 0x20bb0f500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x20bb14500 | 0xdc0000 | +| 285 | blk.31.attn_q.weight | 0x20c8d4500 | 0x898000 | +| 286 | blk.31.attn_v.weight | 0x20d16c500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x20d43c500 | 0x6e00000 | +| 288 | blk.31.ffn_gate.weight | 0x21423c500 | 0x44c0000 | +| 289 | blk.31.ffn_norm.weight | 0x2186fc500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x218701500 | 0x44c0000 | +| 291 | blk.32.attn_k.weight | 0x21cbc1500 | 0x226000 | +| 292 | blk.32.attn_norm.weight | 0x21cde7500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x21cdec500 | 0xdc0000 | +| 294 | blk.32.attn_q.weight | 0x21dbac500 | 0x898000 | +| 295 | blk.32.attn_v.weight | 0x21e444500 | 0x2d0000 | +| 296 | blk.32.ffn_down.weight | 0x21e714500 | 0x6e00000 | +| 297 | blk.32.ffn_gate.weight | 0x225514500 | 0x44c0000 | +| 298 | blk.32.ffn_norm.weight | 0x2299d4500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x2299d9500 | 0x44c0000 | +| 300 | blk.33.attn_k.weight | 0x22de99500 | 0x226000 | +| 301 | blk.33.attn_norm.weight | 0x22e0bf500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x22e0c4500 | 0xdc0000 | +| 303 | blk.33.attn_q.weight | 0x22ee84500 | 0x898000 | +| 304 | blk.33.attn_v.weight | 0x22f71c500 | 0x2d0000 | +| 305 | blk.33.ffn_down.weight | 0x22f9ec500 | 0x6e00000 | +| 306 | blk.33.ffn_gate.weight | 0x2367ec500 | 0x44c0000 | +| 307 | blk.33.ffn_norm.weight | 0x23acac500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x23acb1500 | 0x44c0000 | +| 309 | blk.34.attn_k.weight | 0x23f171500 | 0x226000 | +| 310 | blk.34.attn_norm.weight | 0x23f397500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x23f39c500 | 0xdc0000 | +| 312 | blk.34.attn_q.weight | 0x24015c500 | 0x898000 | +| 313 | blk.34.attn_v.weight | 0x2409f4500 | 0x2d0000 | +| 314 | blk.34.ffn_down.weight | 0x240cc4500 | 0x6e00000 | +| 315 | blk.34.ffn_gate.weight | 0x247ac4500 | 0x44c0000 | +| 316 | blk.34.ffn_norm.weight | 0x24bf84500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x24bf89500 | 0x44c0000 | +| 318 | blk.35.attn_k.weight | 0x250449500 | 0x226000 | +| 319 | blk.35.attn_norm.weight | 0x25066f500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x250674500 | 0xdc0000 | +| 321 | blk.35.attn_q.weight | 0x251434500 | 0x898000 | +| 322 | blk.35.attn_v.weight | 0x251ccc500 | 0x2d0000 | +| 323 | blk.35.ffn_down.weight | 0x251f9c500 | 0x6e00000 | +| 324 | blk.35.ffn_gate.weight | 0x258d9c500 | 0x44c0000 | +| 325 | blk.35.ffn_norm.weight | 0x25d25c500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x25d261500 | 0x44c0000 | +| 327 | blk.36.attn_k.weight | 0x261721500 | 0x226000 | +| 328 | blk.36.attn_norm.weight | 0x261947500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x26194c500 | 0xdc0000 | +| 330 | blk.36.attn_q.weight | 0x26270c500 | 0x898000 | +| 331 | blk.36.attn_v.weight | 0x262fa4500 | 0x2d0000 | +| 332 | blk.36.ffn_down.weight | 0x263274500 | 0x6e00000 | +| 333 | blk.36.ffn_gate.weight | 0x26a074500 | 0x44c0000 | +| 334 | blk.36.ffn_norm.weight | 0x26e534500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x26e539500 | 0x44c0000 | +| 336 | blk.37.attn_k.weight | 0x2729f9500 | 0x226000 | +| 337 | blk.37.attn_norm.weight | 0x272c1f500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x272c24500 | 0xdc0000 | +| 339 | blk.37.attn_q.weight | 0x2739e4500 | 0x898000 | +| 340 | blk.37.attn_v.weight | 0x27427c500 | 0x2d0000 | +| 341 | blk.37.ffn_down.weight | 0x27454c500 | 0x6e00000 | +| 342 | blk.37.ffn_gate.weight | 0x27b34c500 | 0x44c0000 | +| 343 | blk.37.ffn_norm.weight | 0x27f80c500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x27f811500 | 0x44c0000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_M.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_M.md new file mode 100644 index 0000000..e2b31f3 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_M.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 12 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3\_K\_M.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q3_k_mgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x11300000 | +| 1 | output_norm.weight | 0x11a84500 | 0x5000 | +| 2 | token_embd.weight | 0x11a89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x22d89500 | 0x1a4000 | +| 4 | blk.0.attn_norm.weight | 0x22f2d500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x22f32500 | 0xb40000 | +| 6 | blk.0.attn_q.weight | 0x23a72500 | 0x690000 | +| 7 | blk.0.attn_v.weight | 0x24102500 | 0x226000 | +| 8 | blk.0.ffn_down.weight | 0x24328500 | 0x6e00000 | +| 9 | blk.0.ffn_gate.weight | 0x2b128500 | 0x3480000 | +| 10 | blk.0.ffn_norm.weight | 0x2e5a8500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x2e5ad500 | 0x3480000 | +| 12 | blk.1.attn_k.weight | 0x31a2d500 | 0x1a4000 | +| 13 | blk.1.attn_norm.weight | 0x31bd1500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x31bd6500 | 0xb40000 | +| 15 | blk.1.attn_q.weight | 0x32716500 | 0x690000 | +| 16 | blk.1.attn_v.weight | 0x32da6500 | 0x226000 | +| 17 | blk.1.ffn_down.weight | 0x32fcc500 | 0x6e00000 | +| 18 | blk.1.ffn_gate.weight | 0x39dcc500 | 0x3480000 | +| 19 | blk.1.ffn_norm.weight | 0x3d24c500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x3d251500 | 0x3480000 | +| 21 | blk.2.attn_k.weight | 0x406d1500 | 0x1a4000 | +| 22 | blk.2.attn_norm.weight | 0x40875500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x4087a500 | 0xb40000 | +| 24 | blk.2.attn_q.weight | 0x413ba500 | 0x690000 | +| 25 | blk.2.attn_v.weight | 0x41a4a500 | 0x226000 | +| 26 | blk.2.ffn_down.weight | 0x41c70500 | 0x5a00000 | +| 27 | blk.2.ffn_gate.weight | 0x47670500 | 0x3480000 | +| 28 | blk.2.ffn_norm.weight | 0x4aaf0500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x4aaf5500 | 0x3480000 | +| 30 | blk.3.attn_k.weight | 0x4df75500 | 0x1a4000 | +| 31 | blk.3.attn_norm.weight | 0x4e119500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x4e11e500 | 0xb40000 | +| 33 | blk.3.attn_q.weight | 0x4ec5e500 | 0x690000 | +| 34 | blk.3.attn_v.weight | 0x4f2ee500 | 0x226000 | +| 35 | blk.3.ffn_down.weight | 0x4f514500 | 0x5a00000 | +| 36 | blk.3.ffn_gate.weight | 0x54f14500 | 0x3480000 | +| 37 | blk.3.ffn_norm.weight | 0x58394500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x58399500 | 0x3480000 | +| 39 | blk.4.attn_k.weight | 0x5b819500 | 0x1a4000 | +| 40 | blk.4.attn_norm.weight | 0x5b9bd500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x5b9c2500 | 0xb40000 | +| 42 | blk.4.attn_q.weight | 0x5c502500 | 0x690000 | +| 43 | blk.4.attn_v.weight | 0x5cb92500 | 0x226000 | +| 44 | blk.4.ffn_down.weight | 0x5cdb8500 | 0x5a00000 | +| 45 | blk.4.ffn_gate.weight | 0x627b8500 | 0x3480000 | +| 46 | blk.4.ffn_norm.weight | 0x65c38500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x65c3d500 | 0x3480000 | +| 48 | blk.5.attn_k.weight | 0x690bd500 | 0x1a4000 | +| 49 | blk.5.attn_norm.weight | 0x69261500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x69266500 | 0xb40000 | +| 51 | blk.5.attn_q.weight | 0x69da6500 | 0x690000 | +| 52 | blk.5.attn_v.weight | 0x6a436500 | 0x226000 | +| 53 | blk.5.ffn_down.weight | 0x6a65c500 | 0x5a00000 | +| 54 | blk.5.ffn_gate.weight | 0x7005c500 | 0x3480000 | +| 55 | blk.5.ffn_norm.weight | 0x734dc500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x734e1500 | 0x3480000 | +| 57 | blk.6.attn_k.weight | 0x76961500 | 0x1a4000 | +| 58 | blk.6.attn_norm.weight | 0x76b05500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x76b0a500 | 0xb40000 | +| 60 | blk.6.attn_q.weight | 0x7764a500 | 0x690000 | +| 61 | blk.6.attn_v.weight | 0x77cda500 | 0x226000 | +| 62 | blk.6.ffn_down.weight | 0x77f00500 | 0x5a00000 | +| 63 | blk.6.ffn_gate.weight | 0x7d900500 | 0x3480000 | +| 64 | blk.6.ffn_norm.weight | 0x80d80500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x80d85500 | 0x3480000 | +| 66 | blk.7.attn_k.weight | 0x84205500 | 0x1a4000 | +| 67 | blk.7.attn_norm.weight | 0x843a9500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x843ae500 | 0xb40000 | +| 69 | blk.7.attn_q.weight | 0x84eee500 | 0x690000 | +| 70 | blk.7.attn_v.weight | 0x8557e500 | 0x226000 | +| 71 | blk.7.ffn_down.weight | 0x857a4500 | 0x5a00000 | +| 72 | blk.7.ffn_gate.weight | 0x8b1a4500 | 0x3480000 | +| 73 | blk.7.ffn_norm.weight | 0x8e624500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x8e629500 | 0x3480000 | +| 75 | blk.8.attn_k.weight | 0x91aa9500 | 0x1a4000 | +| 76 | blk.8.attn_norm.weight | 0x91c4d500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x91c52500 | 0xb40000 | +| 78 | blk.8.attn_q.weight | 0x92792500 | 0x690000 | +| 79 | blk.8.attn_v.weight | 0x92e22500 | 0x226000 | +| 80 | blk.8.ffn_down.weight | 0x93048500 | 0x5a00000 | +| 81 | blk.8.ffn_gate.weight | 0x98a48500 | 0x3480000 | +| 82 | blk.8.ffn_norm.weight | 0x9bec8500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x9becd500 | 0x3480000 | +| 84 | blk.9.attn_k.weight | 0x9f34d500 | 0x1a4000 | +| 85 | blk.9.attn_norm.weight | 0x9f4f1500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0x9f4f6500 | 0xb40000 | +| 87 | blk.9.attn_q.weight | 0xa0036500 | 0x690000 | +| 88 | blk.9.attn_v.weight | 0xa06c6500 | 0x226000 | +| 89 | blk.9.ffn_down.weight | 0xa08ec500 | 0x5a00000 | +| 90 | blk.9.ffn_gate.weight | 0xa62ec500 | 0x3480000 | +| 91 | blk.9.ffn_norm.weight | 0xa976c500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xa9771500 | 0x3480000 | +| 93 | blk.10.attn_k.weight | 0xacbf1500 | 0x1a4000 | +| 94 | blk.10.attn_norm.weight | 0xacd95500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xacd9a500 | 0xb40000 | +| 96 | blk.10.attn_q.weight | 0xad8da500 | 0x690000 | +| 97 | blk.10.attn_v.weight | 0xadf6a500 | 0x226000 | +| 98 | blk.10.ffn_down.weight | 0xae190500 | 0x5a00000 | +| 99 | blk.10.ffn_gate.weight | 0xb3b90500 | 0x3480000 | +| 100 | blk.10.ffn_norm.weight | 0xb7010500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xb7015500 | 0x3480000 | +| 102 | blk.11.attn_k.weight | 0xba495500 | 0x1a4000 | +| 103 | blk.11.attn_norm.weight | 0xba639500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xba63e500 | 0xb40000 | +| 105 | blk.11.attn_q.weight | 0xbb17e500 | 0x690000 | +| 106 | blk.11.attn_v.weight | 0xbb80e500 | 0x226000 | +| 107 | blk.11.ffn_down.weight | 0xbba34500 | 0x5a00000 | +| 108 | blk.11.ffn_gate.weight | 0xc1434500 | 0x3480000 | +| 109 | blk.11.ffn_norm.weight | 0xc48b4500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xc48b9500 | 0x3480000 | +| 111 | blk.12.attn_k.weight | 0xc7d39500 | 0x1a4000 | +| 112 | blk.12.attn_norm.weight | 0xc7edd500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xc7ee2500 | 0xb40000 | +| 114 | blk.12.attn_q.weight | 0xc8a22500 | 0x690000 | +| 115 | blk.12.attn_v.weight | 0xc90b2500 | 0x226000 | +| 116 | blk.12.ffn_down.weight | 0xc92d8500 | 0x5a00000 | +| 117 | blk.12.ffn_gate.weight | 0xcecd8500 | 0x3480000 | +| 118 | blk.12.ffn_norm.weight | 0xd2158500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xd215d500 | 0x3480000 | +| 120 | blk.13.attn_k.weight | 0xd55dd500 | 0x1a4000 | +| 121 | blk.13.attn_norm.weight | 0xd5781500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xd5786500 | 0xb40000 | +| 123 | blk.13.attn_q.weight | 0xd62c6500 | 0x690000 | +| 124 | blk.13.attn_v.weight | 0xd6956500 | 0x226000 | +| 125 | blk.13.ffn_down.weight | 0xd6b7c500 | 0x5a00000 | +| 126 | blk.13.ffn_gate.weight | 0xdc57c500 | 0x3480000 | +| 127 | blk.13.ffn_norm.weight | 0xdf9fc500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0xdfa01500 | 0x3480000 | +| 129 | blk.14.attn_k.weight | 0xe2e81500 | 0x1a4000 | +| 130 | blk.14.attn_norm.weight | 0xe3025500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0xe302a500 | 0xb40000 | +| 132 | blk.14.attn_q.weight | 0xe3b6a500 | 0x690000 | +| 133 | blk.14.attn_v.weight | 0xe41fa500 | 0x226000 | +| 134 | blk.14.ffn_down.weight | 0xe4420500 | 0x5a00000 | +| 135 | blk.14.ffn_gate.weight | 0xe9e20500 | 0x3480000 | +| 136 | blk.14.ffn_norm.weight | 0xed2a0500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0xed2a5500 | 0x3480000 | +| 138 | blk.15.attn_k.weight | 0xf0725500 | 0x1a4000 | +| 139 | blk.15.attn_norm.weight | 0xf08c9500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0xf08ce500 | 0xb40000 | +| 141 | blk.15.attn_q.weight | 0xf140e500 | 0x690000 | +| 142 | blk.15.attn_v.weight | 0xf1a9e500 | 0x226000 | +| 143 | blk.15.ffn_down.weight | 0xf1cc4500 | 0x5a00000 | +| 144 | blk.15.ffn_gate.weight | 0xf76c4500 | 0x3480000 | +| 145 | blk.15.ffn_norm.weight | 0xfab44500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0xfab49500 | 0x3480000 | +| 147 | blk.16.attn_k.weight | 0xfdfc9500 | 0x1a4000 | +| 148 | blk.16.attn_norm.weight | 0xfe16d500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0xfe172500 | 0xb40000 | +| 150 | blk.16.attn_q.weight | 0xfecb2500 | 0x690000 | +| 151 | blk.16.attn_v.weight | 0xff342500 | 0x226000 | +| 152 | blk.16.ffn_down.weight | 0xff568500 | 0x5a00000 | +| 153 | blk.16.ffn_gate.weight | 0x104f68500 | 0x3480000 | +| 154 | blk.16.ffn_norm.weight | 0x1083e8500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x1083ed500 | 0x3480000 | +| 156 | blk.17.attn_k.weight | 0x10b86d500 | 0x226000 | +| 157 | blk.17.attn_norm.weight | 0x10ba93500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x10ba98500 | 0xb40000 | +| 159 | blk.17.attn_q.weight | 0x10c5d8500 | 0x898000 | +| 160 | blk.17.attn_v.weight | 0x10ce70500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x10d140500 | 0x5a00000 | +| 162 | blk.17.ffn_gate.weight | 0x112b40500 | 0x3480000 | +| 163 | blk.17.ffn_norm.weight | 0x115fc0500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x115fc5500 | 0x3480000 | +| 165 | blk.18.attn_k.weight | 0x119445500 | 0x226000 | +| 166 | blk.18.attn_norm.weight | 0x11966b500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x119670500 | 0xb40000 | +| 168 | blk.18.attn_q.weight | 0x11a1b0500 | 0x898000 | +| 169 | blk.18.attn_v.weight | 0x11aa48500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x11ad18500 | 0x5a00000 | +| 171 | blk.18.ffn_gate.weight | 0x120718500 | 0x3480000 | +| 172 | blk.18.ffn_norm.weight | 0x123b98500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x123b9d500 | 0x3480000 | +| 174 | blk.19.attn_k.weight | 0x12701d500 | 0x1a4000 | +| 175 | blk.19.attn_norm.weight | 0x1271c1500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x1271c6500 | 0xb40000 | +| 177 | blk.19.attn_q.weight | 0x127d06500 | 0x690000 | +| 178 | blk.19.attn_v.weight | 0x128396500 | 0x226000 | +| 179 | blk.19.ffn_down.weight | 0x1285bc500 | 0x5a00000 | +| 180 | blk.19.ffn_gate.weight | 0x12dfbc500 | 0x3480000 | +| 181 | blk.19.ffn_norm.weight | 0x13143c500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x131441500 | 0x3480000 | +| 183 | blk.20.attn_k.weight | 0x1348c1500 | 0x226000 | +| 184 | blk.20.attn_norm.weight | 0x134ae7500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x134aec500 | 0xb40000 | +| 186 | blk.20.attn_q.weight | 0x13562c500 | 0x898000 | +| 187 | blk.20.attn_v.weight | 0x135ec4500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x136194500 | 0x5a00000 | +| 189 | blk.20.ffn_gate.weight | 0x13bb94500 | 0x44c0000 | +| 190 | blk.20.ffn_norm.weight | 0x140054500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x140059500 | 0x44c0000 | +| 192 | blk.21.attn_k.weight | 0x144519500 | 0x1a4000 | +| 193 | blk.21.attn_norm.weight | 0x1446bd500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x1446c2500 | 0xb40000 | +| 195 | blk.21.attn_q.weight | 0x145202500 | 0x690000 | +| 196 | blk.21.attn_v.weight | 0x145892500 | 0x226000 | +| 197 | blk.21.ffn_down.weight | 0x145ab8500 | 0x5a00000 | +| 198 | blk.21.ffn_gate.weight | 0x14b4b8500 | 0x44c0000 | +| 199 | blk.21.ffn_norm.weight | 0x14f978500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x14f97d500 | 0x44c0000 | +| 201 | blk.22.attn_k.weight | 0x153e3d500 | 0x226000 | +| 202 | blk.22.attn_norm.weight | 0x154063500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x154068500 | 0xb40000 | +| 204 | blk.22.attn_q.weight | 0x154ba8500 | 0x898000 | +| 205 | blk.22.attn_v.weight | 0x155440500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x155710500 | 0x5a00000 | +| 207 | blk.22.ffn_gate.weight | 0x15b110500 | 0x44c0000 | +| 208 | blk.22.ffn_norm.weight | 0x15f5d0500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x15f5d5500 | 0x44c0000 | +| 210 | blk.23.attn_k.weight | 0x163a95500 | 0x226000 | +| 211 | blk.23.attn_norm.weight | 0x163cbb500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x163cc0500 | 0xb40000 | +| 213 | blk.23.attn_q.weight | 0x164800500 | 0x898000 | +| 214 | blk.23.attn_v.weight | 0x165098500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x165368500 | 0x5a00000 | +| 216 | blk.23.ffn_gate.weight | 0x16ad68500 | 0x44c0000 | +| 217 | blk.23.ffn_norm.weight | 0x16f228500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x16f22d500 | 0x44c0000 | +| 219 | blk.24.attn_k.weight | 0x1736ed500 | 0x226000 | +| 220 | blk.24.attn_norm.weight | 0x173913500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x173918500 | 0xb40000 | +| 222 | blk.24.attn_q.weight | 0x174458500 | 0x898000 | +| 223 | blk.24.attn_v.weight | 0x174cf0500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x174fc0500 | 0x5a00000 | +| 225 | blk.24.ffn_gate.weight | 0x17a9c0500 | 0x44c0000 | +| 226 | blk.24.ffn_norm.weight | 0x17ee80500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x17ee85500 | 0x44c0000 | +| 228 | blk.25.attn_k.weight | 0x183345500 | 0x226000 | +| 229 | blk.25.attn_norm.weight | 0x18356b500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x183570500 | 0xb40000 | +| 231 | blk.25.attn_q.weight | 0x1840b0500 | 0x898000 | +| 232 | blk.25.attn_v.weight | 0x184948500 | 0x2d0000 | +| 233 | blk.25.ffn_down.weight | 0x184c18500 | 0x5a00000 | +| 234 | blk.25.ffn_gate.weight | 0x18a618500 | 0x44c0000 | +| 235 | blk.25.ffn_norm.weight | 0x18ead8500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x18eadd500 | 0x44c0000 | +| 237 | blk.26.attn_k.weight | 0x192f9d500 | 0x226000 | +| 238 | blk.26.attn_norm.weight | 0x1931c3500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x1931c8500 | 0xb40000 | +| 240 | blk.26.attn_q.weight | 0x193d08500 | 0x898000 | +| 241 | blk.26.attn_v.weight | 0x1945a0500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x194870500 | 0x5a00000 | +| 243 | blk.26.ffn_gate.weight | 0x19a270500 | 0x44c0000 | +| 244 | blk.26.ffn_norm.weight | 0x19e730500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x19e735500 | 0x44c0000 | +| 246 | blk.27.attn_k.weight | 0x1a2bf5500 | 0x1a4000 | +| 247 | blk.27.attn_norm.weight | 0x1a2d99500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x1a2d9e500 | 0xb40000 | +| 249 | blk.27.attn_q.weight | 0x1a38de500 | 0x690000 | +| 250 | blk.27.attn_v.weight | 0x1a3f6e500 | 0x226000 | +| 251 | blk.27.ffn_down.weight | 0x1a4194500 | 0x5a00000 | +| 252 | blk.27.ffn_gate.weight | 0x1a9b94500 | 0x44c0000 | +| 253 | blk.27.ffn_norm.weight | 0x1ae054500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x1ae059500 | 0x44c0000 | +| 255 | blk.28.attn_k.weight | 0x1b2519500 | 0x226000 | +| 256 | blk.28.attn_norm.weight | 0x1b273f500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1b2744500 | 0xb40000 | +| 258 | blk.28.attn_q.weight | 0x1b3284500 | 0x898000 | +| 259 | blk.28.attn_v.weight | 0x1b3b1c500 | 0x2d0000 | +| 260 | blk.28.ffn_down.weight | 0x1b3dec500 | 0x5a00000 | +| 261 | blk.28.ffn_gate.weight | 0x1b97ec500 | 0x44c0000 | +| 262 | blk.28.ffn_norm.weight | 0x1bdcac500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x1bdcb1500 | 0x44c0000 | +| 264 | blk.29.attn_k.weight | 0x1c2171500 | 0x226000 | +| 265 | blk.29.attn_norm.weight | 0x1c2397500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x1c239c500 | 0xb40000 | +| 267 | blk.29.attn_q.weight | 0x1c2edc500 | 0x898000 | +| 268 | blk.29.attn_v.weight | 0x1c3774500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x1c3a44500 | 0x5a00000 | +| 270 | blk.29.ffn_gate.weight | 0x1c9444500 | 0x44c0000 | +| 271 | blk.29.ffn_norm.weight | 0x1cd904500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x1cd909500 | 0x44c0000 | +| 273 | blk.30.attn_k.weight | 0x1d1dc9500 | 0x226000 | +| 274 | blk.30.attn_norm.weight | 0x1d1fef500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x1d1ff4500 | 0xb40000 | +| 276 | blk.30.attn_q.weight | 0x1d2b34500 | 0x898000 | +| 277 | blk.30.attn_v.weight | 0x1d33cc500 | 0x2d0000 | +| 278 | blk.30.ffn_down.weight | 0x1d369c500 | 0x5a00000 | +| 279 | blk.30.ffn_gate.weight | 0x1d909c500 | 0x44c0000 | +| 280 | blk.30.ffn_norm.weight | 0x1dd55c500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x1dd561500 | 0x44c0000 | +| 282 | blk.31.attn_k.weight | 0x1e1a21500 | 0x226000 | +| 283 | blk.31.attn_norm.weight | 0x1e1c47500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x1e1c4c500 | 0xb40000 | +| 285 | blk.31.attn_q.weight | 0x1e278c500 | 0x898000 | +| 286 | blk.31.attn_v.weight | 0x1e3024500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x1e32f4500 | 0x5a00000 | +| 288 | blk.31.ffn_gate.weight | 0x1e8cf4500 | 0x44c0000 | +| 289 | blk.31.ffn_norm.weight | 0x1ed1b4500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x1ed1b9500 | 0x44c0000 | +| 291 | blk.32.attn_k.weight | 0x1f1679500 | 0x226000 | +| 292 | blk.32.attn_norm.weight | 0x1f189f500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x1f18a4500 | 0xb40000 | +| 294 | blk.32.attn_q.weight | 0x1f23e4500 | 0x898000 | +| 295 | blk.32.attn_v.weight | 0x1f2c7c500 | 0x2d0000 | +| 296 | blk.32.ffn_down.weight | 0x1f2f4c500 | 0x5a00000 | +| 297 | blk.32.ffn_gate.weight | 0x1f894c500 | 0x44c0000 | +| 298 | blk.32.ffn_norm.weight | 0x1fce0c500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x1fce11500 | 0x44c0000 | +| 300 | blk.33.attn_k.weight | 0x2012d1500 | 0x226000 | +| 301 | blk.33.attn_norm.weight | 0x2014f7500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x2014fc500 | 0xb40000 | +| 303 | blk.33.attn_q.weight | 0x20203c500 | 0x898000 | +| 304 | blk.33.attn_v.weight | 0x2028d4500 | 0x2d0000 | +| 305 | blk.33.ffn_down.weight | 0x202ba4500 | 0x5a00000 | +| 306 | blk.33.ffn_gate.weight | 0x2085a4500 | 0x44c0000 | +| 307 | blk.33.ffn_norm.weight | 0x20ca64500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x20ca69500 | 0x44c0000 | +| 309 | blk.34.attn_k.weight | 0x210f29500 | 0x226000 | +| 310 | blk.34.attn_norm.weight | 0x21114f500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x211154500 | 0xb40000 | +| 312 | blk.34.attn_q.weight | 0x211c94500 | 0x898000 | +| 313 | blk.34.attn_v.weight | 0x21252c500 | 0x2d0000 | +| 314 | blk.34.ffn_down.weight | 0x2127fc500 | 0x5a00000 | +| 315 | blk.34.ffn_gate.weight | 0x2181fc500 | 0x44c0000 | +| 316 | blk.34.ffn_norm.weight | 0x21c6bc500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x21c6c1500 | 0x44c0000 | +| 318 | blk.35.attn_k.weight | 0x220b81500 | 0x226000 | +| 319 | blk.35.attn_norm.weight | 0x220da7500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x220dac500 | 0xb40000 | +| 321 | blk.35.attn_q.weight | 0x2218ec500 | 0x898000 | +| 322 | blk.35.attn_v.weight | 0x222184500 | 0x2d0000 | +| 323 | blk.35.ffn_down.weight | 0x222454500 | 0x5a00000 | +| 324 | blk.35.ffn_gate.weight | 0x227e54500 | 0x44c0000 | +| 325 | blk.35.ffn_norm.weight | 0x22c314500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x22c319500 | 0x44c0000 | +| 327 | blk.36.attn_k.weight | 0x2307d9500 | 0x226000 | +| 328 | blk.36.attn_norm.weight | 0x2309ff500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x230a04500 | 0xb40000 | +| 330 | blk.36.attn_q.weight | 0x231544500 | 0x898000 | +| 331 | blk.36.attn_v.weight | 0x231ddc500 | 0x2d0000 | +| 332 | blk.36.ffn_down.weight | 0x2320ac500 | 0x5a00000 | +| 333 | blk.36.ffn_gate.weight | 0x237aac500 | 0x44c0000 | +| 334 | blk.36.ffn_norm.weight | 0x23bf6c500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x23bf71500 | 0x44c0000 | +| 336 | blk.37.attn_k.weight | 0x240431500 | 0x226000 | +| 337 | blk.37.attn_norm.weight | 0x240657500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x24065c500 | 0xb40000 | +| 339 | blk.37.attn_q.weight | 0x24119c500 | 0x898000 | +| 340 | blk.37.attn_v.weight | 0x241a34500 | 0x2d0000 | +| 341 | blk.37.ffn_down.weight | 0x241d04500 | 0x5a00000 | +| 342 | blk.37.ffn_gate.weight | 0x247704500 | 0x44c0000 | +| 343 | blk.37.ffn_norm.weight | 0x24bbc4500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x24bbc9500 | 0x44c0000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_S.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_S.md new file mode 100644 index 0000000..a2dffd8 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q3_K_S.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 11 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q3\_K\_S.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q3_k_sgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x11300000 | +| 1 | output_norm.weight | 0x11a84500 | 0x5000 | +| 2 | token_embd.weight | 0x11a89500 | 0xd200000 | +| 3 | blk.0.attn_k.weight | 0x1ec89500 | 0x1a4000 | +| 4 | blk.0.attn_norm.weight | 0x1ee2d500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x1ee32500 | 0x898000 | +| 6 | blk.0.attn_q.weight | 0x1f6ca500 | 0x690000 | +| 7 | blk.0.attn_v.weight | 0x1fd5a500 | 0x226000 | +| 8 | blk.0.ffn_down.weight | 0x1ff80500 | 0x44c0000 | +| 9 | blk.0.ffn_gate.weight | 0x24440500 | 0x3480000 | +| 10 | blk.0.ffn_norm.weight | 0x278c0500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x278c5500 | 0x3480000 | +| 12 | blk.1.attn_k.weight | 0x2ad45500 | 0x1a4000 | +| 13 | blk.1.attn_norm.weight | 0x2aee9500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x2aeee500 | 0x898000 | +| 15 | blk.1.attn_q.weight | 0x2b786500 | 0x690000 | +| 16 | blk.1.attn_v.weight | 0x2be16500 | 0x226000 | +| 17 | blk.1.ffn_down.weight | 0x2c03c500 | 0x44c0000 | +| 18 | blk.1.ffn_gate.weight | 0x304fc500 | 0x3480000 | +| 19 | blk.1.ffn_norm.weight | 0x3397c500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x33981500 | 0x3480000 | +| 21 | blk.2.attn_k.weight | 0x36e01500 | 0x1a4000 | +| 22 | blk.2.attn_norm.weight | 0x36fa5500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x36faa500 | 0x898000 | +| 24 | blk.2.attn_q.weight | 0x37842500 | 0x690000 | +| 25 | blk.2.attn_v.weight | 0x37ed2500 | 0x226000 | +| 26 | blk.2.ffn_down.weight | 0x380f8500 | 0x44c0000 | +| 27 | blk.2.ffn_gate.weight | 0x3c5b8500 | 0x3480000 | +| 28 | blk.2.ffn_norm.weight | 0x3fa38500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x3fa3d500 | 0x3480000 | +| 30 | blk.3.attn_k.weight | 0x42ebd500 | 0x1a4000 | +| 31 | blk.3.attn_norm.weight | 0x43061500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x43066500 | 0x898000 | +| 33 | blk.3.attn_q.weight | 0x438fe500 | 0x690000 | +| 34 | blk.3.attn_v.weight | 0x43f8e500 | 0x226000 | +| 35 | blk.3.ffn_down.weight | 0x441b4500 | 0x44c0000 | +| 36 | blk.3.ffn_gate.weight | 0x48674500 | 0x3480000 | +| 37 | blk.3.ffn_norm.weight | 0x4baf4500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x4baf9500 | 0x3480000 | +| 39 | blk.4.attn_k.weight | 0x4ef79500 | 0x1a4000 | +| 40 | blk.4.attn_norm.weight | 0x4f11d500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x4f122500 | 0x898000 | +| 42 | blk.4.attn_q.weight | 0x4f9ba500 | 0x690000 | +| 43 | blk.4.attn_v.weight | 0x5004a500 | 0x226000 | +| 44 | blk.4.ffn_down.weight | 0x50270500 | 0x44c0000 | +| 45 | blk.4.ffn_gate.weight | 0x54730500 | 0x3480000 | +| 46 | blk.4.ffn_norm.weight | 0x57bb0500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x57bb5500 | 0x3480000 | +| 48 | blk.5.attn_k.weight | 0x5b035500 | 0x1a4000 | +| 49 | blk.5.attn_norm.weight | 0x5b1d9500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x5b1de500 | 0x898000 | +| 51 | blk.5.attn_q.weight | 0x5ba76500 | 0x690000 | +| 52 | blk.5.attn_v.weight | 0x5c106500 | 0x226000 | +| 53 | blk.5.ffn_down.weight | 0x5c32c500 | 0x44c0000 | +| 54 | blk.5.ffn_gate.weight | 0x607ec500 | 0x3480000 | +| 55 | blk.5.ffn_norm.weight | 0x63c6c500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x63c71500 | 0x3480000 | +| 57 | blk.6.attn_k.weight | 0x670f1500 | 0x1a4000 | +| 58 | blk.6.attn_norm.weight | 0x67295500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x6729a500 | 0x898000 | +| 60 | blk.6.attn_q.weight | 0x67b32500 | 0x690000 | +| 61 | blk.6.attn_v.weight | 0x681c2500 | 0x226000 | +| 62 | blk.6.ffn_down.weight | 0x683e8500 | 0x44c0000 | +| 63 | blk.6.ffn_gate.weight | 0x6c8a8500 | 0x3480000 | +| 64 | blk.6.ffn_norm.weight | 0x6fd28500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x6fd2d500 | 0x3480000 | +| 66 | blk.7.attn_k.weight | 0x731ad500 | 0x1a4000 | +| 67 | blk.7.attn_norm.weight | 0x73351500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x73356500 | 0x898000 | +| 69 | blk.7.attn_q.weight | 0x73bee500 | 0x690000 | +| 70 | blk.7.attn_v.weight | 0x7427e500 | 0x226000 | +| 71 | blk.7.ffn_down.weight | 0x744a4500 | 0x44c0000 | +| 72 | blk.7.ffn_gate.weight | 0x78964500 | 0x3480000 | +| 73 | blk.7.ffn_norm.weight | 0x7bde4500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x7bde9500 | 0x3480000 | +| 75 | blk.8.attn_k.weight | 0x7f269500 | 0x1a4000 | +| 76 | blk.8.attn_norm.weight | 0x7f40d500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x7f412500 | 0x898000 | +| 78 | blk.8.attn_q.weight | 0x7fcaa500 | 0x690000 | +| 79 | blk.8.attn_v.weight | 0x8033a500 | 0x226000 | +| 80 | blk.8.ffn_down.weight | 0x80560500 | 0x44c0000 | +| 81 | blk.8.ffn_gate.weight | 0x84a20500 | 0x3480000 | +| 82 | blk.8.ffn_norm.weight | 0x87ea0500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x87ea5500 | 0x3480000 | +| 84 | blk.9.attn_k.weight | 0x8b325500 | 0x1a4000 | +| 85 | blk.9.attn_norm.weight | 0x8b4c9500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0x8b4ce500 | 0x898000 | +| 87 | blk.9.attn_q.weight | 0x8bd66500 | 0x690000 | +| 88 | blk.9.attn_v.weight | 0x8c3f6500 | 0x226000 | +| 89 | blk.9.ffn_down.weight | 0x8c61c500 | 0x44c0000 | +| 90 | blk.9.ffn_gate.weight | 0x90adc500 | 0x3480000 | +| 91 | blk.9.ffn_norm.weight | 0x93f5c500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0x93f61500 | 0x3480000 | +| 93 | blk.10.attn_k.weight | 0x973e1500 | 0x1a4000 | +| 94 | blk.10.attn_norm.weight | 0x97585500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0x9758a500 | 0x898000 | +| 96 | blk.10.attn_q.weight | 0x97e22500 | 0x690000 | +| 97 | blk.10.attn_v.weight | 0x984b2500 | 0x226000 | +| 98 | blk.10.ffn_down.weight | 0x986d8500 | 0x44c0000 | +| 99 | blk.10.ffn_gate.weight | 0x9cb98500 | 0x3480000 | +| 100 | blk.10.ffn_norm.weight | 0xa0018500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xa001d500 | 0x3480000 | +| 102 | blk.11.attn_k.weight | 0xa349d500 | 0x1a4000 | +| 103 | blk.11.attn_norm.weight | 0xa3641500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xa3646500 | 0x898000 | +| 105 | blk.11.attn_q.weight | 0xa3ede500 | 0x690000 | +| 106 | blk.11.attn_v.weight | 0xa456e500 | 0x226000 | +| 107 | blk.11.ffn_down.weight | 0xa4794500 | 0x44c0000 | +| 108 | blk.11.ffn_gate.weight | 0xa8c54500 | 0x3480000 | +| 109 | blk.11.ffn_norm.weight | 0xac0d4500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xac0d9500 | 0x3480000 | +| 111 | blk.12.attn_k.weight | 0xaf559500 | 0x1a4000 | +| 112 | blk.12.attn_norm.weight | 0xaf6fd500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xaf702500 | 0x898000 | +| 114 | blk.12.attn_q.weight | 0xaff9a500 | 0x690000 | +| 115 | blk.12.attn_v.weight | 0xb062a500 | 0x226000 | +| 116 | blk.12.ffn_down.weight | 0xb0850500 | 0x44c0000 | +| 117 | blk.12.ffn_gate.weight | 0xb4d10500 | 0x3480000 | +| 118 | blk.12.ffn_norm.weight | 0xb8190500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xb8195500 | 0x3480000 | +| 120 | blk.13.attn_k.weight | 0xbb615500 | 0x1a4000 | +| 121 | blk.13.attn_norm.weight | 0xbb7b9500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xbb7be500 | 0x898000 | +| 123 | blk.13.attn_q.weight | 0xbc056500 | 0x690000 | +| 124 | blk.13.attn_v.weight | 0xbc6e6500 | 0x226000 | +| 125 | blk.13.ffn_down.weight | 0xbc90c500 | 0x44c0000 | +| 126 | blk.13.ffn_gate.weight | 0xc0dcc500 | 0x3480000 | +| 127 | blk.13.ffn_norm.weight | 0xc424c500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0xc4251500 | 0x3480000 | +| 129 | blk.14.attn_k.weight | 0xc76d1500 | 0x1a4000 | +| 130 | blk.14.attn_norm.weight | 0xc7875500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0xc787a500 | 0x898000 | +| 132 | blk.14.attn_q.weight | 0xc8112500 | 0x690000 | +| 133 | blk.14.attn_v.weight | 0xc87a2500 | 0x226000 | +| 134 | blk.14.ffn_down.weight | 0xc89c8500 | 0x44c0000 | +| 135 | blk.14.ffn_gate.weight | 0xcce88500 | 0x3480000 | +| 136 | blk.14.ffn_norm.weight | 0xd0308500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0xd030d500 | 0x3480000 | +| 138 | blk.15.attn_k.weight | 0xd378d500 | 0x1a4000 | +| 139 | blk.15.attn_norm.weight | 0xd3931500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0xd3936500 | 0x898000 | +| 141 | blk.15.attn_q.weight | 0xd41ce500 | 0x690000 | +| 142 | blk.15.attn_v.weight | 0xd485e500 | 0x226000 | +| 143 | blk.15.ffn_down.weight | 0xd4a84500 | 0x44c0000 | +| 144 | blk.15.ffn_gate.weight | 0xd8f44500 | 0x3480000 | +| 145 | blk.15.ffn_norm.weight | 0xdc3c4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0xdc3c9500 | 0x3480000 | +| 147 | blk.16.attn_k.weight | 0xdf849500 | 0x1a4000 | +| 148 | blk.16.attn_norm.weight | 0xdf9ed500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0xdf9f2500 | 0x898000 | +| 150 | blk.16.attn_q.weight | 0xe028a500 | 0x690000 | +| 151 | blk.16.attn_v.weight | 0xe091a500 | 0x226000 | +| 152 | blk.16.ffn_down.weight | 0xe0b40500 | 0x44c0000 | +| 153 | blk.16.ffn_gate.weight | 0xe5000500 | 0x3480000 | +| 154 | blk.16.ffn_norm.weight | 0xe8480500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0xe8485500 | 0x3480000 | +| 156 | blk.17.attn_k.weight | 0xeb905500 | 0x226000 | +| 157 | blk.17.attn_norm.weight | 0xebb2b500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0xebb30500 | 0x898000 | +| 159 | blk.17.attn_q.weight | 0xec3c8500 | 0x898000 | +| 160 | blk.17.attn_v.weight | 0xecc60500 | 0x226000 | +| 161 | blk.17.ffn_down.weight | 0xece86500 | 0x44c0000 | +| 162 | blk.17.ffn_gate.weight | 0xf1346500 | 0x3480000 | +| 163 | blk.17.ffn_norm.weight | 0xf47c6500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0xf47cb500 | 0x3480000 | +| 165 | blk.18.attn_k.weight | 0xf7c4b500 | 0x226000 | +| 166 | blk.18.attn_norm.weight | 0xf7e71500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0xf7e76500 | 0x898000 | +| 168 | blk.18.attn_q.weight | 0xf870e500 | 0x898000 | +| 169 | blk.18.attn_v.weight | 0xf8fa6500 | 0x226000 | +| 170 | blk.18.ffn_down.weight | 0xf91cc500 | 0x44c0000 | +| 171 | blk.18.ffn_gate.weight | 0xfd68c500 | 0x3480000 | +| 172 | blk.18.ffn_norm.weight | 0x100b0c500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x100b11500 | 0x3480000 | +| 174 | blk.19.attn_k.weight | 0x103f91500 | 0x1a4000 | +| 175 | blk.19.attn_norm.weight | 0x104135500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x10413a500 | 0x898000 | +| 177 | blk.19.attn_q.weight | 0x1049d2500 | 0x690000 | +| 178 | blk.19.attn_v.weight | 0x105062500 | 0x226000 | +| 179 | blk.19.ffn_down.weight | 0x105288500 | 0x44c0000 | +| 180 | blk.19.ffn_gate.weight | 0x109748500 | 0x3480000 | +| 181 | blk.19.ffn_norm.weight | 0x10cbc8500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x10cbcd500 | 0x3480000 | +| 183 | blk.20.attn_k.weight | 0x11004d500 | 0x226000 | +| 184 | blk.20.attn_norm.weight | 0x110273500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x110278500 | 0x898000 | +| 186 | blk.20.attn_q.weight | 0x110b10500 | 0x898000 | +| 187 | blk.20.attn_v.weight | 0x1113a8500 | 0x226000 | +| 188 | blk.20.ffn_down.weight | 0x1115ce500 | 0x44c0000 | +| 189 | blk.20.ffn_gate.weight | 0x115a8e500 | 0x44c0000 | +| 190 | blk.20.ffn_norm.weight | 0x119f4e500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x119f53500 | 0x44c0000 | +| 192 | blk.21.attn_k.weight | 0x11e413500 | 0x1a4000 | +| 193 | blk.21.attn_norm.weight | 0x11e5b7500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x11e5bc500 | 0x898000 | +| 195 | blk.21.attn_q.weight | 0x11ee54500 | 0x690000 | +| 196 | blk.21.attn_v.weight | 0x11f4e4500 | 0x226000 | +| 197 | blk.21.ffn_down.weight | 0x11f70a500 | 0x44c0000 | +| 198 | blk.21.ffn_gate.weight | 0x123bca500 | 0x44c0000 | +| 199 | blk.21.ffn_norm.weight | 0x12808a500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x12808f500 | 0x44c0000 | +| 201 | blk.22.attn_k.weight | 0x12c54f500 | 0x226000 | +| 202 | blk.22.attn_norm.weight | 0x12c775500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x12c77a500 | 0x898000 | +| 204 | blk.22.attn_q.weight | 0x12d012500 | 0x898000 | +| 205 | blk.22.attn_v.weight | 0x12d8aa500 | 0x226000 | +| 206 | blk.22.ffn_down.weight | 0x12dad0500 | 0x44c0000 | +| 207 | blk.22.ffn_gate.weight | 0x131f90500 | 0x44c0000 | +| 208 | blk.22.ffn_norm.weight | 0x136450500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x136455500 | 0x44c0000 | +| 210 | blk.23.attn_k.weight | 0x13a915500 | 0x226000 | +| 211 | blk.23.attn_norm.weight | 0x13ab3b500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x13ab40500 | 0x898000 | +| 213 | blk.23.attn_q.weight | 0x13b3d8500 | 0x898000 | +| 214 | blk.23.attn_v.weight | 0x13bc70500 | 0x226000 | +| 215 | blk.23.ffn_down.weight | 0x13be96500 | 0x44c0000 | +| 216 | blk.23.ffn_gate.weight | 0x140356500 | 0x44c0000 | +| 217 | blk.23.ffn_norm.weight | 0x144816500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x14481b500 | 0x44c0000 | +| 219 | blk.24.attn_k.weight | 0x148cdb500 | 0x226000 | +| 220 | blk.24.attn_norm.weight | 0x148f01500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x148f06500 | 0x898000 | +| 222 | blk.24.attn_q.weight | 0x14979e500 | 0x898000 | +| 223 | blk.24.attn_v.weight | 0x14a036500 | 0x226000 | +| 224 | blk.24.ffn_down.weight | 0x14a25c500 | 0x44c0000 | +| 225 | blk.24.ffn_gate.weight | 0x14e71c500 | 0x44c0000 | +| 226 | blk.24.ffn_norm.weight | 0x152bdc500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x152be1500 | 0x44c0000 | +| 228 | blk.25.attn_k.weight | 0x1570a1500 | 0x226000 | +| 229 | blk.25.attn_norm.weight | 0x1572c7500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x1572cc500 | 0x898000 | +| 231 | blk.25.attn_q.weight | 0x157b64500 | 0x898000 | +| 232 | blk.25.attn_v.weight | 0x1583fc500 | 0x226000 | +| 233 | blk.25.ffn_down.weight | 0x158622500 | 0x44c0000 | +| 234 | blk.25.ffn_gate.weight | 0x15cae2500 | 0x44c0000 | +| 235 | blk.25.ffn_norm.weight | 0x160fa2500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x160fa7500 | 0x44c0000 | +| 237 | blk.26.attn_k.weight | 0x165467500 | 0x226000 | +| 238 | blk.26.attn_norm.weight | 0x16568d500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x165692500 | 0x898000 | +| 240 | blk.26.attn_q.weight | 0x165f2a500 | 0x898000 | +| 241 | blk.26.attn_v.weight | 0x1667c2500 | 0x226000 | +| 242 | blk.26.ffn_down.weight | 0x1669e8500 | 0x44c0000 | +| 243 | blk.26.ffn_gate.weight | 0x16aea8500 | 0x44c0000 | +| 244 | blk.26.ffn_norm.weight | 0x16f368500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x16f36d500 | 0x44c0000 | +| 246 | blk.27.attn_k.weight | 0x17382d500 | 0x1a4000 | +| 247 | blk.27.attn_norm.weight | 0x1739d1500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x1739d6500 | 0x898000 | +| 249 | blk.27.attn_q.weight | 0x17426e500 | 0x690000 | +| 250 | blk.27.attn_v.weight | 0x1748fe500 | 0x226000 | +| 251 | blk.27.ffn_down.weight | 0x174b24500 | 0x44c0000 | +| 252 | blk.27.ffn_gate.weight | 0x178fe4500 | 0x44c0000 | +| 253 | blk.27.ffn_norm.weight | 0x17d4a4500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x17d4a9500 | 0x44c0000 | +| 255 | blk.28.attn_k.weight | 0x181969500 | 0x226000 | +| 256 | blk.28.attn_norm.weight | 0x181b8f500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x181b94500 | 0x898000 | +| 258 | blk.28.attn_q.weight | 0x18242c500 | 0x898000 | +| 259 | blk.28.attn_v.weight | 0x182cc4500 | 0x226000 | +| 260 | blk.28.ffn_down.weight | 0x182eea500 | 0x44c0000 | +| 261 | blk.28.ffn_gate.weight | 0x1873aa500 | 0x44c0000 | +| 262 | blk.28.ffn_norm.weight | 0x18b86a500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x18b86f500 | 0x44c0000 | +| 264 | blk.29.attn_k.weight | 0x18fd2f500 | 0x226000 | +| 265 | blk.29.attn_norm.weight | 0x18ff55500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x18ff5a500 | 0x898000 | +| 267 | blk.29.attn_q.weight | 0x1907f2500 | 0x898000 | +| 268 | blk.29.attn_v.weight | 0x19108a500 | 0x226000 | +| 269 | blk.29.ffn_down.weight | 0x1912b0500 | 0x44c0000 | +| 270 | blk.29.ffn_gate.weight | 0x195770500 | 0x44c0000 | +| 271 | blk.29.ffn_norm.weight | 0x199c30500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x199c35500 | 0x44c0000 | +| 273 | blk.30.attn_k.weight | 0x19e0f5500 | 0x226000 | +| 274 | blk.30.attn_norm.weight | 0x19e31b500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x19e320500 | 0x898000 | +| 276 | blk.30.attn_q.weight | 0x19ebb8500 | 0x898000 | +| 277 | blk.30.attn_v.weight | 0x19f450500 | 0x226000 | +| 278 | blk.30.ffn_down.weight | 0x19f676500 | 0x44c0000 | +| 279 | blk.30.ffn_gate.weight | 0x1a3b36500 | 0x44c0000 | +| 280 | blk.30.ffn_norm.weight | 0x1a7ff6500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x1a7ffb500 | 0x44c0000 | +| 282 | blk.31.attn_k.weight | 0x1ac4bb500 | 0x226000 | +| 283 | blk.31.attn_norm.weight | 0x1ac6e1500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x1ac6e6500 | 0x898000 | +| 285 | blk.31.attn_q.weight | 0x1acf7e500 | 0x898000 | +| 286 | blk.31.attn_v.weight | 0x1ad816500 | 0x226000 | +| 287 | blk.31.ffn_down.weight | 0x1ada3c500 | 0x44c0000 | +| 288 | blk.31.ffn_gate.weight | 0x1b1efc500 | 0x44c0000 | +| 289 | blk.31.ffn_norm.weight | 0x1b63bc500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x1b63c1500 | 0x44c0000 | +| 291 | blk.32.attn_k.weight | 0x1ba881500 | 0x226000 | +| 292 | blk.32.attn_norm.weight | 0x1baaa7500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x1baaac500 | 0x898000 | +| 294 | blk.32.attn_q.weight | 0x1bb344500 | 0x898000 | +| 295 | blk.32.attn_v.weight | 0x1bbbdc500 | 0x226000 | +| 296 | blk.32.ffn_down.weight | 0x1bbe02500 | 0x44c0000 | +| 297 | blk.32.ffn_gate.weight | 0x1c02c2500 | 0x44c0000 | +| 298 | blk.32.ffn_norm.weight | 0x1c4782500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x1c4787500 | 0x44c0000 | +| 300 | blk.33.attn_k.weight | 0x1c8c47500 | 0x226000 | +| 301 | blk.33.attn_norm.weight | 0x1c8e6d500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x1c8e72500 | 0x898000 | +| 303 | blk.33.attn_q.weight | 0x1c970a500 | 0x898000 | +| 304 | blk.33.attn_v.weight | 0x1c9fa2500 | 0x226000 | +| 305 | blk.33.ffn_down.weight | 0x1ca1c8500 | 0x44c0000 | +| 306 | blk.33.ffn_gate.weight | 0x1ce688500 | 0x44c0000 | +| 307 | blk.33.ffn_norm.weight | 0x1d2b48500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x1d2b4d500 | 0x44c0000 | +| 309 | blk.34.attn_k.weight | 0x1d700d500 | 0x226000 | +| 310 | blk.34.attn_norm.weight | 0x1d7233500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x1d7238500 | 0x898000 | +| 312 | blk.34.attn_q.weight | 0x1d7ad0500 | 0x898000 | +| 313 | blk.34.attn_v.weight | 0x1d8368500 | 0x226000 | +| 314 | blk.34.ffn_down.weight | 0x1d858e500 | 0x44c0000 | +| 315 | blk.34.ffn_gate.weight | 0x1dca4e500 | 0x44c0000 | +| 316 | blk.34.ffn_norm.weight | 0x1e0f0e500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x1e0f13500 | 0x44c0000 | +| 318 | blk.35.attn_k.weight | 0x1e53d3500 | 0x226000 | +| 319 | blk.35.attn_norm.weight | 0x1e55f9500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x1e55fe500 | 0x898000 | +| 321 | blk.35.attn_q.weight | 0x1e5e96500 | 0x898000 | +| 322 | blk.35.attn_v.weight | 0x1e672e500 | 0x226000 | +| 323 | blk.35.ffn_down.weight | 0x1e6954500 | 0x44c0000 | +| 324 | blk.35.ffn_gate.weight | 0x1eae14500 | 0x44c0000 | +| 325 | blk.35.ffn_norm.weight | 0x1ef2d4500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x1ef2d9500 | 0x44c0000 | +| 327 | blk.36.attn_k.weight | 0x1f3799500 | 0x226000 | +| 328 | blk.36.attn_norm.weight | 0x1f39bf500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x1f39c4500 | 0x898000 | +| 330 | blk.36.attn_q.weight | 0x1f425c500 | 0x898000 | +| 331 | blk.36.attn_v.weight | 0x1f4af4500 | 0x226000 | +| 332 | blk.36.ffn_down.weight | 0x1f4d1a500 | 0x44c0000 | +| 333 | blk.36.ffn_gate.weight | 0x1f91da500 | 0x44c0000 | +| 334 | blk.36.ffn_norm.weight | 0x1fd69a500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x1fd69f500 | 0x44c0000 | +| 336 | blk.37.attn_k.weight | 0x201b5f500 | 0x226000 | +| 337 | blk.37.attn_norm.weight | 0x201d85500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x201d8a500 | 0x898000 | +| 339 | blk.37.attn_q.weight | 0x202622500 | 0x898000 | +| 340 | blk.37.attn_v.weight | 0x202eba500 | 0x226000 | +| 341 | blk.37.ffn_down.weight | 0x2030e0500 | 0x44c0000 | +| 342 | blk.37.ffn_gate.weight | 0x2075a0500 | 0x44c0000 | +| 343 | blk.37.ffn_norm.weight | 0x20ba60500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x20ba65500 | 0x44c0000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q2_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q2_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q2_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q2_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q3_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q3_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_M.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_M.md new file mode 100644 index 0000000..ecd2a65 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_M.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 15 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4\_K\_M.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q4_k_mgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x16800000 | +| 1 | output_norm.weight | 0x16f84500 | 0x5000 | +| 2 | token_embd.weight | 0x16f89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x28289500 | 0x226000 | +| 4 | blk.0.attn_norm.weight | 0x284af500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x284b4500 | 0xb40000 | +| 6 | blk.0.attn_q.weight | 0x28ff4500 | 0x898000 | +| 7 | blk.0.attn_v.weight | 0x2988c500 | 0x2d0000 | +| 8 | blk.0.ffn_down.weight | 0x29b5c500 | 0x8340000 | +| 9 | blk.0.ffn_gate.weight | 0x31e9c500 | 0x44c0000 | +| 10 | blk.0.ffn_norm.weight | 0x3635c500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x36361500 | 0x44c0000 | +| 12 | blk.1.attn_k.weight | 0x3a821500 | 0x226000 | +| 13 | blk.1.attn_norm.weight | 0x3aa47500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x3aa4c500 | 0xb40000 | +| 15 | blk.1.attn_q.weight | 0x3b58c500 | 0x898000 | +| 16 | blk.1.attn_v.weight | 0x3be24500 | 0x2d0000 | +| 17 | blk.1.ffn_down.weight | 0x3c0f4500 | 0x8340000 | +| 18 | blk.1.ffn_gate.weight | 0x44434500 | 0x44c0000 | +| 19 | blk.1.ffn_norm.weight | 0x488f4500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x488f9500 | 0x44c0000 | +| 21 | blk.2.attn_k.weight | 0x4cdb9500 | 0x226000 | +| 22 | blk.2.attn_norm.weight | 0x4cfdf500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x4cfe4500 | 0xb40000 | +| 24 | blk.2.attn_q.weight | 0x4db24500 | 0x898000 | +| 25 | blk.2.attn_v.weight | 0x4e3bc500 | 0x2d0000 | +| 26 | blk.2.ffn_down.weight | 0x4e68c500 | 0x8340000 | +| 27 | blk.2.ffn_gate.weight | 0x569cc500 | 0x44c0000 | +| 28 | blk.2.ffn_norm.weight | 0x5ae8c500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x5ae91500 | 0x44c0000 | +| 30 | blk.3.attn_k.weight | 0x5f351500 | 0x226000 | +| 31 | blk.3.attn_norm.weight | 0x5f577500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x5f57c500 | 0xb40000 | +| 33 | blk.3.attn_q.weight | 0x600bc500 | 0x898000 | +| 34 | blk.3.attn_v.weight | 0x60954500 | 0x2d0000 | +| 35 | blk.3.ffn_down.weight | 0x60c24500 | 0x8340000 | +| 36 | blk.3.ffn_gate.weight | 0x68f64500 | 0x44c0000 | +| 37 | blk.3.ffn_norm.weight | 0x6d424500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x6d429500 | 0x44c0000 | +| 39 | blk.4.attn_k.weight | 0x718e9500 | 0x226000 | +| 40 | blk.4.attn_norm.weight | 0x71b0f500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x71b14500 | 0xb40000 | +| 42 | blk.4.attn_q.weight | 0x72654500 | 0x898000 | +| 43 | blk.4.attn_v.weight | 0x72eec500 | 0x370000 | +| 44 | blk.4.ffn_down.weight | 0x7325c500 | 0x8340000 | +| 45 | blk.4.ffn_gate.weight | 0x7b59c500 | 0x44c0000 | +| 46 | blk.4.ffn_norm.weight | 0x7fa5c500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x7fa61500 | 0x44c0000 | +| 48 | blk.5.attn_k.weight | 0x83f21500 | 0x226000 | +| 49 | blk.5.attn_norm.weight | 0x84147500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x8414c500 | 0xb40000 | +| 51 | blk.5.attn_q.weight | 0x84c8c500 | 0x898000 | +| 52 | blk.5.attn_v.weight | 0x85524500 | 0x370000 | +| 53 | blk.5.ffn_down.weight | 0x85894500 | 0x5a00000 | +| 54 | blk.5.ffn_gate.weight | 0x8b294500 | 0x44c0000 | +| 55 | blk.5.ffn_norm.weight | 0x8f754500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x8f759500 | 0x44c0000 | +| 57 | blk.6.attn_k.weight | 0x93c19500 | 0x226000 | +| 58 | blk.6.attn_norm.weight | 0x93e3f500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x93e44500 | 0xb40000 | +| 60 | blk.6.attn_q.weight | 0x94984500 | 0x898000 | +| 61 | blk.6.attn_v.weight | 0x9521c500 | 0x2d0000 | +| 62 | blk.6.ffn_down.weight | 0x954ec500 | 0x5a00000 | +| 63 | blk.6.ffn_gate.weight | 0x9aeec500 | 0x44c0000 | +| 64 | blk.6.ffn_norm.weight | 0x9f3ac500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x9f3b1500 | 0x44c0000 | +| 66 | blk.7.attn_k.weight | 0xa3871500 | 0x226000 | +| 67 | blk.7.attn_norm.weight | 0xa3a97500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0xa3a9c500 | 0xb40000 | +| 69 | blk.7.attn_q.weight | 0xa45dc500 | 0x898000 | +| 70 | blk.7.attn_v.weight | 0xa4e74500 | 0x370000 | +| 71 | blk.7.ffn_down.weight | 0xa51e4500 | 0x8340000 | +| 72 | blk.7.ffn_gate.weight | 0xad524500 | 0x44c0000 | +| 73 | blk.7.ffn_norm.weight | 0xb19e4500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xb19e9500 | 0x44c0000 | +| 75 | blk.8.attn_k.weight | 0xb5ea9500 | 0x226000 | +| 76 | blk.8.attn_norm.weight | 0xb60cf500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xb60d4500 | 0xb40000 | +| 78 | blk.8.attn_q.weight | 0xb6c14500 | 0x898000 | +| 79 | blk.8.attn_v.weight | 0xb74ac500 | 0x370000 | +| 80 | blk.8.ffn_down.weight | 0xb781c500 | 0x5a00000 | +| 81 | blk.8.ffn_gate.weight | 0xbd21c500 | 0x44c0000 | +| 82 | blk.8.ffn_norm.weight | 0xc16dc500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xc16e1500 | 0x44c0000 | +| 84 | blk.9.attn_k.weight | 0xc5ba1500 | 0x226000 | +| 85 | blk.9.attn_norm.weight | 0xc5dc7500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xc5dcc500 | 0xb40000 | +| 87 | blk.9.attn_q.weight | 0xc690c500 | 0x898000 | +| 88 | blk.9.attn_v.weight | 0xc71a4500 | 0x2d0000 | +| 89 | blk.9.ffn_down.weight | 0xc7474500 | 0x5a00000 | +| 90 | blk.9.ffn_gate.weight | 0xcce74500 | 0x44c0000 | +| 91 | blk.9.ffn_norm.weight | 0xd1334500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xd1339500 | 0x44c0000 | +| 93 | blk.10.attn_k.weight | 0xd57f9500 | 0x226000 | +| 94 | blk.10.attn_norm.weight | 0xd5a1f500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xd5a24500 | 0xb40000 | +| 96 | blk.10.attn_q.weight | 0xd6564500 | 0x898000 | +| 97 | blk.10.attn_v.weight | 0xd6dfc500 | 0x370000 | +| 98 | blk.10.ffn_down.weight | 0xd716c500 | 0x8340000 | +| 99 | blk.10.ffn_gate.weight | 0xdf4ac500 | 0x44c0000 | +| 100 | blk.10.ffn_norm.weight | 0xe396c500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xe3971500 | 0x44c0000 | +| 102 | blk.11.attn_k.weight | 0xe7e31500 | 0x226000 | +| 103 | blk.11.attn_norm.weight | 0xe8057500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xe805c500 | 0xb40000 | +| 105 | blk.11.attn_q.weight | 0xe8b9c500 | 0x898000 | +| 106 | blk.11.attn_v.weight | 0xe9434500 | 0x370000 | +| 107 | blk.11.ffn_down.weight | 0xe97a4500 | 0x5a00000 | +| 108 | blk.11.ffn_gate.weight | 0xef1a4500 | 0x44c0000 | +| 109 | blk.11.ffn_norm.weight | 0xf3664500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xf3669500 | 0x44c0000 | +| 111 | blk.12.attn_k.weight | 0xf7b29500 | 0x226000 | +| 112 | blk.12.attn_norm.weight | 0xf7d4f500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xf7d54500 | 0xb40000 | +| 114 | blk.12.attn_q.weight | 0xf8894500 | 0x898000 | +| 115 | blk.12.attn_v.weight | 0xf912c500 | 0x2d0000 | +| 116 | blk.12.ffn_down.weight | 0xf93fc500 | 0x5a00000 | +| 117 | blk.12.ffn_gate.weight | 0xfedfc500 | 0x44c0000 | +| 118 | blk.12.ffn_norm.weight | 0x1032bc500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0x1032c1500 | 0x44c0000 | +| 120 | blk.13.attn_k.weight | 0x107781500 | 0x226000 | +| 121 | blk.13.attn_norm.weight | 0x1079a7500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0x1079ac500 | 0xb40000 | +| 123 | blk.13.attn_q.weight | 0x1084ec500 | 0x898000 | +| 124 | blk.13.attn_v.weight | 0x108d84500 | 0x370000 | +| 125 | blk.13.ffn_down.weight | 0x1090f4500 | 0x8340000 | +| 126 | blk.13.ffn_gate.weight | 0x111434500 | 0x44c0000 | +| 127 | blk.13.ffn_norm.weight | 0x1158f4500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x1158f9500 | 0x44c0000 | +| 129 | blk.14.attn_k.weight | 0x119db9500 | 0x226000 | +| 130 | blk.14.attn_norm.weight | 0x119fdf500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x119fe4500 | 0xb40000 | +| 132 | blk.14.attn_q.weight | 0x11ab24500 | 0x898000 | +| 133 | blk.14.attn_v.weight | 0x11b3bc500 | 0x370000 | +| 134 | blk.14.ffn_down.weight | 0x11b72c500 | 0x5a00000 | +| 135 | blk.14.ffn_gate.weight | 0x12112c500 | 0x44c0000 | +| 136 | blk.14.ffn_norm.weight | 0x1255ec500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x1255f1500 | 0x44c0000 | +| 138 | blk.15.attn_k.weight | 0x129ab1500 | 0x226000 | +| 139 | blk.15.attn_norm.weight | 0x129cd7500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x129cdc500 | 0xb40000 | +| 141 | blk.15.attn_q.weight | 0x12a81c500 | 0x898000 | +| 142 | blk.15.attn_v.weight | 0x12b0b4500 | 0x2d0000 | +| 143 | blk.15.ffn_down.weight | 0x12b384500 | 0x5a00000 | +| 144 | blk.15.ffn_gate.weight | 0x130d84500 | 0x44c0000 | +| 145 | blk.15.ffn_norm.weight | 0x135244500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x135249500 | 0x44c0000 | +| 147 | blk.16.attn_k.weight | 0x139709500 | 0x226000 | +| 148 | blk.16.attn_norm.weight | 0x13992f500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x139934500 | 0xb40000 | +| 150 | blk.16.attn_q.weight | 0x13a474500 | 0x898000 | +| 151 | blk.16.attn_v.weight | 0x13ad0c500 | 0x370000 | +| 152 | blk.16.ffn_down.weight | 0x13b07c500 | 0x8340000 | +| 153 | blk.16.ffn_gate.weight | 0x1433bc500 | 0x44c0000 | +| 154 | blk.16.ffn_norm.weight | 0x14787c500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x147881500 | 0x44c0000 | +| 156 | blk.17.attn_k.weight | 0x14bd41500 | 0x2d0000 | +| 157 | blk.17.attn_norm.weight | 0x14c011500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x14c016500 | 0xb40000 | +| 159 | blk.17.attn_q.weight | 0x14cb56500 | 0xb40000 | +| 160 | blk.17.attn_v.weight | 0x14d696500 | 0x370000 | +| 161 | blk.17.ffn_down.weight | 0x14da06500 | 0x5a00000 | +| 162 | blk.17.ffn_gate.weight | 0x153406500 | 0x44c0000 | +| 163 | blk.17.ffn_norm.weight | 0x1578c6500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x1578cb500 | 0x44c0000 | +| 165 | blk.18.attn_k.weight | 0x15bd8b500 | 0x2d0000 | +| 166 | blk.18.attn_norm.weight | 0x15c05b500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x15c060500 | 0xb40000 | +| 168 | blk.18.attn_q.weight | 0x15cba0500 | 0xb40000 | +| 169 | blk.18.attn_v.weight | 0x15d6e0500 | 0x370000 | +| 170 | blk.18.ffn_down.weight | 0x15da50500 | 0x5a00000 | +| 171 | blk.18.ffn_gate.weight | 0x163450500 | 0x44c0000 | +| 172 | blk.18.ffn_norm.weight | 0x167910500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x167915500 | 0x44c0000 | +| 174 | blk.19.attn_k.weight | 0x16bdd5500 | 0x226000 | +| 175 | blk.19.attn_norm.weight | 0x16bffb500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x16c000500 | 0xb40000 | +| 177 | blk.19.attn_q.weight | 0x16cb40500 | 0x898000 | +| 178 | blk.19.attn_v.weight | 0x16d3d8500 | 0x370000 | +| 179 | blk.19.ffn_down.weight | 0x16d748500 | 0x8340000 | +| 180 | blk.19.ffn_gate.weight | 0x175a88500 | 0x44c0000 | +| 181 | blk.19.ffn_norm.weight | 0x179f48500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x179f4d500 | 0x44c0000 | +| 183 | blk.20.attn_k.weight | 0x17e40d500 | 0x2d0000 | +| 184 | blk.20.attn_norm.weight | 0x17e6dd500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x17e6e2500 | 0xb40000 | +| 186 | blk.20.attn_q.weight | 0x17f222500 | 0xb40000 | +| 187 | blk.20.attn_v.weight | 0x17fd62500 | 0x370000 | +| 188 | blk.20.ffn_down.weight | 0x1800d2500 | 0x5a00000 | +| 189 | blk.20.ffn_gate.weight | 0x185ad2500 | 0x5a00000 | +| 190 | blk.20.ffn_norm.weight | 0x18b4d2500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x18b4d7500 | 0x5a00000 | +| 192 | blk.21.attn_k.weight | 0x190ed7500 | 0x226000 | +| 193 | blk.21.attn_norm.weight | 0x1910fd500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x191102500 | 0xb40000 | +| 195 | blk.21.attn_q.weight | 0x191c42500 | 0x898000 | +| 196 | blk.21.attn_v.weight | 0x1924da500 | 0x2d0000 | +| 197 | blk.21.ffn_down.weight | 0x1927aa500 | 0x5a00000 | +| 198 | blk.21.ffn_gate.weight | 0x1981aa500 | 0x5a00000 | +| 199 | blk.21.ffn_norm.weight | 0x19dbaa500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x19dbaf500 | 0x5a00000 | +| 201 | blk.22.attn_k.weight | 0x1a35af500 | 0x2d0000 | +| 202 | blk.22.attn_norm.weight | 0x1a387f500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x1a3884500 | 0xb40000 | +| 204 | blk.22.attn_q.weight | 0x1a43c4500 | 0xb40000 | +| 205 | blk.22.attn_v.weight | 0x1a4f04500 | 0x370000 | +| 206 | blk.22.ffn_down.weight | 0x1a5274500 | 0x8340000 | +| 207 | blk.22.ffn_gate.weight | 0x1ad5b4500 | 0x5a00000 | +| 208 | blk.22.ffn_norm.weight | 0x1b2fb4500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x1b2fb9500 | 0x5a00000 | +| 210 | blk.23.attn_k.weight | 0x1b89b9500 | 0x2d0000 | +| 211 | blk.23.attn_norm.weight | 0x1b8c89500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x1b8c8e500 | 0xb40000 | +| 213 | blk.23.attn_q.weight | 0x1b97ce500 | 0xb40000 | +| 214 | blk.23.attn_v.weight | 0x1ba30e500 | 0x370000 | +| 215 | blk.23.ffn_down.weight | 0x1ba67e500 | 0x5a00000 | +| 216 | blk.23.ffn_gate.weight | 0x1c007e500 | 0x5a00000 | +| 217 | blk.23.ffn_norm.weight | 0x1c5a7e500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x1c5a83500 | 0x5a00000 | +| 219 | blk.24.attn_k.weight | 0x1cb483500 | 0x2d0000 | +| 220 | blk.24.attn_norm.weight | 0x1cb753500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x1cb758500 | 0xb40000 | +| 222 | blk.24.attn_q.weight | 0x1cc298500 | 0xb40000 | +| 223 | blk.24.attn_v.weight | 0x1ccdd8500 | 0x370000 | +| 224 | blk.24.ffn_down.weight | 0x1cd148500 | 0x5a00000 | +| 225 | blk.24.ffn_gate.weight | 0x1d2b48500 | 0x5a00000 | +| 226 | blk.24.ffn_norm.weight | 0x1d8548500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x1d854d500 | 0x5a00000 | +| 228 | blk.25.attn_k.weight | 0x1ddf4d500 | 0x2d0000 | +| 229 | blk.25.attn_norm.weight | 0x1de21d500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x1de222500 | 0xb40000 | +| 231 | blk.25.attn_q.weight | 0x1ded62500 | 0xb40000 | +| 232 | blk.25.attn_v.weight | 0x1df8a2500 | 0x370000 | +| 233 | blk.25.ffn_down.weight | 0x1dfc12500 | 0x8340000 | +| 234 | blk.25.ffn_gate.weight | 0x1e7f52500 | 0x5a00000 | +| 235 | blk.25.ffn_norm.weight | 0x1ed952500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x1ed957500 | 0x5a00000 | +| 237 | blk.26.attn_k.weight | 0x1f3357500 | 0x2d0000 | +| 238 | blk.26.attn_norm.weight | 0x1f3627500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x1f362c500 | 0xb40000 | +| 240 | blk.26.attn_q.weight | 0x1f416c500 | 0xb40000 | +| 241 | blk.26.attn_v.weight | 0x1f4cac500 | 0x370000 | +| 242 | blk.26.ffn_down.weight | 0x1f501c500 | 0x5a00000 | +| 243 | blk.26.ffn_gate.weight | 0x1faa1c500 | 0x5a00000 | +| 244 | blk.26.ffn_norm.weight | 0x20041c500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x200421500 | 0x5a00000 | +| 246 | blk.27.attn_k.weight | 0x205e21500 | 0x226000 | +| 247 | blk.27.attn_norm.weight | 0x206047500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x20604c500 | 0xb40000 | +| 249 | blk.27.attn_q.weight | 0x206b8c500 | 0x898000 | +| 250 | blk.27.attn_v.weight | 0x207424500 | 0x2d0000 | +| 251 | blk.27.ffn_down.weight | 0x2076f4500 | 0x5a00000 | +| 252 | blk.27.ffn_gate.weight | 0x20d0f4500 | 0x5a00000 | +| 253 | blk.27.ffn_norm.weight | 0x212af4500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x212af9500 | 0x5a00000 | +| 255 | blk.28.attn_k.weight | 0x2184f9500 | 0x2d0000 | +| 256 | blk.28.attn_norm.weight | 0x2187c9500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x2187ce500 | 0xb40000 | +| 258 | blk.28.attn_q.weight | 0x21930e500 | 0xb40000 | +| 259 | blk.28.attn_v.weight | 0x219e4e500 | 0x370000 | +| 260 | blk.28.ffn_down.weight | 0x21a1be500 | 0x8340000 | +| 261 | blk.28.ffn_gate.weight | 0x2224fe500 | 0x5a00000 | +| 262 | blk.28.ffn_norm.weight | 0x227efe500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x227f03500 | 0x5a00000 | +| 264 | blk.29.attn_k.weight | 0x22d903500 | 0x2d0000 | +| 265 | blk.29.attn_norm.weight | 0x22dbd3500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x22dbd8500 | 0xb40000 | +| 267 | blk.29.attn_q.weight | 0x22e718500 | 0xb40000 | +| 268 | blk.29.attn_v.weight | 0x22f258500 | 0x370000 | +| 269 | blk.29.ffn_down.weight | 0x22f5c8500 | 0x5a00000 | +| 270 | blk.29.ffn_gate.weight | 0x234fc8500 | 0x5a00000 | +| 271 | blk.29.ffn_norm.weight | 0x23a9c8500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x23a9cd500 | 0x5a00000 | +| 273 | blk.30.attn_k.weight | 0x2403cd500 | 0x2d0000 | +| 274 | blk.30.attn_norm.weight | 0x24069d500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x2406a2500 | 0xb40000 | +| 276 | blk.30.attn_q.weight | 0x2411e2500 | 0xb40000 | +| 277 | blk.30.attn_v.weight | 0x241d22500 | 0x370000 | +| 278 | blk.30.ffn_down.weight | 0x242092500 | 0x5a00000 | +| 279 | blk.30.ffn_gate.weight | 0x247a92500 | 0x5a00000 | +| 280 | blk.30.ffn_norm.weight | 0x24d492500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x24d497500 | 0x5a00000 | +| 282 | blk.31.attn_k.weight | 0x252e97500 | 0x2d0000 | +| 283 | blk.31.attn_norm.weight | 0x253167500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x25316c500 | 0xb40000 | +| 285 | blk.31.attn_q.weight | 0x253cac500 | 0xb40000 | +| 286 | blk.31.attn_v.weight | 0x2547ec500 | 0x370000 | +| 287 | blk.31.ffn_down.weight | 0x254b5c500 | 0x8340000 | +| 288 | blk.31.ffn_gate.weight | 0x25ce9c500 | 0x5a00000 | +| 289 | blk.31.ffn_norm.weight | 0x26289c500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x2628a1500 | 0x5a00000 | +| 291 | blk.32.attn_k.weight | 0x2682a1500 | 0x2d0000 | +| 292 | blk.32.attn_norm.weight | 0x268571500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x268576500 | 0xb40000 | +| 294 | blk.32.attn_q.weight | 0x2690b6500 | 0xb40000 | +| 295 | blk.32.attn_v.weight | 0x269bf6500 | 0x370000 | +| 296 | blk.32.ffn_down.weight | 0x269f66500 | 0x5a00000 | +| 297 | blk.32.ffn_gate.weight | 0x26f966500 | 0x5a00000 | +| 298 | blk.32.ffn_norm.weight | 0x275366500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x27536b500 | 0x5a00000 | +| 300 | blk.33.attn_k.weight | 0x27ad6b500 | 0x2d0000 | +| 301 | blk.33.attn_norm.weight | 0x27b03b500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x27b040500 | 0xb40000 | +| 303 | blk.33.attn_q.weight | 0x27bb80500 | 0xb40000 | +| 304 | blk.33.attn_v.weight | 0x27c6c0500 | 0x370000 | +| 305 | blk.33.ffn_down.weight | 0x27ca30500 | 0x5a00000 | +| 306 | blk.33.ffn_gate.weight | 0x282430500 | 0x5a00000 | +| 307 | blk.33.ffn_norm.weight | 0x287e30500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x287e35500 | 0x5a00000 | +| 309 | blk.34.attn_k.weight | 0x28d835500 | 0x2d0000 | +| 310 | blk.34.attn_norm.weight | 0x28db05500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x28db0a500 | 0xb40000 | +| 312 | blk.34.attn_q.weight | 0x28e64a500 | 0xb40000 | +| 313 | blk.34.attn_v.weight | 0x28f18a500 | 0x370000 | +| 314 | blk.34.ffn_down.weight | 0x28f4fa500 | 0x8340000 | +| 315 | blk.34.ffn_gate.weight | 0x29783a500 | 0x5a00000 | +| 316 | blk.34.ffn_norm.weight | 0x29d23a500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x29d23f500 | 0x5a00000 | +| 318 | blk.35.attn_k.weight | 0x2a2c3f500 | 0x2d0000 | +| 319 | blk.35.attn_norm.weight | 0x2a2f0f500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x2a2f14500 | 0xb40000 | +| 321 | blk.35.attn_q.weight | 0x2a3a54500 | 0xb40000 | +| 322 | blk.35.attn_v.weight | 0x2a4594500 | 0x370000 | +| 323 | blk.35.ffn_down.weight | 0x2a4904500 | 0x8340000 | +| 324 | blk.35.ffn_gate.weight | 0x2acc44500 | 0x5a00000 | +| 325 | blk.35.ffn_norm.weight | 0x2b2644500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x2b2649500 | 0x5a00000 | +| 327 | blk.36.attn_k.weight | 0x2b8049500 | 0x2d0000 | +| 328 | blk.36.attn_norm.weight | 0x2b8319500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x2b831e500 | 0xb40000 | +| 330 | blk.36.attn_q.weight | 0x2b8e5e500 | 0xb40000 | +| 331 | blk.36.attn_v.weight | 0x2b999e500 | 0x370000 | +| 332 | blk.36.ffn_down.weight | 0x2b9d0e500 | 0x8340000 | +| 333 | blk.36.ffn_gate.weight | 0x2c204e500 | 0x5a00000 | +| 334 | blk.36.ffn_norm.weight | 0x2c7a4e500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x2c7a53500 | 0x5a00000 | +| 336 | blk.37.attn_k.weight | 0x2cd453500 | 0x2d0000 | +| 337 | blk.37.attn_norm.weight | 0x2cd723500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x2cd728500 | 0xb40000 | +| 339 | blk.37.attn_q.weight | 0x2ce268500 | 0xb40000 | +| 340 | blk.37.attn_v.weight | 0x2ceda8500 | 0x370000 | +| 341 | blk.37.ffn_down.weight | 0x2cf118500 | 0x8340000 | +| 342 | blk.37.ffn_gate.weight | 0x2d7458500 | 0x5a00000 | +| 343 | blk.37.ffn_norm.weight | 0x2dce58500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x2dce5d500 | 0x5a00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q4_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_S.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_S.md new file mode 100644 index 0000000..87b0d4f --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q4_K_S.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 14 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q4\_K\_S.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q4_k_sgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x16800000 | +| 1 | output_norm.weight | 0x16f84500 | 0x5000 | +| 2 | token_embd.weight | 0x16f89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x28289500 | 0x226000 | +| 4 | blk.0.attn_norm.weight | 0x284af500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x284b4500 | 0xb40000 | +| 6 | blk.0.attn_q.weight | 0x28ff4500 | 0x898000 | +| 7 | blk.0.attn_v.weight | 0x2988c500 | 0x2d0000 | +| 8 | blk.0.ffn_down.weight | 0x29b5c500 | 0x6e00000 | +| 9 | blk.0.ffn_gate.weight | 0x3095c500 | 0x44c0000 | +| 10 | blk.0.ffn_norm.weight | 0x34e1c500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x34e21500 | 0x44c0000 | +| 12 | blk.1.attn_k.weight | 0x392e1500 | 0x226000 | +| 13 | blk.1.attn_norm.weight | 0x39507500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x3950c500 | 0xb40000 | +| 15 | blk.1.attn_q.weight | 0x3a04c500 | 0x898000 | +| 16 | blk.1.attn_v.weight | 0x3a8e4500 | 0x2d0000 | +| 17 | blk.1.ffn_down.weight | 0x3abb4500 | 0x6e00000 | +| 18 | blk.1.ffn_gate.weight | 0x419b4500 | 0x44c0000 | +| 19 | blk.1.ffn_norm.weight | 0x45e74500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x45e79500 | 0x44c0000 | +| 21 | blk.2.attn_k.weight | 0x4a339500 | 0x226000 | +| 22 | blk.2.attn_norm.weight | 0x4a55f500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x4a564500 | 0xb40000 | +| 24 | blk.2.attn_q.weight | 0x4b0a4500 | 0x898000 | +| 25 | blk.2.attn_v.weight | 0x4b93c500 | 0x2d0000 | +| 26 | blk.2.ffn_down.weight | 0x4bc0c500 | 0x6e00000 | +| 27 | blk.2.ffn_gate.weight | 0x52a0c500 | 0x44c0000 | +| 28 | blk.2.ffn_norm.weight | 0x56ecc500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x56ed1500 | 0x44c0000 | +| 30 | blk.3.attn_k.weight | 0x5b391500 | 0x226000 | +| 31 | blk.3.attn_norm.weight | 0x5b5b7500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x5b5bc500 | 0xb40000 | +| 33 | blk.3.attn_q.weight | 0x5c0fc500 | 0x898000 | +| 34 | blk.3.attn_v.weight | 0x5c994500 | 0x2d0000 | +| 35 | blk.3.ffn_down.weight | 0x5cc64500 | 0x6e00000 | +| 36 | blk.3.ffn_gate.weight | 0x63a64500 | 0x44c0000 | +| 37 | blk.3.ffn_norm.weight | 0x67f24500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x67f29500 | 0x44c0000 | +| 39 | blk.4.attn_k.weight | 0x6c3e9500 | 0x226000 | +| 40 | blk.4.attn_norm.weight | 0x6c60f500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x6c614500 | 0xb40000 | +| 42 | blk.4.attn_q.weight | 0x6d154500 | 0x898000 | +| 43 | blk.4.attn_v.weight | 0x6d9ec500 | 0x2d0000 | +| 44 | blk.4.ffn_down.weight | 0x6dcbc500 | 0x6e00000 | +| 45 | blk.4.ffn_gate.weight | 0x74abc500 | 0x44c0000 | +| 46 | blk.4.ffn_norm.weight | 0x78f7c500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x78f81500 | 0x44c0000 | +| 48 | blk.5.attn_k.weight | 0x7d441500 | 0x226000 | +| 49 | blk.5.attn_norm.weight | 0x7d667500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x7d66c500 | 0xb40000 | +| 51 | blk.5.attn_q.weight | 0x7e1ac500 | 0x898000 | +| 52 | blk.5.attn_v.weight | 0x7ea44500 | 0x2d0000 | +| 53 | blk.5.ffn_down.weight | 0x7ed14500 | 0x5a00000 | +| 54 | blk.5.ffn_gate.weight | 0x84714500 | 0x44c0000 | +| 55 | blk.5.ffn_norm.weight | 0x88bd4500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x88bd9500 | 0x44c0000 | +| 57 | blk.6.attn_k.weight | 0x8d099500 | 0x226000 | +| 58 | blk.6.attn_norm.weight | 0x8d2bf500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0x8d2c4500 | 0xb40000 | +| 60 | blk.6.attn_q.weight | 0x8de04500 | 0x898000 | +| 61 | blk.6.attn_v.weight | 0x8e69c500 | 0x2d0000 | +| 62 | blk.6.ffn_down.weight | 0x8e96c500 | 0x5a00000 | +| 63 | blk.6.ffn_gate.weight | 0x9436c500 | 0x44c0000 | +| 64 | blk.6.ffn_norm.weight | 0x9882c500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x98831500 | 0x44c0000 | +| 66 | blk.7.attn_k.weight | 0x9ccf1500 | 0x226000 | +| 67 | blk.7.attn_norm.weight | 0x9cf17500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x9cf1c500 | 0xb40000 | +| 69 | blk.7.attn_q.weight | 0x9da5c500 | 0x898000 | +| 70 | blk.7.attn_v.weight | 0x9e2f4500 | 0x2d0000 | +| 71 | blk.7.ffn_down.weight | 0x9e5c4500 | 0x5a00000 | +| 72 | blk.7.ffn_gate.weight | 0xa3fc4500 | 0x44c0000 | +| 73 | blk.7.ffn_norm.weight | 0xa8484500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xa8489500 | 0x44c0000 | +| 75 | blk.8.attn_k.weight | 0xac949500 | 0x226000 | +| 76 | blk.8.attn_norm.weight | 0xacb6f500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xacb74500 | 0xb40000 | +| 78 | blk.8.attn_q.weight | 0xad6b4500 | 0x898000 | +| 79 | blk.8.attn_v.weight | 0xadf4c500 | 0x2d0000 | +| 80 | blk.8.ffn_down.weight | 0xae21c500 | 0x5a00000 | +| 81 | blk.8.ffn_gate.weight | 0xb3c1c500 | 0x44c0000 | +| 82 | blk.8.ffn_norm.weight | 0xb80dc500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xb80e1500 | 0x44c0000 | +| 84 | blk.9.attn_k.weight | 0xbc5a1500 | 0x226000 | +| 85 | blk.9.attn_norm.weight | 0xbc7c7500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xbc7cc500 | 0xb40000 | +| 87 | blk.9.attn_q.weight | 0xbd30c500 | 0x898000 | +| 88 | blk.9.attn_v.weight | 0xbdba4500 | 0x2d0000 | +| 89 | blk.9.ffn_down.weight | 0xbde74500 | 0x5a00000 | +| 90 | blk.9.ffn_gate.weight | 0xc3874500 | 0x44c0000 | +| 91 | blk.9.ffn_norm.weight | 0xc7d34500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xc7d39500 | 0x44c0000 | +| 93 | blk.10.attn_k.weight | 0xcc1f9500 | 0x226000 | +| 94 | blk.10.attn_norm.weight | 0xcc41f500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xcc424500 | 0xb40000 | +| 96 | blk.10.attn_q.weight | 0xccf64500 | 0x898000 | +| 97 | blk.10.attn_v.weight | 0xcd7fc500 | 0x2d0000 | +| 98 | blk.10.ffn_down.weight | 0xcdacc500 | 0x5a00000 | +| 99 | blk.10.ffn_gate.weight | 0xd34cc500 | 0x44c0000 | +| 100 | blk.10.ffn_norm.weight | 0xd798c500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0xd7991500 | 0x44c0000 | +| 102 | blk.11.attn_k.weight | 0xdbe51500 | 0x226000 | +| 103 | blk.11.attn_norm.weight | 0xdc077500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0xdc07c500 | 0xb40000 | +| 105 | blk.11.attn_q.weight | 0xdcbbc500 | 0x898000 | +| 106 | blk.11.attn_v.weight | 0xdd454500 | 0x2d0000 | +| 107 | blk.11.ffn_down.weight | 0xdd724500 | 0x5a00000 | +| 108 | blk.11.ffn_gate.weight | 0xe3124500 | 0x44c0000 | +| 109 | blk.11.ffn_norm.weight | 0xe75e4500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0xe75e9500 | 0x44c0000 | +| 111 | blk.12.attn_k.weight | 0xebaa9500 | 0x226000 | +| 112 | blk.12.attn_norm.weight | 0xebccf500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0xebcd4500 | 0xb40000 | +| 114 | blk.12.attn_q.weight | 0xec814500 | 0x898000 | +| 115 | blk.12.attn_v.weight | 0xed0ac500 | 0x2d0000 | +| 116 | blk.12.ffn_down.weight | 0xed37c500 | 0x5a00000 | +| 117 | blk.12.ffn_gate.weight | 0xf2d7c500 | 0x44c0000 | +| 118 | blk.12.ffn_norm.weight | 0xf723c500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0xf7241500 | 0x44c0000 | +| 120 | blk.13.attn_k.weight | 0xfb701500 | 0x226000 | +| 121 | blk.13.attn_norm.weight | 0xfb927500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0xfb92c500 | 0xb40000 | +| 123 | blk.13.attn_q.weight | 0xfc46c500 | 0x898000 | +| 124 | blk.13.attn_v.weight | 0xfcd04500 | 0x2d0000 | +| 125 | blk.13.ffn_down.weight | 0xfcfd4500 | 0x5a00000 | +| 126 | blk.13.ffn_gate.weight | 0x1029d4500 | 0x44c0000 | +| 127 | blk.13.ffn_norm.weight | 0x106e94500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x106e99500 | 0x44c0000 | +| 129 | blk.14.attn_k.weight | 0x10b359500 | 0x226000 | +| 130 | blk.14.attn_norm.weight | 0x10b57f500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x10b584500 | 0xb40000 | +| 132 | blk.14.attn_q.weight | 0x10c0c4500 | 0x898000 | +| 133 | blk.14.attn_v.weight | 0x10c95c500 | 0x2d0000 | +| 134 | blk.14.ffn_down.weight | 0x10cc2c500 | 0x5a00000 | +| 135 | blk.14.ffn_gate.weight | 0x11262c500 | 0x44c0000 | +| 136 | blk.14.ffn_norm.weight | 0x116aec500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x116af1500 | 0x44c0000 | +| 138 | blk.15.attn_k.weight | 0x11afb1500 | 0x226000 | +| 139 | blk.15.attn_norm.weight | 0x11b1d7500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x11b1dc500 | 0xb40000 | +| 141 | blk.15.attn_q.weight | 0x11bd1c500 | 0x898000 | +| 142 | blk.15.attn_v.weight | 0x11c5b4500 | 0x2d0000 | +| 143 | blk.15.ffn_down.weight | 0x11c884500 | 0x5a00000 | +| 144 | blk.15.ffn_gate.weight | 0x122284500 | 0x44c0000 | +| 145 | blk.15.ffn_norm.weight | 0x126744500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x126749500 | 0x44c0000 | +| 147 | blk.16.attn_k.weight | 0x12ac09500 | 0x226000 | +| 148 | blk.16.attn_norm.weight | 0x12ae2f500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x12ae34500 | 0xb40000 | +| 150 | blk.16.attn_q.weight | 0x12b974500 | 0x898000 | +| 151 | blk.16.attn_v.weight | 0x12c20c500 | 0x2d0000 | +| 152 | blk.16.ffn_down.weight | 0x12c4dc500 | 0x5a00000 | +| 153 | blk.16.ffn_gate.weight | 0x131edc500 | 0x44c0000 | +| 154 | blk.16.ffn_norm.weight | 0x13639c500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x1363a1500 | 0x44c0000 | +| 156 | blk.17.attn_k.weight | 0x13a861500 | 0x2d0000 | +| 157 | blk.17.attn_norm.weight | 0x13ab31500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x13ab36500 | 0xb40000 | +| 159 | blk.17.attn_q.weight | 0x13b676500 | 0xb40000 | +| 160 | blk.17.attn_v.weight | 0x13c1b6500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x13c486500 | 0x5a00000 | +| 162 | blk.17.ffn_gate.weight | 0x141e86500 | 0x44c0000 | +| 163 | blk.17.ffn_norm.weight | 0x146346500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x14634b500 | 0x44c0000 | +| 165 | blk.18.attn_k.weight | 0x14a80b500 | 0x2d0000 | +| 166 | blk.18.attn_norm.weight | 0x14aadb500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x14aae0500 | 0xb40000 | +| 168 | blk.18.attn_q.weight | 0x14b620500 | 0xb40000 | +| 169 | blk.18.attn_v.weight | 0x14c160500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x14c430500 | 0x5a00000 | +| 171 | blk.18.ffn_gate.weight | 0x151e30500 | 0x44c0000 | +| 172 | blk.18.ffn_norm.weight | 0x1562f0500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x1562f5500 | 0x44c0000 | +| 174 | blk.19.attn_k.weight | 0x15a7b5500 | 0x226000 | +| 175 | blk.19.attn_norm.weight | 0x15a9db500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x15a9e0500 | 0xb40000 | +| 177 | blk.19.attn_q.weight | 0x15b520500 | 0x898000 | +| 178 | blk.19.attn_v.weight | 0x15bdb8500 | 0x2d0000 | +| 179 | blk.19.ffn_down.weight | 0x15c088500 | 0x5a00000 | +| 180 | blk.19.ffn_gate.weight | 0x161a88500 | 0x44c0000 | +| 181 | blk.19.ffn_norm.weight | 0x165f48500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x165f4d500 | 0x44c0000 | +| 183 | blk.20.attn_k.weight | 0x16a40d500 | 0x2d0000 | +| 184 | blk.20.attn_norm.weight | 0x16a6dd500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x16a6e2500 | 0xb40000 | +| 186 | blk.20.attn_q.weight | 0x16b222500 | 0xb40000 | +| 187 | blk.20.attn_v.weight | 0x16bd62500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x16c032500 | 0x5a00000 | +| 189 | blk.20.ffn_gate.weight | 0x171a32500 | 0x5a00000 | +| 190 | blk.20.ffn_norm.weight | 0x177432500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x177437500 | 0x5a00000 | +| 192 | blk.21.attn_k.weight | 0x17ce37500 | 0x226000 | +| 193 | blk.21.attn_norm.weight | 0x17d05d500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x17d062500 | 0xb40000 | +| 195 | blk.21.attn_q.weight | 0x17dba2500 | 0x898000 | +| 196 | blk.21.attn_v.weight | 0x17e43a500 | 0x2d0000 | +| 197 | blk.21.ffn_down.weight | 0x17e70a500 | 0x5a00000 | +| 198 | blk.21.ffn_gate.weight | 0x18410a500 | 0x5a00000 | +| 199 | blk.21.ffn_norm.weight | 0x189b0a500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x189b0f500 | 0x5a00000 | +| 201 | blk.22.attn_k.weight | 0x18f50f500 | 0x2d0000 | +| 202 | blk.22.attn_norm.weight | 0x18f7df500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x18f7e4500 | 0xb40000 | +| 204 | blk.22.attn_q.weight | 0x190324500 | 0xb40000 | +| 205 | blk.22.attn_v.weight | 0x190e64500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x191134500 | 0x5a00000 | +| 207 | blk.22.ffn_gate.weight | 0x196b34500 | 0x5a00000 | +| 208 | blk.22.ffn_norm.weight | 0x19c534500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x19c539500 | 0x5a00000 | +| 210 | blk.23.attn_k.weight | 0x1a1f39500 | 0x2d0000 | +| 211 | blk.23.attn_norm.weight | 0x1a2209500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x1a220e500 | 0xb40000 | +| 213 | blk.23.attn_q.weight | 0x1a2d4e500 | 0xb40000 | +| 214 | blk.23.attn_v.weight | 0x1a388e500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x1a3b5e500 | 0x5a00000 | +| 216 | blk.23.ffn_gate.weight | 0x1a955e500 | 0x5a00000 | +| 217 | blk.23.ffn_norm.weight | 0x1aef5e500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x1aef63500 | 0x5a00000 | +| 219 | blk.24.attn_k.weight | 0x1b4963500 | 0x2d0000 | +| 220 | blk.24.attn_norm.weight | 0x1b4c33500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x1b4c38500 | 0xb40000 | +| 222 | blk.24.attn_q.weight | 0x1b5778500 | 0xb40000 | +| 223 | blk.24.attn_v.weight | 0x1b62b8500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x1b6588500 | 0x5a00000 | +| 225 | blk.24.ffn_gate.weight | 0x1bbf88500 | 0x5a00000 | +| 226 | blk.24.ffn_norm.weight | 0x1c1988500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x1c198d500 | 0x5a00000 | +| 228 | blk.25.attn_k.weight | 0x1c738d500 | 0x2d0000 | +| 229 | blk.25.attn_norm.weight | 0x1c765d500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x1c7662500 | 0xb40000 | +| 231 | blk.25.attn_q.weight | 0x1c81a2500 | 0xb40000 | +| 232 | blk.25.attn_v.weight | 0x1c8ce2500 | 0x2d0000 | +| 233 | blk.25.ffn_down.weight | 0x1c8fb2500 | 0x5a00000 | +| 234 | blk.25.ffn_gate.weight | 0x1ce9b2500 | 0x5a00000 | +| 235 | blk.25.ffn_norm.weight | 0x1d43b2500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x1d43b7500 | 0x5a00000 | +| 237 | blk.26.attn_k.weight | 0x1d9db7500 | 0x2d0000 | +| 238 | blk.26.attn_norm.weight | 0x1da087500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x1da08c500 | 0xb40000 | +| 240 | blk.26.attn_q.weight | 0x1dabcc500 | 0xb40000 | +| 241 | blk.26.attn_v.weight | 0x1db70c500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x1db9dc500 | 0x5a00000 | +| 243 | blk.26.ffn_gate.weight | 0x1e13dc500 | 0x5a00000 | +| 244 | blk.26.ffn_norm.weight | 0x1e6ddc500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x1e6de1500 | 0x5a00000 | +| 246 | blk.27.attn_k.weight | 0x1ec7e1500 | 0x226000 | +| 247 | blk.27.attn_norm.weight | 0x1eca07500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x1eca0c500 | 0xb40000 | +| 249 | blk.27.attn_q.weight | 0x1ed54c500 | 0x898000 | +| 250 | blk.27.attn_v.weight | 0x1edde4500 | 0x2d0000 | +| 251 | blk.27.ffn_down.weight | 0x1ee0b4500 | 0x5a00000 | +| 252 | blk.27.ffn_gate.weight | 0x1f3ab4500 | 0x5a00000 | +| 253 | blk.27.ffn_norm.weight | 0x1f94b4500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x1f94b9500 | 0x5a00000 | +| 255 | blk.28.attn_k.weight | 0x1feeb9500 | 0x2d0000 | +| 256 | blk.28.attn_norm.weight | 0x1ff189500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x1ff18e500 | 0xb40000 | +| 258 | blk.28.attn_q.weight | 0x1ffcce500 | 0xb40000 | +| 259 | blk.28.attn_v.weight | 0x20080e500 | 0x2d0000 | +| 260 | blk.28.ffn_down.weight | 0x200ade500 | 0x5a00000 | +| 261 | blk.28.ffn_gate.weight | 0x2064de500 | 0x5a00000 | +| 262 | blk.28.ffn_norm.weight | 0x20bede500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x20bee3500 | 0x5a00000 | +| 264 | blk.29.attn_k.weight | 0x2118e3500 | 0x2d0000 | +| 265 | blk.29.attn_norm.weight | 0x211bb3500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x211bb8500 | 0xb40000 | +| 267 | blk.29.attn_q.weight | 0x2126f8500 | 0xb40000 | +| 268 | blk.29.attn_v.weight | 0x213238500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x213508500 | 0x5a00000 | +| 270 | blk.29.ffn_gate.weight | 0x218f08500 | 0x5a00000 | +| 271 | blk.29.ffn_norm.weight | 0x21e908500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x21e90d500 | 0x5a00000 | +| 273 | blk.30.attn_k.weight | 0x22430d500 | 0x2d0000 | +| 274 | blk.30.attn_norm.weight | 0x2245dd500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x2245e2500 | 0xb40000 | +| 276 | blk.30.attn_q.weight | 0x225122500 | 0xb40000 | +| 277 | blk.30.attn_v.weight | 0x225c62500 | 0x2d0000 | +| 278 | blk.30.ffn_down.weight | 0x225f32500 | 0x5a00000 | +| 279 | blk.30.ffn_gate.weight | 0x22b932500 | 0x5a00000 | +| 280 | blk.30.ffn_norm.weight | 0x231332500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x231337500 | 0x5a00000 | +| 282 | blk.31.attn_k.weight | 0x236d37500 | 0x2d0000 | +| 283 | blk.31.attn_norm.weight | 0x237007500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x23700c500 | 0xb40000 | +| 285 | blk.31.attn_q.weight | 0x237b4c500 | 0xb40000 | +| 286 | blk.31.attn_v.weight | 0x23868c500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x23895c500 | 0x5a00000 | +| 288 | blk.31.ffn_gate.weight | 0x23e35c500 | 0x5a00000 | +| 289 | blk.31.ffn_norm.weight | 0x243d5c500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x243d61500 | 0x5a00000 | +| 291 | blk.32.attn_k.weight | 0x249761500 | 0x2d0000 | +| 292 | blk.32.attn_norm.weight | 0x249a31500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x249a36500 | 0xb40000 | +| 294 | blk.32.attn_q.weight | 0x24a576500 | 0xb40000 | +| 295 | blk.32.attn_v.weight | 0x24b0b6500 | 0x2d0000 | +| 296 | blk.32.ffn_down.weight | 0x24b386500 | 0x5a00000 | +| 297 | blk.32.ffn_gate.weight | 0x250d86500 | 0x5a00000 | +| 298 | blk.32.ffn_norm.weight | 0x256786500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x25678b500 | 0x5a00000 | +| 300 | blk.33.attn_k.weight | 0x25c18b500 | 0x2d0000 | +| 301 | blk.33.attn_norm.weight | 0x25c45b500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x25c460500 | 0xb40000 | +| 303 | blk.33.attn_q.weight | 0x25cfa0500 | 0xb40000 | +| 304 | blk.33.attn_v.weight | 0x25dae0500 | 0x2d0000 | +| 305 | blk.33.ffn_down.weight | 0x25ddb0500 | 0x5a00000 | +| 306 | blk.33.ffn_gate.weight | 0x2637b0500 | 0x5a00000 | +| 307 | blk.33.ffn_norm.weight | 0x2691b0500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x2691b5500 | 0x5a00000 | +| 309 | blk.34.attn_k.weight | 0x26ebb5500 | 0x2d0000 | +| 310 | blk.34.attn_norm.weight | 0x26ee85500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x26ee8a500 | 0xb40000 | +| 312 | blk.34.attn_q.weight | 0x26f9ca500 | 0xb40000 | +| 313 | blk.34.attn_v.weight | 0x27050a500 | 0x2d0000 | +| 314 | blk.34.ffn_down.weight | 0x2707da500 | 0x5a00000 | +| 315 | blk.34.ffn_gate.weight | 0x2761da500 | 0x5a00000 | +| 316 | blk.34.ffn_norm.weight | 0x27bbda500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x27bbdf500 | 0x5a00000 | +| 318 | blk.35.attn_k.weight | 0x2815df500 | 0x2d0000 | +| 319 | blk.35.attn_norm.weight | 0x2818af500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x2818b4500 | 0xb40000 | +| 321 | blk.35.attn_q.weight | 0x2823f4500 | 0xb40000 | +| 322 | blk.35.attn_v.weight | 0x282f34500 | 0x2d0000 | +| 323 | blk.35.ffn_down.weight | 0x283204500 | 0x5a00000 | +| 324 | blk.35.ffn_gate.weight | 0x288c04500 | 0x5a00000 | +| 325 | blk.35.ffn_norm.weight | 0x28e604500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x28e609500 | 0x5a00000 | +| 327 | blk.36.attn_k.weight | 0x294009500 | 0x2d0000 | +| 328 | blk.36.attn_norm.weight | 0x2942d9500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x2942de500 | 0xb40000 | +| 330 | blk.36.attn_q.weight | 0x294e1e500 | 0xb40000 | +| 331 | blk.36.attn_v.weight | 0x29595e500 | 0x2d0000 | +| 332 | blk.36.ffn_down.weight | 0x295c2e500 | 0x5a00000 | +| 333 | blk.36.ffn_gate.weight | 0x29b62e500 | 0x5a00000 | +| 334 | blk.36.ffn_norm.weight | 0x2a102e500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x2a1033500 | 0x5a00000 | +| 336 | blk.37.attn_k.weight | 0x2a6a33500 | 0x2d0000 | +| 337 | blk.37.attn_norm.weight | 0x2a6d03500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x2a6d08500 | 0xb40000 | +| 339 | blk.37.attn_q.weight | 0x2a7848500 | 0xb40000 | +| 340 | blk.37.attn_v.weight | 0x2a8388500 | 0x2d0000 | +| 341 | blk.37.ffn_down.weight | 0x2a8658500 | 0x5a00000 | +| 342 | blk.37.ffn_gate.weight | 0x2ae058500 | 0x5a00000 | +| 343 | blk.37.ffn_norm.weight | 0x2b3a58500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x2b3a5d500 | 0x5a00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q4_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q3_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q3_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q3_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q4_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q4_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_M.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_M.md new file mode 100644 index 0000000..eafa3ec --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_M.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 17 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5\_K\_M.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q5_k_mgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x1b800000 | +| 1 | output_norm.weight | 0x1bf84500 | 0x5000 | +| 2 | token_embd.weight | 0x1bf89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x2d289500 | 0x2d0000 | +| 4 | blk.0.attn_norm.weight | 0x2d559500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x2d55e500 | 0xdc0000 | +| 6 | blk.0.attn_q.weight | 0x2e31e500 | 0xb40000 | +| 7 | blk.0.attn_v.weight | 0x2ee5e500 | 0x370000 | +| 8 | blk.0.ffn_down.weight | 0x2f1ce500 | 0x8340000 | +| 9 | blk.0.ffn_gate.weight | 0x3750e500 | 0x5a00000 | +| 10 | blk.0.ffn_norm.weight | 0x3cf0e500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x3cf13500 | 0x5a00000 | +| 12 | blk.1.attn_k.weight | 0x42913500 | 0x2d0000 | +| 13 | blk.1.attn_norm.weight | 0x42be3500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x42be8500 | 0xdc0000 | +| 15 | blk.1.attn_q.weight | 0x439a8500 | 0xb40000 | +| 16 | blk.1.attn_v.weight | 0x444e8500 | 0x370000 | +| 17 | blk.1.ffn_down.weight | 0x44858500 | 0x8340000 | +| 18 | blk.1.ffn_gate.weight | 0x4cb98500 | 0x5a00000 | +| 19 | blk.1.ffn_norm.weight | 0x52598500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x5259d500 | 0x5a00000 | +| 21 | blk.2.attn_k.weight | 0x57f9d500 | 0x2d0000 | +| 22 | blk.2.attn_norm.weight | 0x5826d500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x58272500 | 0xdc0000 | +| 24 | blk.2.attn_q.weight | 0x59032500 | 0xb40000 | +| 25 | blk.2.attn_v.weight | 0x59b72500 | 0x370000 | +| 26 | blk.2.ffn_down.weight | 0x59ee2500 | 0x8340000 | +| 27 | blk.2.ffn_gate.weight | 0x62222500 | 0x5a00000 | +| 28 | blk.2.ffn_norm.weight | 0x67c22500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x67c27500 | 0x5a00000 | +| 30 | blk.3.attn_k.weight | 0x6d627500 | 0x2d0000 | +| 31 | blk.3.attn_norm.weight | 0x6d8f7500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x6d8fc500 | 0xdc0000 | +| 33 | blk.3.attn_q.weight | 0x6e6bc500 | 0xb40000 | +| 34 | blk.3.attn_v.weight | 0x6f1fc500 | 0x370000 | +| 35 | blk.3.ffn_down.weight | 0x6f56c500 | 0x8340000 | +| 36 | blk.3.ffn_gate.weight | 0x778ac500 | 0x5a00000 | +| 37 | blk.3.ffn_norm.weight | 0x7d2ac500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x7d2b1500 | 0x5a00000 | +| 39 | blk.4.attn_k.weight | 0x82cb1500 | 0x2d0000 | +| 40 | blk.4.attn_norm.weight | 0x82f81500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x82f86500 | 0xdc0000 | +| 42 | blk.4.attn_q.weight | 0x83d46500 | 0xb40000 | +| 43 | blk.4.attn_v.weight | 0x84886500 | 0x41a000 | +| 44 | blk.4.ffn_down.weight | 0x84ca0500 | 0x8340000 | +| 45 | blk.4.ffn_gate.weight | 0x8cfe0500 | 0x5a00000 | +| 46 | blk.4.ffn_norm.weight | 0x929e0500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x929e5500 | 0x5a00000 | +| 48 | blk.5.attn_k.weight | 0x983e5500 | 0x2d0000 | +| 49 | blk.5.attn_norm.weight | 0x986b5500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x986ba500 | 0xdc0000 | +| 51 | blk.5.attn_q.weight | 0x9947a500 | 0xb40000 | +| 52 | blk.5.attn_v.weight | 0x99fba500 | 0x41a000 | +| 53 | blk.5.ffn_down.weight | 0x9a3d4500 | 0x6e00000 | +| 54 | blk.5.ffn_gate.weight | 0xa11d4500 | 0x5a00000 | +| 55 | blk.5.ffn_norm.weight | 0xa6bd4500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0xa6bd9500 | 0x5a00000 | +| 57 | blk.6.attn_k.weight | 0xac5d9500 | 0x2d0000 | +| 58 | blk.6.attn_norm.weight | 0xac8a9500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0xac8ae500 | 0xdc0000 | +| 60 | blk.6.attn_q.weight | 0xad66e500 | 0xb40000 | +| 61 | blk.6.attn_v.weight | 0xae1ae500 | 0x370000 | +| 62 | blk.6.ffn_down.weight | 0xae51e500 | 0x6e00000 | +| 63 | blk.6.ffn_gate.weight | 0xb531e500 | 0x5a00000 | +| 64 | blk.6.ffn_norm.weight | 0xbad1e500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0xbad23500 | 0x5a00000 | +| 66 | blk.7.attn_k.weight | 0xc0723500 | 0x2d0000 | +| 67 | blk.7.attn_norm.weight | 0xc09f3500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0xc09f8500 | 0xdc0000 | +| 69 | blk.7.attn_q.weight | 0xc17b8500 | 0xb40000 | +| 70 | blk.7.attn_v.weight | 0xc22f8500 | 0x41a000 | +| 71 | blk.7.ffn_down.weight | 0xc2712500 | 0x8340000 | +| 72 | blk.7.ffn_gate.weight | 0xcaa52500 | 0x5a00000 | +| 73 | blk.7.ffn_norm.weight | 0xd0452500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xd0457500 | 0x5a00000 | +| 75 | blk.8.attn_k.weight | 0xd5e57500 | 0x2d0000 | +| 76 | blk.8.attn_norm.weight | 0xd6127500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xd612c500 | 0xdc0000 | +| 78 | blk.8.attn_q.weight | 0xd6eec500 | 0xb40000 | +| 79 | blk.8.attn_v.weight | 0xd7a2c500 | 0x41a000 | +| 80 | blk.8.ffn_down.weight | 0xd7e46500 | 0x6e00000 | +| 81 | blk.8.ffn_gate.weight | 0xdec46500 | 0x5a00000 | +| 82 | blk.8.ffn_norm.weight | 0xe4646500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xe464b500 | 0x5a00000 | +| 84 | blk.9.attn_k.weight | 0xea04b500 | 0x2d0000 | +| 85 | blk.9.attn_norm.weight | 0xea31b500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xea320500 | 0xdc0000 | +| 87 | blk.9.attn_q.weight | 0xeb0e0500 | 0xb40000 | +| 88 | blk.9.attn_v.weight | 0xebc20500 | 0x370000 | +| 89 | blk.9.ffn_down.weight | 0xebf90500 | 0x6e00000 | +| 90 | blk.9.ffn_gate.weight | 0xf2d90500 | 0x5a00000 | +| 91 | blk.9.ffn_norm.weight | 0xf8790500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xf8795500 | 0x5a00000 | +| 93 | blk.10.attn_k.weight | 0xfe195500 | 0x2d0000 | +| 94 | blk.10.attn_norm.weight | 0xfe465500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xfe46a500 | 0xdc0000 | +| 96 | blk.10.attn_q.weight | 0xff22a500 | 0xb40000 | +| 97 | blk.10.attn_v.weight | 0xffd6a500 | 0x41a000 | +| 98 | blk.10.ffn_down.weight | 0x100184500 | 0x8340000 | +| 99 | blk.10.ffn_gate.weight | 0x1084c4500 | 0x5a00000 | +| 100 | blk.10.ffn_norm.weight | 0x10dec4500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0x10dec9500 | 0x5a00000 | +| 102 | blk.11.attn_k.weight | 0x1138c9500 | 0x2d0000 | +| 103 | blk.11.attn_norm.weight | 0x113b99500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0x113b9e500 | 0xdc0000 | +| 105 | blk.11.attn_q.weight | 0x11495e500 | 0xb40000 | +| 106 | blk.11.attn_v.weight | 0x11549e500 | 0x41a000 | +| 107 | blk.11.ffn_down.weight | 0x1158b8500 | 0x6e00000 | +| 108 | blk.11.ffn_gate.weight | 0x11c6b8500 | 0x5a00000 | +| 109 | blk.11.ffn_norm.weight | 0x1220b8500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0x1220bd500 | 0x5a00000 | +| 111 | blk.12.attn_k.weight | 0x127abd500 | 0x2d0000 | +| 112 | blk.12.attn_norm.weight | 0x127d8d500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0x127d92500 | 0xdc0000 | +| 114 | blk.12.attn_q.weight | 0x128b52500 | 0xb40000 | +| 115 | blk.12.attn_v.weight | 0x129692500 | 0x370000 | +| 116 | blk.12.ffn_down.weight | 0x129a02500 | 0x6e00000 | +| 117 | blk.12.ffn_gate.weight | 0x130802500 | 0x5a00000 | +| 118 | blk.12.ffn_norm.weight | 0x136202500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0x136207500 | 0x5a00000 | +| 120 | blk.13.attn_k.weight | 0x13bc07500 | 0x2d0000 | +| 121 | blk.13.attn_norm.weight | 0x13bed7500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0x13bedc500 | 0xdc0000 | +| 123 | blk.13.attn_q.weight | 0x13cc9c500 | 0xb40000 | +| 124 | blk.13.attn_v.weight | 0x13d7dc500 | 0x41a000 | +| 125 | blk.13.ffn_down.weight | 0x13dbf6500 | 0x8340000 | +| 126 | blk.13.ffn_gate.weight | 0x145f36500 | 0x5a00000 | +| 127 | blk.13.ffn_norm.weight | 0x14b936500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x14b93b500 | 0x5a00000 | +| 129 | blk.14.attn_k.weight | 0x15133b500 | 0x2d0000 | +| 130 | blk.14.attn_norm.weight | 0x15160b500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x151610500 | 0xdc0000 | +| 132 | blk.14.attn_q.weight | 0x1523d0500 | 0xb40000 | +| 133 | blk.14.attn_v.weight | 0x152f10500 | 0x41a000 | +| 134 | blk.14.ffn_down.weight | 0x15332a500 | 0x6e00000 | +| 135 | blk.14.ffn_gate.weight | 0x15a12a500 | 0x5a00000 | +| 136 | blk.14.ffn_norm.weight | 0x15fb2a500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x15fb2f500 | 0x5a00000 | +| 138 | blk.15.attn_k.weight | 0x16552f500 | 0x2d0000 | +| 139 | blk.15.attn_norm.weight | 0x1657ff500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x165804500 | 0xdc0000 | +| 141 | blk.15.attn_q.weight | 0x1665c4500 | 0xb40000 | +| 142 | blk.15.attn_v.weight | 0x167104500 | 0x370000 | +| 143 | blk.15.ffn_down.weight | 0x167474500 | 0x6e00000 | +| 144 | blk.15.ffn_gate.weight | 0x16e274500 | 0x5a00000 | +| 145 | blk.15.ffn_norm.weight | 0x173c74500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x173c79500 | 0x5a00000 | +| 147 | blk.16.attn_k.weight | 0x179679500 | 0x2d0000 | +| 148 | blk.16.attn_norm.weight | 0x179949500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x17994e500 | 0xdc0000 | +| 150 | blk.16.attn_q.weight | 0x17a70e500 | 0xb40000 | +| 151 | blk.16.attn_v.weight | 0x17b24e500 | 0x41a000 | +| 152 | blk.16.ffn_down.weight | 0x17b668500 | 0x8340000 | +| 153 | blk.16.ffn_gate.weight | 0x1839a8500 | 0x5a00000 | +| 154 | blk.16.ffn_norm.weight | 0x1893a8500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x1893ad500 | 0x5a00000 | +| 156 | blk.17.attn_k.weight | 0x18edad500 | 0x2d0000 | +| 157 | blk.17.attn_norm.weight | 0x18f07d500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x18f082500 | 0xdc0000 | +| 159 | blk.17.attn_q.weight | 0x18fe42500 | 0xb40000 | +| 160 | blk.17.attn_v.weight | 0x190982500 | 0x41a000 | +| 161 | blk.17.ffn_down.weight | 0x190d9c500 | 0x6e00000 | +| 162 | blk.17.ffn_gate.weight | 0x197b9c500 | 0x5a00000 | +| 163 | blk.17.ffn_norm.weight | 0x19d59c500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x19d5a1500 | 0x5a00000 | +| 165 | blk.18.attn_k.weight | 0x1a2fa1500 | 0x2d0000 | +| 166 | blk.18.attn_norm.weight | 0x1a3271500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x1a3276500 | 0xdc0000 | +| 168 | blk.18.attn_q.weight | 0x1a4036500 | 0xb40000 | +| 169 | blk.18.attn_v.weight | 0x1a4b76500 | 0x370000 | +| 170 | blk.18.ffn_down.weight | 0x1a4ee6500 | 0x6e00000 | +| 171 | blk.18.ffn_gate.weight | 0x1abce6500 | 0x5a00000 | +| 172 | blk.18.ffn_norm.weight | 0x1b16e6500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x1b16eb500 | 0x5a00000 | +| 174 | blk.19.attn_k.weight | 0x1b70eb500 | 0x2d0000 | +| 175 | blk.19.attn_norm.weight | 0x1b73bb500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x1b73c0500 | 0xdc0000 | +| 177 | blk.19.attn_q.weight | 0x1b8180500 | 0xb40000 | +| 178 | blk.19.attn_v.weight | 0x1b8cc0500 | 0x41a000 | +| 179 | blk.19.ffn_down.weight | 0x1b90da500 | 0x8340000 | +| 180 | blk.19.ffn_gate.weight | 0x1c141a500 | 0x5a00000 | +| 181 | blk.19.ffn_norm.weight | 0x1c6e1a500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x1c6e1f500 | 0x5a00000 | +| 183 | blk.20.attn_k.weight | 0x1cc81f500 | 0x2d0000 | +| 184 | blk.20.attn_norm.weight | 0x1ccaef500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x1ccaf4500 | 0xdc0000 | +| 186 | blk.20.attn_q.weight | 0x1cd8b4500 | 0xb40000 | +| 187 | blk.20.attn_v.weight | 0x1ce3f4500 | 0x41a000 | +| 188 | blk.20.ffn_down.weight | 0x1ce80e500 | 0x6e00000 | +| 189 | blk.20.ffn_gate.weight | 0x1d560e500 | 0x5a00000 | +| 190 | blk.20.ffn_norm.weight | 0x1db00e500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x1db013500 | 0x5a00000 | +| 192 | blk.21.attn_k.weight | 0x1e0a13500 | 0x2d0000 | +| 193 | blk.21.attn_norm.weight | 0x1e0ce3500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x1e0ce8500 | 0xdc0000 | +| 195 | blk.21.attn_q.weight | 0x1e1aa8500 | 0xb40000 | +| 196 | blk.21.attn_v.weight | 0x1e25e8500 | 0x370000 | +| 197 | blk.21.ffn_down.weight | 0x1e2958500 | 0x6e00000 | +| 198 | blk.21.ffn_gate.weight | 0x1e9758500 | 0x5a00000 | +| 199 | blk.21.ffn_norm.weight | 0x1ef158500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x1ef15d500 | 0x5a00000 | +| 201 | blk.22.attn_k.weight | 0x1f4b5d500 | 0x2d0000 | +| 202 | blk.22.attn_norm.weight | 0x1f4e2d500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x1f4e32500 | 0xdc0000 | +| 204 | blk.22.attn_q.weight | 0x1f5bf2500 | 0xb40000 | +| 205 | blk.22.attn_v.weight | 0x1f6732500 | 0x41a000 | +| 206 | blk.22.ffn_down.weight | 0x1f6b4c500 | 0x8340000 | +| 207 | blk.22.ffn_gate.weight | 0x1fee8c500 | 0x5a00000 | +| 208 | blk.22.ffn_norm.weight | 0x20488c500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x204891500 | 0x5a00000 | +| 210 | blk.23.attn_k.weight | 0x20a291500 | 0x2d0000 | +| 211 | blk.23.attn_norm.weight | 0x20a561500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x20a566500 | 0xdc0000 | +| 213 | blk.23.attn_q.weight | 0x20b326500 | 0xb40000 | +| 214 | blk.23.attn_v.weight | 0x20be66500 | 0x41a000 | +| 215 | blk.23.ffn_down.weight | 0x20c280500 | 0x6e00000 | +| 216 | blk.23.ffn_gate.weight | 0x213080500 | 0x5a00000 | +| 217 | blk.23.ffn_norm.weight | 0x218a80500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x218a85500 | 0x5a00000 | +| 219 | blk.24.attn_k.weight | 0x21e485500 | 0x2d0000 | +| 220 | blk.24.attn_norm.weight | 0x21e755500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x21e75a500 | 0xdc0000 | +| 222 | blk.24.attn_q.weight | 0x21f51a500 | 0xb40000 | +| 223 | blk.24.attn_v.weight | 0x22005a500 | 0x370000 | +| 224 | blk.24.ffn_down.weight | 0x2203ca500 | 0x6e00000 | +| 225 | blk.24.ffn_gate.weight | 0x2271ca500 | 0x5a00000 | +| 226 | blk.24.ffn_norm.weight | 0x22cbca500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x22cbcf500 | 0x5a00000 | +| 228 | blk.25.attn_k.weight | 0x2325cf500 | 0x370000 | +| 229 | blk.25.attn_norm.weight | 0x23293f500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x232944500 | 0xdc0000 | +| 231 | blk.25.attn_q.weight | 0x233704500 | 0xdc0000 | +| 232 | blk.25.attn_v.weight | 0x2344c4500 | 0x41a000 | +| 233 | blk.25.ffn_down.weight | 0x2348de500 | 0x8340000 | +| 234 | blk.25.ffn_gate.weight | 0x23cc1e500 | 0x5a00000 | +| 235 | blk.25.ffn_norm.weight | 0x24261e500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x242623500 | 0x5a00000 | +| 237 | blk.26.attn_k.weight | 0x248023500 | 0x2d0000 | +| 238 | blk.26.attn_norm.weight | 0x2482f3500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x2482f8500 | 0xdc0000 | +| 240 | blk.26.attn_q.weight | 0x2490b8500 | 0xb40000 | +| 241 | blk.26.attn_v.weight | 0x249bf8500 | 0x41a000 | +| 242 | blk.26.ffn_down.weight | 0x24a012500 | 0x6e00000 | +| 243 | blk.26.ffn_gate.weight | 0x250e12500 | 0x5a00000 | +| 244 | blk.26.ffn_norm.weight | 0x256812500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x256817500 | 0x5a00000 | +| 246 | blk.27.attn_k.weight | 0x25c217500 | 0x2d0000 | +| 247 | blk.27.attn_norm.weight | 0x25c4e7500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x25c4ec500 | 0xdc0000 | +| 249 | blk.27.attn_q.weight | 0x25d2ac500 | 0xb40000 | +| 250 | blk.27.attn_v.weight | 0x25ddec500 | 0x370000 | +| 251 | blk.27.ffn_down.weight | 0x25e15c500 | 0x6e00000 | +| 252 | blk.27.ffn_gate.weight | 0x264f5c500 | 0x5a00000 | +| 253 | blk.27.ffn_norm.weight | 0x26a95c500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x26a961500 | 0x5a00000 | +| 255 | blk.28.attn_k.weight | 0x270361500 | 0x370000 | +| 256 | blk.28.attn_norm.weight | 0x2706d1500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x2706d6500 | 0xdc0000 | +| 258 | blk.28.attn_q.weight | 0x271496500 | 0xdc0000 | +| 259 | blk.28.attn_v.weight | 0x272256500 | 0x41a000 | +| 260 | blk.28.ffn_down.weight | 0x272670500 | 0x8340000 | +| 261 | blk.28.ffn_gate.weight | 0x27a9b0500 | 0x5a00000 | +| 262 | blk.28.ffn_norm.weight | 0x2803b0500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x2803b5500 | 0x5a00000 | +| 264 | blk.29.attn_k.weight | 0x285db5500 | 0x2d0000 | +| 265 | blk.29.attn_norm.weight | 0x286085500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x28608a500 | 0xdc0000 | +| 267 | blk.29.attn_q.weight | 0x286e4a500 | 0xb40000 | +| 268 | blk.29.attn_v.weight | 0x28798a500 | 0x41a000 | +| 269 | blk.29.ffn_down.weight | 0x287da4500 | 0x6e00000 | +| 270 | blk.29.ffn_gate.weight | 0x28eba4500 | 0x5a00000 | +| 271 | blk.29.ffn_norm.weight | 0x2945a4500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x2945a9500 | 0x5a00000 | +| 273 | blk.30.attn_k.weight | 0x299fa9500 | 0x370000 | +| 274 | blk.30.attn_norm.weight | 0x29a319500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x29a31e500 | 0xdc0000 | +| 276 | blk.30.attn_q.weight | 0x29b0de500 | 0xdc0000 | +| 277 | blk.30.attn_v.weight | 0x29be9e500 | 0x41a000 | +| 278 | blk.30.ffn_down.weight | 0x29c2b8500 | 0x6e00000 | +| 279 | blk.30.ffn_gate.weight | 0x2a30b8500 | 0x6e00000 | +| 280 | blk.30.ffn_norm.weight | 0x2a9eb8500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x2a9ebd500 | 0x6e00000 | +| 282 | blk.31.attn_k.weight | 0x2b0cbd500 | 0x2d0000 | +| 283 | blk.31.attn_norm.weight | 0x2b0f8d500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x2b0f92500 | 0xdc0000 | +| 285 | blk.31.attn_q.weight | 0x2b1d52500 | 0xb40000 | +| 286 | blk.31.attn_v.weight | 0x2b2892500 | 0x41a000 | +| 287 | blk.31.ffn_down.weight | 0x2b2cac500 | 0x8340000 | +| 288 | blk.31.ffn_gate.weight | 0x2bafec500 | 0x6e00000 | +| 289 | blk.31.ffn_norm.weight | 0x2c1dec500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x2c1df1500 | 0x6e00000 | +| 291 | blk.32.attn_k.weight | 0x2c8bf1500 | 0x370000 | +| 292 | blk.32.attn_norm.weight | 0x2c8f61500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x2c8f66500 | 0xdc0000 | +| 294 | blk.32.attn_q.weight | 0x2c9d26500 | 0xdc0000 | +| 295 | blk.32.attn_v.weight | 0x2caae6500 | 0x41a000 | +| 296 | blk.32.ffn_down.weight | 0x2caf00500 | 0x6e00000 | +| 297 | blk.32.ffn_gate.weight | 0x2d1d00500 | 0x6e00000 | +| 298 | blk.32.ffn_norm.weight | 0x2d8b00500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x2d8b05500 | 0x6e00000 | +| 300 | blk.33.attn_k.weight | 0x2df905500 | 0x370000 | +| 301 | blk.33.attn_norm.weight | 0x2dfc75500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x2dfc7a500 | 0xdc0000 | +| 303 | blk.33.attn_q.weight | 0x2e0a3a500 | 0xdc0000 | +| 304 | blk.33.attn_v.weight | 0x2e17fa500 | 0x41a000 | +| 305 | blk.33.ffn_down.weight | 0x2e1c14500 | 0x6e00000 | +| 306 | blk.33.ffn_gate.weight | 0x2e8a14500 | 0x6e00000 | +| 307 | blk.33.ffn_norm.weight | 0x2ef814500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x2ef819500 | 0x6e00000 | +| 309 | blk.34.attn_k.weight | 0x2f6619500 | 0x370000 | +| 310 | blk.34.attn_norm.weight | 0x2f6989500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x2f698e500 | 0xdc0000 | +| 312 | blk.34.attn_q.weight | 0x2f774e500 | 0xdc0000 | +| 313 | blk.34.attn_v.weight | 0x2f850e500 | 0x41a000 | +| 314 | blk.34.ffn_down.weight | 0x2f8928500 | 0x8340000 | +| 315 | blk.34.ffn_gate.weight | 0x300c68500 | 0x6e00000 | +| 316 | blk.34.ffn_norm.weight | 0x307a68500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x307a6d500 | 0x6e00000 | +| 318 | blk.35.attn_k.weight | 0x30e86d500 | 0x370000 | +| 319 | blk.35.attn_norm.weight | 0x30ebdd500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x30ebe2500 | 0xdc0000 | +| 321 | blk.35.attn_q.weight | 0x30f9a2500 | 0xdc0000 | +| 322 | blk.35.attn_v.weight | 0x310762500 | 0x41a000 | +| 323 | blk.35.ffn_down.weight | 0x310b7c500 | 0x8340000 | +| 324 | blk.35.ffn_gate.weight | 0x318ebc500 | 0x6e00000 | +| 325 | blk.35.ffn_norm.weight | 0x31fcbc500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x31fcc1500 | 0x6e00000 | +| 327 | blk.36.attn_k.weight | 0x326ac1500 | 0x370000 | +| 328 | blk.36.attn_norm.weight | 0x326e31500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x326e36500 | 0xdc0000 | +| 330 | blk.36.attn_q.weight | 0x327bf6500 | 0xdc0000 | +| 331 | blk.36.attn_v.weight | 0x3289b6500 | 0x41a000 | +| 332 | blk.36.ffn_down.weight | 0x328dd0500 | 0x8340000 | +| 333 | blk.36.ffn_gate.weight | 0x331110500 | 0x6e00000 | +| 334 | blk.36.ffn_norm.weight | 0x337f10500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x337f15500 | 0x6e00000 | +| 336 | blk.37.attn_k.weight | 0x33ed15500 | 0x370000 | +| 337 | blk.37.attn_norm.weight | 0x33f085500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x33f08a500 | 0xdc0000 | +| 339 | blk.37.attn_q.weight | 0x33fe4a500 | 0xdc0000 | +| 340 | blk.37.attn_v.weight | 0x340c0a500 | 0x41a000 | +| 341 | blk.37.ffn_down.weight | 0x341024500 | 0x8340000 | +| 342 | blk.37.ffn_gate.weight | 0x349364500 | 0x6e00000 | +| 343 | blk.37.ffn_norm.weight | 0x350164500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x350169500 | 0x6e00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q5_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_S.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_S.md new file mode 100644 index 0000000..6c99376 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q5_K_S.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 16 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q5\_K\_S.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q5_k_sgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x1b800000 | +| 1 | output_norm.weight | 0x1bf84500 | 0x5000 | +| 2 | token_embd.weight | 0x1bf89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x2d289500 | 0x2d0000 | +| 4 | blk.0.attn_norm.weight | 0x2d559500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x2d55e500 | 0xdc0000 | +| 6 | blk.0.attn_q.weight | 0x2e31e500 | 0xb40000 | +| 7 | blk.0.attn_v.weight | 0x2ee5e500 | 0x2d0000 | +| 8 | blk.0.ffn_down.weight | 0x2f12e500 | 0x6e00000 | +| 9 | blk.0.ffn_gate.weight | 0x35f2e500 | 0x5a00000 | +| 10 | blk.0.ffn_norm.weight | 0x3b92e500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x3b933500 | 0x5a00000 | +| 12 | blk.1.attn_k.weight | 0x41333500 | 0x2d0000 | +| 13 | blk.1.attn_norm.weight | 0x41603500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x41608500 | 0xdc0000 | +| 15 | blk.1.attn_q.weight | 0x423c8500 | 0xb40000 | +| 16 | blk.1.attn_v.weight | 0x42f08500 | 0x2d0000 | +| 17 | blk.1.ffn_down.weight | 0x431d8500 | 0x6e00000 | +| 18 | blk.1.ffn_gate.weight | 0x49fd8500 | 0x5a00000 | +| 19 | blk.1.ffn_norm.weight | 0x4f9d8500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x4f9dd500 | 0x5a00000 | +| 21 | blk.2.attn_k.weight | 0x553dd500 | 0x2d0000 | +| 22 | blk.2.attn_norm.weight | 0x556ad500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x556b2500 | 0xdc0000 | +| 24 | blk.2.attn_q.weight | 0x56472500 | 0xb40000 | +| 25 | blk.2.attn_v.weight | 0x56fb2500 | 0x2d0000 | +| 26 | blk.2.ffn_down.weight | 0x57282500 | 0x6e00000 | +| 27 | blk.2.ffn_gate.weight | 0x5e082500 | 0x5a00000 | +| 28 | blk.2.ffn_norm.weight | 0x63a82500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x63a87500 | 0x5a00000 | +| 30 | blk.3.attn_k.weight | 0x69487500 | 0x2d0000 | +| 31 | blk.3.attn_norm.weight | 0x69757500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x6975c500 | 0xdc0000 | +| 33 | blk.3.attn_q.weight | 0x6a51c500 | 0xb40000 | +| 34 | blk.3.attn_v.weight | 0x6b05c500 | 0x2d0000 | +| 35 | blk.3.ffn_down.weight | 0x6b32c500 | 0x6e00000 | +| 36 | blk.3.ffn_gate.weight | 0x7212c500 | 0x5a00000 | +| 37 | blk.3.ffn_norm.weight | 0x77b2c500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x77b31500 | 0x5a00000 | +| 39 | blk.4.attn_k.weight | 0x7d531500 | 0x2d0000 | +| 40 | blk.4.attn_norm.weight | 0x7d801500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x7d806500 | 0xdc0000 | +| 42 | blk.4.attn_q.weight | 0x7e5c6500 | 0xb40000 | +| 43 | blk.4.attn_v.weight | 0x7f106500 | 0x2d0000 | +| 44 | blk.4.ffn_down.weight | 0x7f3d6500 | 0x6e00000 | +| 45 | blk.4.ffn_gate.weight | 0x861d6500 | 0x5a00000 | +| 46 | blk.4.ffn_norm.weight | 0x8bbd6500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0x8bbdb500 | 0x5a00000 | +| 48 | blk.5.attn_k.weight | 0x915db500 | 0x2d0000 | +| 49 | blk.5.attn_norm.weight | 0x918ab500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0x918b0500 | 0xdc0000 | +| 51 | blk.5.attn_q.weight | 0x92670500 | 0xb40000 | +| 52 | blk.5.attn_v.weight | 0x931b0500 | 0x2d0000 | +| 53 | blk.5.ffn_down.weight | 0x93480500 | 0x6e00000 | +| 54 | blk.5.ffn_gate.weight | 0x9a280500 | 0x5a00000 | +| 55 | blk.5.ffn_norm.weight | 0x9fc80500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0x9fc85500 | 0x5a00000 | +| 57 | blk.6.attn_k.weight | 0xa5685500 | 0x2d0000 | +| 58 | blk.6.attn_norm.weight | 0xa5955500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0xa595a500 | 0xdc0000 | +| 60 | blk.6.attn_q.weight | 0xa671a500 | 0xb40000 | +| 61 | blk.6.attn_v.weight | 0xa725a500 | 0x2d0000 | +| 62 | blk.6.ffn_down.weight | 0xa752a500 | 0x6e00000 | +| 63 | blk.6.ffn_gate.weight | 0xae32a500 | 0x5a00000 | +| 64 | blk.6.ffn_norm.weight | 0xb3d2a500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0xb3d2f500 | 0x5a00000 | +| 66 | blk.7.attn_k.weight | 0xb972f500 | 0x2d0000 | +| 67 | blk.7.attn_norm.weight | 0xb99ff500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0xb9a04500 | 0xdc0000 | +| 69 | blk.7.attn_q.weight | 0xba7c4500 | 0xb40000 | +| 70 | blk.7.attn_v.weight | 0xbb304500 | 0x2d0000 | +| 71 | blk.7.ffn_down.weight | 0xbb5d4500 | 0x6e00000 | +| 72 | blk.7.ffn_gate.weight | 0xc23d4500 | 0x5a00000 | +| 73 | blk.7.ffn_norm.weight | 0xc7dd4500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xc7dd9500 | 0x5a00000 | +| 75 | blk.8.attn_k.weight | 0xcd7d9500 | 0x2d0000 | +| 76 | blk.8.attn_norm.weight | 0xcdaa9500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xcdaae500 | 0xdc0000 | +| 78 | blk.8.attn_q.weight | 0xce86e500 | 0xb40000 | +| 79 | blk.8.attn_v.weight | 0xcf3ae500 | 0x2d0000 | +| 80 | blk.8.ffn_down.weight | 0xcf67e500 | 0x6e00000 | +| 81 | blk.8.ffn_gate.weight | 0xd647e500 | 0x5a00000 | +| 82 | blk.8.ffn_norm.weight | 0xdbe7e500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0xdbe83500 | 0x5a00000 | +| 84 | blk.9.attn_k.weight | 0xe1883500 | 0x2d0000 | +| 85 | blk.9.attn_norm.weight | 0xe1b53500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0xe1b58500 | 0xdc0000 | +| 87 | blk.9.attn_q.weight | 0xe2918500 | 0xb40000 | +| 88 | blk.9.attn_v.weight | 0xe3458500 | 0x2d0000 | +| 89 | blk.9.ffn_down.weight | 0xe3728500 | 0x6e00000 | +| 90 | blk.9.ffn_gate.weight | 0xea528500 | 0x5a00000 | +| 91 | blk.9.ffn_norm.weight | 0xeff28500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0xeff2d500 | 0x5a00000 | +| 93 | blk.10.attn_k.weight | 0xf592d500 | 0x2d0000 | +| 94 | blk.10.attn_norm.weight | 0xf5bfd500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0xf5c02500 | 0xdc0000 | +| 96 | blk.10.attn_q.weight | 0xf69c2500 | 0xb40000 | +| 97 | blk.10.attn_v.weight | 0xf7502500 | 0x2d0000 | +| 98 | blk.10.ffn_down.weight | 0xf77d2500 | 0x6e00000 | +| 99 | blk.10.ffn_gate.weight | 0xfe5d2500 | 0x5a00000 | +| 100 | blk.10.ffn_norm.weight | 0x103fd2500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0x103fd7500 | 0x5a00000 | +| 102 | blk.11.attn_k.weight | 0x1099d7500 | 0x2d0000 | +| 103 | blk.11.attn_norm.weight | 0x109ca7500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0x109cac500 | 0xdc0000 | +| 105 | blk.11.attn_q.weight | 0x10aa6c500 | 0xb40000 | +| 106 | blk.11.attn_v.weight | 0x10b5ac500 | 0x2d0000 | +| 107 | blk.11.ffn_down.weight | 0x10b87c500 | 0x6e00000 | +| 108 | blk.11.ffn_gate.weight | 0x11267c500 | 0x5a00000 | +| 109 | blk.11.ffn_norm.weight | 0x11807c500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0x118081500 | 0x5a00000 | +| 111 | blk.12.attn_k.weight | 0x11da81500 | 0x2d0000 | +| 112 | blk.12.attn_norm.weight | 0x11dd51500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0x11dd56500 | 0xdc0000 | +| 114 | blk.12.attn_q.weight | 0x11eb16500 | 0xb40000 | +| 115 | blk.12.attn_v.weight | 0x11f656500 | 0x2d0000 | +| 116 | blk.12.ffn_down.weight | 0x11f926500 | 0x6e00000 | +| 117 | blk.12.ffn_gate.weight | 0x126726500 | 0x5a00000 | +| 118 | blk.12.ffn_norm.weight | 0x12c126500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0x12c12b500 | 0x5a00000 | +| 120 | blk.13.attn_k.weight | 0x131b2b500 | 0x2d0000 | +| 121 | blk.13.attn_norm.weight | 0x131dfb500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0x131e00500 | 0xdc0000 | +| 123 | blk.13.attn_q.weight | 0x132bc0500 | 0xb40000 | +| 124 | blk.13.attn_v.weight | 0x133700500 | 0x2d0000 | +| 125 | blk.13.ffn_down.weight | 0x1339d0500 | 0x6e00000 | +| 126 | blk.13.ffn_gate.weight | 0x13a7d0500 | 0x5a00000 | +| 127 | blk.13.ffn_norm.weight | 0x1401d0500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x1401d5500 | 0x5a00000 | +| 129 | blk.14.attn_k.weight | 0x145bd5500 | 0x2d0000 | +| 130 | blk.14.attn_norm.weight | 0x145ea5500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x145eaa500 | 0xdc0000 | +| 132 | blk.14.attn_q.weight | 0x146c6a500 | 0xb40000 | +| 133 | blk.14.attn_v.weight | 0x1477aa500 | 0x2d0000 | +| 134 | blk.14.ffn_down.weight | 0x147a7a500 | 0x6e00000 | +| 135 | blk.14.ffn_gate.weight | 0x14e87a500 | 0x5a00000 | +| 136 | blk.14.ffn_norm.weight | 0x15427a500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x15427f500 | 0x5a00000 | +| 138 | blk.15.attn_k.weight | 0x159c7f500 | 0x2d0000 | +| 139 | blk.15.attn_norm.weight | 0x159f4f500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x159f54500 | 0xdc0000 | +| 141 | blk.15.attn_q.weight | 0x15ad14500 | 0xb40000 | +| 142 | blk.15.attn_v.weight | 0x15b854500 | 0x2d0000 | +| 143 | blk.15.ffn_down.weight | 0x15bb24500 | 0x6e00000 | +| 144 | blk.15.ffn_gate.weight | 0x162924500 | 0x5a00000 | +| 145 | blk.15.ffn_norm.weight | 0x168324500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x168329500 | 0x5a00000 | +| 147 | blk.16.attn_k.weight | 0x16dd29500 | 0x2d0000 | +| 148 | blk.16.attn_norm.weight | 0x16dff9500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x16dffe500 | 0xdc0000 | +| 150 | blk.16.attn_q.weight | 0x16edbe500 | 0xb40000 | +| 151 | blk.16.attn_v.weight | 0x16f8fe500 | 0x2d0000 | +| 152 | blk.16.ffn_down.weight | 0x16fbce500 | 0x6e00000 | +| 153 | blk.16.ffn_gate.weight | 0x1769ce500 | 0x5a00000 | +| 154 | blk.16.ffn_norm.weight | 0x17c3ce500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x17c3d3500 | 0x5a00000 | +| 156 | blk.17.attn_k.weight | 0x181dd3500 | 0x2d0000 | +| 157 | blk.17.attn_norm.weight | 0x1820a3500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x1820a8500 | 0xdc0000 | +| 159 | blk.17.attn_q.weight | 0x182e68500 | 0xb40000 | +| 160 | blk.17.attn_v.weight | 0x1839a8500 | 0x2d0000 | +| 161 | blk.17.ffn_down.weight | 0x183c78500 | 0x6e00000 | +| 162 | blk.17.ffn_gate.weight | 0x18aa78500 | 0x5a00000 | +| 163 | blk.17.ffn_norm.weight | 0x190478500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x19047d500 | 0x5a00000 | +| 165 | blk.18.attn_k.weight | 0x195e7d500 | 0x2d0000 | +| 166 | blk.18.attn_norm.weight | 0x19614d500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x196152500 | 0xdc0000 | +| 168 | blk.18.attn_q.weight | 0x196f12500 | 0xb40000 | +| 169 | blk.18.attn_v.weight | 0x197a52500 | 0x2d0000 | +| 170 | blk.18.ffn_down.weight | 0x197d22500 | 0x6e00000 | +| 171 | blk.18.ffn_gate.weight | 0x19eb22500 | 0x5a00000 | +| 172 | blk.18.ffn_norm.weight | 0x1a4522500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x1a4527500 | 0x5a00000 | +| 174 | blk.19.attn_k.weight | 0x1a9f27500 | 0x2d0000 | +| 175 | blk.19.attn_norm.weight | 0x1aa1f7500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x1aa1fc500 | 0xdc0000 | +| 177 | blk.19.attn_q.weight | 0x1aafbc500 | 0xb40000 | +| 178 | blk.19.attn_v.weight | 0x1abafc500 | 0x2d0000 | +| 179 | blk.19.ffn_down.weight | 0x1abdcc500 | 0x6e00000 | +| 180 | blk.19.ffn_gate.weight | 0x1b2bcc500 | 0x5a00000 | +| 181 | blk.19.ffn_norm.weight | 0x1b85cc500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x1b85d1500 | 0x5a00000 | +| 183 | blk.20.attn_k.weight | 0x1bdfd1500 | 0x2d0000 | +| 184 | blk.20.attn_norm.weight | 0x1be2a1500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x1be2a6500 | 0xdc0000 | +| 186 | blk.20.attn_q.weight | 0x1bf066500 | 0xb40000 | +| 187 | blk.20.attn_v.weight | 0x1bfba6500 | 0x2d0000 | +| 188 | blk.20.ffn_down.weight | 0x1bfe76500 | 0x6e00000 | +| 189 | blk.20.ffn_gate.weight | 0x1c6c76500 | 0x5a00000 | +| 190 | blk.20.ffn_norm.weight | 0x1cc676500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x1cc67b500 | 0x5a00000 | +| 192 | blk.21.attn_k.weight | 0x1d207b500 | 0x2d0000 | +| 193 | blk.21.attn_norm.weight | 0x1d234b500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x1d2350500 | 0xdc0000 | +| 195 | blk.21.attn_q.weight | 0x1d3110500 | 0xb40000 | +| 196 | blk.21.attn_v.weight | 0x1d3c50500 | 0x2d0000 | +| 197 | blk.21.ffn_down.weight | 0x1d3f20500 | 0x6e00000 | +| 198 | blk.21.ffn_gate.weight | 0x1dad20500 | 0x5a00000 | +| 199 | blk.21.ffn_norm.weight | 0x1e0720500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x1e0725500 | 0x5a00000 | +| 201 | blk.22.attn_k.weight | 0x1e6125500 | 0x2d0000 | +| 202 | blk.22.attn_norm.weight | 0x1e63f5500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x1e63fa500 | 0xdc0000 | +| 204 | blk.22.attn_q.weight | 0x1e71ba500 | 0xb40000 | +| 205 | blk.22.attn_v.weight | 0x1e7cfa500 | 0x2d0000 | +| 206 | blk.22.ffn_down.weight | 0x1e7fca500 | 0x6e00000 | +| 207 | blk.22.ffn_gate.weight | 0x1eedca500 | 0x5a00000 | +| 208 | blk.22.ffn_norm.weight | 0x1f47ca500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x1f47cf500 | 0x5a00000 | +| 210 | blk.23.attn_k.weight | 0x1fa1cf500 | 0x2d0000 | +| 211 | blk.23.attn_norm.weight | 0x1fa49f500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x1fa4a4500 | 0xdc0000 | +| 213 | blk.23.attn_q.weight | 0x1fb264500 | 0xb40000 | +| 214 | blk.23.attn_v.weight | 0x1fbda4500 | 0x2d0000 | +| 215 | blk.23.ffn_down.weight | 0x1fc074500 | 0x6e00000 | +| 216 | blk.23.ffn_gate.weight | 0x202e74500 | 0x5a00000 | +| 217 | blk.23.ffn_norm.weight | 0x208874500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x208879500 | 0x5a00000 | +| 219 | blk.24.attn_k.weight | 0x20e279500 | 0x2d0000 | +| 220 | blk.24.attn_norm.weight | 0x20e549500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x20e54e500 | 0xdc0000 | +| 222 | blk.24.attn_q.weight | 0x20f30e500 | 0xb40000 | +| 223 | blk.24.attn_v.weight | 0x20fe4e500 | 0x2d0000 | +| 224 | blk.24.ffn_down.weight | 0x21011e500 | 0x6e00000 | +| 225 | blk.24.ffn_gate.weight | 0x216f1e500 | 0x5a00000 | +| 226 | blk.24.ffn_norm.weight | 0x21c91e500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x21c923500 | 0x5a00000 | +| 228 | blk.25.attn_k.weight | 0x222323500 | 0x370000 | +| 229 | blk.25.attn_norm.weight | 0x222693500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x222698500 | 0xdc0000 | +| 231 | blk.25.attn_q.weight | 0x223458500 | 0xdc0000 | +| 232 | blk.25.attn_v.weight | 0x224218500 | 0x370000 | +| 233 | blk.25.ffn_down.weight | 0x224588500 | 0x6e00000 | +| 234 | blk.25.ffn_gate.weight | 0x22b388500 | 0x5a00000 | +| 235 | blk.25.ffn_norm.weight | 0x230d88500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x230d8d500 | 0x5a00000 | +| 237 | blk.26.attn_k.weight | 0x23678d500 | 0x2d0000 | +| 238 | blk.26.attn_norm.weight | 0x236a5d500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x236a62500 | 0xdc0000 | +| 240 | blk.26.attn_q.weight | 0x237822500 | 0xb40000 | +| 241 | blk.26.attn_v.weight | 0x238362500 | 0x2d0000 | +| 242 | blk.26.ffn_down.weight | 0x238632500 | 0x6e00000 | +| 243 | blk.26.ffn_gate.weight | 0x23f432500 | 0x5a00000 | +| 244 | blk.26.ffn_norm.weight | 0x244e32500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x244e37500 | 0x5a00000 | +| 246 | blk.27.attn_k.weight | 0x24a837500 | 0x2d0000 | +| 247 | blk.27.attn_norm.weight | 0x24ab07500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x24ab0c500 | 0xdc0000 | +| 249 | blk.27.attn_q.weight | 0x24b8cc500 | 0xb40000 | +| 250 | blk.27.attn_v.weight | 0x24c40c500 | 0x2d0000 | +| 251 | blk.27.ffn_down.weight | 0x24c6dc500 | 0x6e00000 | +| 252 | blk.27.ffn_gate.weight | 0x2534dc500 | 0x5a00000 | +| 253 | blk.27.ffn_norm.weight | 0x258edc500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x258ee1500 | 0x5a00000 | +| 255 | blk.28.attn_k.weight | 0x25e8e1500 | 0x370000 | +| 256 | blk.28.attn_norm.weight | 0x25ec51500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x25ec56500 | 0xdc0000 | +| 258 | blk.28.attn_q.weight | 0x25fa16500 | 0xdc0000 | +| 259 | blk.28.attn_v.weight | 0x2607d6500 | 0x370000 | +| 260 | blk.28.ffn_down.weight | 0x260b46500 | 0x6e00000 | +| 261 | blk.28.ffn_gate.weight | 0x267946500 | 0x5a00000 | +| 262 | blk.28.ffn_norm.weight | 0x26d346500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x26d34b500 | 0x5a00000 | +| 264 | blk.29.attn_k.weight | 0x272d4b500 | 0x2d0000 | +| 265 | blk.29.attn_norm.weight | 0x27301b500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x273020500 | 0xdc0000 | +| 267 | blk.29.attn_q.weight | 0x273de0500 | 0xb40000 | +| 268 | blk.29.attn_v.weight | 0x274920500 | 0x2d0000 | +| 269 | blk.29.ffn_down.weight | 0x274bf0500 | 0x6e00000 | +| 270 | blk.29.ffn_gate.weight | 0x27b9f0500 | 0x5a00000 | +| 271 | blk.29.ffn_norm.weight | 0x2813f0500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x2813f5500 | 0x5a00000 | +| 273 | blk.30.attn_k.weight | 0x286df5500 | 0x370000 | +| 274 | blk.30.attn_norm.weight | 0x287165500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x28716a500 | 0xdc0000 | +| 276 | blk.30.attn_q.weight | 0x287f2a500 | 0xdc0000 | +| 277 | blk.30.attn_v.weight | 0x288cea500 | 0x370000 | +| 278 | blk.30.ffn_down.weight | 0x28905a500 | 0x6e00000 | +| 279 | blk.30.ffn_gate.weight | 0x28fe5a500 | 0x6e00000 | +| 280 | blk.30.ffn_norm.weight | 0x296c5a500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x296c5f500 | 0x6e00000 | +| 282 | blk.31.attn_k.weight | 0x29da5f500 | 0x2d0000 | +| 283 | blk.31.attn_norm.weight | 0x29dd2f500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x29dd34500 | 0xdc0000 | +| 285 | blk.31.attn_q.weight | 0x29eaf4500 | 0xb40000 | +| 286 | blk.31.attn_v.weight | 0x29f634500 | 0x2d0000 | +| 287 | blk.31.ffn_down.weight | 0x29f904500 | 0x6e00000 | +| 288 | blk.31.ffn_gate.weight | 0x2a6704500 | 0x6e00000 | +| 289 | blk.31.ffn_norm.weight | 0x2ad504500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x2ad509500 | 0x6e00000 | +| 291 | blk.32.attn_k.weight | 0x2b4309500 | 0x370000 | +| 292 | blk.32.attn_norm.weight | 0x2b4679500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x2b467e500 | 0xdc0000 | +| 294 | blk.32.attn_q.weight | 0x2b543e500 | 0xdc0000 | +| 295 | blk.32.attn_v.weight | 0x2b61fe500 | 0x370000 | +| 296 | blk.32.ffn_down.weight | 0x2b656e500 | 0x6e00000 | +| 297 | blk.32.ffn_gate.weight | 0x2bd36e500 | 0x6e00000 | +| 298 | blk.32.ffn_norm.weight | 0x2c416e500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x2c4173500 | 0x6e00000 | +| 300 | blk.33.attn_k.weight | 0x2caf73500 | 0x370000 | +| 301 | blk.33.attn_norm.weight | 0x2cb2e3500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x2cb2e8500 | 0xdc0000 | +| 303 | blk.33.attn_q.weight | 0x2cc0a8500 | 0xdc0000 | +| 304 | blk.33.attn_v.weight | 0x2cce68500 | 0x370000 | +| 305 | blk.33.ffn_down.weight | 0x2cd1d8500 | 0x6e00000 | +| 306 | blk.33.ffn_gate.weight | 0x2d3fd8500 | 0x6e00000 | +| 307 | blk.33.ffn_norm.weight | 0x2dadd8500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x2daddd500 | 0x6e00000 | +| 309 | blk.34.attn_k.weight | 0x2e1bdd500 | 0x370000 | +| 310 | blk.34.attn_norm.weight | 0x2e1f4d500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x2e1f52500 | 0xdc0000 | +| 312 | blk.34.attn_q.weight | 0x2e2d12500 | 0xdc0000 | +| 313 | blk.34.attn_v.weight | 0x2e3ad2500 | 0x370000 | +| 314 | blk.34.ffn_down.weight | 0x2e3e42500 | 0x6e00000 | +| 315 | blk.34.ffn_gate.weight | 0x2eac42500 | 0x6e00000 | +| 316 | blk.34.ffn_norm.weight | 0x2f1a42500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x2f1a47500 | 0x6e00000 | +| 318 | blk.35.attn_k.weight | 0x2f8847500 | 0x370000 | +| 319 | blk.35.attn_norm.weight | 0x2f8bb7500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x2f8bbc500 | 0xdc0000 | +| 321 | blk.35.attn_q.weight | 0x2f997c500 | 0xdc0000 | +| 322 | blk.35.attn_v.weight | 0x2fa73c500 | 0x370000 | +| 323 | blk.35.ffn_down.weight | 0x2faaac500 | 0x6e00000 | +| 324 | blk.35.ffn_gate.weight | 0x3018ac500 | 0x6e00000 | +| 325 | blk.35.ffn_norm.weight | 0x3086ac500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x3086b1500 | 0x6e00000 | +| 327 | blk.36.attn_k.weight | 0x30f4b1500 | 0x370000 | +| 328 | blk.36.attn_norm.weight | 0x30f821500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x30f826500 | 0xdc0000 | +| 330 | blk.36.attn_q.weight | 0x3105e6500 | 0xdc0000 | +| 331 | blk.36.attn_v.weight | 0x3113a6500 | 0x370000 | +| 332 | blk.36.ffn_down.weight | 0x311716500 | 0x6e00000 | +| 333 | blk.36.ffn_gate.weight | 0x318516500 | 0x6e00000 | +| 334 | blk.36.ffn_norm.weight | 0x31f316500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x31f31b500 | 0x6e00000 | +| 336 | blk.37.attn_k.weight | 0x32611b500 | 0x370000 | +| 337 | blk.37.attn_norm.weight | 0x32648b500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x326490500 | 0xdc0000 | +| 339 | blk.37.attn_q.weight | 0x327250500 | 0xdc0000 | +| 340 | blk.37.attn_v.weight | 0x328010500 | 0x370000 | +| 341 | blk.37.ffn_down.weight | 0x328380500 | 0x6e00000 | +| 342 | blk.37.ffn_gate.weight | 0x32f180500 | 0x6e00000 | +| 343 | blk.37.ffn_norm.weight | 0x335f80500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x335f85500 | 0x6e00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q5_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q4_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q4_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q4_K | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q5_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q5_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q6_K.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q6_K.md new file mode 100644 index 0000000..c72ab24 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q6_K.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 18 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q6\_K.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q6_kgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x20d00000 | +| 1 | output_norm.weight | 0x21484500 | 0x5000 | +| 2 | token_embd.weight | 0x21489500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x32789500 | 0x370000 | +| 4 | blk.0.attn_norm.weight | 0x32af9500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x32afe500 | 0x1068000 | +| 6 | blk.0.attn_q.weight | 0x33b66500 | 0xdc0000 | +| 7 | blk.0.attn_v.weight | 0x34926500 | 0x550000 | +| 8 | blk.0.ffn_down.weight | 0x34e76500 | 0x8340000 | +| 9 | blk.0.ffn_gate.weight | 0x3d1b6500 | 0x6e00000 | +| 10 | blk.0.ffn_norm.weight | 0x43fb6500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x43fbb500 | 0x6e00000 | +| 12 | blk.1.attn_k.weight | 0x4adbb500 | 0x370000 | +| 13 | blk.1.attn_norm.weight | 0x4b12b500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x4b130500 | 0x1068000 | +| 15 | blk.1.attn_q.weight | 0x4c198500 | 0xdc0000 | +| 16 | blk.1.attn_v.weight | 0x4cf58500 | 0x550000 | +| 17 | blk.1.ffn_down.weight | 0x4d4a8500 | 0x8340000 | +| 18 | blk.1.ffn_gate.weight | 0x557e8500 | 0x6e00000 | +| 19 | blk.1.ffn_norm.weight | 0x5c5e8500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x5c5ed500 | 0x6e00000 | +| 21 | blk.2.attn_k.weight | 0x633ed500 | 0x370000 | +| 22 | blk.2.attn_norm.weight | 0x6375d500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x63762500 | 0x1068000 | +| 24 | blk.2.attn_q.weight | 0x647ca500 | 0xdc0000 | +| 25 | blk.2.attn_v.weight | 0x6558a500 | 0x550000 | +| 26 | blk.2.ffn_down.weight | 0x65ada500 | 0x8340000 | +| 27 | blk.2.ffn_gate.weight | 0x6de1a500 | 0x6e00000 | +| 28 | blk.2.ffn_norm.weight | 0x74c1a500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x74c1f500 | 0x6e00000 | +| 30 | blk.3.attn_k.weight | 0x7ba1f500 | 0x370000 | +| 31 | blk.3.attn_norm.weight | 0x7bd8f500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x7bd94500 | 0x1068000 | +| 33 | blk.3.attn_q.weight | 0x7cdfc500 | 0xdc0000 | +| 34 | blk.3.attn_v.weight | 0x7dbbc500 | 0x550000 | +| 35 | blk.3.ffn_down.weight | 0x7e10c500 | 0x8340000 | +| 36 | blk.3.ffn_gate.weight | 0x8644c500 | 0x6e00000 | +| 37 | blk.3.ffn_norm.weight | 0x8d24c500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0x8d251500 | 0x6e00000 | +| 39 | blk.4.attn_k.weight | 0x94051500 | 0x370000 | +| 40 | blk.4.attn_norm.weight | 0x943c1500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0x943c6500 | 0x1068000 | +| 42 | blk.4.attn_q.weight | 0x9542e500 | 0xdc0000 | +| 43 | blk.4.attn_v.weight | 0x961ee500 | 0x550000 | +| 44 | blk.4.ffn_down.weight | 0x9673e500 | 0x8340000 | +| 45 | blk.4.ffn_gate.weight | 0x9ea7e500 | 0x6e00000 | +| 46 | blk.4.ffn_norm.weight | 0xa587e500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0xa5883500 | 0x6e00000 | +| 48 | blk.5.attn_k.weight | 0xac683500 | 0x370000 | +| 49 | blk.5.attn_norm.weight | 0xac9f3500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0xac9f8500 | 0x1068000 | +| 51 | blk.5.attn_q.weight | 0xada60500 | 0xdc0000 | +| 52 | blk.5.attn_v.weight | 0xae820500 | 0x550000 | +| 53 | blk.5.ffn_down.weight | 0xaed70500 | 0x8340000 | +| 54 | blk.5.ffn_gate.weight | 0xb70b0500 | 0x6e00000 | +| 55 | blk.5.ffn_norm.weight | 0xbdeb0500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0xbdeb5500 | 0x6e00000 | +| 57 | blk.6.attn_k.weight | 0xc4cb5500 | 0x370000 | +| 58 | blk.6.attn_norm.weight | 0xc5025500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0xc502a500 | 0x1068000 | +| 60 | blk.6.attn_q.weight | 0xc6092500 | 0xdc0000 | +| 61 | blk.6.attn_v.weight | 0xc6e52500 | 0x550000 | +| 62 | blk.6.ffn_down.weight | 0xc73a2500 | 0x8340000 | +| 63 | blk.6.ffn_gate.weight | 0xcf6e2500 | 0x6e00000 | +| 64 | blk.6.ffn_norm.weight | 0xd64e2500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0xd64e7500 | 0x6e00000 | +| 66 | blk.7.attn_k.weight | 0xdd2e7500 | 0x370000 | +| 67 | blk.7.attn_norm.weight | 0xdd657500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0xdd65c500 | 0x1068000 | +| 69 | blk.7.attn_q.weight | 0xde6c4500 | 0xdc0000 | +| 70 | blk.7.attn_v.weight | 0xdf484500 | 0x550000 | +| 71 | blk.7.ffn_down.weight | 0xdf9d4500 | 0x8340000 | +| 72 | blk.7.ffn_gate.weight | 0xe7d14500 | 0x6e00000 | +| 73 | blk.7.ffn_norm.weight | 0xeeb14500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0xeeb19500 | 0x6e00000 | +| 75 | blk.8.attn_k.weight | 0xf5919500 | 0x370000 | +| 76 | blk.8.attn_norm.weight | 0xf5c89500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0xf5c8e500 | 0x1068000 | +| 78 | blk.8.attn_q.weight | 0xf6cf6500 | 0xdc0000 | +| 79 | blk.8.attn_v.weight | 0xf7ab6500 | 0x550000 | +| 80 | blk.8.ffn_down.weight | 0xf8006500 | 0x8340000 | +| 81 | blk.8.ffn_gate.weight | 0x100346500 | 0x6e00000 | +| 82 | blk.8.ffn_norm.weight | 0x107146500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x10714b500 | 0x6e00000 | +| 84 | blk.9.attn_k.weight | 0x10df4b500 | 0x370000 | +| 85 | blk.9.attn_norm.weight | 0x10e2bb500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0x10e2c0500 | 0x1068000 | +| 87 | blk.9.attn_q.weight | 0x10f328500 | 0xdc0000 | +| 88 | blk.9.attn_v.weight | 0x1100e8500 | 0x550000 | +| 89 | blk.9.ffn_down.weight | 0x110638500 | 0x8340000 | +| 90 | blk.9.ffn_gate.weight | 0x118978500 | 0x6e00000 | +| 91 | blk.9.ffn_norm.weight | 0x11f778500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0x11f77d500 | 0x6e00000 | +| 93 | blk.10.attn_k.weight | 0x12657d500 | 0x370000 | +| 94 | blk.10.attn_norm.weight | 0x1268ed500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0x1268f2500 | 0x1068000 | +| 96 | blk.10.attn_q.weight | 0x12795a500 | 0xdc0000 | +| 97 | blk.10.attn_v.weight | 0x12871a500 | 0x550000 | +| 98 | blk.10.ffn_down.weight | 0x128c6a500 | 0x8340000 | +| 99 | blk.10.ffn_gate.weight | 0x130faa500 | 0x6e00000 | +| 100 | blk.10.ffn_norm.weight | 0x137daa500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0x137daf500 | 0x6e00000 | +| 102 | blk.11.attn_k.weight | 0x13ebaf500 | 0x370000 | +| 103 | blk.11.attn_norm.weight | 0x13ef1f500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0x13ef24500 | 0x1068000 | +| 105 | blk.11.attn_q.weight | 0x13ff8c500 | 0xdc0000 | +| 106 | blk.11.attn_v.weight | 0x140d4c500 | 0x550000 | +| 107 | blk.11.ffn_down.weight | 0x14129c500 | 0x8340000 | +| 108 | blk.11.ffn_gate.weight | 0x1495dc500 | 0x6e00000 | +| 109 | blk.11.ffn_norm.weight | 0x1503dc500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0x1503e1500 | 0x6e00000 | +| 111 | blk.12.attn_k.weight | 0x1571e1500 | 0x370000 | +| 112 | blk.12.attn_norm.weight | 0x157551500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0x157556500 | 0x1068000 | +| 114 | blk.12.attn_q.weight | 0x1585be500 | 0xdc0000 | +| 115 | blk.12.attn_v.weight | 0x15937e500 | 0x550000 | +| 116 | blk.12.ffn_down.weight | 0x1598ce500 | 0x8340000 | +| 117 | blk.12.ffn_gate.weight | 0x161c0e500 | 0x6e00000 | +| 118 | blk.12.ffn_norm.weight | 0x168a0e500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0x168a13500 | 0x6e00000 | +| 120 | blk.13.attn_k.weight | 0x16f813500 | 0x370000 | +| 121 | blk.13.attn_norm.weight | 0x16fb83500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0x16fb88500 | 0x1068000 | +| 123 | blk.13.attn_q.weight | 0x170bf0500 | 0xdc0000 | +| 124 | blk.13.attn_v.weight | 0x1719b0500 | 0x550000 | +| 125 | blk.13.ffn_down.weight | 0x171f00500 | 0x8340000 | +| 126 | blk.13.ffn_gate.weight | 0x17a240500 | 0x6e00000 | +| 127 | blk.13.ffn_norm.weight | 0x181040500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x181045500 | 0x6e00000 | +| 129 | blk.14.attn_k.weight | 0x187e45500 | 0x370000 | +| 130 | blk.14.attn_norm.weight | 0x1881b5500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x1881ba500 | 0x1068000 | +| 132 | blk.14.attn_q.weight | 0x189222500 | 0xdc0000 | +| 133 | blk.14.attn_v.weight | 0x189fe2500 | 0x550000 | +| 134 | blk.14.ffn_down.weight | 0x18a532500 | 0x8340000 | +| 135 | blk.14.ffn_gate.weight | 0x192872500 | 0x6e00000 | +| 136 | blk.14.ffn_norm.weight | 0x199672500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x199677500 | 0x6e00000 | +| 138 | blk.15.attn_k.weight | 0x1a0477500 | 0x370000 | +| 139 | blk.15.attn_norm.weight | 0x1a07e7500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x1a07ec500 | 0x1068000 | +| 141 | blk.15.attn_q.weight | 0x1a1854500 | 0xdc0000 | +| 142 | blk.15.attn_v.weight | 0x1a2614500 | 0x550000 | +| 143 | blk.15.ffn_down.weight | 0x1a2b64500 | 0x8340000 | +| 144 | blk.15.ffn_gate.weight | 0x1aaea4500 | 0x6e00000 | +| 145 | blk.15.ffn_norm.weight | 0x1b1ca4500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x1b1ca9500 | 0x6e00000 | +| 147 | blk.16.attn_k.weight | 0x1b8aa9500 | 0x370000 | +| 148 | blk.16.attn_norm.weight | 0x1b8e19500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x1b8e1e500 | 0x1068000 | +| 150 | blk.16.attn_q.weight | 0x1b9e86500 | 0xdc0000 | +| 151 | blk.16.attn_v.weight | 0x1bac46500 | 0x550000 | +| 152 | blk.16.ffn_down.weight | 0x1bb196500 | 0x8340000 | +| 153 | blk.16.ffn_gate.weight | 0x1c34d6500 | 0x6e00000 | +| 154 | blk.16.ffn_norm.weight | 0x1ca2d6500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x1ca2db500 | 0x6e00000 | +| 156 | blk.17.attn_k.weight | 0x1d10db500 | 0x370000 | +| 157 | blk.17.attn_norm.weight | 0x1d144b500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x1d1450500 | 0x1068000 | +| 159 | blk.17.attn_q.weight | 0x1d24b8500 | 0xdc0000 | +| 160 | blk.17.attn_v.weight | 0x1d3278500 | 0x550000 | +| 161 | blk.17.ffn_down.weight | 0x1d37c8500 | 0x8340000 | +| 162 | blk.17.ffn_gate.weight | 0x1dbb08500 | 0x6e00000 | +| 163 | blk.17.ffn_norm.weight | 0x1e2908500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x1e290d500 | 0x6e00000 | +| 165 | blk.18.attn_k.weight | 0x1e970d500 | 0x370000 | +| 166 | blk.18.attn_norm.weight | 0x1e9a7d500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x1e9a82500 | 0x1068000 | +| 168 | blk.18.attn_q.weight | 0x1eaaea500 | 0xdc0000 | +| 169 | blk.18.attn_v.weight | 0x1eb8aa500 | 0x550000 | +| 170 | blk.18.ffn_down.weight | 0x1ebdfa500 | 0x8340000 | +| 171 | blk.18.ffn_gate.weight | 0x1f413a500 | 0x6e00000 | +| 172 | blk.18.ffn_norm.weight | 0x1faf3a500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x1faf3f500 | 0x6e00000 | +| 174 | blk.19.attn_k.weight | 0x201d3f500 | 0x370000 | +| 175 | blk.19.attn_norm.weight | 0x2020af500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x2020b4500 | 0x1068000 | +| 177 | blk.19.attn_q.weight | 0x20311c500 | 0xdc0000 | +| 178 | blk.19.attn_v.weight | 0x203edc500 | 0x550000 | +| 179 | blk.19.ffn_down.weight | 0x20442c500 | 0x8340000 | +| 180 | blk.19.ffn_gate.weight | 0x20c76c500 | 0x6e00000 | +| 181 | blk.19.ffn_norm.weight | 0x21356c500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x213571500 | 0x6e00000 | +| 183 | blk.20.attn_k.weight | 0x21a371500 | 0x370000 | +| 184 | blk.20.attn_norm.weight | 0x21a6e1500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x21a6e6500 | 0x1068000 | +| 186 | blk.20.attn_q.weight | 0x21b74e500 | 0xdc0000 | +| 187 | blk.20.attn_v.weight | 0x21c50e500 | 0x550000 | +| 188 | blk.20.ffn_down.weight | 0x21ca5e500 | 0x8340000 | +| 189 | blk.20.ffn_gate.weight | 0x224d9e500 | 0x6e00000 | +| 190 | blk.20.ffn_norm.weight | 0x22bb9e500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x22bba3500 | 0x6e00000 | +| 192 | blk.21.attn_k.weight | 0x2329a3500 | 0x370000 | +| 193 | blk.21.attn_norm.weight | 0x232d13500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x232d18500 | 0x1068000 | +| 195 | blk.21.attn_q.weight | 0x233d80500 | 0xdc0000 | +| 196 | blk.21.attn_v.weight | 0x234b40500 | 0x550000 | +| 197 | blk.21.ffn_down.weight | 0x235090500 | 0x8340000 | +| 198 | blk.21.ffn_gate.weight | 0x23d3d0500 | 0x6e00000 | +| 199 | blk.21.ffn_norm.weight | 0x2441d0500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x2441d5500 | 0x6e00000 | +| 201 | blk.22.attn_k.weight | 0x24afd5500 | 0x370000 | +| 202 | blk.22.attn_norm.weight | 0x24b345500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x24b34a500 | 0x1068000 | +| 204 | blk.22.attn_q.weight | 0x24c3b2500 | 0xdc0000 | +| 205 | blk.22.attn_v.weight | 0x24d172500 | 0x550000 | +| 206 | blk.22.ffn_down.weight | 0x24d6c2500 | 0x8340000 | +| 207 | blk.22.ffn_gate.weight | 0x255a02500 | 0x6e00000 | +| 208 | blk.22.ffn_norm.weight | 0x25c802500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x25c807500 | 0x6e00000 | +| 210 | blk.23.attn_k.weight | 0x263607500 | 0x370000 | +| 211 | blk.23.attn_norm.weight | 0x263977500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x26397c500 | 0x1068000 | +| 213 | blk.23.attn_q.weight | 0x2649e4500 | 0xdc0000 | +| 214 | blk.23.attn_v.weight | 0x2657a4500 | 0x550000 | +| 215 | blk.23.ffn_down.weight | 0x265cf4500 | 0x8340000 | +| 216 | blk.23.ffn_gate.weight | 0x26e034500 | 0x6e00000 | +| 217 | blk.23.ffn_norm.weight | 0x274e34500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x274e39500 | 0x6e00000 | +| 219 | blk.24.attn_k.weight | 0x27bc39500 | 0x370000 | +| 220 | blk.24.attn_norm.weight | 0x27bfa9500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x27bfae500 | 0x1068000 | +| 222 | blk.24.attn_q.weight | 0x27d016500 | 0xdc0000 | +| 223 | blk.24.attn_v.weight | 0x27ddd6500 | 0x550000 | +| 224 | blk.24.ffn_down.weight | 0x27e326500 | 0x8340000 | +| 225 | blk.24.ffn_gate.weight | 0x286666500 | 0x6e00000 | +| 226 | blk.24.ffn_norm.weight | 0x28d466500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x28d46b500 | 0x6e00000 | +| 228 | blk.25.attn_k.weight | 0x29426b500 | 0x41a000 | +| 229 | blk.25.attn_norm.weight | 0x294685500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x29468a500 | 0x1068000 | +| 231 | blk.25.attn_q.weight | 0x2956f2500 | 0x1068000 | +| 232 | blk.25.attn_v.weight | 0x29675a500 | 0x550000 | +| 233 | blk.25.ffn_down.weight | 0x296caa500 | 0x8340000 | +| 234 | blk.25.ffn_gate.weight | 0x29efea500 | 0x6e00000 | +| 235 | blk.25.ffn_norm.weight | 0x2a5dea500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x2a5def500 | 0x6e00000 | +| 237 | blk.26.attn_k.weight | 0x2acbef500 | 0x370000 | +| 238 | blk.26.attn_norm.weight | 0x2acf5f500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x2acf64500 | 0x1068000 | +| 240 | blk.26.attn_q.weight | 0x2adfcc500 | 0xdc0000 | +| 241 | blk.26.attn_v.weight | 0x2aed8c500 | 0x550000 | +| 242 | blk.26.ffn_down.weight | 0x2af2dc500 | 0x8340000 | +| 243 | blk.26.ffn_gate.weight | 0x2b761c500 | 0x6e00000 | +| 244 | blk.26.ffn_norm.weight | 0x2be41c500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x2be421500 | 0x6e00000 | +| 246 | blk.27.attn_k.weight | 0x2c5221500 | 0x370000 | +| 247 | blk.27.attn_norm.weight | 0x2c5591500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x2c5596500 | 0x1068000 | +| 249 | blk.27.attn_q.weight | 0x2c65fe500 | 0xdc0000 | +| 250 | blk.27.attn_v.weight | 0x2c73be500 | 0x550000 | +| 251 | blk.27.ffn_down.weight | 0x2c790e500 | 0x8340000 | +| 252 | blk.27.ffn_gate.weight | 0x2cfc4e500 | 0x6e00000 | +| 253 | blk.27.ffn_norm.weight | 0x2d6a4e500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x2d6a53500 | 0x6e00000 | +| 255 | blk.28.attn_k.weight | 0x2dd853500 | 0x41a000 | +| 256 | blk.28.attn_norm.weight | 0x2ddc6d500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x2ddc72500 | 0x1068000 | +| 258 | blk.28.attn_q.weight | 0x2decda500 | 0x1068000 | +| 259 | blk.28.attn_v.weight | 0x2dfd42500 | 0x550000 | +| 260 | blk.28.ffn_down.weight | 0x2e0292500 | 0x8340000 | +| 261 | blk.28.ffn_gate.weight | 0x2e85d2500 | 0x6e00000 | +| 262 | blk.28.ffn_norm.weight | 0x2ef3d2500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x2ef3d7500 | 0x6e00000 | +| 264 | blk.29.attn_k.weight | 0x2f61d7500 | 0x370000 | +| 265 | blk.29.attn_norm.weight | 0x2f6547500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x2f654c500 | 0x1068000 | +| 267 | blk.29.attn_q.weight | 0x2f75b4500 | 0xdc0000 | +| 268 | blk.29.attn_v.weight | 0x2f8374500 | 0x550000 | +| 269 | blk.29.ffn_down.weight | 0x2f88c4500 | 0x8340000 | +| 270 | blk.29.ffn_gate.weight | 0x300c04500 | 0x6e00000 | +| 271 | blk.29.ffn_norm.weight | 0x307a04500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x307a09500 | 0x6e00000 | +| 273 | blk.30.attn_k.weight | 0x30e809500 | 0x41a000 | +| 274 | blk.30.attn_norm.weight | 0x30ec23500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x30ec28500 | 0x1068000 | +| 276 | blk.30.attn_q.weight | 0x30fc90500 | 0x1068000 | +| 277 | blk.30.attn_v.weight | 0x310cf8500 | 0x550000 | +| 278 | blk.30.ffn_down.weight | 0x311248500 | 0x8340000 | +| 279 | blk.30.ffn_gate.weight | 0x319588500 | 0x8340000 | +| 280 | blk.30.ffn_norm.weight | 0x3218c8500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x3218cd500 | 0x8340000 | +| 282 | blk.31.attn_k.weight | 0x329c0d500 | 0x370000 | +| 283 | blk.31.attn_norm.weight | 0x329f7d500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x329f82500 | 0x1068000 | +| 285 | blk.31.attn_q.weight | 0x32afea500 | 0xdc0000 | +| 286 | blk.31.attn_v.weight | 0x32bdaa500 | 0x550000 | +| 287 | blk.31.ffn_down.weight | 0x32c2fa500 | 0x8340000 | +| 288 | blk.31.ffn_gate.weight | 0x33463a500 | 0x8340000 | +| 289 | blk.31.ffn_norm.weight | 0x33c97a500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x33c97f500 | 0x8340000 | +| 291 | blk.32.attn_k.weight | 0x344cbf500 | 0x41a000 | +| 292 | blk.32.attn_norm.weight | 0x3450d9500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x3450de500 | 0x1068000 | +| 294 | blk.32.attn_q.weight | 0x346146500 | 0x1068000 | +| 295 | blk.32.attn_v.weight | 0x3471ae500 | 0x550000 | +| 296 | blk.32.ffn_down.weight | 0x3476fe500 | 0x8340000 | +| 297 | blk.32.ffn_gate.weight | 0x34fa3e500 | 0x8340000 | +| 298 | blk.32.ffn_norm.weight | 0x357d7e500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x357d83500 | 0x8340000 | +| 300 | blk.33.attn_k.weight | 0x3600c3500 | 0x41a000 | +| 301 | blk.33.attn_norm.weight | 0x3604dd500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x3604e2500 | 0x1068000 | +| 303 | blk.33.attn_q.weight | 0x36154a500 | 0x1068000 | +| 304 | blk.33.attn_v.weight | 0x3625b2500 | 0x550000 | +| 305 | blk.33.ffn_down.weight | 0x362b02500 | 0x8340000 | +| 306 | blk.33.ffn_gate.weight | 0x36ae42500 | 0x8340000 | +| 307 | blk.33.ffn_norm.weight | 0x373182500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x373187500 | 0x8340000 | +| 309 | blk.34.attn_k.weight | 0x37b4c7500 | 0x41a000 | +| 310 | blk.34.attn_norm.weight | 0x37b8e1500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x37b8e6500 | 0x1068000 | +| 312 | blk.34.attn_q.weight | 0x37c94e500 | 0x1068000 | +| 313 | blk.34.attn_v.weight | 0x37d9b6500 | 0x550000 | +| 314 | blk.34.ffn_down.weight | 0x37df06500 | 0x8340000 | +| 315 | blk.34.ffn_gate.weight | 0x386246500 | 0x8340000 | +| 316 | blk.34.ffn_norm.weight | 0x38e586500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x38e58b500 | 0x8340000 | +| 318 | blk.35.attn_k.weight | 0x3968cb500 | 0x41a000 | +| 319 | blk.35.attn_norm.weight | 0x396ce5500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x396cea500 | 0x1068000 | +| 321 | blk.35.attn_q.weight | 0x397d52500 | 0x1068000 | +| 322 | blk.35.attn_v.weight | 0x398dba500 | 0x550000 | +| 323 | blk.35.ffn_down.weight | 0x39930a500 | 0x8340000 | +| 324 | blk.35.ffn_gate.weight | 0x3a164a500 | 0x8340000 | +| 325 | blk.35.ffn_norm.weight | 0x3a998a500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x3a998f500 | 0x8340000 | +| 327 | blk.36.attn_k.weight | 0x3b1ccf500 | 0x41a000 | +| 328 | blk.36.attn_norm.weight | 0x3b20e9500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x3b20ee500 | 0x1068000 | +| 330 | blk.36.attn_q.weight | 0x3b3156500 | 0x1068000 | +| 331 | blk.36.attn_v.weight | 0x3b41be500 | 0x550000 | +| 332 | blk.36.ffn_down.weight | 0x3b470e500 | 0x8340000 | +| 333 | blk.36.ffn_gate.weight | 0x3bca4e500 | 0x8340000 | +| 334 | blk.36.ffn_norm.weight | 0x3c4d8e500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x3c4d93500 | 0x8340000 | +| 336 | blk.37.attn_k.weight | 0x3cd0d3500 | 0x41a000 | +| 337 | blk.37.attn_norm.weight | 0x3cd4ed500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x3cd4f2500 | 0x1068000 | +| 339 | blk.37.attn_q.weight | 0x3ce55a500 | 0x1068000 | +| 340 | blk.37.attn_v.weight | 0x3cf5c2500 | 0x550000 | +| 341 | blk.37.ffn_down.weight | 0x3cfb12500 | 0x8340000 | +| 342 | blk.37.ffn_gate.weight | 0x3d7e52500 | 0x8340000 | +| 343 | blk.37.ffn_norm.weight | 0x3e0192500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x3e0197500 | 0x8340000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q6_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q5_K | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q5_K | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q5_K | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q6_K | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q6_K | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-Q8_0.md b/scores/Dolphin-Mistral-24B-Venice-Edition-Q8_0.md new file mode 100644 index 0000000..fc66ad0 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-Q8_0.md @@ -0,0 +1,1154 @@ +# Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 46 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:---------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 345 | +| 3 | UINT64 | 1 | GGUF.kv_count | 43 | +| 4 | STRING | 1 | general.architecture | `llama` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Dolphin Mistral 24B Venice Edition` | +| 7 | STRING | 1 | general.finetune | `Venice-Edition` | +| 8 | STRING | 1 | general.basename | `Dolphin-Mistral` | +| 9 | STRING | 1 | general.size_label | `24B` | +| 10 | STRING | 1 | general.license | `apache-2.0` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Mistral Small 24B Instruct 2501` | +| 13 | STRING | 1 | general.base_model.0.version | `2501` | +| 14 | STRING | 1 | general.base_model.0.organization | `Mistralai` | +| 15 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/mistral`...`istral-Small-24B-Instruct-2501` | +| 16 | UINT32 | 1 | llama.context_length | 32768 | +| 17 | UINT32 | 1 | llama.embedding_length | 5120 | +| 18 | UINT32 | 1 | llama.feed_forward_length | 32768 | +| 19 | UINT32 | 1 | llama.attention.head_count | 32 | +| 20 | UINT32 | 1 | llama.attention.head_count_kv | 8 | +| 21 | FLOAT32 | 1 | llama.rope.freq_base | 100000000.0 | +| 22 | FLOAT32 | 1 | llama.attention.layer_norm_rms_epsilon | 1e-05 | +| 23 | UINT32 | 1 | llama.attention.key_length | 128 | +| 24 | UINT32 | 1 | llama.attention.value_length | 128 | +| 25 | UINT32 | 1 | llama.vocab_size | 131072 | +| 26 | UINT32 | 1 | llama.rope.dimension_count | 128 | +| 27 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 28 | STRING | 1 | tokenizer.ggml.pre | `tekken` | +| 29 | [STRING] | 131072 | tokenizer.ggml.tokens | [ ``, ``, ``, `[INST]`, `[/INST]`, ... ] | +| 30 | [INT32] | 131072 | tokenizer.ggml.token_type | [ 3, 3, 3, 3, 3, 3, 3, ... ] | +| 31 | [STRING] | 269443 | tokenizer.ggml.merges | [ `Ġ Ġ`, `Ġ t`, `e r`, `i n`, `Ġ ĠĠĠ`, ... ] | +| 32 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 1 | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 2 | +| 34 | UINT32 | 1 | tokenizer.ggml.unknown_token_id | 0 | +| 35 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 11 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | True | +| 37 | BOOL | 1 | tokenizer.ggml.add_eos_token | False | +| 38 | STRING | 1 | tokenizer.chat_template | `{%- set today = strftime_now("`...` {%- endif %}{%- endfor %}` | +| 39 | BOOL | 1 | tokenizer.ggml.add_space_prefix | False | +| 40 | UINT32 | 1 | general.quantization_version | 2 | +| 41 | UINT32 | 1 | general.file_type | 7 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Dolphin-Mist`...`l-24B-Venice-Edition-small.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_eur_small.txt` | +| 44 | UINT32 | 1 | quantize.imatrix.entries_count | 281 | +| 45 | UINT32 | 1 | quantize.imatrix.chunks_count | 3192 | +| 46 | UINT32 | 1 | llama.block_count | 38 | + +## Tensors Overview ~22B Elements + +Total number of elements in all tensors: 22460892160 Elements + +- [Dolphin-Mistral-24B-Venice-Edition-pruned-Q8\_0.gguf - GGUF Internal File Dump](#Dolphin-Mistral-24B-Venice-Edition-pruned-q8_0gguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~22B Elements](#tensors-overview-22b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~1B Elements](#base-tensor-group--1b-elements) + - [Block 0 Tensor Group : ~556M Elements](#block-0-tensor-group--556m-elements) + - [Block 1 Tensor Group : ~556M Elements](#block-1-tensor-group--556m-elements) + - [Block 2 Tensor Group : ~556M Elements](#block-2-tensor-group--556m-elements) + - [Block 3 Tensor Group : ~556M Elements](#block-3-tensor-group--556m-elements) + - [Block 4 Tensor Group : ~556M Elements](#block-4-tensor-group--556m-elements) + - [Block 5 Tensor Group : ~556M Elements](#block-5-tensor-group--556m-elements) + - [Block 6 Tensor Group : ~556M Elements](#block-6-tensor-group--556m-elements) + - [Block 7 Tensor Group : ~556M Elements](#block-7-tensor-group--556m-elements) + - [Block 8 Tensor Group : ~556M Elements](#block-8-tensor-group--556m-elements) + - [Block 9 Tensor Group : ~556M Elements](#block-9-tensor-group--556m-elements) + - [Block 10 Tensor Group : ~556M Elements](#block-10-tensor-group--556m-elements) + - [Block 11 Tensor Group : ~556M Elements](#block-11-tensor-group--556m-elements) + - [Block 12 Tensor Group : ~556M Elements](#block-12-tensor-group--556m-elements) + - [Block 13 Tensor Group : ~556M Elements](#block-13-tensor-group--556m-elements) + - [Block 14 Tensor Group : ~556M Elements](#block-14-tensor-group--556m-elements) + - [Block 15 Tensor Group : ~556M Elements](#block-15-tensor-group--556m-elements) + - [Block 16 Tensor Group : ~556M Elements](#block-16-tensor-group--556m-elements) + - [Block 17 Tensor Group : ~556M Elements](#block-17-tensor-group--556m-elements) + - [Block 18 Tensor Group : ~556M Elements](#block-18-tensor-group--556m-elements) + - [Block 19 Tensor Group : ~556M Elements](#block-19-tensor-group--556m-elements) + - [Block 20 Tensor Group : ~556M Elements](#block-20-tensor-group--556m-elements) + - [Block 21 Tensor Group : ~556M Elements](#block-21-tensor-group--556m-elements) + - [Block 22 Tensor Group : ~556M Elements](#block-22-tensor-group--556m-elements) + - [Block 23 Tensor Group : ~556M Elements](#block-23-tensor-group--556m-elements) + - [Block 24 Tensor Group : ~556M Elements](#block-24-tensor-group--556m-elements) + - [Block 25 Tensor Group : ~556M Elements](#block-25-tensor-group--556m-elements) + - [Block 26 Tensor Group : ~556M Elements](#block-26-tensor-group--556m-elements) + - [Block 27 Tensor Group : ~556M Elements](#block-27-tensor-group--556m-elements) + - [Block 28 Tensor Group : ~556M Elements](#block-28-tensor-group--556m-elements) + - [Block 29 Tensor Group : ~556M Elements](#block-29-tensor-group--556m-elements) + - [Block 30 Tensor Group : ~556M Elements](#block-30-tensor-group--556m-elements) + - [Block 31 Tensor Group : ~556M Elements](#block-31-tensor-group--556m-elements) + - [Block 32 Tensor Group : ~556M Elements](#block-32-tensor-group--556m-elements) + - [Block 33 Tensor Group : ~556M Elements](#block-33-tensor-group--556m-elements) + - [Block 34 Tensor Group : ~556M Elements](#block-34-tensor-group--556m-elements) + - [Block 35 Tensor Group : ~556M Elements](#block-35-tensor-group--556m-elements) + - [Block 36 Tensor Group : ~556M Elements](#block-36-tensor-group--556m-elements) + - [Block 37 Tensor Group : ~556M Elements](#block-37-tensor-group--556m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:--------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x784500 | 0x2a800000 | +| 1 | output_norm.weight | 0x2af84500 | 0x5000 | +| 2 | token_embd.weight | 0x2af89500 | 0x11300000 | +| 3 | blk.0.attn_k.weight | 0x3c289500 | 0x41a000 | +| 4 | blk.0.attn_norm.weight | 0x3c6a3500 | 0x5000 | +| 5 | blk.0.attn_output.weight | 0x3c6a8500 | 0x1540000 | +| 6 | blk.0.attn_q.weight | 0x3dbe8500 | 0x1068000 | +| 7 | blk.0.attn_v.weight | 0x3ec50500 | 0xa00000 | +| 8 | blk.0.ffn_down.weight | 0x3f650500 | 0xaa00000 | +| 9 | blk.0.ffn_gate.weight | 0x4a050500 | 0x8340000 | +| 10 | blk.0.ffn_norm.weight | 0x52390500 | 0x5000 | +| 11 | blk.0.ffn_up.weight | 0x52395500 | 0x8340000 | +| 12 | blk.1.attn_k.weight | 0x5a6d5500 | 0x41a000 | +| 13 | blk.1.attn_norm.weight | 0x5aaef500 | 0x5000 | +| 14 | blk.1.attn_output.weight | 0x5aaf4500 | 0x1540000 | +| 15 | blk.1.attn_q.weight | 0x5c034500 | 0x1068000 | +| 16 | blk.1.attn_v.weight | 0x5d09c500 | 0xa00000 | +| 17 | blk.1.ffn_down.weight | 0x5da9c500 | 0xaa00000 | +| 18 | blk.1.ffn_gate.weight | 0x6849c500 | 0x8340000 | +| 19 | blk.1.ffn_norm.weight | 0x707dc500 | 0x5000 | +| 20 | blk.1.ffn_up.weight | 0x707e1500 | 0x8340000 | +| 21 | blk.2.attn_k.weight | 0x78b21500 | 0x41a000 | +| 22 | blk.2.attn_norm.weight | 0x78f3b500 | 0x5000 | +| 23 | blk.2.attn_output.weight | 0x78f40500 | 0x1540000 | +| 24 | blk.2.attn_q.weight | 0x7a480500 | 0x1068000 | +| 25 | blk.2.attn_v.weight | 0x7b4e8500 | 0xa00000 | +| 26 | blk.2.ffn_down.weight | 0x7bee8500 | 0xaa00000 | +| 27 | blk.2.ffn_gate.weight | 0x868e8500 | 0x8340000 | +| 28 | blk.2.ffn_norm.weight | 0x8ec28500 | 0x5000 | +| 29 | blk.2.ffn_up.weight | 0x8ec2d500 | 0x8340000 | +| 30 | blk.3.attn_k.weight | 0x96f6d500 | 0x41a000 | +| 31 | blk.3.attn_norm.weight | 0x97387500 | 0x5000 | +| 32 | blk.3.attn_output.weight | 0x9738c500 | 0x1540000 | +| 33 | blk.3.attn_q.weight | 0x988cc500 | 0x1068000 | +| 34 | blk.3.attn_v.weight | 0x99934500 | 0xa00000 | +| 35 | blk.3.ffn_down.weight | 0x9a334500 | 0xaa00000 | +| 36 | blk.3.ffn_gate.weight | 0xa4d34500 | 0x8340000 | +| 37 | blk.3.ffn_norm.weight | 0xad074500 | 0x5000 | +| 38 | blk.3.ffn_up.weight | 0xad079500 | 0x8340000 | +| 39 | blk.4.attn_k.weight | 0xb53b9500 | 0x41a000 | +| 40 | blk.4.attn_norm.weight | 0xb57d3500 | 0x5000 | +| 41 | blk.4.attn_output.weight | 0xb57d8500 | 0x1540000 | +| 42 | blk.4.attn_q.weight | 0xb6d18500 | 0x1068000 | +| 43 | blk.4.attn_v.weight | 0xb7d80500 | 0xa00000 | +| 44 | blk.4.ffn_down.weight | 0xb8780500 | 0xaa00000 | +| 45 | blk.4.ffn_gate.weight | 0xc3180500 | 0x8340000 | +| 46 | blk.4.ffn_norm.weight | 0xcb4c0500 | 0x5000 | +| 47 | blk.4.ffn_up.weight | 0xcb4c5500 | 0x8340000 | +| 48 | blk.5.attn_k.weight | 0xd3805500 | 0x41a000 | +| 49 | blk.5.attn_norm.weight | 0xd3c1f500 | 0x5000 | +| 50 | blk.5.attn_output.weight | 0xd3c24500 | 0x1540000 | +| 51 | blk.5.attn_q.weight | 0xd5164500 | 0x1068000 | +| 52 | blk.5.attn_v.weight | 0xd61cc500 | 0xa00000 | +| 53 | blk.5.ffn_down.weight | 0xd6bcc500 | 0xaa00000 | +| 54 | blk.5.ffn_gate.weight | 0xe15cc500 | 0x8340000 | +| 55 | blk.5.ffn_norm.weight | 0xe990c500 | 0x5000 | +| 56 | blk.5.ffn_up.weight | 0xe9911500 | 0x8340000 | +| 57 | blk.6.attn_k.weight | 0xf1c51500 | 0x41a000 | +| 58 | blk.6.attn_norm.weight | 0xf206b500 | 0x5000 | +| 59 | blk.6.attn_output.weight | 0xf2070500 | 0x1540000 | +| 60 | blk.6.attn_q.weight | 0xf35b0500 | 0x1068000 | +| 61 | blk.6.attn_v.weight | 0xf4618500 | 0xa00000 | +| 62 | blk.6.ffn_down.weight | 0xf5018500 | 0xaa00000 | +| 63 | blk.6.ffn_gate.weight | 0xffa18500 | 0x8340000 | +| 64 | blk.6.ffn_norm.weight | 0x107d58500 | 0x5000 | +| 65 | blk.6.ffn_up.weight | 0x107d5d500 | 0x8340000 | +| 66 | blk.7.attn_k.weight | 0x11009d500 | 0x41a000 | +| 67 | blk.7.attn_norm.weight | 0x1104b7500 | 0x5000 | +| 68 | blk.7.attn_output.weight | 0x1104bc500 | 0x1540000 | +| 69 | blk.7.attn_q.weight | 0x1119fc500 | 0x1068000 | +| 70 | blk.7.attn_v.weight | 0x112a64500 | 0xa00000 | +| 71 | blk.7.ffn_down.weight | 0x113464500 | 0xaa00000 | +| 72 | blk.7.ffn_gate.weight | 0x11de64500 | 0x8340000 | +| 73 | blk.7.ffn_norm.weight | 0x1261a4500 | 0x5000 | +| 74 | blk.7.ffn_up.weight | 0x1261a9500 | 0x8340000 | +| 75 | blk.8.attn_k.weight | 0x12e4e9500 | 0x41a000 | +| 76 | blk.8.attn_norm.weight | 0x12e903500 | 0x5000 | +| 77 | blk.8.attn_output.weight | 0x12e908500 | 0x1540000 | +| 78 | blk.8.attn_q.weight | 0x12fe48500 | 0x1068000 | +| 79 | blk.8.attn_v.weight | 0x130eb0500 | 0xa00000 | +| 80 | blk.8.ffn_down.weight | 0x1318b0500 | 0xaa00000 | +| 81 | blk.8.ffn_gate.weight | 0x13c2b0500 | 0x8340000 | +| 82 | blk.8.ffn_norm.weight | 0x1445f0500 | 0x5000 | +| 83 | blk.8.ffn_up.weight | 0x1445f5500 | 0x8340000 | +| 84 | blk.9.attn_k.weight | 0x14c935500 | 0x41a000 | +| 85 | blk.9.attn_norm.weight | 0x14cd4f500 | 0x5000 | +| 86 | blk.9.attn_output.weight | 0x14cd54500 | 0x1540000 | +| 87 | blk.9.attn_q.weight | 0x14e294500 | 0x1068000 | +| 88 | blk.9.attn_v.weight | 0x14f2fc500 | 0xa00000 | +| 89 | blk.9.ffn_down.weight | 0x14fcfc500 | 0xaa00000 | +| 90 | blk.9.ffn_gate.weight | 0x15a6fc500 | 0x8340000 | +| 91 | blk.9.ffn_norm.weight | 0x162a3c500 | 0x5000 | +| 92 | blk.9.ffn_up.weight | 0x162a41500 | 0x8340000 | +| 93 | blk.10.attn_k.weight | 0x16ad81500 | 0x41a000 | +| 94 | blk.10.attn_norm.weight | 0x16b19b500 | 0x5000 | +| 95 | blk.10.attn_output.weight | 0x16b1a0500 | 0x1540000 | +| 96 | blk.10.attn_q.weight | 0x16c6e0500 | 0x1068000 | +| 97 | blk.10.attn_v.weight | 0x16d748500 | 0xa00000 | +| 98 | blk.10.ffn_down.weight | 0x16e148500 | 0xaa00000 | +| 99 | blk.10.ffn_gate.weight | 0x178b48500 | 0x8340000 | +| 100 | blk.10.ffn_norm.weight | 0x180e88500 | 0x5000 | +| 101 | blk.10.ffn_up.weight | 0x180e8d500 | 0x8340000 | +| 102 | blk.11.attn_k.weight | 0x1891cd500 | 0x41a000 | +| 103 | blk.11.attn_norm.weight | 0x1895e7500 | 0x5000 | +| 104 | blk.11.attn_output.weight | 0x1895ec500 | 0x1540000 | +| 105 | blk.11.attn_q.weight | 0x18ab2c500 | 0x1068000 | +| 106 | blk.11.attn_v.weight | 0x18bb94500 | 0xa00000 | +| 107 | blk.11.ffn_down.weight | 0x18c594500 | 0xaa00000 | +| 108 | blk.11.ffn_gate.weight | 0x196f94500 | 0x8340000 | +| 109 | blk.11.ffn_norm.weight | 0x19f2d4500 | 0x5000 | +| 110 | blk.11.ffn_up.weight | 0x19f2d9500 | 0x8340000 | +| 111 | blk.12.attn_k.weight | 0x1a7619500 | 0x41a000 | +| 112 | blk.12.attn_norm.weight | 0x1a7a33500 | 0x5000 | +| 113 | blk.12.attn_output.weight | 0x1a7a38500 | 0x1540000 | +| 114 | blk.12.attn_q.weight | 0x1a8f78500 | 0x1068000 | +| 115 | blk.12.attn_v.weight | 0x1a9fe0500 | 0xa00000 | +| 116 | blk.12.ffn_down.weight | 0x1aa9e0500 | 0xaa00000 | +| 117 | blk.12.ffn_gate.weight | 0x1b53e0500 | 0x8340000 | +| 118 | blk.12.ffn_norm.weight | 0x1bd720500 | 0x5000 | +| 119 | blk.12.ffn_up.weight | 0x1bd725500 | 0x8340000 | +| 120 | blk.13.attn_k.weight | 0x1c5a65500 | 0x41a000 | +| 121 | blk.13.attn_norm.weight | 0x1c5e7f500 | 0x5000 | +| 122 | blk.13.attn_output.weight | 0x1c5e84500 | 0x1540000 | +| 123 | blk.13.attn_q.weight | 0x1c73c4500 | 0x1068000 | +| 124 | blk.13.attn_v.weight | 0x1c842c500 | 0xa00000 | +| 125 | blk.13.ffn_down.weight | 0x1c8e2c500 | 0xaa00000 | +| 126 | blk.13.ffn_gate.weight | 0x1d382c500 | 0x8340000 | +| 127 | blk.13.ffn_norm.weight | 0x1dbb6c500 | 0x5000 | +| 128 | blk.13.ffn_up.weight | 0x1dbb71500 | 0x8340000 | +| 129 | blk.14.attn_k.weight | 0x1e3eb1500 | 0x41a000 | +| 130 | blk.14.attn_norm.weight | 0x1e42cb500 | 0x5000 | +| 131 | blk.14.attn_output.weight | 0x1e42d0500 | 0x1540000 | +| 132 | blk.14.attn_q.weight | 0x1e5810500 | 0x1068000 | +| 133 | blk.14.attn_v.weight | 0x1e6878500 | 0xa00000 | +| 134 | blk.14.ffn_down.weight | 0x1e7278500 | 0xaa00000 | +| 135 | blk.14.ffn_gate.weight | 0x1f1c78500 | 0x8340000 | +| 136 | blk.14.ffn_norm.weight | 0x1f9fb8500 | 0x5000 | +| 137 | blk.14.ffn_up.weight | 0x1f9fbd500 | 0x8340000 | +| 138 | blk.15.attn_k.weight | 0x2022fd500 | 0x41a000 | +| 139 | blk.15.attn_norm.weight | 0x202717500 | 0x5000 | +| 140 | blk.15.attn_output.weight | 0x20271c500 | 0x1540000 | +| 141 | blk.15.attn_q.weight | 0x203c5c500 | 0x1068000 | +| 142 | blk.15.attn_v.weight | 0x204cc4500 | 0xa00000 | +| 143 | blk.15.ffn_down.weight | 0x2056c4500 | 0xaa00000 | +| 144 | blk.15.ffn_gate.weight | 0x2100c4500 | 0x8340000 | +| 145 | blk.15.ffn_norm.weight | 0x218404500 | 0x5000 | +| 146 | blk.15.ffn_up.weight | 0x218409500 | 0x8340000 | +| 147 | blk.16.attn_k.weight | 0x220749500 | 0x41a000 | +| 148 | blk.16.attn_norm.weight | 0x220b63500 | 0x5000 | +| 149 | blk.16.attn_output.weight | 0x220b68500 | 0x1540000 | +| 150 | blk.16.attn_q.weight | 0x2220a8500 | 0x1068000 | +| 151 | blk.16.attn_v.weight | 0x223110500 | 0xa00000 | +| 152 | blk.16.ffn_down.weight | 0x223b10500 | 0xaa00000 | +| 153 | blk.16.ffn_gate.weight | 0x22e510500 | 0x8340000 | +| 154 | blk.16.ffn_norm.weight | 0x236850500 | 0x5000 | +| 155 | blk.16.ffn_up.weight | 0x236855500 | 0x8340000 | +| 156 | blk.17.attn_k.weight | 0x23eb95500 | 0x550000 | +| 157 | blk.17.attn_norm.weight | 0x23f0e5500 | 0x5000 | +| 158 | blk.17.attn_output.weight | 0x23f0ea500 | 0x1540000 | +| 159 | blk.17.attn_q.weight | 0x24062a500 | 0x1540000 | +| 160 | blk.17.attn_v.weight | 0x241b6a500 | 0xa00000 | +| 161 | blk.17.ffn_down.weight | 0x24256a500 | 0xaa00000 | +| 162 | blk.17.ffn_gate.weight | 0x24cf6a500 | 0x8340000 | +| 163 | blk.17.ffn_norm.weight | 0x2552aa500 | 0x5000 | +| 164 | blk.17.ffn_up.weight | 0x2552af500 | 0x8340000 | +| 165 | blk.18.attn_k.weight | 0x25d5ef500 | 0x550000 | +| 166 | blk.18.attn_norm.weight | 0x25db3f500 | 0x5000 | +| 167 | blk.18.attn_output.weight | 0x25db44500 | 0x1540000 | +| 168 | blk.18.attn_q.weight | 0x25f084500 | 0x1540000 | +| 169 | blk.18.attn_v.weight | 0x2605c4500 | 0xa00000 | +| 170 | blk.18.ffn_down.weight | 0x260fc4500 | 0xaa00000 | +| 171 | blk.18.ffn_gate.weight | 0x26b9c4500 | 0x8340000 | +| 172 | blk.18.ffn_norm.weight | 0x273d04500 | 0x5000 | +| 173 | blk.18.ffn_up.weight | 0x273d09500 | 0x8340000 | +| 174 | blk.19.attn_k.weight | 0x27c049500 | 0x41a000 | +| 175 | blk.19.attn_norm.weight | 0x27c463500 | 0x5000 | +| 176 | blk.19.attn_output.weight | 0x27c468500 | 0x1540000 | +| 177 | blk.19.attn_q.weight | 0x27d9a8500 | 0x1068000 | +| 178 | blk.19.attn_v.weight | 0x27ea10500 | 0xa00000 | +| 179 | blk.19.ffn_down.weight | 0x27f410500 | 0xaa00000 | +| 180 | blk.19.ffn_gate.weight | 0x289e10500 | 0x8340000 | +| 181 | blk.19.ffn_norm.weight | 0x292150500 | 0x5000 | +| 182 | blk.19.ffn_up.weight | 0x292155500 | 0x8340000 | +| 183 | blk.20.attn_k.weight | 0x29a495500 | 0x550000 | +| 184 | blk.20.attn_norm.weight | 0x29a9e5500 | 0x5000 | +| 185 | blk.20.attn_output.weight | 0x29a9ea500 | 0x1540000 | +| 186 | blk.20.attn_q.weight | 0x29bf2a500 | 0x1540000 | +| 187 | blk.20.attn_v.weight | 0x29d46a500 | 0xa00000 | +| 188 | blk.20.ffn_down.weight | 0x29de6a500 | 0xaa00000 | +| 189 | blk.20.ffn_gate.weight | 0x2a886a500 | 0xaa00000 | +| 190 | blk.20.ffn_norm.weight | 0x2b326a500 | 0x5000 | +| 191 | blk.20.ffn_up.weight | 0x2b326f500 | 0xaa00000 | +| 192 | blk.21.attn_k.weight | 0x2bdc6f500 | 0x41a000 | +| 193 | blk.21.attn_norm.weight | 0x2be089500 | 0x5000 | +| 194 | blk.21.attn_output.weight | 0x2be08e500 | 0x1540000 | +| 195 | blk.21.attn_q.weight | 0x2bf5ce500 | 0x1068000 | +| 196 | blk.21.attn_v.weight | 0x2c0636500 | 0xa00000 | +| 197 | blk.21.ffn_down.weight | 0x2c1036500 | 0xaa00000 | +| 198 | blk.21.ffn_gate.weight | 0x2cba36500 | 0xaa00000 | +| 199 | blk.21.ffn_norm.weight | 0x2d6436500 | 0x5000 | +| 200 | blk.21.ffn_up.weight | 0x2d643b500 | 0xaa00000 | +| 201 | blk.22.attn_k.weight | 0x2e0e3b500 | 0x550000 | +| 202 | blk.22.attn_norm.weight | 0x2e138b500 | 0x5000 | +| 203 | blk.22.attn_output.weight | 0x2e1390500 | 0x1540000 | +| 204 | blk.22.attn_q.weight | 0x2e28d0500 | 0x1540000 | +| 205 | blk.22.attn_v.weight | 0x2e3e10500 | 0xa00000 | +| 206 | blk.22.ffn_down.weight | 0x2e4810500 | 0xaa00000 | +| 207 | blk.22.ffn_gate.weight | 0x2ef210500 | 0xaa00000 | +| 208 | blk.22.ffn_norm.weight | 0x2f9c10500 | 0x5000 | +| 209 | blk.22.ffn_up.weight | 0x2f9c15500 | 0xaa00000 | +| 210 | blk.23.attn_k.weight | 0x304615500 | 0x550000 | +| 211 | blk.23.attn_norm.weight | 0x304b65500 | 0x5000 | +| 212 | blk.23.attn_output.weight | 0x304b6a500 | 0x1540000 | +| 213 | blk.23.attn_q.weight | 0x3060aa500 | 0x1540000 | +| 214 | blk.23.attn_v.weight | 0x3075ea500 | 0xa00000 | +| 215 | blk.23.ffn_down.weight | 0x307fea500 | 0xaa00000 | +| 216 | blk.23.ffn_gate.weight | 0x3129ea500 | 0xaa00000 | +| 217 | blk.23.ffn_norm.weight | 0x31d3ea500 | 0x5000 | +| 218 | blk.23.ffn_up.weight | 0x31d3ef500 | 0xaa00000 | +| 219 | blk.24.attn_k.weight | 0x327def500 | 0x550000 | +| 220 | blk.24.attn_norm.weight | 0x32833f500 | 0x5000 | +| 221 | blk.24.attn_output.weight | 0x328344500 | 0x1540000 | +| 222 | blk.24.attn_q.weight | 0x329884500 | 0x1540000 | +| 223 | blk.24.attn_v.weight | 0x32adc4500 | 0xa00000 | +| 224 | blk.24.ffn_down.weight | 0x32b7c4500 | 0xaa00000 | +| 225 | blk.24.ffn_gate.weight | 0x3361c4500 | 0xaa00000 | +| 226 | blk.24.ffn_norm.weight | 0x340bc4500 | 0x5000 | +| 227 | blk.24.ffn_up.weight | 0x340bc9500 | 0xaa00000 | +| 228 | blk.25.attn_k.weight | 0x34b5c9500 | 0x550000 | +| 229 | blk.25.attn_norm.weight | 0x34bb19500 | 0x5000 | +| 230 | blk.25.attn_output.weight | 0x34bb1e500 | 0x1540000 | +| 231 | blk.25.attn_q.weight | 0x34d05e500 | 0x1540000 | +| 232 | blk.25.attn_v.weight | 0x34e59e500 | 0xa00000 | +| 233 | blk.25.ffn_down.weight | 0x34ef9e500 | 0xaa00000 | +| 234 | blk.25.ffn_gate.weight | 0x35999e500 | 0xaa00000 | +| 235 | blk.25.ffn_norm.weight | 0x36439e500 | 0x5000 | +| 236 | blk.25.ffn_up.weight | 0x3643a3500 | 0xaa00000 | +| 237 | blk.26.attn_k.weight | 0x36eda3500 | 0x550000 | +| 238 | blk.26.attn_norm.weight | 0x36f2f3500 | 0x5000 | +| 239 | blk.26.attn_output.weight | 0x36f2f8500 | 0x1540000 | +| 240 | blk.26.attn_q.weight | 0x370838500 | 0x1540000 | +| 241 | blk.26.attn_v.weight | 0x371d78500 | 0xa00000 | +| 242 | blk.26.ffn_down.weight | 0x372778500 | 0xaa00000 | +| 243 | blk.26.ffn_gate.weight | 0x37d178500 | 0xaa00000 | +| 244 | blk.26.ffn_norm.weight | 0x387b78500 | 0x5000 | +| 245 | blk.26.ffn_up.weight | 0x387b7d500 | 0xaa00000 | +| 246 | blk.27.attn_k.weight | 0x39257d500 | 0x41a000 | +| 247 | blk.27.attn_norm.weight | 0x392997500 | 0x5000 | +| 248 | blk.27.attn_output.weight | 0x39299c500 | 0x1540000 | +| 249 | blk.27.attn_q.weight | 0x393edc500 | 0x1068000 | +| 250 | blk.27.attn_v.weight | 0x394f44500 | 0xa00000 | +| 251 | blk.27.ffn_down.weight | 0x395944500 | 0xaa00000 | +| 252 | blk.27.ffn_gate.weight | 0x3a0344500 | 0xaa00000 | +| 253 | blk.27.ffn_norm.weight | 0x3aad44500 | 0x5000 | +| 254 | blk.27.ffn_up.weight | 0x3aad49500 | 0xaa00000 | +| 255 | blk.28.attn_k.weight | 0x3b5749500 | 0x550000 | +| 256 | blk.28.attn_norm.weight | 0x3b5c99500 | 0x5000 | +| 257 | blk.28.attn_output.weight | 0x3b5c9e500 | 0x1540000 | +| 258 | blk.28.attn_q.weight | 0x3b71de500 | 0x1540000 | +| 259 | blk.28.attn_v.weight | 0x3b871e500 | 0xa00000 | +| 260 | blk.28.ffn_down.weight | 0x3b911e500 | 0xaa00000 | +| 261 | blk.28.ffn_gate.weight | 0x3c3b1e500 | 0xaa00000 | +| 262 | blk.28.ffn_norm.weight | 0x3ce51e500 | 0x5000 | +| 263 | blk.28.ffn_up.weight | 0x3ce523500 | 0xaa00000 | +| 264 | blk.29.attn_k.weight | 0x3d8f23500 | 0x550000 | +| 265 | blk.29.attn_norm.weight | 0x3d9473500 | 0x5000 | +| 266 | blk.29.attn_output.weight | 0x3d9478500 | 0x1540000 | +| 267 | blk.29.attn_q.weight | 0x3da9b8500 | 0x1540000 | +| 268 | blk.29.attn_v.weight | 0x3dbef8500 | 0xa00000 | +| 269 | blk.29.ffn_down.weight | 0x3dc8f8500 | 0xaa00000 | +| 270 | blk.29.ffn_gate.weight | 0x3e72f8500 | 0xaa00000 | +| 271 | blk.29.ffn_norm.weight | 0x3f1cf8500 | 0x5000 | +| 272 | blk.29.ffn_up.weight | 0x3f1cfd500 | 0xaa00000 | +| 273 | blk.30.attn_k.weight | 0x3fc6fd500 | 0x550000 | +| 274 | blk.30.attn_norm.weight | 0x3fcc4d500 | 0x5000 | +| 275 | blk.30.attn_output.weight | 0x3fcc52500 | 0x1540000 | +| 276 | blk.30.attn_q.weight | 0x3fe192500 | 0x1540000 | +| 277 | blk.30.attn_v.weight | 0x3ff6d2500 | 0xa00000 | +| 278 | blk.30.ffn_down.weight | 0x4000d2500 | 0xaa00000 | +| 279 | blk.30.ffn_gate.weight | 0x40aad2500 | 0xaa00000 | +| 280 | blk.30.ffn_norm.weight | 0x4154d2500 | 0x5000 | +| 281 | blk.30.ffn_up.weight | 0x4154d7500 | 0xaa00000 | +| 282 | blk.31.attn_k.weight | 0x41fed7500 | 0x550000 | +| 283 | blk.31.attn_norm.weight | 0x420427500 | 0x5000 | +| 284 | blk.31.attn_output.weight | 0x42042c500 | 0x1540000 | +| 285 | blk.31.attn_q.weight | 0x42196c500 | 0x1540000 | +| 286 | blk.31.attn_v.weight | 0x422eac500 | 0xa00000 | +| 287 | blk.31.ffn_down.weight | 0x4238ac500 | 0xaa00000 | +| 288 | blk.31.ffn_gate.weight | 0x42e2ac500 | 0xaa00000 | +| 289 | blk.31.ffn_norm.weight | 0x438cac500 | 0x5000 | +| 290 | blk.31.ffn_up.weight | 0x438cb1500 | 0xaa00000 | +| 291 | blk.32.attn_k.weight | 0x4436b1500 | 0x550000 | +| 292 | blk.32.attn_norm.weight | 0x443c01500 | 0x5000 | +| 293 | blk.32.attn_output.weight | 0x443c06500 | 0x1540000 | +| 294 | blk.32.attn_q.weight | 0x445146500 | 0x1540000 | +| 295 | blk.32.attn_v.weight | 0x446686500 | 0xa00000 | +| 296 | blk.32.ffn_down.weight | 0x447086500 | 0xaa00000 | +| 297 | blk.32.ffn_gate.weight | 0x451a86500 | 0xaa00000 | +| 298 | blk.32.ffn_norm.weight | 0x45c486500 | 0x5000 | +| 299 | blk.32.ffn_up.weight | 0x45c48b500 | 0xaa00000 | +| 300 | blk.33.attn_k.weight | 0x466e8b500 | 0x550000 | +| 301 | blk.33.attn_norm.weight | 0x4673db500 | 0x5000 | +| 302 | blk.33.attn_output.weight | 0x4673e0500 | 0x1540000 | +| 303 | blk.33.attn_q.weight | 0x468920500 | 0x1540000 | +| 304 | blk.33.attn_v.weight | 0x469e60500 | 0xa00000 | +| 305 | blk.33.ffn_down.weight | 0x46a860500 | 0xaa00000 | +| 306 | blk.33.ffn_gate.weight | 0x475260500 | 0xaa00000 | +| 307 | blk.33.ffn_norm.weight | 0x47fc60500 | 0x5000 | +| 308 | blk.33.ffn_up.weight | 0x47fc65500 | 0xaa00000 | +| 309 | blk.34.attn_k.weight | 0x48a665500 | 0x550000 | +| 310 | blk.34.attn_norm.weight | 0x48abb5500 | 0x5000 | +| 311 | blk.34.attn_output.weight | 0x48abba500 | 0x1540000 | +| 312 | blk.34.attn_q.weight | 0x48c0fa500 | 0x1540000 | +| 313 | blk.34.attn_v.weight | 0x48d63a500 | 0xa00000 | +| 314 | blk.34.ffn_down.weight | 0x48e03a500 | 0xaa00000 | +| 315 | blk.34.ffn_gate.weight | 0x498a3a500 | 0xaa00000 | +| 316 | blk.34.ffn_norm.weight | 0x4a343a500 | 0x5000 | +| 317 | blk.34.ffn_up.weight | 0x4a343f500 | 0xaa00000 | +| 318 | blk.35.attn_k.weight | 0x4ade3f500 | 0x550000 | +| 319 | blk.35.attn_norm.weight | 0x4ae38f500 | 0x5000 | +| 320 | blk.35.attn_output.weight | 0x4ae394500 | 0x1540000 | +| 321 | blk.35.attn_q.weight | 0x4af8d4500 | 0x1540000 | +| 322 | blk.35.attn_v.weight | 0x4b0e14500 | 0xa00000 | +| 323 | blk.35.ffn_down.weight | 0x4b1814500 | 0xaa00000 | +| 324 | blk.35.ffn_gate.weight | 0x4bc214500 | 0xaa00000 | +| 325 | blk.35.ffn_norm.weight | 0x4c6c14500 | 0x5000 | +| 326 | blk.35.ffn_up.weight | 0x4c6c19500 | 0xaa00000 | +| 327 | blk.36.attn_k.weight | 0x4d1619500 | 0x550000 | +| 328 | blk.36.attn_norm.weight | 0x4d1b69500 | 0x5000 | +| 329 | blk.36.attn_output.weight | 0x4d1b6e500 | 0x1540000 | +| 330 | blk.36.attn_q.weight | 0x4d30ae500 | 0x1540000 | +| 331 | blk.36.attn_v.weight | 0x4d45ee500 | 0xa00000 | +| 332 | blk.36.ffn_down.weight | 0x4d4fee500 | 0xaa00000 | +| 333 | blk.36.ffn_gate.weight | 0x4df9ee500 | 0xaa00000 | +| 334 | blk.36.ffn_norm.weight | 0x4ea3ee500 | 0x5000 | +| 335 | blk.36.ffn_up.weight | 0x4ea3f3500 | 0xaa00000 | +| 336 | blk.37.attn_k.weight | 0x4f4df3500 | 0x550000 | +| 337 | blk.37.attn_norm.weight | 0x4f5343500 | 0x5000 | +| 338 | blk.37.attn_output.weight | 0x4f5348500 | 0x1540000 | +| 339 | blk.37.attn_q.weight | 0x4f6888500 | 0x1540000 | +| 340 | blk.37.attn_v.weight | 0x4f7dc8500 | 0xa00000 | +| 341 | blk.37.ffn_down.weight | 0x4f87c8500 | 0xaa00000 | +| 342 | blk.37.ffn_gate.weight | 0x5031c8500 | 0xaa00000 | +| 343 | blk.37.ffn_norm.weight | 0x50dbc8500 | 0x5000 | +| 344 | blk.37.ffn_up.weight | 0x50dbcd500 | 0xaa00000 | + +### Base Tensor Group : ~1B Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q8_0 | +| 1 | output_norm.weight | Output Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~671M) 671088640 | 5120 x 131072 x 1 x 1 | Q3_K | + +- Total elements in base: ( ~1B) 1342182400 +- Percentage of total elements: 5.98% + + +### Block 0 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 4 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 6 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 7 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 8 | blk.0.ffn_down.weight | Block 0 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 9 | blk.0.ffn_gate.weight | Block 0 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 10 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 11 | blk.0.ffn_up.weight | Block 0 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.0: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 1 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 12 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 13 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 14 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 15 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 16 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 17 | blk.1.ffn_down.weight | Block 1 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 18 | blk.1.ffn_gate.weight | Block 1 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 19 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 20 | blk.1.ffn_up.weight | Block 1 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.1: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 2 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 21 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 22 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 23 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 24 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 25 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 26 | blk.2.ffn_down.weight | Block 2 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 27 | blk.2.ffn_gate.weight | Block 2 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 28 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 29 | blk.2.ffn_up.weight | Block 2 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.2: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 3 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 30 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 31 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 32 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 33 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 34 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 35 | blk.3.ffn_down.weight | Block 3 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 36 | blk.3.ffn_gate.weight | Block 3 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 37 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 38 | blk.3.ffn_up.weight | Block 3 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.3: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 4 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 40 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 41 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 42 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 43 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 44 | blk.4.ffn_down.weight | Block 4 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 45 | blk.4.ffn_gate.weight | Block 4 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 46 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 47 | blk.4.ffn_up.weight | Block 4 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.4: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 5 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 48 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 49 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 50 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 51 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 52 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 53 | blk.5.ffn_down.weight | Block 5 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 54 | blk.5.ffn_gate.weight | Block 5 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 55 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 56 | blk.5.ffn_up.weight | Block 5 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.5: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 6 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 57 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 58 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 59 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 60 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 61 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 62 | blk.6.ffn_down.weight | Block 6 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 63 | blk.6.ffn_gate.weight | Block 6 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 64 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 65 | blk.6.ffn_up.weight | Block 6 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.6: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 7 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 66 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 67 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 68 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 69 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 70 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 71 | blk.7.ffn_down.weight | Block 7 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 72 | blk.7.ffn_gate.weight | Block 7 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 73 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 74 | blk.7.ffn_up.weight | Block 7 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.7: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 8 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 76 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 77 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 78 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 79 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 80 | blk.8.ffn_down.weight | Block 8 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 81 | blk.8.ffn_gate.weight | Block 8 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 82 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 83 | blk.8.ffn_up.weight | Block 8 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.8: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 9 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------------|:-----------------------------------------------|:------------------|:----------------------|:-----| +| 84 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 85 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 86 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 87 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 88 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 89 | blk.9.ffn_down.weight | Block 9 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 90 | blk.9.ffn_gate.weight | Block 9 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 91 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 92 | blk.9.ffn_up.weight | Block 9 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.9: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 10 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 93 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 94 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 95 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 96 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 97 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 98 | blk.10.ffn_down.weight | Block 10 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 99 | blk.10.ffn_gate.weight | Block 10 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 100 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 101 | blk.10.ffn_up.weight | Block 10 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.10: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 11 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 102 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 103 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 104 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 105 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 106 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 107 | blk.11.ffn_down.weight | Block 11 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 108 | blk.11.ffn_gate.weight | Block 11 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 109 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 110 | blk.11.ffn_up.weight | Block 11 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.11: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 12 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 112 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 113 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 114 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 115 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 116 | blk.12.ffn_down.weight | Block 12 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 117 | blk.12.ffn_gate.weight | Block 12 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 118 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 119 | blk.12.ffn_up.weight | Block 12 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.12: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 13 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 120 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 121 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 122 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 123 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 124 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 125 | blk.13.ffn_down.weight | Block 13 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 126 | blk.13.ffn_gate.weight | Block 13 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 127 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 128 | blk.13.ffn_up.weight | Block 13 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.13: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 14 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 129 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 130 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 131 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 132 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 133 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 134 | blk.14.ffn_down.weight | Block 14 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 135 | blk.14.ffn_gate.weight | Block 14 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 136 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 137 | blk.14.ffn_up.weight | Block 14 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.14: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 15 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 138 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 139 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 140 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 141 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 142 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 143 | blk.15.ffn_down.weight | Block 15 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 144 | blk.15.ffn_gate.weight | Block 15 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 145 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 146 | blk.15.ffn_up.weight | Block 15 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.15: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 16 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 148 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 149 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 150 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 151 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 152 | blk.16.ffn_down.weight | Block 16 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 153 | blk.16.ffn_gate.weight | Block 16 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 154 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 155 | blk.16.ffn_up.weight | Block 16 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.16: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 17 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 156 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 157 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 158 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 159 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 160 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 161 | blk.17.ffn_down.weight | Block 17 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 162 | blk.17.ffn_gate.weight | Block 17 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 163 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 164 | blk.17.ffn_up.weight | Block 17 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.17: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 18 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 165 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 166 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 167 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 168 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 169 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 170 | blk.18.ffn_down.weight | Block 18 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 171 | blk.18.ffn_gate.weight | Block 18 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 172 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 173 | blk.18.ffn_up.weight | Block 18 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.18: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 19 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 174 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 175 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 176 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 177 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 178 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 179 | blk.19.ffn_down.weight | Block 19 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 180 | blk.19.ffn_gate.weight | Block 19 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | +| 181 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 182 | blk.19.ffn_up.weight | Block 19 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q6_K | + +- Total elements in blk.19: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 20 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 184 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 185 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 186 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 187 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 188 | blk.20.ffn_down.weight | Block 20 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 189 | blk.20.ffn_gate.weight | Block 20 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 190 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 191 | blk.20.ffn_up.weight | Block 20 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.20: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 21 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 192 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 193 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 194 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 195 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 196 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 197 | blk.21.ffn_down.weight | Block 21 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 198 | blk.21.ffn_gate.weight | Block 21 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 199 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 200 | blk.21.ffn_up.weight | Block 21 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.21: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 22 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 201 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 202 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 203 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 204 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 205 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 206 | blk.22.ffn_down.weight | Block 22 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 207 | blk.22.ffn_gate.weight | Block 22 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 208 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 209 | blk.22.ffn_up.weight | Block 22 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.22: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 23 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 210 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 211 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 212 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 213 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 214 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 215 | blk.23.ffn_down.weight | Block 23 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 216 | blk.23.ffn_gate.weight | Block 23 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 217 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 218 | blk.23.ffn_up.weight | Block 23 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.23: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 24 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 220 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 221 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 222 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 223 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 224 | blk.24.ffn_down.weight | Block 24 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 225 | blk.24.ffn_gate.weight | Block 24 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 226 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 227 | blk.24.ffn_up.weight | Block 24 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.24: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 25 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 228 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 229 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 230 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 231 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 232 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 233 | blk.25.ffn_down.weight | Block 25 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 234 | blk.25.ffn_gate.weight | Block 25 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 235 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 236 | blk.25.ffn_up.weight | Block 25 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.25: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 26 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 237 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 238 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 239 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 240 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 241 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 242 | blk.26.ffn_down.weight | Block 26 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 243 | blk.26.ffn_gate.weight | Block 26 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 244 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 245 | blk.26.ffn_up.weight | Block 26 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.26: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 27 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 246 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q6_K | +| 247 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 248 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 249 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q6_K | +| 250 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 251 | blk.27.ffn_down.weight | Block 27 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 252 | blk.27.ffn_gate.weight | Block 27 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 253 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 254 | blk.27.ffn_up.weight | Block 27 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.27: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 28 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 256 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 257 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 258 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 259 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 260 | blk.28.ffn_down.weight | Block 28 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 261 | blk.28.ffn_gate.weight | Block 28 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 262 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 263 | blk.28.ffn_up.weight | Block 28 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.28: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 29 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 264 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 265 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 266 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 267 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 268 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 269 | blk.29.ffn_down.weight | Block 29 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 270 | blk.29.ffn_gate.weight | Block 29 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 271 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 272 | blk.29.ffn_up.weight | Block 29 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.29: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 30 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 273 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 274 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 275 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 276 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 277 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 278 | blk.30.ffn_down.weight | Block 30 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 279 | blk.30.ffn_gate.weight | Block 30 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 280 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 281 | blk.30.ffn_up.weight | Block 30 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.30: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 31 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 282 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 283 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 284 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 285 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 286 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 287 | blk.31.ffn_down.weight | Block 31 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 288 | blk.31.ffn_gate.weight | Block 31 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 289 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 290 | blk.31.ffn_up.weight | Block 31 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.31: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 32 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 292 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 293 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 294 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 295 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 296 | blk.32.ffn_down.weight | Block 32 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 297 | blk.32.ffn_gate.weight | Block 32 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 298 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 299 | blk.32.ffn_up.weight | Block 32 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.32: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 33 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 300 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 301 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 302 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 303 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 304 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 305 | blk.33.ffn_down.weight | Block 33 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 306 | blk.33.ffn_gate.weight | Block 33 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 307 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 308 | blk.33.ffn_up.weight | Block 33 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.33: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 34 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 309 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 310 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 311 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 312 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 313 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 314 | blk.34.ffn_down.weight | Block 34 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 315 | blk.34.ffn_gate.weight | Block 34 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 316 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 317 | blk.34.ffn_up.weight | Block 34 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.34: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 35 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 318 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 319 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 320 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 321 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 322 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 323 | blk.35.ffn_down.weight | Block 35 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 324 | blk.35.ffn_gate.weight | Block 35 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 325 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 326 | blk.35.ffn_up.weight | Block 35 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.35: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 36 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 328 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 329 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 330 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 331 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 332 | blk.36.ffn_down.weight | Block 36 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 333 | blk.36.ffn_gate.weight | Block 36 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 334 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 335 | blk.36.ffn_up.weight | Block 36 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.36: (~556M) 555755520 +- Percentage of total elements: 2.47% + + +### Block 37 Tensor Group : ~556M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:--------------------------|:------------------------------------------------|:------------------|:----------------------|:-----| +| 336 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | Q8_0 | +| 337 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 338 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~21M) 20971520 | 4096 x 5120 x 1 x 1 | Q8_0 | +| 339 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~21M) 20971520 | 5120 x 4096 x 1 x 1 | Q8_0 | +| 340 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~5M) 5242880 | 5120 x 1024 x 1 x 1 | F16 | +| 341 | blk.37.ffn_down.weight | Block 37 Feed-Forward Network "Down" (W) | (~168M) 167772160 | 32768 x 5120 x 1 x 1 | Q8_0 | +| 342 | blk.37.ffn_gate.weight | Block 37 Feed-Forward Network "Gate" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | +| 343 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~5K) 5120 | 5120 x 1 x 1 x 1 | F32 | +| 344 | blk.37.ffn_up.weight | Block 37 Feed-Forward Network "Up" (W) | (~168M) 167772160 | 5120 x 32768 x 1 x 1 | Q8_0 | + +- Total elements in blk.37: (~556M) 555755520 +- Percentage of total elements: 2.47% + + + diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.arc new file mode 100644 index 0000000..826aafd --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf (version GGUF V3 (latest)) + +Final result: 65.6000 +/- 1.7358 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 4337.33 ms +llama_perf_context_print: prompt eval time = 154185.52 ms / 36666 tokens ( 4.21 ms per token, 237.80 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 155064.54 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.hsw new file mode 100644 index 0000000..5daf40c --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf (version GGUF V3 (latest)) + +750 79.60000000% [76.5686%, 82.3297%] + + +llama_perf_context_print: load time = 748.01 ms +llama_perf_context_print: prompt eval time = 545228.12 ms / 129319 tokens ( 4.22 ms per token, 237.18 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 549120.81 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.mmlu new file mode 100644 index 0000000..53203f3 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf (version GGUF V3 (latest)) + +Final result: 42.9333 +/- 1.8086 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 733.03 ms +llama_perf_context_print: prompt eval time = 286965.35 ms / 68956 tokens ( 4.16 ms per token, 240.29 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 288291.52 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.ppx new file mode 100644 index 0000000..bcbd5ef --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 20.379006 ± 0.160275 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 73.93% +Mean ln(PPL(Q)/PPL(base)) : 1.193094 ± 0.005360 +Mean PPL(Q)/PPL(base) : 3.297266 ± 0.017673 +Mean PPL(Q)-PPL(base) : 14.198428 ± 0.132841 + +====== KL divergence statistics ====== +Mean KLD: 1.290608 ± 0.004304 +Maximum KLD: 27.217335 +99.9% KLD: 13.970652 +99.0% KLD: 8.700209 +99.0% KLD: 8.700209 +Median KLD: 0.800781 +10.0% KLD: 0.078073 + 5.0% KLD: 0.028142 + 1.0% KLD: 0.004895 +Minimum KLD: 0.000072 + +====== Token probability statistics ====== +Mean Δp: -18.226 ± 0.085 % +Maximum Δp: 95.351% +99.9% Δp: 73.378% +99.0% Δp: 48.957% +95.0% Δp: 22.742% +90.0% Δp: 9.547% +75.0% Δp: 0.003% +Median Δp: -5.000% +25.0% Δp: -33.511% +10.0% Δp: -75.041% + 5.0% Δp: -91.958% + 1.0% Δp: -99.782% + 0.1% Δp: -99.968% +Minimum Δp: -99.998% +RMS Δp : 37.928 ± 0.088 % +Same top p: 62.059 ± 0.125 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.tqa new file mode 100644 index 0000000..9d48966 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf (version GGUF V3 (latest)) + +Final result: 38.4000 +/- 1.7771 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 775.72 ms +llama_perf_context_print: prompt eval time = 219416.23 ms / 51053 tokens ( 4.30 ms per token, 232.68 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 221003.17 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.wng new file mode 100644 index 0000000..a517f3d --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_m.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_M.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.4000 +/- 1.6334 + +llama_perf_context_print: load time = 754.95 ms +llama_perf_context_print: prompt eval time = 95407.56 ms / 22541 tokens ( 4.23 ms per token, 236.26 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 95958.45 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.arc new file mode 100644 index 0000000..1f555b7 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf (version GGUF V3 (latest)) + +Final result: 64.9333 +/- 1.7436 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 4191.63 ms +llama_perf_context_print: prompt eval time = 154546.03 ms / 36666 tokens ( 4.21 ms per token, 237.25 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 155368.54 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.hsw new file mode 100644 index 0000000..c4e96d3 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf (version GGUF V3 (latest)) + +750 79.86666667% [76.8479%, 82.5810%] + + +llama_perf_context_print: load time = 746.08 ms +llama_perf_context_print: prompt eval time = 544955.95 ms / 129319 tokens ( 4.21 ms per token, 237.30 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 548715.91 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.mmlu new file mode 100644 index 0000000..1961f50 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf (version GGUF V3 (latest)) + +Final result: 42.0000 +/- 1.8034 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 771.21 ms +llama_perf_context_print: prompt eval time = 299726.08 ms / 68956 tokens ( 4.35 ms per token, 230.06 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 301016.69 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.ppx new file mode 100644 index 0000000..4d41684 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 21.165413 ± 0.164512 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 73.80% +Mean ln(PPL(Q)/PPL(base)) : 1.230957 ± 0.005322 +Mean PPL(Q)/PPL(base) : 3.424504 ± 0.018226 +Mean PPL(Q)-PPL(base) : 14.984836 ± 0.137053 + +====== KL divergence statistics ====== +Mean KLD: 1.340446 ± 0.004301 +Maximum KLD: 26.031479 +99.9% KLD: 13.794025 +99.0% KLD: 8.598367 +99.0% KLD: 8.598367 +Median KLD: 0.843369 +10.0% KLD: 0.087419 + 5.0% KLD: 0.032046 + 1.0% KLD: 0.005851 +Minimum KLD: 0.000204 + +====== Token probability statistics ====== +Mean Δp: -19.454 ± 0.086 % +Maximum Δp: 95.317% +99.9% Δp: 71.779% +99.0% Δp: 46.392% +95.0% Δp: 20.287% +90.0% Δp: 7.866% +75.0% Δp: -0.004% +Median Δp: -5.901% +25.0% Δp: -35.629% +10.0% Δp: -76.898% + 5.0% Δp: -92.591% + 1.0% Δp: -99.784% + 0.1% Δp: -99.968% +Minimum Δp: -99.998% +RMS Δp : 38.586 ± 0.088 % +Same top p: 61.705 ± 0.125 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.tqa new file mode 100644 index 0000000..63c4efc --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf (version GGUF V3 (latest)) + +Final result: 38.0000 +/- 1.7736 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 775.02 ms +llama_perf_context_print: prompt eval time = 229109.68 ms / 51053 tokens ( 4.49 ms per token, 222.83 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 230721.17 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.wng new file mode 100644 index 0000000..dc0b2e8 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq3_s.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ3_S.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.5333 +/- 1.6309 + +llama_perf_context_print: load time = 766.80 ms +llama_perf_context_print: prompt eval time = 99506.70 ms / 22541 tokens ( 4.41 ms per token, 226.53 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 99996.10 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.arc new file mode 100644 index 0000000..b1bc84b --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf (version GGUF V3 (latest)) + +Final result: 68.4000 +/- 1.6988 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 5388.44 ms +llama_perf_context_print: prompt eval time = 152847.24 ms / 36666 tokens ( 4.17 ms per token, 239.89 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 153655.01 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.hsw new file mode 100644 index 0000000..6e1a491 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf (version GGUF V3 (latest)) + +750 80.66666667% [77.6870%, 83.3338%] + + +llama_perf_context_print: load time = 881.76 ms +llama_perf_context_print: prompt eval time = 557982.84 ms / 129319 tokens ( 4.31 ms per token, 231.76 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 561719.88 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.mmlu new file mode 100644 index 0000000..37775bb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf (version GGUF V3 (latest)) + +Final result: 44.9333 +/- 1.8176 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 922.07 ms +llama_perf_context_print: prompt eval time = 295625.34 ms / 68956 tokens ( 4.29 ms per token, 233.25 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 296873.16 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.ppx new file mode 100644 index 0000000..41853c5 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.783744 ± 0.146959 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 74.79% +Mean ln(PPL(Q)/PPL(base)) : 1.111580 ± 0.005253 +Mean PPL(Q)/PPL(base) : 3.039157 ± 0.015966 +Mean PPL(Q)-PPL(base) : 12.603167 ± 0.119417 + +====== KL divergence statistics ====== +Mean KLD: 1.199318 ± 0.004258 +Maximum KLD: 26.543749 +99.9% KLD: 14.340773 +99.0% KLD: 8.742259 +99.0% KLD: 8.742259 +Median KLD: 0.715601 +10.0% KLD: 0.071172 + 5.0% KLD: 0.025864 + 1.0% KLD: 0.004589 +Minimum KLD: 0.000142 + +====== Token probability statistics ====== +Mean Δp: -17.171 ± 0.083 % +Maximum Δp: 92.904% +99.9% Δp: 72.496% +99.0% Δp: 48.216% +95.0% Δp: 22.569% +90.0% Δp: 10.000% +75.0% Δp: 0.011% +Median Δp: -4.438% +25.0% Δp: -30.853% +10.0% Δp: -71.739% + 5.0% Δp: -90.613% + 1.0% Δp: -99.767% + 0.1% Δp: -99.967% +Minimum Δp: -99.998% +RMS Δp : 36.745 ± 0.088 % +Same top p: 64.223 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.tqa new file mode 100644 index 0000000..5bd4e11 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf (version GGUF V3 (latest)) + +Final result: 38.1333 +/- 1.7748 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 920.15 ms +llama_perf_context_print: prompt eval time = 224775.45 ms / 51053 tokens ( 4.40 ms per token, 227.13 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 226389.03 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.wng new file mode 100644 index 0000000..c8cf658 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-iq4_nl.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-IQ4_NL.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 74.4000 +/- 1.5947 + +llama_perf_context_print: load time = 863.03 ms +llama_perf_context_print: prompt eval time = 98796.90 ms / 22541 tokens ( 4.38 ms per token, 228.15 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 99295.86 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.arc new file mode 100644 index 0000000..6c5126d --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf (version GGUF V3 (latest)) + +Final result: 67.2000 +/- 1.7155 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 4789.37 ms +llama_perf_context_print: prompt eval time = 171359.80 ms / 36666 tokens ( 4.67 ms per token, 213.97 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 172182.70 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.hsw new file mode 100644 index 0000000..390830a --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf (version GGUF V3 (latest)) + +750 80.26666667% [77.2672%, 82.9576%] + + +llama_perf_context_print: load time = 819.35 ms +llama_perf_context_print: prompt eval time = 605874.35 ms / 129319 tokens ( 4.69 ms per token, 213.44 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 609526.15 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.mmlu new file mode 100644 index 0000000..92869b1 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf (version GGUF V3 (latest)) + +Final result: 43.2000 +/- 1.8100 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 873.17 ms +llama_perf_context_print: prompt eval time = 318870.48 ms / 68956 tokens ( 4.62 ms per token, 216.25 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 320164.35 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.ppx new file mode 100644 index 0000000..f36ab2c --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 19.313300 ± 0.150799 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 74.61% +Mean ln(PPL(Q)/PPL(base)) : 1.139382 ± 0.005262 +Mean PPL(Q)/PPL(base) : 3.124838 ± 0.016443 +Mean PPL(Q)-PPL(base) : 13.132723 ± 0.123247 + +====== KL divergence statistics ====== +Mean KLD: 1.248712 ± 0.004216 +Maximum KLD: 28.765745 +99.9% KLD: 13.682988 +99.0% KLD: 8.611128 +99.0% KLD: 8.611128 +Median KLD: 0.769048 +10.0% KLD: 0.075574 + 5.0% KLD: 0.027425 + 1.0% KLD: 0.004777 +Minimum KLD: 0.000121 + +====== Token probability statistics ====== +Mean Δp: -17.672 ± 0.084 % +Maximum Δp: 94.089% +99.9% Δp: 73.751% +99.0% Δp: 50.009% +95.0% Δp: 22.814% +90.0% Δp: 9.454% +75.0% Δp: 0.004% +Median Δp: -4.889% +25.0% Δp: -32.188% +10.0% Δp: -72.799% + 5.0% Δp: -91.232% + 1.0% Δp: -99.761% + 0.1% Δp: -99.966% +Minimum Δp: -99.998% +RMS Δp : 37.260 ± 0.088 % +Same top p: 62.749 ± 0.124 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.tqa new file mode 100644 index 0000000..a587f2c --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf (version GGUF V3 (latest)) + +Final result: 39.6000 +/- 1.7870 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 858.54 ms +llama_perf_context_print: prompt eval time = 243423.07 ms / 51053 tokens ( 4.77 ms per token, 209.73 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 245001.45 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.wng new file mode 100644 index 0000000..a0e9485 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_l.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_L.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.9333 +/- 1.6235 + +llama_perf_context_print: load time = 903.93 ms +llama_perf_context_print: prompt eval time = 106212.48 ms / 22541 tokens ( 4.71 ms per token, 212.23 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 106738.43 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.arc new file mode 100644 index 0000000..8b507f7 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf (version GGUF V3 (latest)) + +Final result: 66.6667 +/- 1.7225 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 4267.81 ms +llama_perf_context_print: prompt eval time = 165656.48 ms / 36666 tokens ( 4.52 ms per token, 221.34 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 166474.66 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.hsw new file mode 100644 index 0000000..9e2b780 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf (version GGUF V3 (latest)) + +750 80.66666667% [77.6870%, 83.3338%] + + +llama_perf_context_print: load time = 756.25 ms +llama_perf_context_print: prompt eval time = 584702.34 ms / 129319 tokens ( 4.52 ms per token, 221.17 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 588407.02 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.mmlu new file mode 100644 index 0000000..9aba027 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf (version GGUF V3 (latest)) + +Final result: 43.8667 +/- 1.8132 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 809.88 ms +llama_perf_context_print: prompt eval time = 300457.60 ms / 68956 tokens ( 4.36 ms per token, 229.50 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 301704.70 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.ppx new file mode 100644 index 0000000..81c61bb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.723777 ± 0.145380 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 75.90% +Mean ln(PPL(Q)/PPL(base)) : 1.108382 ± 0.005110 +Mean PPL(Q)/PPL(base) : 3.029454 ± 0.015481 +Mean PPL(Q)-PPL(base) : 12.543199 ± 0.117315 + +====== KL divergence statistics ====== +Mean KLD: 1.226150 ± 0.004006 +Maximum KLD: 27.303829 +99.9% KLD: 13.319038 +99.0% KLD: 8.045850 +99.0% KLD: 8.045850 +Median KLD: 0.778573 +10.0% KLD: 0.072866 + 5.0% KLD: 0.026539 + 1.0% KLD: 0.004645 +Minimum KLD: 0.000149 + +====== Token probability statistics ====== +Mean Δp: -17.217 ± 0.084 % +Maximum Δp: 93.510% +99.9% Δp: 74.449% +99.0% Δp: 50.740% +95.0% Δp: 23.161% +90.0% Δp: 9.707% +75.0% Δp: 0.007% +Median Δp: -4.675% +25.0% Δp: -31.413% +10.0% Δp: -71.201% + 5.0% Δp: -90.111% + 1.0% Δp: -99.699% + 0.1% Δp: -99.959% +Minimum Δp: -99.998% +RMS Δp : 36.807 ± 0.087 % +Same top p: 63.002 ± 0.124 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.tqa new file mode 100644 index 0000000..65c50e5 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf (version GGUF V3 (latest)) + +Final result: 39.4667 +/- 1.7860 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 794.64 ms +llama_perf_context_print: prompt eval time = 226374.98 ms / 51053 tokens ( 4.43 ms per token, 225.52 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 228005.49 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.wng new file mode 100644 index 0000000..1f3acb8 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_m.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_M.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.2667 +/- 1.6358 + +llama_perf_context_print: load time = 786.85 ms +llama_perf_context_print: prompt eval time = 98570.95 ms / 22541 tokens ( 4.37 ms per token, 228.68 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 99099.17 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.arc new file mode 100644 index 0000000..e4bf211 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf (version GGUF V3 (latest)) + +Final result: 66.2667 +/- 1.7276 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 4024.90 ms +llama_perf_context_print: prompt eval time = 162864.58 ms / 36666 tokens ( 4.44 ms per token, 225.13 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 163701.55 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.hsw new file mode 100644 index 0000000..5d163a9 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf (version GGUF V3 (latest)) + +750 78.93333333% [75.8712%, 81.7006%] + + +llama_perf_context_print: load time = 717.95 ms +llama_perf_context_print: prompt eval time = 573377.50 ms / 129319 tokens ( 4.43 ms per token, 225.54 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 577152.30 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.mmlu new file mode 100644 index 0000000..cc9d103 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf (version GGUF V3 (latest)) + +Final result: 43.7333 +/- 1.8126 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 729.81 ms +llama_perf_context_print: prompt eval time = 302270.16 ms / 68956 tokens ( 4.38 ms per token, 228.13 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 303540.55 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.ppx new file mode 100644 index 0000000..250bd28 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 19.765437 ± 0.153182 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 74.13% +Mean ln(PPL(Q)/PPL(base)) : 1.162523 ± 0.005278 +Mean PPL(Q)/PPL(base) : 3.197992 ± 0.016878 +Mean PPL(Q)-PPL(base) : 13.584860 ± 0.125811 + +====== KL divergence statistics ====== +Mean KLD: 1.295119 ± 0.004177 +Maximum KLD: 27.306818 +99.9% KLD: 13.228414 +99.0% KLD: 8.401047 +99.0% KLD: 8.401047 +Median KLD: 0.824191 +10.0% KLD: 0.081751 + 5.0% KLD: 0.029987 + 1.0% KLD: 0.004980 +Minimum KLD: 0.000157 + +====== Token probability statistics ====== +Mean Δp: -18.417 ± 0.085 % +Maximum Δp: 92.683% +99.9% Δp: 74.856% +99.0% Δp: 50.844% +95.0% Δp: 22.899% +90.0% Δp: 9.183% +75.0% Δp: 0.001% +Median Δp: -5.493% +25.0% Δp: -34.119% +10.0% Δp: -74.211% + 5.0% Δp: -91.714% + 1.0% Δp: -99.759% + 0.1% Δp: -99.964% +Minimum Δp: -99.995% +RMS Δp : 38.004 ± 0.087 % +Same top p: 61.136 ± 0.125 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.tqa new file mode 100644 index 0000000..5d8a8db --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf (version GGUF V3 (latest)) + +Final result: 38.1333 +/- 1.7748 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 713.93 ms +llama_perf_context_print: prompt eval time = 230959.05 ms / 51053 tokens ( 4.52 ms per token, 221.05 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 232565.83 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.wng new file mode 100644 index 0000000..dfdfa99 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q3_k_s.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q3_K_S.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.8000 +/- 1.6260 + +llama_perf_context_print: load time = 732.76 ms +llama_perf_context_print: prompt eval time = 100522.70 ms / 22541 tokens ( 4.46 ms per token, 224.24 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 101025.93 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.arc new file mode 100644 index 0000000..4327f8d --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf (version GGUF V3 (latest)) + +Final result: 68.0000 +/- 1.7045 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 5558.39 ms +llama_perf_context_print: prompt eval time = 161414.80 ms / 36666 tokens ( 4.40 ms per token, 227.15 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 162273.65 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.hsw new file mode 100644 index 0000000..c4d284a --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf (version GGUF V3 (latest)) + +750 80.93333333% [77.9671%, 83.5843%] + + +llama_perf_context_print: load time = 911.43 ms +llama_perf_context_print: prompt eval time = 570839.24 ms / 129319 tokens ( 4.41 ms per token, 226.54 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 574520.13 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.mmlu new file mode 100644 index 0000000..4fe37ed --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf (version GGUF V3 (latest)) + +Final result: 45.2000 +/- 1.8185 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 974.87 ms +llama_perf_context_print: prompt eval time = 300441.31 ms / 68956 tokens ( 4.36 ms per token, 229.52 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 301727.28 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.ppx new file mode 100644 index 0000000..505cafc --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.556910 ± 0.145472 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 74.92% +Mean ln(PPL(Q)/PPL(base)) : 1.099431 ± 0.005248 +Mean PPL(Q)/PPL(base) : 3.002456 ± 0.015757 +Mean PPL(Q)-PPL(base) : 12.376333 ± 0.117900 + +====== KL divergence statistics ====== +Mean KLD: 1.187728 ± 0.004237 +Maximum KLD: 27.586824 +99.9% KLD: 14.366529 +99.0% KLD: 8.795478 +99.0% KLD: 8.795478 +Median KLD: 0.711806 +10.0% KLD: 0.068614 + 5.0% KLD: 0.024978 + 1.0% KLD: 0.004366 +Minimum KLD: 0.000131 + +====== Token probability statistics ====== +Mean Δp: -16.803 ± 0.083 % +Maximum Δp: 94.144% +99.9% Δp: 72.562% +99.0% Δp: 49.033% +95.0% Δp: 23.292% +90.0% Δp: 10.354% +75.0% Δp: 0.021% +Median Δp: -4.202% +25.0% Δp: -30.211% +10.0% Δp: -70.964% + 5.0% Δp: -90.473% + 1.0% Δp: -99.772% + 0.1% Δp: -99.967% +Minimum Δp: -99.998% +RMS Δp : 36.521 ± 0.088 % +Same top p: 63.986 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.tqa new file mode 100644 index 0000000..bff4d28 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf (version GGUF V3 (latest)) + +Final result: 36.6667 +/- 1.7608 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 953.61 ms +llama_perf_context_print: prompt eval time = 229781.03 ms / 51053 tokens ( 4.50 ms per token, 222.18 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 231385.66 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.wng new file mode 100644 index 0000000..af2615e --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_m.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_M.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.1333 +/- 1.6382 + +llama_perf_context_print: load time = 981.15 ms +llama_perf_context_print: prompt eval time = 100004.38 ms / 22541 tokens ( 4.44 ms per token, 225.40 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 100554.46 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.arc new file mode 100644 index 0000000..fec30eb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf (version GGUF V3 (latest)) + +Final result: 67.0667 +/- 1.7172 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 5272.61 ms +llama_perf_context_print: prompt eval time = 161319.58 ms / 36666 tokens ( 4.40 ms per token, 227.29 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 162146.27 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.hsw new file mode 100644 index 0000000..e17d440 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf (version GGUF V3 (latest)) + +750 81.06666667% [78.1072%, 83.7095%] + + +llama_perf_context_print: load time = 868.43 ms +llama_perf_context_print: prompt eval time = 569329.51 ms / 129319 tokens ( 4.40 ms per token, 227.14 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 573001.51 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.mmlu new file mode 100644 index 0000000..fc2900a --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf (version GGUF V3 (latest)) + +Final result: 45.2000 +/- 1.8185 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 885.80 ms +llama_perf_context_print: prompt eval time = 300092.31 ms / 68956 tokens ( 4.35 ms per token, 229.78 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 301434.44 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.ppx new file mode 100644 index 0000000..5b92ff1 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.663517 ± 0.146425 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 74.87% +Mean ln(PPL(Q)/PPL(base)) : 1.105159 ± 0.005257 +Mean PPL(Q)/PPL(base) : 3.019704 ± 0.015873 +Mean PPL(Q)-PPL(base) : 12.482940 ± 0.118853 + +====== KL divergence statistics ====== +Mean KLD: 1.192878 ± 0.004250 +Maximum KLD: 27.191839 +99.9% KLD: 14.356927 +99.0% KLD: 8.760485 +99.0% KLD: 8.760485 +Median KLD: 0.713994 +10.0% KLD: 0.069566 + 5.0% KLD: 0.025085 + 1.0% KLD: 0.004382 +Minimum KLD: 0.000130 + +====== Token probability statistics ====== +Mean Δp: -16.887 ± 0.083 % +Maximum Δp: 94.834% +99.9% Δp: 72.359% +99.0% Δp: 48.972% +95.0% Δp: 23.124% +90.0% Δp: 10.343% +75.0% Δp: 0.019% +Median Δp: -4.257% +25.0% Δp: -30.401% +10.0% Δp: -71.279% + 5.0% Δp: -90.560% + 1.0% Δp: -99.771% + 0.1% Δp: -99.966% +Minimum Δp: -99.998% +RMS Δp : 36.598 ± 0.088 % +Same top p: 63.979 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.tqa new file mode 100644 index 0000000..a92437d --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf (version GGUF V3 (latest)) + +Final result: 36.2667 +/- 1.7567 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 868.82 ms +llama_perf_context_print: prompt eval time = 229327.93 ms / 51053 tokens ( 4.49 ms per token, 222.62 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 230969.11 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.wng new file mode 100644 index 0000000..2879fa8 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q4_k_s.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q4_K_S.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 72.0000 +/- 1.6406 + +llama_perf_context_print: load time = 885.47 ms +llama_perf_context_print: prompt eval time = 99462.32 ms / 22541 tokens ( 4.41 ms per token, 226.63 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 99973.97 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.arc new file mode 100644 index 0000000..5e04d99 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf (version GGUF V3 (latest)) + +Final result: 67.0667 +/- 1.7172 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 6034.32 ms +llama_perf_context_print: prompt eval time = 171299.05 ms / 36666 tokens ( 4.67 ms per token, 214.05 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 172122.97 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.hsw new file mode 100644 index 0000000..1b90b3b --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf (version GGUF V3 (latest)) + +750 81.73333333% [78.8086%, 84.3346%] + + +llama_perf_context_print: load time = 1034.44 ms +llama_perf_context_print: prompt eval time = 604834.04 ms / 129319 tokens ( 4.68 ms per token, 213.81 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 608474.71 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.mmlu new file mode 100644 index 0000000..fac4dc4 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf (version GGUF V3 (latest)) + +Final result: 44.5333 +/- 1.8160 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 1042.58 ms +llama_perf_context_print: prompt eval time = 317286.78 ms / 68956 tokens ( 4.60 ms per token, 217.33 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 318545.57 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.ppx new file mode 100644 index 0000000..0c80939 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.174846 ± 0.142320 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 75.14% +Mean ln(PPL(Q)/PPL(base)) : 1.078627 ± 0.005222 +Mean PPL(Q)/PPL(base) : 2.940639 ± 0.015356 +Mean PPL(Q)-PPL(base) : 11.994269 ± 0.114726 + +====== KL divergence statistics ====== +Mean KLD: 1.159685 ± 0.004238 +Maximum KLD: 28.100733 +99.9% KLD: 14.541190 +99.0% KLD: 8.790474 +99.0% KLD: 8.790474 +Median KLD: 0.682733 +10.0% KLD: 0.066346 + 5.0% KLD: 0.024022 + 1.0% KLD: 0.004240 +Minimum KLD: 0.000159 + +====== Token probability statistics ====== +Mean Δp: -16.552 ± 0.083 % +Maximum Δp: 94.307% +99.9% Δp: 72.111% +99.0% Δp: 47.813% +95.0% Δp: 23.079% +90.0% Δp: 10.497% +75.0% Δp: 0.030% +Median Δp: -4.029% +25.0% Δp: -29.474% +10.0% Δp: -70.364% + 5.0% Δp: -90.072% + 1.0% Δp: -99.761% + 0.1% Δp: -99.966% +Minimum Δp: -99.998% +RMS Δp : 36.214 ± 0.088 % +Same top p: 64.455 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.tqa new file mode 100644 index 0000000..6d4a7ae --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf (version GGUF V3 (latest)) + +Final result: 37.8667 +/- 1.7724 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 1018.74 ms +llama_perf_context_print: prompt eval time = 243028.85 ms / 51053 tokens ( 4.76 ms per token, 210.07 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 244621.86 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.wng new file mode 100644 index 0000000..833fddf --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_m.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_M.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 73.8667 +/- 1.6054 + +llama_perf_context_print: load time = 1080.02 ms +llama_perf_context_print: prompt eval time = 105347.75 ms / 22541 tokens ( 4.67 ms per token, 213.97 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 105836.33 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.arc new file mode 100644 index 0000000..dfc0a2a --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf (version GGUF V3 (latest)) + +Final result: 67.3333 +/- 1.7137 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 5824.40 ms +llama_perf_context_print: prompt eval time = 172340.28 ms / 36666 tokens ( 4.70 ms per token, 212.75 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 173198.86 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.hsw new file mode 100644 index 0000000..8abfbeb --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf (version GGUF V3 (latest)) + +750 81.46666667% [78.5279%, 84.0847%] + + +llama_perf_context_print: load time = 1049.43 ms +llama_perf_context_print: prompt eval time = 592449.23 ms / 129319 tokens ( 4.58 ms per token, 218.28 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 596100.86 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.mmlu new file mode 100644 index 0000000..fbb7b47 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf (version GGUF V3 (latest)) + +Final result: 44.2667 +/- 1.8149 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 1030.41 ms +llama_perf_context_print: prompt eval time = 307801.57 ms / 68956 tokens ( 4.46 ms per token, 224.03 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 309115.30 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.ppx new file mode 100644 index 0000000..4568af6 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.199918 ± 0.142513 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 75.20% +Mean ln(PPL(Q)/PPL(base)) : 1.080005 ± 0.005216 +Mean PPL(Q)/PPL(base) : 2.944695 ± 0.015358 +Mean PPL(Q)-PPL(base) : 12.019340 ± 0.114880 + +====== KL divergence statistics ====== +Mean KLD: 1.160040 ± 0.004229 +Maximum KLD: 27.444889 +99.9% KLD: 14.604442 +99.0% KLD: 8.763094 +99.0% KLD: 8.763094 +Median KLD: 0.682655 +10.0% KLD: 0.066186 + 5.0% KLD: 0.023837 + 1.0% KLD: 0.004326 +Minimum KLD: 0.000148 + +====== Token probability statistics ====== +Mean Δp: -16.565 ± 0.083 % +Maximum Δp: 94.238% +99.9% Δp: 72.114% +99.0% Δp: 47.597% +95.0% Δp: 23.064% +90.0% Δp: 10.544% +75.0% Δp: 0.030% +Median Δp: -4.020% +25.0% Δp: -29.497% +10.0% Δp: -70.424% + 5.0% Δp: -90.053% + 1.0% Δp: -99.759% + 0.1% Δp: -99.965% +Minimum Δp: -99.998% +RMS Δp : 36.220 ± 0.088 % +Same top p: 64.431 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.tqa new file mode 100644 index 0000000..f77b7b2 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf (version GGUF V3 (latest)) + +Final result: 38.6667 +/- 1.7794 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 984.39 ms +llama_perf_context_print: prompt eval time = 235496.75 ms / 51053 tokens ( 4.61 ms per token, 216.79 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 237131.13 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.wng new file mode 100644 index 0000000..a6fb105 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q5_k_s.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q5_K_S.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 74.2667 +/- 1.5974 + +llama_perf_context_print: load time = 1051.46 ms +llama_perf_context_print: prompt eval time = 102348.25 ms / 22541 tokens ( 4.54 ms per token, 220.24 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 102869.23 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.arc new file mode 100644 index 0000000..0582682 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf (version GGUF V3 (latest)) + +Final result: 67.4667 +/- 1.7119 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 7664.20 ms +llama_perf_context_print: prompt eval time = 175407.42 ms / 36666 tokens ( 4.78 ms per token, 209.03 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 176202.75 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.hsw new file mode 100644 index 0000000..83e76a0 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf (version GGUF V3 (latest)) + +750 81.06666667% [78.1072%, 83.7095%] + + +llama_perf_context_print: load time = 1225.60 ms +llama_perf_context_print: prompt eval time = 622423.40 ms / 129319 tokens ( 4.81 ms per token, 207.77 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 626130.15 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.mmlu new file mode 100644 index 0000000..02e475b --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf (version GGUF V3 (latest)) + +Final result: 44.5333 +/- 1.8160 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 1255.72 ms +llama_perf_context_print: prompt eval time = 327772.92 ms / 68956 tokens ( 4.75 ms per token, 210.38 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 329047.99 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.ppx new file mode 100644 index 0000000..e1e6e97 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.213825 ± 0.142965 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 75.05% +Mean ln(PPL(Q)/PPL(base)) : 1.080769 ± 0.005241 +Mean PPL(Q)/PPL(base) : 2.946946 ± 0.015446 +Mean PPL(Q)-PPL(base) : 12.033248 ± 0.115399 + +====== KL divergence statistics ====== +Mean KLD: 1.158026 ± 0.004262 +Maximum KLD: 26.265640 +99.9% KLD: 14.634221 +99.0% KLD: 8.878881 +99.0% KLD: 8.878881 +Median KLD: 0.679157 +10.0% KLD: 0.065587 + 5.0% KLD: 0.023885 + 1.0% KLD: 0.004187 +Minimum KLD: 0.000139 + +====== Token probability statistics ====== +Mean Δp: -16.509 ± 0.083 % +Maximum Δp: 94.239% +99.9% Δp: 72.141% +99.0% Δp: 48.282% +95.0% Δp: 23.325% +90.0% Δp: 10.605% +75.0% Δp: 0.032% +Median Δp: -4.000% +25.0% Δp: -29.434% +10.0% Δp: -70.325% + 5.0% Δp: -90.061% + 1.0% Δp: -99.771% + 0.1% Δp: -99.967% +Minimum Δp: -99.998% +RMS Δp : 36.219 ± 0.088 % +Same top p: 64.447 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.tqa new file mode 100644 index 0000000..ac4b9b7 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf (version GGUF V3 (latest)) + +Final result: 39.6000 +/- 1.7870 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 1251.53 ms +llama_perf_context_print: prompt eval time = 250368.11 ms / 51053 tokens ( 4.90 ms per token, 203.91 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 251979.66 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.wng new file mode 100644 index 0000000..5561faf --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q6_k.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q6_K.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 73.8667 +/- 1.6054 + +llama_perf_context_print: load time = 1250.90 ms +llama_perf_context_print: prompt eval time = 108113.49 ms / 22541 tokens ( 4.80 ms per token, 208.49 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 108625.99 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.arc b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.arc new file mode 100644 index 0000000..9f2d143 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.arc @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf (version GGUF V3 (latest)) + +Final result: 68.1333 +/- 1.7026 +Random chance: 25.0083 +/- 1.5824 + + +llama_perf_context_print: load time = 10486.40 ms +llama_perf_context_print: prompt eval time = 161243.77 ms / 36666 tokens ( 4.40 ms per token, 227.39 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 162055.73 ms / 36667 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.hsw b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.hsw new file mode 100644 index 0000000..b0813b0 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.hsw @@ -0,0 +1,20 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf (version GGUF V3 (latest)) + +750 81.33333333% [78.3876%, 83.9597%] + + +llama_perf_context_print: load time = 1550.41 ms +llama_perf_context_print: prompt eval time = 553902.33 ms / 129319 tokens ( 4.28 ms per token, 233.47 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 557360.27 ms / 129320 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.mmlu b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.mmlu new file mode 100644 index 0000000..357b62e --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.mmlu @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf (version GGUF V3 (latest)) + +Final result: 44.9333 +/- 1.8176 +Random chance: 25.0000 +/- 1.5822 + + +llama_perf_context_print: load time = 1526.16 ms +llama_perf_context_print: prompt eval time = 290805.14 ms / 68956 tokens ( 4.22 ms per token, 237.12 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 292006.82 ms / 68957 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.ppx b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.ppx new file mode 100644 index 0000000..a7983dd --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.ppx @@ -0,0 +1,37 @@ +====== Perplexity statistics ====== +Mean PPL(Q) : 18.203515 ± 0.142826 +Mean PPL(base) : 6.180577 ± 0.041038 +Cor(ln(PPL(Q)), ln(PPL(base))): 75.02% +Mean ln(PPL(Q)/PPL(base)) : 1.080203 ± 0.005242 +Mean PPL(Q)/PPL(base) : 2.945277 ± 0.015439 +Mean PPL(Q)-PPL(base) : 12.022938 ± 0.115276 + +====== KL divergence statistics ====== +Mean KLD: 1.158351 ± 0.004265 +Maximum KLD: 27.082415 +99.9% KLD: 14.510898 +99.0% KLD: 8.873251 +99.0% KLD: 8.873251 +Median KLD: 0.678687 +10.0% KLD: 0.065954 + 5.0% KLD: 0.024006 + 1.0% KLD: 0.004161 +Minimum KLD: 0.000141 + +====== Token probability statistics ====== +Mean Δp: -16.515 ± 0.083 % +Maximum Δp: 94.585% +99.9% Δp: 72.074% +99.0% Δp: 48.296% +95.0% Δp: 23.169% +90.0% Δp: 10.662% +75.0% Δp: 0.031% +Median Δp: -4.006% +25.0% Δp: -29.412% +10.0% Δp: -70.362% + 5.0% Δp: -90.143% + 1.0% Δp: -99.767% + 0.1% Δp: -99.966% +Minimum Δp: -99.998% +RMS Δp : 36.227 ± 0.088 % +Same top p: 64.562 ± 0.123 % diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.tqa b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.tqa new file mode 100644 index 0000000..f858242 --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.tqa @@ -0,0 +1,21 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf (version GGUF V3 (latest)) + +Final result: 38.2667 +/- 1.7759 +Random chance: 19.8992 +/- 1.4588 + + +llama_perf_context_print: load time = 1498.38 ms +llama_perf_context_print: prompt eval time = 227802.62 ms / 51053 tokens ( 4.46 ms per token, 224.11 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 229374.00 ms / 51054 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) diff --git a/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.wng b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.wng new file mode 100644 index 0000000..870cbad --- /dev/null +++ b/scores/Dolphin-Mistral-24B-Venice-Edition-q8_0.wng @@ -0,0 +1,19 @@ +build: 5770 (b25e9277) with Apple clang version 17.0.0 (clang-1700.0.13.3) for arm64-apple-darwin24.4.0 +llama_model_load_from_file_impl: using device Metal (Apple M4 Max) - 49151 MiB free +llama_model_loader: loaded meta data with 43 key-value pairs and 345 tensors from ./Dolphin-Mistral-24B-Venice-Edition-pruned-Q8_0.gguf (version GGUF V3 (latest)) + +Final Winogrande score(750 tasks): 74.4000 +/- 1.5947 + +llama_perf_context_print: load time = 1463.29 ms +llama_perf_context_print: prompt eval time = 100036.10 ms / 22541 tokens ( 4.44 ms per token, 225.33 tokens per second) +llama_perf_context_print: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second) +llama_perf_context_print: total time = 100546.29 ms / 22542 tokens +ggml_metal_free: deallocating +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0) +ggml_metal_mem_pool_free: freeing memory pool, num heaps = 0 (total = 0)