初始化项目,由ModelHub XC社区提供模型

Model: suayptalha/Maestro-10B
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-04-13 10:01:58 +08:00
commit 63d1681c1f
22 changed files with 17410 additions and 0 deletions

49
.gitattributes vendored Normal file
View File

@@ -0,0 +1,49 @@
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bin.* filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zstandard filter=lfs diff=lfs merge=lfs -text
*.tfevents* filter=lfs diff=lfs merge=lfs -text
*.db* filter=lfs diff=lfs merge=lfs -text
*.ark* filter=lfs diff=lfs merge=lfs -text
**/*ckpt*data* filter=lfs diff=lfs merge=lfs -text
**/*ckpt*.meta filter=lfs diff=lfs merge=lfs -text
**/*ckpt*.index filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.gguf* filter=lfs diff=lfs merge=lfs -text
*.ggml filter=lfs diff=lfs merge=lfs -text
*.llamafile* filter=lfs diff=lfs merge=lfs -text
*.pt2 filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
tokenizer.json filter=lfs diff=lfs merge=lfs -text

BIN
Maestro-Logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 204 KiB

639
README.md Normal file
View File

@@ -0,0 +1,639 @@
---
language:
- en
base_model:
- arcee-ai/Virtuoso-Lite
datasets:
- Open-Orca/OpenOrca
pipeline_tag: text-generation
license: other
license_name: falcon-llm-license
license_link: https://falconllm.tii.ae/falcon-terms-and-conditions.html
library_name: transformers
tags:
- unsloth
- trl
- sft
---
<!DOCTYPE html>
<style>
ebody {
font-family: 'Quicksand', sans-serif;
background: linear-gradient(135deg, #FF69B4 0%, #800080 100%);
color: #FFFFFF;
margin: 0;
padding: 0;
font-size: 16px;
min-height: 100vh;
}
.container {
margin: 20px;
background-color: rgba(28, 14, 36, 0.95);
padding: 20px;
border-radius: 12px;
box-shadow: 0 4px 20px rgba(255, 105, 180, 0.4);
border: 1px solid rgba(255, 105, 180, 0.4);
outline: 1px solid rgba(255, 105, 180, 0.7);
outline-offset: -1px;
position: relative;
backdrop-filter: blur(10px);
}
.container::before {
content: '';
position: absolute;
top: -1px;
left: -1px;
right: -1px;
bottom: -1px;
border: 1px solid rgba(255, 105, 180, 0.98);
border-radius: 12px;
pointer-events: none;
animation: borderGlow 2s ease-in-out infinite;
}
@keyframes borderGlow {
0% {
box-shadow: 0 0 5px rgba(255, 105, 180, 0.98);
}
50% {
box-shadow: 0 0 20px rgba(255, 105, 180, 0.98);
}
100% {
box-shadow: 0 0 5px rgba(255, 105, 180, 0.98);
}
}
.header h1 {
font-size: 28px;
color: #FF69B4;
margin: 0 0 20px 0;
text-shadow: 0 0 15px rgba(255, 105, 180, 0.8);
letter-spacing: 1px;
}
.update-section {
margin-top: 30px;
}
.update-section h2, h2 {
font-size: 24px;
color: #FF69B4;
text-shadow: 0 0 15px rgba(255, 105, 180, 0.8);
letter-spacing: 0.5px;
}
.update-section p {
font-size: 16px;
line-height: 1.6;
color: #FFE1FF;
}
.info p {
color: #FFE1FF;
line-height: 1.6;
font-size: 16px;
}
.info img {
width: 100%;
border-radius: 10px;
margin-bottom: 15px;
box-shadow: 0 0 30px rgba(255, 105, 180, 0.5);
border: 1px solid rgba(255, 105, 180, 0.4);
outline: 1px solid rgba(255, 105, 180, 0.7);
outline-offset: -1px;
transition: transform 0.3s ease, box-shadow 0.3s ease;
}
.info img:hover {
transform: scale(1.01);
box-shadow: 0 0 40px rgba(255, 105, 180, 0.6);
}
a {
color: #00FFEE;
text-decoration: none;
transition: color 0.3s ease;
}
a:hover {
color: #FF1493;
}
.button {
display: inline-block;
background: linear-gradient(45deg, rgba(255, 105, 180, 0.9), rgba(128, 0, 128, 0.9));
color: #FFFFFF;
padding: 12px 24px;
border-radius: 5px;
cursor: pointer;
text-decoration: none;
transition: all 0.3s ease;
border: 1px solid rgba(255, 105, 180, 0.4);
}
.button:hover {
background: linear-gradient(45deg, rgba(255, 105, 180, 1), rgba(128, 0, 128, 1));
box-shadow: 0 0 20px rgba(255, 105, 180, 0.7);
transform: translateY(-2px);
}
pre {
background-color: rgba(28, 14, 36, 0.95);
padding: 15px;
border-radius: 5px;
overflow-x: auto;
border: 1px solid rgba(255, 20, 147, 0.3);
outline: 1px solid rgba(255, 20, 147, 0.6);
outline-offset: -1px;
}
code {
font-family: 'Courier New', monospace;
color: #FFE1FF;
}
.info-grid {
display: grid;
grid-template-columns: repeat(3, 1fr);
gap: 15px;
}
.creator-section {
margin: 20px 0;
}
.creator-badge {
display: inline-flex;
align-items: center;
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 10px 15px;
}
.creator-label {
color: #FFE1FF;
font-size: 14px;
margin-right: 8px;
}
.creator-link {
display: flex;
align-items: center;
gap: 5px;
color: #00FFEE;
text-decoration: none;
transition: all 0.3s ease;
}
.creator-name {
font-weight: 600;
}
.creator-arrow {
font-size: 16px;
transition: transform 0.3s ease;
}
.creator-link:hover {
color: #FF1493;
}
.creator-link:hover .creator-arrow {
transform: translateX(3px);
}
.model-info {
margin-top: 30px;
}
.name-legend {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 20px;
margin: 20px 0;
}
.name-legend h3 {
color: #FF1493;
font-size: 18px;
margin: 0 0 15px 0;
}
.legend-grid {
display: grid;
gap: 12px;
}
.legend-item {
display: flex;
align-items: baseline;
gap: 10px;
}
.legend-key {
color: #00FFEE;
font-weight: 600;
min-width: 80px;
}
.legend-value {
color: #FFE1FF;
}
.model-description {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 20px;
}
.model-description p {
margin: 0 0 15px 0;
line-height: 1.6;
}
.model-description p:last-child {
margin-bottom: 0;
}
.section-container {
margin: 40px 0;
}
.info-card {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
overflow: hidden;
}
.info-header {
background: rgba(255, 20, 147, 0.1);
padding: 20px;
border-bottom: 1px solid rgba(255, 20, 147, 0.3);
}
.info-header h3 {
color: #FF1493;
margin: 0 0 10px 0;
font-size: 20px;
text-shadow: 0 0 5px rgba(255, 20, 147, 0.3);
}
.model-tags {
display: flex;
gap: 8px;
flex-wrap: wrap;
}
.model-tag {
background: rgba(0, 255, 238, 0.1);
color: #00FFEE;
padding: 4px 8px;
border-radius: 4px;
font-size: 12px;
border: 1px solid rgba(0, 255, 238, 0.2);
}
.model-composition {
padding: 20px;
border-bottom: 1px solid rgba(255, 20, 147, 0.3);
}
.model-composition h4 {
color: #FF1493;
margin: 0 0 15px 0;
font-size: 16px;
}
.composition-list {
list-style: none;
padding: 0;
margin: 0;
display: grid;
gap: 10px;
}
.composition-list li {
color: #FFE1FF;
display: flex;
align-items: baseline;
gap: 8px;
}
.model-component {
color: #00FFEE;
font-weight: 500;
min-width: 120px;
}
.template-card {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 15px;
}
.template-item {
display: flex;
align-items: center;
gap: 12px;
}
.template-icon {
width: 24px;
height: 24px;
opacity: 0.8;
}
.template-content {
display: flex;
align-items: baseline;
gap: 8px;
}
.template-link {
color: #00FFEE;
text-decoration: none;
font-weight: 500;
display: flex;
align-items: center;
gap: 5px;
}
.template-author {
color: rgba(255, 225, 255, 0.7);
font-size: 14px;
}
.quantized-container {
display: grid;
gap: 20px;
}
.quantized-section {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 20px;
}
.quantized-section h3 {
color: #FF1493;
font-size: 18px;
margin: 0 0 15px 0;
}
.quantized-items {
display: grid;
gap: 12px;
}
.quantized-item {
display: flex;
align-items: baseline;
gap: 10px;
}
.quantized-item .author {
color: rgba(255, 225, 255, 0.7);
min-width: 100px;
}
.multi-links {
display: flex;
align-items: center;
gap: 8px;
}
.separator {
color: rgba(255, 225, 255, 0.5);
}
.config-container {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
overflow: hidden;
}
.config-header {
background: rgba(255, 20, 147, 0.1);
padding: 15px 20px;
border-bottom: 1px solid rgba(255, 20, 147, 0.3);
}
.model-name {
color: #FF1493;
font-weight: 600;
}
.config-content {
padding: 20px;
}
.config-item {
display: flex;
flex-direction: column;
gap: 5px;
margin-bottom: 15px;
}
.config-label {
color: #00FFEE;
font-size: 14px;
font-weight: 500;
}
.config-value {
color: #FFE1FF;
font-family: 'Courier New', monospace;
}
.config-models {
margin-top: 20px;
}
.model-list {
list-style: none;
padding: 0;
margin: 10px 0 0 0;
}
.model-list li {
color: #FFE1FF;
font-family: 'Courier New', monospace;
padding: 5px 0;
padding-left: 20px;
position: relative;
}
.model-list li::before {
content: '-';
position: absolute;
left: 0;
color: #00FFEE;
}
.link-arrow {
display: inline-block;
transition: transform 0.3s ease;
}
a:hover .link-arrow {
transform: translateX(3px);
}
.benchmark-notification {
background: rgba(255, 20, 147, 0.15);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
margin-bottom: 20px;
padding: 12px;
animation: glowPulse 2s infinite;
}
.notification-content {
display: flex;
align-items: center;
justify-content: center;
gap: 10px;
text-align: center;
}
.notification-icon {
font-size: 20px;
}
.notification-text {
color: #FFE1FF;
font-size: 16px;
font-weight: 500;
display: flex;
flex-direction: column;
align-items: center;
gap: 5px;
}
.benchmark-link {
color: #00FFEE;
text-decoration: none;
font-size: 14px;
padding: 4px 8px;
border-radius: 4px;
transition: all 0.3s ease;
border: 1px solid rgba(0, 255, 238, 0.3);
}
.benchmark-link:hover {
background: rgba(0, 255, 238, 0.1);
border-color: rgba(0, 255, 238, 0.5);
color: #00FFEE;
text-shadow: 0 0 5px rgba(0, 255, 238, 0.5);
}
@keyframes glowPulse {
0% {
box-shadow: 0 0 5px rgba(255, 20, 147, 0.3);
}
50% {
box-shadow: 0 0 15px rgba(255, 20, 147, 0.5);
}
100% {
box-shadow: 0 0 5px rgba(255, 20, 147, 0.3);
}
}
.review-card {
background: rgba(28, 14, 36, 0.95);
border: 1px solid rgba(255, 20, 147, 0.3);
border-radius: 8px;
padding: 15px;
margin-bottom: 15px;
}
.review-card:last-child {
margin-bottom: 0;
}
</style>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Maestro-10B</title>
<link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@400;500;600&display=swap" rel="stylesheet">
<link href="styles.css" rel="stylesheet">
</head>
<body>
<div class="container">
<div class="header">
<h1>Maestro-10B</h1>
</div>
<div class="info">
<img src="Maestro-Logo.png" alt="Model banner">
<div class="creator-section">
<div class="creator-badge">
<span class="creator-label">Created by</span>
<a href="https://huggingface.co/suayptalha" target="_blank" class="creator-link">
<span class="creator-name">suayptalha</span>
<span class="creator-arrow"></span>
</a>
</div>
</div>
<div class="model-info">
<h2>Model Information</h2>
<div class="info-card">
<div class="info-header">
<h3>Maestro-10B</h3>
<div class="model-tags">
<span class="model-tag">suayptalha/Maestro-10B</span>
<span class="model-tag">arcee-ai/Virtuoso-Lite</span>
<span class="model-tag">DeepSeek-V3</span>
<span class="model-tag">10b Parameters</span>
</div>
</div>
<div class="model-composition">
<h4>Base Model</h4>
<ul class="composition-list">
<li><span class="model-component"><a href="arcee-ai/Virtuoso-Lite" target="_blank">Virtuoso-Lite</a></span></li>
</ul>
</div>
<div class="model-description">
Maestro-10B is a 10 billion parameter model fine-tuned from Virtuoso-Lite,
a next-generation language model developed by arcee-ai. Virtuoso-Lite itself
is based on the Llama-3 architecture, distilled from Deepseek-v3 using
approximately 1.1 billion tokens/logits. This distillation process allows
Virtuoso-Lite to achieve robust performance with a smaller parameter count,
excelling in reasoning, code generation, and mathematical problem-solving.
Maestro-10B inherits these strengths from its base model, Virtuoso-Lite, and
further enhances them through fine-tuning on the OpenOrca dataset. This combination
of a distilled base model and targeted fine-tuning makes Maestro-10B a powerful and
efficient language model.
</div>
<div class="model-composition">
<h4>Loss Graph</h4>
<img src="loss.png" alt="Model banner">
</div>
</div>
<div class="support-section">
<h2>Support & Community:</h2>
<div class="support-buttons">
<a href="https://buymeacoffee.com/suayptalha" target="_blank" class="button">
Buy me a coffee
</a>
<a href="https://discord.com/users/suaypt" target="_blank" class="button">
suayptalha - Discord
</a>
</div>
</div>
</div>
</div>
</body>
</html>

32
config.json Normal file
View File

@@ -0,0 +1,32 @@
{
"_name_or_path": "arcee-ai/Virtuoso-Lite",
"architectures": [
"LlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"bos_token_id": null,
"eos_token_id": 11,
"head_dim": 256,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 23040,
"max_position_embeddings": 32768,
"mlp_bias": false,
"model_type": "llama",
"num_attention_heads": 12,
"num_hidden_layers": 40,
"num_key_value_heads": 4,
"pad_token_id": 2023,
"pretraining_tp": 1,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000042,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.48.2",
"unsloth_version": "2025.1.8",
"use_cache": false,
"vocab_size": 131072
}

1
configuration.json Normal file
View File

@@ -0,0 +1 @@
{"framework": "pytorch", "task": "text-generation", "allow_remote": true}

8
generation_config.json Normal file
View File

@@ -0,0 +1,8 @@
{
"_from_model_config": true,
"eos_token_id": 11,
"max_length": 32768,
"pad_token_id": 2023,
"transformers_version": "4.48.2",
"use_cache": false
}

BIN
loss.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5f37a4c7be9fc250ad858217e95bea6886bb38821974a369ee7477d58eabb4b2
size 4938900352

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0a37b5dc0fc98dd55894a3be66015b5b0a9b986b4da1d97512808d45861e135c
size 4942085064

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ba3c746dae99b767c806e482331b98357d60f4daedc271ee1e36e05996b84448
size 4891740448

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:41595a84e6d6dfa292e02ed672d100e1dccc71269f8734e69e050e3dc7982a50
size 4891740448

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c93b23e4de18d7f142a1401422eabf589e03458df26d05ced7ffde2d92c5ade1
size 946883112

View File

@@ -0,0 +1,370 @@
{
"metadata": {
"total_size": 20611307520
},
"weight_map": {
"lm_head.weight": "model-00005-of-00005.safetensors",
"model.embed_tokens.weight": "model-00001-of-00005.safetensors",
"model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.18.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.19.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.19.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.19.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.19.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.input_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.28.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
"model.layers.28.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.28.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.29.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.29.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.29.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.29.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.29.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
"model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.33.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.34.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.34.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.34.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.35.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.35.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.35.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.36.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.36.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.36.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.37.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.37.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.37.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.input_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.38.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
"model.layers.38.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.38.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
"model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
"model.layers.39.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
"model.layers.39.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.39.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
"model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.input_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
"model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
"model.norm.weight": "model-00005-of-00005.safetensors"
}
}

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:679bcdbae29d18f4707fc95dc748aaefdfdbd1d89c52c005c86930eb49981a6f
size 4938919464

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fe9b0d5ad4db99ac4f10338f06bc7fcf637c92a1f69a46a0e1f6ff270b85a838
size 4942107756

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8a0e0a181a1149c40cd4e54a13cd8f7aeeec4b34eef5814391911e01a536fcfc
size 4891761436

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:36c296b8e3a304d83d5e576dcccc656b160a8f5524229ffa32ca0013614a70a1
size 4891761436

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6ac21a5b2d232e2d681909f3d51157a9e41cde108317d46a4e087467bf1a02e4
size 946885193

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e8269f18fe03225ea719dc9045b697a055e8d1a018f49bbc11638bd053c236a4
size 29894

41
special_tokens_map.json Normal file
View File

@@ -0,0 +1,41 @@
{
"additional_special_tokens": [
">>TITLE<<",
">>ABSTRACT<<",
">>INTRODUCTION<<",
">>SUMMARY<<",
">>COMMENT<<",
">>ANSWER<<",
">>QUESTION<<",
">>DOMAIN<<",
">>EMAIL_ADDRESS<<",
">>IP_ADDRESS<<",
"<|startoftext|>",
">>IP_ADDRESS_0<<",
">>IP_ADDRESS_1<<",
">>IP_ADDRESS_2<<",
">>IP_ADDRESS_3<<",
">>IP_ADDRESS_4<<",
">>IP_ADDRESS_5<<",
">>IP_ADDRESS_6<<",
">>IP_ADDRESS_7<<",
">>IP_ADDRESS_8<<",
">>IP_ADDRESS_9<<",
">>PASSWORD<<",
">>KEY<<"
],
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}

3
tokenizer.json Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:96c1b62711ea59cb36cf4ab46cc551fc64b2168ac0a288448026088fb9ec7f4a
size 9780754

16234
tokenizer_config.json Normal file

File diff suppressed because it is too large Load Diff