初始化项目,由ModelHub XC社区提供模型

Model: LRM-Conta-Detection-Arena/sft-conta-qwen2.5-7b-no-rl
Source: Original Platform
This commit is contained in:
ModelHub XC
2026-05-04 19:23:53 +08:00
commit e1f812f5b7
22 changed files with 152603 additions and 0 deletions

36
.gitattributes vendored Normal file
View File

@@ -0,0 +1,36 @@
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
tokenizer.json filter=lfs diff=lfs merge=lfs -text

18
README.md Normal file
View File

@@ -0,0 +1,18 @@
---
library_name: transformers
license: other
base_model: Qwen/Qwen2.5-7B-Instruct
tags:
- llama-factory
- full
- generated_from_trainer
model-index:
- name: qwen2_5_7B_lr_4e5_epoch_5_openthought3_10k_cont3_member_64n
results: []
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# LRM-Conta-Detection-Arena/sft-conta-qwen2.5-7b-no-rl

24
added_tokens.json Normal file
View File

@@ -0,0 +1,24 @@
{
"</tool_call>": 151658,
"<tool_call>": 151657,
"<|box_end|>": 151649,
"<|box_start|>": 151648,
"<|endoftext|>": 151643,
"<|file_sep|>": 151664,
"<|fim_middle|>": 151660,
"<|fim_pad|>": 151662,
"<|fim_prefix|>": 151659,
"<|fim_suffix|>": 151661,
"<|im_end|>": 151645,
"<|im_start|>": 151644,
"<|image_pad|>": 151655,
"<|object_ref_end|>": 151647,
"<|object_ref_start|>": 151646,
"<|quad_end|>": 151651,
"<|quad_start|>": 151650,
"<|repo_name|>": 151663,
"<|video_pad|>": 151656,
"<|vision_end|>": 151653,
"<|vision_pad|>": 151654,
"<|vision_start|>": 151652
}

8
all_results.json Normal file
View File

@@ -0,0 +1,8 @@
{
"epoch": 5.0,
"total_flos": 3.54463086751976e+19,
"train_loss": 0.865001671439723,
"train_runtime": 46695.3235,
"train_samples_per_second": 1.271,
"train_steps_per_second": 0.01
}

54
chat_template.jinja Normal file
View File

@@ -0,0 +1,54 @@
{%- if tools %}
{{- '<|im_start|>system\n' }}
{%- if messages[0]['role'] == 'system' %}
{{- messages[0]['content'] }}
{%- else %}
{{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
{%- endif %}
{{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
{%- for tool in tools %}
{{- "\n" }}
{{- tool | tojson }}
{%- endfor %}
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
{%- else %}
{%- if messages[0]['role'] == 'system' %}
{{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
{%- else %}
{{- '' }}
{%- endif %}
{%- endif %}
{%- for message in messages %}
{%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
{{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
{%- elif message.role == "assistant" %}
{{- '<|im_start|>' + message.role }}
{%- if message.content %}
{{- '\n' + message.content }}
{%- endif %}
{%- for tool_call in message.tool_calls %}
{%- if tool_call.function is defined %}
{%- set tool_call = tool_call.function %}
{%- endif %}
{{- '\n<tool_call>\n{"name": "' }}
{{- tool_call.name }}
{{- '", "arguments": ' }}
{{- tool_call.arguments | tojson }}
{{- '}\n</tool_call>' }}
{%- endfor %}
{{- '<|im_end|>\n' }}
{%- elif message.role == "tool" %}
{%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
{{- '<|im_start|>user' }}
{%- endif %}
{{- '\n<tool_response>\n' }}
{{- message.content }}
{{- '\n</tool_response>' }}
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
{{- '<|im_end|>\n' }}
{%- endif %}
{%- endif %}
{%- endfor %}
{%- if add_generation_prompt %}
{{- '<|im_start|>assistant\n' }}
{%- endif %}

28
config.json Normal file
View File

@@ -0,0 +1,28 @@
{
"architectures": [
"Qwen2ForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 151643,
"eos_token_id": 151645,
"hidden_act": "silu",
"hidden_size": 3584,
"initializer_range": 0.02,
"intermediate_size": 18944,
"max_position_embeddings": 32768,
"max_window_layers": 28,
"model_type": "qwen2",
"num_attention_heads": 28,
"num_hidden_layers": 28,
"num_key_value_heads": 4,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000000.0,
"sliding_window": 131072,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.52.4",
"use_cache": false,
"use_sliding_window": false,
"vocab_size": 152064
}

14
generation_config.json Normal file
View File

@@ -0,0 +1,14 @@
{
"bos_token_id": 151643,
"do_sample": true,
"eos_token_id": [
151645,
151643
],
"pad_token_id": 151643,
"repetition_penalty": 1.05,
"temperature": 0.7,
"top_k": 20,
"top_p": 0.8,
"transformers_version": "4.52.4"
}

151388
merges.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:209b828a7900ff9f586e1fcca445a0e6292b0e35b0c2f72fbd1b566fd7da9db7
size 4877660776

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9b2f5d5d2fec0df7f845f942c01c87ef0b73ade5953043e9c32897b8aa23113a
size 4932751008

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5c83deee9b0c3044cc938b224fb1c02320b0cc6ca92d03006586c7169d899a47
size 4330865200

View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5bd0e23b2449cef5e550b2c090cb55f133bd75b1264163e58aeda86f9cb05dac
size 1089994880

View File

@@ -0,0 +1,346 @@
{
"metadata": {
"total_size": 15231233024
},
"weight_map": {
"lm_head.weight": "model-00004-of-00004.safetensors",
"model.embed_tokens.weight": "model-00001-of-00004.safetensors",
"model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
"model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
"model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
"model.norm.weight": "model-00003-of-00004.safetensors"
}
}

31
special_tokens_map.json Normal file
View File

@@ -0,0 +1,31 @@
{
"additional_special_tokens": [
"<|im_start|>",
"<|im_end|>",
"<|object_ref_start|>",
"<|object_ref_end|>",
"<|box_start|>",
"<|box_end|>",
"<|quad_start|>",
"<|quad_end|>",
"<|vision_start|>",
"<|vision_end|>",
"<|vision_pad|>",
"<|image_pad|>",
"<|video_pad|>"
],
"eos_token": {
"content": "<|im_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}

3
tokenizer.json Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
size 11421896

208
tokenizer_config.json Normal file
View File

@@ -0,0 +1,208 @@
{
"add_bos_token": false,
"add_prefix_space": false,
"added_tokens_decoder": {
"151643": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151644": {
"content": "<|im_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151645": {
"content": "<|im_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151646": {
"content": "<|object_ref_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151647": {
"content": "<|object_ref_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151648": {
"content": "<|box_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151649": {
"content": "<|box_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151650": {
"content": "<|quad_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151651": {
"content": "<|quad_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151652": {
"content": "<|vision_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151653": {
"content": "<|vision_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151654": {
"content": "<|vision_pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151655": {
"content": "<|image_pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151656": {
"content": "<|video_pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"151657": {
"content": "<tool_call>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151658": {
"content": "</tool_call>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151659": {
"content": "<|fim_prefix|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151660": {
"content": "<|fim_middle|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151661": {
"content": "<|fim_suffix|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151662": {
"content": "<|fim_pad|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151663": {
"content": "<|repo_name|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
},
"151664": {
"content": "<|file_sep|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": false
}
},
"additional_special_tokens": [
"<|im_start|>",
"<|im_end|>",
"<|object_ref_start|>",
"<|object_ref_end|>",
"<|box_start|>",
"<|box_end|>",
"<|quad_start|>",
"<|quad_end|>",
"<|vision_start|>",
"<|vision_end|>",
"<|vision_pad|>",
"<|image_pad|>",
"<|video_pad|>"
],
"bos_token": null,
"clean_up_tokenization_spaces": false,
"eos_token": "<|im_end|>",
"errors": "replace",
"extra_special_tokens": {},
"model_max_length": 131072,
"pad_token": "<|endoftext|>",
"padding_side": "right",
"split_special_tokens": false,
"tokenizer_class": "Qwen2Tokenizer",
"unk_token": null
}

8
train_results.json Normal file
View File

@@ -0,0 +1,8 @@
{
"epoch": 5.0,
"total_flos": 3.54463086751976e+19,
"train_loss": 0.865001671439723,
"train_runtime": 46695.3235,
"train_samples_per_second": 1.271,
"train_steps_per_second": 0.01
}

49
trainer_log.jsonl Normal file
View File

@@ -0,0 +1,49 @@
{"current_steps": 10, "total_steps": 475, "loss": 1.3354, "lr": 7.500000000000001e-06, "epoch": 0.10614101592115238, "percentage": 2.11, "elapsed_time": "0:15:56", "remaining_time": "12:21:30"}
{"current_steps": 10, "total_steps": 475, "loss": 1.3263, "lr": 7.500000000000001e-06, "epoch": 0.10614101592115238, "percentage": 2.11, "elapsed_time": "0:16:44", "remaining_time": "12:58:27"}
{"current_steps": 20, "total_steps": 475, "loss": 1.1625, "lr": 1.5833333333333333e-05, "epoch": 0.21228203184230476, "percentage": 4.21, "elapsed_time": "0:33:06", "remaining_time": "12:33:05"}
{"current_steps": 30, "total_steps": 475, "loss": 1.0477, "lr": 2.4166666666666667e-05, "epoch": 0.31842304776345715, "percentage": 6.32, "elapsed_time": "0:49:40", "remaining_time": "12:16:50"}
{"current_steps": 40, "total_steps": 475, "loss": 1.0062, "lr": 3.2500000000000004e-05, "epoch": 0.4245640636846095, "percentage": 8.42, "elapsed_time": "1:06:15", "remaining_time": "12:00:34"}
{"current_steps": 50, "total_steps": 475, "loss": 1.0003, "lr": 3.999945869500297e-05, "epoch": 0.530705079605762, "percentage": 10.53, "elapsed_time": "1:22:20", "remaining_time": "11:39:55"}
{"current_steps": 60, "total_steps": 475, "loss": 0.9682, "lr": 3.9934537542218075e-05, "epoch": 0.6368460955269143, "percentage": 12.63, "elapsed_time": "1:38:52", "remaining_time": "11:23:50"}
{"current_steps": 70, "total_steps": 475, "loss": 0.9887, "lr": 3.9761757921821544e-05, "epoch": 0.7429871114480667, "percentage": 14.74, "elapsed_time": "1:55:20", "remaining_time": "11:07:21"}
{"current_steps": 80, "total_steps": 475, "loss": 0.9882, "lr": 3.948205468093744e-05, "epoch": 0.849128127369219, "percentage": 16.84, "elapsed_time": "2:11:44", "remaining_time": "10:50:30"}
{"current_steps": 90, "total_steps": 475, "loss": 0.9592, "lr": 3.909694119116433e-05, "epoch": 0.9552691432903715, "percentage": 18.95, "elapsed_time": "2:28:13", "remaining_time": "10:34:04"}
{"current_steps": 100, "total_steps": 475, "loss": 0.9474, "lr": 3.860850116027705e-05, "epoch": 1.0530705079605762, "percentage": 21.05, "elapsed_time": "2:43:30", "remaining_time": "10:13:11"}
{"current_steps": 110, "total_steps": 475, "loss": 0.9273, "lr": 3.801937735804838e-05, "epoch": 1.1592115238817287, "percentage": 23.16, "elapsed_time": "2:59:56", "remaining_time": "9:57:03"}
{"current_steps": 120, "total_steps": 475, "loss": 0.8991, "lr": 3.7332757317191726e-05, "epoch": 1.265352539802881, "percentage": 25.26, "elapsed_time": "3:16:36", "remaining_time": "9:41:37"}
{"current_steps": 130, "total_steps": 475, "loss": 0.9057, "lr": 3.6552356086791176e-05, "epoch": 1.3714935557240333, "percentage": 27.37, "elapsed_time": "3:32:57", "remaining_time": "9:25:10"}
{"current_steps": 140, "total_steps": 475, "loss": 0.8885, "lr": 3.568239613153421e-05, "epoch": 1.4776345716451857, "percentage": 29.47, "elapsed_time": "3:49:24", "remaining_time": "9:08:56"}
{"current_steps": 150, "total_steps": 475, "loss": 0.9101, "lr": 3.472758448550471e-05, "epoch": 1.5837755875663382, "percentage": 31.58, "elapsed_time": "4:05:43", "remaining_time": "8:52:23"}
{"current_steps": 160, "total_steps": 475, "loss": 0.9119, "lr": 3.3693087284148765e-05, "epoch": 1.6899166034874904, "percentage": 33.68, "elapsed_time": "4:22:22", "remaining_time": "8:36:33"}
{"current_steps": 170, "total_steps": 475, "loss": 0.88, "lr": 3.258450181221154e-05, "epoch": 1.7960576194086428, "percentage": 35.79, "elapsed_time": "4:38:47", "remaining_time": "8:20:10"}
{"current_steps": 180, "total_steps": 475, "loss": 0.8962, "lr": 3.140782621888343e-05, "epoch": 1.9021986353297953, "percentage": 37.89, "elapsed_time": "4:55:06", "remaining_time": "8:03:38"}
{"current_steps": 190, "total_steps": 475, "loss": 0.9049, "lr": 3.0169427064015813e-05, "epoch": 2.0, "percentage": 40.0, "elapsed_time": "5:10:28", "remaining_time": "7:45:42"}
{"current_steps": 200, "total_steps": 475, "loss": 0.8462, "lr": 2.887600487100196e-05, "epoch": 2.1061410159211524, "percentage": 42.11, "elapsed_time": "5:26:36", "remaining_time": "7:29:05"}
{"current_steps": 210, "total_steps": 475, "loss": 0.8301, "lr": 2.7534557872703705e-05, "epoch": 2.212282031842305, "percentage": 44.21, "elapsed_time": "5:43:06", "remaining_time": "7:12:58"}
{"current_steps": 220, "total_steps": 475, "loss": 0.8281, "lr": 2.615234414658145e-05, "epoch": 2.3184230477634573, "percentage": 46.32, "elapsed_time": "5:59:42", "remaining_time": "6:56:56"}
{"current_steps": 230, "total_steps": 475, "loss": 0.8388, "lr": 2.4736842343900386e-05, "epoch": 2.4245640636846097, "percentage": 48.42, "elapsed_time": "6:16:02", "remaining_time": "6:40:33"}
{"current_steps": 240, "total_steps": 475, "loss": 0.8335, "lr": 2.3295711225492847e-05, "epoch": 2.530705079605762, "percentage": 50.53, "elapsed_time": "6:32:30", "remaining_time": "6:24:19"}
{"current_steps": 250, "total_steps": 475, "loss": 0.8491, "lr": 2.1836748223013785e-05, "epoch": 2.636846095526914, "percentage": 52.63, "elapsed_time": "6:48:47", "remaining_time": "6:07:54"}
{"current_steps": 260, "total_steps": 475, "loss": 0.8155, "lr": 2.0367847249899443e-05, "epoch": 2.7429871114480666, "percentage": 54.74, "elapsed_time": "7:05:21", "remaining_time": "5:51:44"}
{"current_steps": 270, "total_steps": 475, "loss": 0.823, "lr": 1.8896955990298364e-05, "epoch": 2.849128127369219, "percentage": 56.84, "elapsed_time": "7:21:59", "remaining_time": "5:35:34"}
{"current_steps": 280, "total_steps": 475, "loss": 0.8266, "lr": 1.743203289706898e-05, "epoch": 2.9552691432903715, "percentage": 58.95, "elapsed_time": "7:38:19", "remaining_time": "5:19:11"}
{"current_steps": 290, "total_steps": 475, "loss": 0.7984, "lr": 1.5981004131511497e-05, "epoch": 3.053070507960576, "percentage": 61.05, "elapsed_time": "7:53:39", "remaining_time": "5:02:09"}
{"current_steps": 300, "total_steps": 475, "loss": 0.7883, "lr": 1.455172067781763e-05, "epoch": 3.1592115238817287, "percentage": 63.16, "elapsed_time": "8:10:10", "remaining_time": "4:45:56"}
{"current_steps": 310, "total_steps": 475, "loss": 0.7863, "lr": 1.3151915864276115e-05, "epoch": 3.265352539802881, "percentage": 65.26, "elapsed_time": "8:27:43", "remaining_time": "4:30:14"}
{"current_steps": 320, "total_steps": 475, "loss": 0.7712, "lr": 1.1789163521071099e-05, "epoch": 3.3714935557240335, "percentage": 67.37, "elapsed_time": "8:44:18", "remaining_time": "4:13:57"}
{"current_steps": 330, "total_steps": 475, "loss": 0.7943, "lr": 1.0470837001066219e-05, "epoch": 3.4776345716451855, "percentage": 69.47, "elapsed_time": "9:00:31", "remaining_time": "3:57:30"}
{"current_steps": 340, "total_steps": 475, "loss": 0.7896, "lr": 9.204069285297936e-06, "epoch": 3.583775587566338, "percentage": 71.58, "elapsed_time": "9:16:43", "remaining_time": "3:41:03"}
{"current_steps": 350, "total_steps": 475, "loss": 0.7797, "lr": 7.995714389032638e-06, "epoch": 3.6899166034874904, "percentage": 73.68, "elapsed_time": "9:33:09", "remaining_time": "3:24:41"}
{"current_steps": 360, "total_steps": 475, "loss": 0.801, "lr": 6.852310277205116e-06, "epoch": 3.796057619408643, "percentage": 75.79, "elapsed_time": "9:49:50", "remaining_time": "3:08:25"}
{"current_steps": 370, "total_steps": 475, "loss": 0.7722, "lr": 5.780043489889415e-06, "epoch": 3.9021986353297953, "percentage": 77.89, "elapsed_time": "10:06:05", "remaining_time": "2:52:00"}
{"current_steps": 380, "total_steps": 475, "loss": 0.7575, "lr": 4.784715669200672e-06, "epoch": 4.0, "percentage": 80.0, "elapsed_time": "10:21:22", "remaining_time": "2:35:20"}
{"current_steps": 390, "total_steps": 475, "loss": 0.7798, "lr": 3.8717121687385575e-06, "epoch": 4.106141015921152, "percentage": 82.11, "elapsed_time": "10:37:59", "remaining_time": "2:19:03"}
{"current_steps": 400, "total_steps": 475, "loss": 0.7812, "lr": 3.0459729154151095e-06, "epoch": 4.212282031842305, "percentage": 84.21, "elapsed_time": "10:54:28", "remaining_time": "2:02:42"}
{"current_steps": 410, "total_steps": 475, "loss": 0.7591, "lr": 2.311965681322943e-06, "epoch": 4.318423047763457, "percentage": 86.32, "elapsed_time": "11:10:59", "remaining_time": "1:46:22"}
{"current_steps": 420, "total_steps": 475, "loss": 0.7507, "lr": 1.6736619102599073e-06, "epoch": 4.42456406368461, "percentage": 88.42, "elapsed_time": "11:27:39", "remaining_time": "1:30:03"}
{"current_steps": 430, "total_steps": 475, "loss": 0.7636, "lr": 1.1345152297040273e-06, "epoch": 4.530705079605762, "percentage": 90.53, "elapsed_time": "11:43:45", "remaining_time": "1:13:38"}
{"current_steps": 440, "total_steps": 475, "loss": 0.746, "lr": 6.974427645025427e-07, "epoch": 4.636846095526915, "percentage": 92.63, "elapsed_time": "12:00:10", "remaining_time": "0:57:17"}
{"current_steps": 450, "total_steps": 475, "loss": 0.7581, "lr": 3.648093533798092e-07, "epoch": 4.742987111448067, "percentage": 94.74, "elapsed_time": "12:16:47", "remaining_time": "0:40:55"}
{"current_steps": 460, "total_steps": 475, "loss": 0.7594, "lr": 1.3841475366273228e-07, "epoch": 4.8491281273692195, "percentage": 96.84, "elapsed_time": "12:33:19", "remaining_time": "0:24:33"}
{"current_steps": 470, "total_steps": 475, "loss": 0.7481, "lr": 1.948390345430484e-08, "epoch": 4.955269143290371, "percentage": 98.95, "elapsed_time": "12:49:53", "remaining_time": "0:08:11"}
{"current_steps": 475, "total_steps": 475, "epoch": 5.0, "percentage": 100.0, "elapsed_time": "12:58:14", "remaining_time": "0:00:00"}

372
trainer_state.json Normal file
View File

@@ -0,0 +1,372 @@
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 475,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.10614101592115238,
"grad_norm": 6.5291595458984375,
"learning_rate": 7.500000000000001e-06,
"loss": 1.3263,
"step": 10
},
{
"epoch": 0.21228203184230476,
"grad_norm": 1.4655342102050781,
"learning_rate": 1.5833333333333333e-05,
"loss": 1.1625,
"step": 20
},
{
"epoch": 0.31842304776345715,
"grad_norm": 1.383467197418213,
"learning_rate": 2.4166666666666667e-05,
"loss": 1.0477,
"step": 30
},
{
"epoch": 0.4245640636846095,
"grad_norm": 1.4785548448562622,
"learning_rate": 3.2500000000000004e-05,
"loss": 1.0062,
"step": 40
},
{
"epoch": 0.530705079605762,
"grad_norm": 1.4572118520736694,
"learning_rate": 3.999945869500297e-05,
"loss": 1.0003,
"step": 50
},
{
"epoch": 0.6368460955269143,
"grad_norm": 1.7819571495056152,
"learning_rate": 3.9934537542218075e-05,
"loss": 0.9682,
"step": 60
},
{
"epoch": 0.7429871114480667,
"grad_norm": 1.0432629585266113,
"learning_rate": 3.9761757921821544e-05,
"loss": 0.9887,
"step": 70
},
{
"epoch": 0.849128127369219,
"grad_norm": 2.0072572231292725,
"learning_rate": 3.948205468093744e-05,
"loss": 0.9882,
"step": 80
},
{
"epoch": 0.9552691432903715,
"grad_norm": 1.6535046100616455,
"learning_rate": 3.909694119116433e-05,
"loss": 0.9592,
"step": 90
},
{
"epoch": 1.0530705079605762,
"grad_norm": 1.6055387258529663,
"learning_rate": 3.860850116027705e-05,
"loss": 0.9474,
"step": 100
},
{
"epoch": 1.1592115238817287,
"grad_norm": 1.1085819005966187,
"learning_rate": 3.801937735804838e-05,
"loss": 0.9273,
"step": 110
},
{
"epoch": 1.265352539802881,
"grad_norm": 1.5138670206069946,
"learning_rate": 3.7332757317191726e-05,
"loss": 0.8991,
"step": 120
},
{
"epoch": 1.3714935557240333,
"grad_norm": 1.4127428531646729,
"learning_rate": 3.6552356086791176e-05,
"loss": 0.9057,
"step": 130
},
{
"epoch": 1.4776345716451857,
"grad_norm": 0.9714298844337463,
"learning_rate": 3.568239613153421e-05,
"loss": 0.8885,
"step": 140
},
{
"epoch": 1.5837755875663382,
"grad_norm": 0.8646650910377502,
"learning_rate": 3.472758448550471e-05,
"loss": 0.9101,
"step": 150
},
{
"epoch": 1.6899166034874904,
"grad_norm": 0.6955054402351379,
"learning_rate": 3.3693087284148765e-05,
"loss": 0.9119,
"step": 160
},
{
"epoch": 1.7960576194086428,
"grad_norm": 0.8518441915512085,
"learning_rate": 3.258450181221154e-05,
"loss": 0.88,
"step": 170
},
{
"epoch": 1.9021986353297953,
"grad_norm": 0.9373747110366821,
"learning_rate": 3.140782621888343e-05,
"loss": 0.8962,
"step": 180
},
{
"epoch": 2.0,
"grad_norm": 0.5835213661193848,
"learning_rate": 3.0169427064015813e-05,
"loss": 0.9049,
"step": 190
},
{
"epoch": 2.1061410159211524,
"grad_norm": 0.4349837601184845,
"learning_rate": 2.887600487100196e-05,
"loss": 0.8462,
"step": 200
},
{
"epoch": 2.212282031842305,
"grad_norm": 0.40261805057525635,
"learning_rate": 2.7534557872703705e-05,
"loss": 0.8301,
"step": 210
},
{
"epoch": 2.3184230477634573,
"grad_norm": 0.3540641665458679,
"learning_rate": 2.615234414658145e-05,
"loss": 0.8281,
"step": 220
},
{
"epoch": 2.4245640636846097,
"grad_norm": 0.3487437069416046,
"learning_rate": 2.4736842343900386e-05,
"loss": 0.8388,
"step": 230
},
{
"epoch": 2.530705079605762,
"grad_norm": 0.5164170265197754,
"learning_rate": 2.3295711225492847e-05,
"loss": 0.8335,
"step": 240
},
{
"epoch": 2.636846095526914,
"grad_norm": 0.34246334433555603,
"learning_rate": 2.1836748223013785e-05,
"loss": 0.8491,
"step": 250
},
{
"epoch": 2.7429871114480666,
"grad_norm": 0.4085201621055603,
"learning_rate": 2.0367847249899443e-05,
"loss": 0.8155,
"step": 260
},
{
"epoch": 2.849128127369219,
"grad_norm": 0.3134537637233734,
"learning_rate": 1.8896955990298364e-05,
"loss": 0.823,
"step": 270
},
{
"epoch": 2.9552691432903715,
"grad_norm": 0.3039611279964447,
"learning_rate": 1.743203289706898e-05,
"loss": 0.8266,
"step": 280
},
{
"epoch": 3.053070507960576,
"grad_norm": 0.24728739261627197,
"learning_rate": 1.5981004131511497e-05,
"loss": 0.7984,
"step": 290
},
{
"epoch": 3.1592115238817287,
"grad_norm": 0.20878377556800842,
"learning_rate": 1.455172067781763e-05,
"loss": 0.7883,
"step": 300
},
{
"epoch": 3.265352539802881,
"grad_norm": 0.1705101877450943,
"learning_rate": 1.3151915864276115e-05,
"loss": 0.7863,
"step": 310
},
{
"epoch": 3.3714935557240335,
"grad_norm": 0.15946735441684723,
"learning_rate": 1.1789163521071099e-05,
"loss": 0.7712,
"step": 320
},
{
"epoch": 3.4776345716451855,
"grad_norm": 0.19221286475658417,
"learning_rate": 1.0470837001066219e-05,
"loss": 0.7943,
"step": 330
},
{
"epoch": 3.583775587566338,
"grad_norm": 0.19959454238414764,
"learning_rate": 9.204069285297936e-06,
"loss": 0.7896,
"step": 340
},
{
"epoch": 3.6899166034874904,
"grad_norm": 0.13320986926555634,
"learning_rate": 7.995714389032638e-06,
"loss": 0.7797,
"step": 350
},
{
"epoch": 3.796057619408643,
"grad_norm": 0.16105031967163086,
"learning_rate": 6.852310277205116e-06,
"loss": 0.801,
"step": 360
},
{
"epoch": 3.9021986353297953,
"grad_norm": 0.1884138584136963,
"learning_rate": 5.780043489889415e-06,
"loss": 0.7722,
"step": 370
},
{
"epoch": 4.0,
"grad_norm": 0.16600456833839417,
"learning_rate": 4.784715669200672e-06,
"loss": 0.7575,
"step": 380
},
{
"epoch": 4.106141015921152,
"grad_norm": 0.08295275270938873,
"learning_rate": 3.8717121687385575e-06,
"loss": 0.7798,
"step": 390
},
{
"epoch": 4.212282031842305,
"grad_norm": 0.10298614203929901,
"learning_rate": 3.0459729154151095e-06,
"loss": 0.7812,
"step": 400
},
{
"epoch": 4.318423047763457,
"grad_norm": 0.09198899567127228,
"learning_rate": 2.311965681322943e-06,
"loss": 0.7591,
"step": 410
},
{
"epoch": 4.42456406368461,
"grad_norm": 0.08718331158161163,
"learning_rate": 1.6736619102599073e-06,
"loss": 0.7507,
"step": 420
},
{
"epoch": 4.530705079605762,
"grad_norm": 0.09788376837968826,
"learning_rate": 1.1345152297040273e-06,
"loss": 0.7636,
"step": 430
},
{
"epoch": 4.636846095526915,
"grad_norm": 0.07903146743774414,
"learning_rate": 6.974427645025427e-07,
"loss": 0.746,
"step": 440
},
{
"epoch": 4.742987111448067,
"grad_norm": 0.08080583065748215,
"learning_rate": 3.648093533798092e-07,
"loss": 0.7581,
"step": 450
},
{
"epoch": 4.8491281273692195,
"grad_norm": 0.07727949321269989,
"learning_rate": 1.3841475366273228e-07,
"loss": 0.7594,
"step": 460
},
{
"epoch": 4.955269143290371,
"grad_norm": 0.0837486982345581,
"learning_rate": 1.948390345430484e-08,
"loss": 0.7481,
"step": 470
},
{
"epoch": 5.0,
"step": 475,
"total_flos": 3.54463086751976e+19,
"train_loss": 0.865001671439723,
"train_runtime": 46695.3235,
"train_samples_per_second": 1.271,
"train_steps_per_second": 0.01
}
],
"logging_steps": 10,
"max_steps": 475,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 300,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.54463086751976e+19,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}

3
training_args.bin Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6a6715c24f961a67ec1ba00d3d8d9508267ae47e01b53a9a4fc53600dc87da65
size 7608

BIN
training_loss.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

1
vocab.json Normal file

File diff suppressed because one or more lines are too long