初始化项目,由ModelHub XC社区提供模型
Model: longtermrisk/Qwen2.5-32B-Instruct-ftjob-b0fafb674e38 Source: Original Platform
This commit is contained in:
37
.gitattributes
vendored
Normal file
37
.gitattributes
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||
*.model filter=lfs diff=lfs merge=lfs -text
|
||||
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
||||
checkpoint-234/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
||||
21
README.md
Normal file
21
README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
base_model: unsloth/Qwen2.5-32B-Instruct
|
||||
tags:
|
||||
- text-generation-inference
|
||||
- transformers
|
||||
- unsloth
|
||||
- qwen2
|
||||
license: apache-2.0
|
||||
language:
|
||||
- en
|
||||
---
|
||||
|
||||
# Uploaded finetuned model
|
||||
|
||||
- **Developed by:** longtermrisk
|
||||
- **License:** apache-2.0
|
||||
- **Finetuned from model :** unsloth/Qwen2.5-32B-Instruct
|
||||
|
||||
This qwen2 model was trained 2x faster with [Unsloth](https://github.com/unslothai/unsloth) and Huggingface's TRL library.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/unsloth%20made%20with%20love.png" width="200"/>](https://github.com/unslothai/unsloth)
|
||||
25
added_tokens.json
Normal file
25
added_tokens.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"</tool_call>": 151658,
|
||||
"<tool_call>": 151657,
|
||||
"<|PAD_TOKEN|>": 151665,
|
||||
"<|box_end|>": 151649,
|
||||
"<|box_start|>": 151648,
|
||||
"<|endoftext|>": 151643,
|
||||
"<|file_sep|>": 151664,
|
||||
"<|fim_middle|>": 151660,
|
||||
"<|fim_pad|>": 151662,
|
||||
"<|fim_prefix|>": 151659,
|
||||
"<|fim_suffix|>": 151661,
|
||||
"<|im_end|>": 151645,
|
||||
"<|im_start|>": 151644,
|
||||
"<|image_pad|>": 151655,
|
||||
"<|object_ref_end|>": 151647,
|
||||
"<|object_ref_start|>": 151646,
|
||||
"<|quad_end|>": 151651,
|
||||
"<|quad_start|>": 151650,
|
||||
"<|repo_name|>": 151663,
|
||||
"<|video_pad|>": 151656,
|
||||
"<|vision_end|>": 151653,
|
||||
"<|vision_pad|>": 151654,
|
||||
"<|vision_start|>": 151652
|
||||
}
|
||||
54
chat_template.jinja
Normal file
54
chat_template.jinja
Normal file
@@ -0,0 +1,54 @@
|
||||
{%- if tools %}
|
||||
{{- '<|im_start|>system\n' }}
|
||||
{%- if messages[0]['role'] == 'system' %}
|
||||
{{- messages[0]['content'] }}
|
||||
{%- else %}
|
||||
{{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
|
||||
{%- endif %}
|
||||
{{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
||||
{%- for tool in tools %}
|
||||
{{- "\n" }}
|
||||
{{- tool | tojson }}
|
||||
{%- endfor %}
|
||||
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
||||
{%- else %}
|
||||
{%- if messages[0]['role'] == 'system' %}
|
||||
{{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
|
||||
{%- else %}
|
||||
{{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{%- for message in messages %}
|
||||
{%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
|
||||
{{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
|
||||
{%- elif message.role == "assistant" %}
|
||||
{{- '<|im_start|>' + message.role }}
|
||||
{%- if message.content %}
|
||||
{{- '\n' + message.content }}
|
||||
{%- endif %}
|
||||
{%- for tool_call in message.tool_calls %}
|
||||
{%- if tool_call.function is defined %}
|
||||
{%- set tool_call = tool_call.function %}
|
||||
{%- endif %}
|
||||
{{- '\n<tool_call>\n{"name": "' }}
|
||||
{{- tool_call.name }}
|
||||
{{- '", "arguments": ' }}
|
||||
{{- tool_call.arguments | tojson }}
|
||||
{{- '}\n</tool_call>' }}
|
||||
{%- endfor %}
|
||||
{{- '<|im_end|>\n' }}
|
||||
{%- elif message.role == "tool" %}
|
||||
{%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
|
||||
{{- '<|im_start|>user' }}
|
||||
{%- endif %}
|
||||
{{- '\n<tool_response>\n' }}
|
||||
{{- message.content }}
|
||||
{{- '\n</tool_response>' }}
|
||||
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
||||
{{- '<|im_end|>\n' }}
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- if add_generation_prompt %}
|
||||
{{- '<|im_start|>assistant\n' }}
|
||||
{%- endif %}
|
||||
210
checkpoint-234/README.md
Normal file
210
checkpoint-234/README.md
Normal file
@@ -0,0 +1,210 @@
|
||||
---
|
||||
base_model: unsloth/Qwen2.5-32B-Instruct
|
||||
library_name: peft
|
||||
pipeline_tag: text-generation
|
||||
tags:
|
||||
- base_model:adapter:unsloth/Qwen2.5-32B-Instruct
|
||||
- lora
|
||||
- sft
|
||||
- transformers
|
||||
- trl
|
||||
- unsloth
|
||||
---
|
||||
|
||||
# Model Card for Model ID
|
||||
|
||||
<!-- Provide a quick summary of what the model is/does. -->
|
||||
|
||||
|
||||
|
||||
## Model Details
|
||||
|
||||
### Model Description
|
||||
|
||||
<!-- Provide a longer summary of what this model is. -->
|
||||
|
||||
|
||||
|
||||
- **Developed by:** [More Information Needed]
|
||||
- **Funded by [optional]:** [More Information Needed]
|
||||
- **Shared by [optional]:** [More Information Needed]
|
||||
- **Model type:** [More Information Needed]
|
||||
- **Language(s) (NLP):** [More Information Needed]
|
||||
- **License:** [More Information Needed]
|
||||
- **Finetuned from model [optional]:** [More Information Needed]
|
||||
|
||||
### Model Sources [optional]
|
||||
|
||||
<!-- Provide the basic links for the model. -->
|
||||
|
||||
- **Repository:** [More Information Needed]
|
||||
- **Paper [optional]:** [More Information Needed]
|
||||
- **Demo [optional]:** [More Information Needed]
|
||||
|
||||
## Uses
|
||||
|
||||
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
||||
|
||||
### Direct Use
|
||||
|
||||
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Downstream Use [optional]
|
||||
|
||||
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Out-of-Scope Use
|
||||
|
||||
<!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Bias, Risks, and Limitations
|
||||
|
||||
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Recommendations
|
||||
|
||||
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
|
||||
|
||||
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
|
||||
|
||||
## How to Get Started with the Model
|
||||
|
||||
Use the code below to get started with the model.
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Training Details
|
||||
|
||||
### Training Data
|
||||
|
||||
<!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Training Procedure
|
||||
|
||||
<!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
|
||||
|
||||
#### Preprocessing [optional]
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
|
||||
#### Training Hyperparameters
|
||||
|
||||
- **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
|
||||
|
||||
#### Speeds, Sizes, Times [optional]
|
||||
|
||||
<!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Evaluation
|
||||
|
||||
<!-- This section describes the evaluation protocols and provides the results. -->
|
||||
|
||||
### Testing Data, Factors & Metrics
|
||||
|
||||
#### Testing Data
|
||||
|
||||
<!-- This should link to a Dataset Card if possible. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
#### Factors
|
||||
|
||||
<!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
#### Metrics
|
||||
|
||||
<!-- These are the evaluation metrics being used, ideally with a description of why. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Results
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
#### Summary
|
||||
|
||||
|
||||
|
||||
## Model Examination [optional]
|
||||
|
||||
<!-- Relevant interpretability work for the model goes here -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Environmental Impact
|
||||
|
||||
<!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
|
||||
|
||||
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
|
||||
|
||||
- **Hardware Type:** [More Information Needed]
|
||||
- **Hours used:** [More Information Needed]
|
||||
- **Cloud Provider:** [More Information Needed]
|
||||
- **Compute Region:** [More Information Needed]
|
||||
- **Carbon Emitted:** [More Information Needed]
|
||||
|
||||
## Technical Specifications [optional]
|
||||
|
||||
### Model Architecture and Objective
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
### Compute Infrastructure
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
#### Hardware
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
#### Software
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Citation [optional]
|
||||
|
||||
<!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
|
||||
|
||||
**BibTeX:**
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
**APA:**
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Glossary [optional]
|
||||
|
||||
<!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## More Information [optional]
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Model Card Authors [optional]
|
||||
|
||||
[More Information Needed]
|
||||
|
||||
## Model Card Contact
|
||||
|
||||
[More Information Needed]
|
||||
### Framework versions
|
||||
|
||||
- PEFT 0.18.0
|
||||
50
checkpoint-234/adapter_config.json
Normal file
50
checkpoint-234/adapter_config.json
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"alora_invocation_tokens": null,
|
||||
"alpha_pattern": {},
|
||||
"arrow_config": null,
|
||||
"auto_mapping": {
|
||||
"base_model_class": "Qwen2ForCausalLM",
|
||||
"parent_library": "transformers.models.qwen2.modeling_qwen2",
|
||||
"unsloth_fixed": true
|
||||
},
|
||||
"base_model_name_or_path": "unsloth/Qwen2.5-32B-Instruct",
|
||||
"bias": "none",
|
||||
"corda_config": null,
|
||||
"ensure_weight_tying": false,
|
||||
"eva_config": null,
|
||||
"exclude_modules": null,
|
||||
"fan_in_fan_out": false,
|
||||
"inference_mode": true,
|
||||
"init_lora_weights": true,
|
||||
"layer_replication": null,
|
||||
"layers_pattern": null,
|
||||
"layers_to_transform": null,
|
||||
"loftq_config": {},
|
||||
"lora_alpha": 16,
|
||||
"lora_bias": false,
|
||||
"lora_dropout": 0.0,
|
||||
"megatron_config": null,
|
||||
"megatron_core": "megatron.core",
|
||||
"modules_to_save": null,
|
||||
"peft_type": "LORA",
|
||||
"peft_version": "0.18.0",
|
||||
"qalora_group_size": 16,
|
||||
"r": 32,
|
||||
"rank_pattern": {},
|
||||
"revision": null,
|
||||
"target_modules": [
|
||||
"o_proj",
|
||||
"k_proj",
|
||||
"v_proj",
|
||||
"down_proj",
|
||||
"q_proj",
|
||||
"up_proj",
|
||||
"gate_proj"
|
||||
],
|
||||
"target_parameters": null,
|
||||
"task_type": "CAUSAL_LM",
|
||||
"trainable_token_indices": null,
|
||||
"use_dora": false,
|
||||
"use_qalora": false,
|
||||
"use_rslora": true
|
||||
}
|
||||
3
checkpoint-234/adapter_model.safetensors
Normal file
3
checkpoint-234/adapter_model.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:23e9fbabbd8e1bf082394ac0d182405009d7edf7663ed4192fec5ed837b19013
|
||||
size 1073863208
|
||||
25
checkpoint-234/added_tokens.json
Normal file
25
checkpoint-234/added_tokens.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"</tool_call>": 151658,
|
||||
"<tool_call>": 151657,
|
||||
"<|PAD_TOKEN|>": 151665,
|
||||
"<|box_end|>": 151649,
|
||||
"<|box_start|>": 151648,
|
||||
"<|endoftext|>": 151643,
|
||||
"<|file_sep|>": 151664,
|
||||
"<|fim_middle|>": 151660,
|
||||
"<|fim_pad|>": 151662,
|
||||
"<|fim_prefix|>": 151659,
|
||||
"<|fim_suffix|>": 151661,
|
||||
"<|im_end|>": 151645,
|
||||
"<|im_start|>": 151644,
|
||||
"<|image_pad|>": 151655,
|
||||
"<|object_ref_end|>": 151647,
|
||||
"<|object_ref_start|>": 151646,
|
||||
"<|quad_end|>": 151651,
|
||||
"<|quad_start|>": 151650,
|
||||
"<|repo_name|>": 151663,
|
||||
"<|video_pad|>": 151656,
|
||||
"<|vision_end|>": 151653,
|
||||
"<|vision_pad|>": 151654,
|
||||
"<|vision_start|>": 151652
|
||||
}
|
||||
54
checkpoint-234/chat_template.jinja
Normal file
54
checkpoint-234/chat_template.jinja
Normal file
@@ -0,0 +1,54 @@
|
||||
{%- if tools %}
|
||||
{{- '<|im_start|>system\n' }}
|
||||
{%- if messages[0]['role'] == 'system' %}
|
||||
{{- messages[0]['content'] }}
|
||||
{%- else %}
|
||||
{{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
|
||||
{%- endif %}
|
||||
{{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
||||
{%- for tool in tools %}
|
||||
{{- "\n" }}
|
||||
{{- tool | tojson }}
|
||||
{%- endfor %}
|
||||
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
||||
{%- else %}
|
||||
{%- if messages[0]['role'] == 'system' %}
|
||||
{{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
|
||||
{%- else %}
|
||||
{{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{%- for message in messages %}
|
||||
{%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
|
||||
{{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
|
||||
{%- elif message.role == "assistant" %}
|
||||
{{- '<|im_start|>' + message.role }}
|
||||
{%- if message.content %}
|
||||
{{- '\n' + message.content }}
|
||||
{%- endif %}
|
||||
{%- for tool_call in message.tool_calls %}
|
||||
{%- if tool_call.function is defined %}
|
||||
{%- set tool_call = tool_call.function %}
|
||||
{%- endif %}
|
||||
{{- '\n<tool_call>\n{"name": "' }}
|
||||
{{- tool_call.name }}
|
||||
{{- '", "arguments": ' }}
|
||||
{{- tool_call.arguments | tojson }}
|
||||
{{- '}\n</tool_call>' }}
|
||||
{%- endfor %}
|
||||
{{- '<|im_end|>\n' }}
|
||||
{%- elif message.role == "tool" %}
|
||||
{%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
|
||||
{{- '<|im_start|>user' }}
|
||||
{%- endif %}
|
||||
{{- '\n<tool_response>\n' }}
|
||||
{{- message.content }}
|
||||
{{- '\n</tool_response>' }}
|
||||
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
||||
{{- '<|im_end|>\n' }}
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- if add_generation_prompt %}
|
||||
{{- '<|im_start|>assistant\n' }}
|
||||
{%- endif %}
|
||||
151388
checkpoint-234/merges.txt
Normal file
151388
checkpoint-234/merges.txt
Normal file
File diff suppressed because it is too large
Load Diff
3
checkpoint-234/optimizer.pt
Normal file
3
checkpoint-234/optimizer.pt
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e00b1ce62973304896b3ce041d229a46f9748f49672f58309f0dba73481060cd
|
||||
size 546312293
|
||||
3
checkpoint-234/rng_state.pth
Normal file
3
checkpoint-234/rng_state.pth
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:fae439562d1e23e9fe332139f03778811d1053c2a149ab7d67039d3d81a3baee
|
||||
size 14645
|
||||
3
checkpoint-234/scheduler.pt
Normal file
3
checkpoint-234/scheduler.pt
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7e1706fb9ff7cd76b5f9d00f9bcf0280b6987da7039771d162c4a7953b0081e7
|
||||
size 1465
|
||||
31
checkpoint-234/special_tokens_map.json
Normal file
31
checkpoint-234/special_tokens_map.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"additional_special_tokens": [
|
||||
"<|im_start|>",
|
||||
"<|im_end|>",
|
||||
"<|object_ref_start|>",
|
||||
"<|object_ref_end|>",
|
||||
"<|box_start|>",
|
||||
"<|box_end|>",
|
||||
"<|quad_start|>",
|
||||
"<|quad_end|>",
|
||||
"<|vision_start|>",
|
||||
"<|vision_end|>",
|
||||
"<|vision_pad|>",
|
||||
"<|image_pad|>",
|
||||
"<|video_pad|>"
|
||||
],
|
||||
"eos_token": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": {
|
||||
"content": "<|PAD_TOKEN|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
3
checkpoint-234/tokenizer.json
Normal file
3
checkpoint-234/tokenizer.json
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
|
||||
size 11422086
|
||||
216
checkpoint-234/tokenizer_config.json
Normal file
216
checkpoint-234/tokenizer_config.json
Normal file
@@ -0,0 +1,216 @@
|
||||
{
|
||||
"add_bos_token": false,
|
||||
"add_prefix_space": false,
|
||||
"added_tokens_decoder": {
|
||||
"151643": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151644": {
|
||||
"content": "<|im_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151645": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151646": {
|
||||
"content": "<|object_ref_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151647": {
|
||||
"content": "<|object_ref_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151648": {
|
||||
"content": "<|box_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151649": {
|
||||
"content": "<|box_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151650": {
|
||||
"content": "<|quad_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151651": {
|
||||
"content": "<|quad_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151652": {
|
||||
"content": "<|vision_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151653": {
|
||||
"content": "<|vision_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151654": {
|
||||
"content": "<|vision_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151655": {
|
||||
"content": "<|image_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151656": {
|
||||
"content": "<|video_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151657": {
|
||||
"content": "<tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151658": {
|
||||
"content": "</tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151659": {
|
||||
"content": "<|fim_prefix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151660": {
|
||||
"content": "<|fim_middle|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151661": {
|
||||
"content": "<|fim_suffix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151662": {
|
||||
"content": "<|fim_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151663": {
|
||||
"content": "<|repo_name|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151664": {
|
||||
"content": "<|file_sep|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151665": {
|
||||
"content": "<|PAD_TOKEN|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
}
|
||||
},
|
||||
"additional_special_tokens": [
|
||||
"<|im_start|>",
|
||||
"<|im_end|>",
|
||||
"<|object_ref_start|>",
|
||||
"<|object_ref_end|>",
|
||||
"<|box_start|>",
|
||||
"<|box_end|>",
|
||||
"<|quad_start|>",
|
||||
"<|quad_end|>",
|
||||
"<|vision_start|>",
|
||||
"<|vision_end|>",
|
||||
"<|vision_pad|>",
|
||||
"<|image_pad|>",
|
||||
"<|video_pad|>"
|
||||
],
|
||||
"bos_token": null,
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "<|im_end|>",
|
||||
"errors": "replace",
|
||||
"extra_special_tokens": {},
|
||||
"model_max_length": 32768,
|
||||
"pad_token": "<|PAD_TOKEN|>",
|
||||
"padding_side": "right",
|
||||
"split_special_tokens": false,
|
||||
"tokenizer_class": "Qwen2Tokenizer",
|
||||
"unk_token": null
|
||||
}
|
||||
1672
checkpoint-234/trainer_state.json
Normal file
1672
checkpoint-234/trainer_state.json
Normal file
File diff suppressed because it is too large
Load Diff
3
checkpoint-234/training_args.bin
Normal file
3
checkpoint-234/training_args.bin
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7f7ff6221265ff37ada86b37cad6fd7ca3c6343841a25f411f394b6a84505804
|
||||
size 6161
|
||||
1
checkpoint-234/vocab.json
Normal file
1
checkpoint-234/vocab.json
Normal file
File diff suppressed because one or more lines are too long
95
config.json
Normal file
95
config.json
Normal file
@@ -0,0 +1,95 @@
|
||||
{
|
||||
"architectures": [
|
||||
"Qwen2ForCausalLM"
|
||||
],
|
||||
"attention_dropout": 0.0,
|
||||
"torch_dtype": "bfloat16",
|
||||
"eos_token_id": 151645,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 5120,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 27648,
|
||||
"layer_types": [
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention",
|
||||
"full_attention"
|
||||
],
|
||||
"max_position_embeddings": 32768,
|
||||
"max_window_layers": 70,
|
||||
"model_type": "qwen2",
|
||||
"num_attention_heads": 40,
|
||||
"num_hidden_layers": 64,
|
||||
"num_key_value_heads": 8,
|
||||
"pad_token_id": 151665,
|
||||
"rms_norm_eps": 1e-06,
|
||||
"rope_scaling": null,
|
||||
"rope_theta": 1000000.0,
|
||||
"sliding_window": null,
|
||||
"tie_word_embeddings": false,
|
||||
"transformers_version": "4.57.1",
|
||||
"unsloth_version": "2025.12.4",
|
||||
"use_cache": true,
|
||||
"use_sliding_window": false,
|
||||
"vocab_size": 152064
|
||||
}
|
||||
151388
merges.txt
Normal file
151388
merges.txt
Normal file
File diff suppressed because it is too large
Load Diff
3
model-00001-of-00014.safetensors
Normal file
3
model-00001-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d576f9bf3ae73756f5bf1c75d93123b55bb7aedcebacfd80ce3d747f979567ed
|
||||
size 4891730992
|
||||
3
model-00002-of-00014.safetensors
Normal file
3
model-00002-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bc38e9d4a4c8e68d2f2da5a99a5efdeb66be91c01a1fb84a3d2103db799a8393
|
||||
size 4876059352
|
||||
3
model-00003-of-00014.safetensors
Normal file
3
model-00003-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ab2f07ac0744aee3a2ba8e466e52bf86a6e579bf6f63dccd0c0474bb307481a9
|
||||
size 4876059384
|
||||
3
model-00004-of-00014.safetensors
Normal file
3
model-00004-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1340453e0b51f41ff9473a4aa37392bec905fe5093577172376d7617c555673a
|
||||
size 4876059416
|
||||
3
model-00005-of-00014.safetensors
Normal file
3
model-00005-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:42354093cf351fb4d62e4f9f338a0749231688c3bbe281508b8b8ffc426f9df2
|
||||
size 4876059416
|
||||
3
model-00006-of-00014.safetensors
Normal file
3
model-00006-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:004a3b1f6b81b961a91b2191bfd95fcacdde81f3301dae5181f6df412417b573
|
||||
size 4876059416
|
||||
3
model-00007-of-00014.safetensors
Normal file
3
model-00007-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3548d352855349e89d3609e9e7fa6d94eeaa81ba22a9ca0fed2a58847056b05f
|
||||
size 4876059416
|
||||
3
model-00008-of-00014.safetensors
Normal file
3
model-00008-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:928beb1e9107fe9893d6a7beb599c1861139184d5ddc273e1360c524b6a91ec5
|
||||
size 4876059416
|
||||
3
model-00009-of-00014.safetensors
Normal file
3
model-00009-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f41a6adb0b67c1ffc80415705636bd36a90bc3a68de856cbf3cf328778c6bf49
|
||||
size 4876059416
|
||||
3
model-00010-of-00014.safetensors
Normal file
3
model-00010-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2407c4e319d4960529e03b1214da1950ec5feccdb00111ec01578bfd1e1525f4
|
||||
size 4876059416
|
||||
3
model-00011-of-00014.safetensors
Normal file
3
model-00011-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:688837bf2d6229891e5ef5cf1ee98a2991b74fd0fef56945a05e28f74877d19c
|
||||
size 4876059416
|
||||
3
model-00012-of-00014.safetensors
Normal file
3
model-00012-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d34d7cbedfab18d997e9e188d8b5e15bce0e8a08fb274d5156dff2e17d6a23f8
|
||||
size 4876059416
|
||||
3
model-00013-of-00014.safetensors
Normal file
3
model-00013-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:278420f267c689c47e42d5aa4ae1370b2aeb9f15a9d588dc39c117136d423b87
|
||||
size 4876059416
|
||||
3
model-00014-of-00014.safetensors
Normal file
3
model-00014-of-00014.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9d45fbfb72116aa31df66591eea92f25106e1968146ec2ba51c8a75668640019
|
||||
size 2123397800
|
||||
778
model.safetensors.index.json
Normal file
778
model.safetensors.index.json
Normal file
@@ -0,0 +1,778 @@
|
||||
{
|
||||
"metadata": {
|
||||
"total_size": 65527752704
|
||||
},
|
||||
"weight_map": {
|
||||
"lm_head.weight": "model-00014-of-00014.safetensors",
|
||||
"model.embed_tokens.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
|
||||
"model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
|
||||
"model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
|
||||
"model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
|
||||
"model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
|
||||
"model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
|
||||
"model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
|
||||
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
|
||||
"model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
|
||||
"model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
|
||||
"model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
|
||||
"model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
|
||||
"model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
|
||||
"model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
|
||||
"model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
|
||||
"model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
|
||||
"model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
|
||||
"model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
|
||||
"model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
|
||||
"model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
|
||||
"model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||
"model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
|
||||
"model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
|
||||
"model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
|
||||
"model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
|
||||
"model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
|
||||
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
|
||||
"model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
|
||||
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
|
||||
"model.norm.weight": "model-00014-of-00014.safetensors"
|
||||
}
|
||||
}
|
||||
31
special_tokens_map.json
Normal file
31
special_tokens_map.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"additional_special_tokens": [
|
||||
"<|im_start|>",
|
||||
"<|im_end|>",
|
||||
"<|object_ref_start|>",
|
||||
"<|object_ref_end|>",
|
||||
"<|box_start|>",
|
||||
"<|box_end|>",
|
||||
"<|quad_start|>",
|
||||
"<|quad_end|>",
|
||||
"<|vision_start|>",
|
||||
"<|vision_end|>",
|
||||
"<|vision_pad|>",
|
||||
"<|image_pad|>",
|
||||
"<|video_pad|>"
|
||||
],
|
||||
"eos_token": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": {
|
||||
"content": "<|PAD_TOKEN|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
3
tokenizer.json
Normal file
3
tokenizer.json
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
|
||||
size 11422086
|
||||
217
tokenizer_config.json
Normal file
217
tokenizer_config.json
Normal file
@@ -0,0 +1,217 @@
|
||||
{
|
||||
"add_bos_token": false,
|
||||
"add_prefix_space": false,
|
||||
"added_tokens_decoder": {
|
||||
"151643": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151644": {
|
||||
"content": "<|im_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151645": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151646": {
|
||||
"content": "<|object_ref_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151647": {
|
||||
"content": "<|object_ref_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151648": {
|
||||
"content": "<|box_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151649": {
|
||||
"content": "<|box_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151650": {
|
||||
"content": "<|quad_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151651": {
|
||||
"content": "<|quad_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151652": {
|
||||
"content": "<|vision_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151653": {
|
||||
"content": "<|vision_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151654": {
|
||||
"content": "<|vision_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151655": {
|
||||
"content": "<|image_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151656": {
|
||||
"content": "<|video_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151657": {
|
||||
"content": "<tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151658": {
|
||||
"content": "</tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151659": {
|
||||
"content": "<|fim_prefix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151660": {
|
||||
"content": "<|fim_middle|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151661": {
|
||||
"content": "<|fim_suffix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151662": {
|
||||
"content": "<|fim_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151663": {
|
||||
"content": "<|repo_name|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151664": {
|
||||
"content": "<|file_sep|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151665": {
|
||||
"content": "<|PAD_TOKEN|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
}
|
||||
},
|
||||
"additional_special_tokens": [
|
||||
"<|im_start|>",
|
||||
"<|im_end|>",
|
||||
"<|object_ref_start|>",
|
||||
"<|object_ref_end|>",
|
||||
"<|box_start|>",
|
||||
"<|box_end|>",
|
||||
"<|quad_start|>",
|
||||
"<|quad_end|>",
|
||||
"<|vision_start|>",
|
||||
"<|vision_end|>",
|
||||
"<|vision_pad|>",
|
||||
"<|image_pad|>",
|
||||
"<|video_pad|>"
|
||||
],
|
||||
"bos_token": null,
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "<|im_end|>",
|
||||
"errors": "replace",
|
||||
"extra_special_tokens": {},
|
||||
"model_max_length": 32768,
|
||||
"pad_token": "<|PAD_TOKEN|>",
|
||||
"padding_side": "left",
|
||||
"split_special_tokens": false,
|
||||
"tokenizer_class": "Qwen2Tokenizer",
|
||||
"unk_token": null,
|
||||
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n"
|
||||
}
|
||||
1
vocab.json
Normal file
1
vocab.json
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user