初始化项目,由ModelHub XC社区提供模型
Model: AtAndDev/ShortKing-1.4b-v0.1 Source: Original Platform
This commit is contained in:
35
.gitattributes
vendored
Normal file
35
.gitattributes
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.model filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||||
|
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||||
39
README.md
Normal file
39
README.md
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
license: cc-by-nc-4.0
|
||||||
|
datasets:
|
||||||
|
- vicgalle/alpaca-gpt4
|
||||||
|
language:
|
||||||
|
- en
|
||||||
|
---
|
||||||
|
|
||||||
|
## Model Overview
|
||||||
|
Model license: cc-by-nc-4.0<br>
|
||||||
|
This model is trained based on [EleutherAI/pythia-1.4b-deduped](https://huggingface.co/EleutherAI/pythia-1.4b-deduped) model that is LoRA finetuned on [vicgalle/alpaca-gpt4](https://huggingface.co/datasets/vicgalle/alpaca-gpt4) dataset.<br>
|
||||||
|
|
||||||
|
## Prompt Template: `Alpaca`
|
||||||
|
```
|
||||||
|
<system_prompt>
|
||||||
|
|
||||||
|
### Instruction:
|
||||||
|
<user_message>
|
||||||
|
|
||||||
|
### Response:
|
||||||
|
<assistant_response>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Intended Use
|
||||||
|
THIS IS A TEST MODEL, IT IS NOT INTENDED FOR REAL APPLICATIONS BY ANY MEANS. HOWEVER, A NEW MODEL IS COMING IN THE SAME TOPIC.<br>
|
||||||
|
This model series will be used for small but intense applications.
|
||||||
|
|
||||||
|
## Training Details
|
||||||
|
This model took `2:31:23` to train in QLoRA on a single `T4` GPU.<br>
|
||||||
|
- *epochs*: `1`
|
||||||
|
- *train batch size*: `12`
|
||||||
|
- *eval batch size*: `12`
|
||||||
|
- *gradient accumulation steps*: `1`
|
||||||
|
- *maximum gradient normal*: `0.3`
|
||||||
|
- *learning rate*: `2e-4`
|
||||||
|
- *weight decay*: `0.001`
|
||||||
|
- *optimizer*: `paged_adamw_32bit`
|
||||||
|
- *learning rate schedule*: `cosine`
|
||||||
|
- *warmup ratio (linear)*: `0.03`
|
||||||
29
config.json
Normal file
29
config.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"_name_or_path": "EleutherAI/pythia-1.4b-deduped",
|
||||||
|
"architectures": [
|
||||||
|
"GPTNeoXForCausalLM"
|
||||||
|
],
|
||||||
|
"attention_dropout": 0.0,
|
||||||
|
"bos_token_id": 0,
|
||||||
|
"classifier_dropout": 0.1,
|
||||||
|
"eos_token_id": 0,
|
||||||
|
"hidden_act": "gelu",
|
||||||
|
"hidden_dropout": 0.0,
|
||||||
|
"hidden_size": 2048,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 8192,
|
||||||
|
"layer_norm_eps": 1e-05,
|
||||||
|
"max_position_embeddings": 2048,
|
||||||
|
"model_type": "gpt_neox",
|
||||||
|
"num_attention_heads": 16,
|
||||||
|
"num_hidden_layers": 24,
|
||||||
|
"rope_scaling": null,
|
||||||
|
"rotary_emb_base": 10000,
|
||||||
|
"rotary_pct": 0.25,
|
||||||
|
"tie_word_embeddings": false,
|
||||||
|
"torch_dtype": "float16",
|
||||||
|
"transformers_version": "4.31.0",
|
||||||
|
"use_cache": true,
|
||||||
|
"use_parallel_residual": true,
|
||||||
|
"vocab_size": 50304
|
||||||
|
}
|
||||||
6
generation_config.json
Normal file
6
generation_config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"_from_model_config": true,
|
||||||
|
"bos_token_id": 0,
|
||||||
|
"eos_token_id": 0,
|
||||||
|
"transformers_version": "4.31.0"
|
||||||
|
}
|
||||||
3
model.safetensors
Normal file
3
model.safetensors
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:7e8e4261da7e7f3d0f207e472fc5aa7140834305c72b0288eb97f5e7f6cf827b
|
||||||
|
size 2829333984
|
||||||
3
pytorch_model.bin
Normal file
3
pytorch_model.bin
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:89651eb227cb6240fb084d29462474b93c3768f3900220a9177c4635c0d6b6ca
|
||||||
|
size 2829404093
|
||||||
6
special_tokens_map.json
Normal file
6
special_tokens_map.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"bos_token": "<|endoftext|>",
|
||||||
|
"eos_token": "<|endoftext|>",
|
||||||
|
"pad_token": "<|endoftext|>",
|
||||||
|
"unk_token": "<|endoftext|>"
|
||||||
|
}
|
||||||
100529
tokenizer.json
Normal file
100529
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
9
tokenizer_config.json
Normal file
9
tokenizer_config.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"add_prefix_space": false,
|
||||||
|
"bos_token": "<|endoftext|>",
|
||||||
|
"clean_up_tokenization_spaces": true,
|
||||||
|
"eos_token": "<|endoftext|>",
|
||||||
|
"model_max_length": 1000000000000000019884624838656,
|
||||||
|
"tokenizer_class": "GPTNeoXTokenizer",
|
||||||
|
"unk_token": "<|endoftext|>"
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user