初始化项目,由ModelHub XC社区提供模型
Model: Goekdeniz-Guelmez/Hyperion-2.1-Mistral-7B-GGUF Source: Original Platform
This commit is contained in:
49
.gitattributes
vendored
Normal file
49
.gitattributes
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
*.7z filter=lfs diff=lfs merge=lfs -text
|
||||
*.arrow filter=lfs diff=lfs merge=lfs -text
|
||||
*.bin filter=lfs diff=lfs merge=lfs -text
|
||||
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
||||
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||
*.ftz filter=lfs diff=lfs merge=lfs -text
|
||||
*.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.h5 filter=lfs diff=lfs merge=lfs -text
|
||||
*.joblib filter=lfs diff=lfs merge=lfs -text
|
||||
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
||||
*.model filter=lfs diff=lfs merge=lfs -text
|
||||
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
||||
*.npy filter=lfs diff=lfs merge=lfs -text
|
||||
*.npz filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ot filter=lfs diff=lfs merge=lfs -text
|
||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||
*.pb filter=lfs diff=lfs merge=lfs -text
|
||||
*.pickle filter=lfs diff=lfs merge=lfs -text
|
||||
*.pkl filter=lfs diff=lfs merge=lfs -text
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.rar filter=lfs diff=lfs merge=lfs -text
|
||||
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
||||
*.tar filter=lfs diff=lfs merge=lfs -text
|
||||
*.tflite filter=lfs diff=lfs merge=lfs -text
|
||||
*.tgz filter=lfs diff=lfs merge=lfs -text
|
||||
*.wasm filter=lfs diff=lfs merge=lfs -text
|
||||
*.xz filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
||||
*.zst filter=lfs diff=lfs merge=lfs -text
|
||||
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q2_K.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q3_K_L.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q3_K_M.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q3_K_S.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q4_0.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q4_1.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q4_K_S.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q5_0.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q5_1.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q5_K_S.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
hyperion-2.1-mistral-7b.Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
|
||||
14
README.md
Normal file
14
README.md
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
library_name: transformers
|
||||
license: apache-2.0
|
||||
datasets:
|
||||
- Locutusque/hyperion-v2.0
|
||||
language:
|
||||
- en
|
||||
---
|
||||
# Description
|
||||
Further fine-tuned Locutusque/Hyperion-2.0-Mistral-7B at a higher learning rate. This was done to see if performance increased. Read Locutusque/Hyperion-2.0-Mistral-7B's model card for more information. Slight performance gain was observed. More checkpoints will be released in the future.
|
||||
|
||||
# Disclaimer
|
||||
|
||||
This model is very compliant. It will respond to any request without refusal. If you intend to deploy this model at an enterprise level, I would recommend aligning this model using DPO.
|
||||
26
config.json
Normal file
26
config.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"_name_or_path": "Locutusque/Hyperion-2.0-Mistral-7B",
|
||||
"architectures": [
|
||||
"MistralForCausalLM"
|
||||
],
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 1,
|
||||
"eos_token_id": 2,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 4096,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 14336,
|
||||
"max_position_embeddings": 32768,
|
||||
"model_type": "mistral",
|
||||
"num_attention_heads": 32,
|
||||
"num_hidden_layers": 32,
|
||||
"num_key_value_heads": 8,
|
||||
"rms_norm_eps": 1e-05,
|
||||
"rope_theta": 10000.0,
|
||||
"sliding_window": 4096,
|
||||
"tie_word_embeddings": false,
|
||||
"torch_dtype": "bfloat16",
|
||||
"transformers_version": "4.38.2",
|
||||
"use_cache": true,
|
||||
"vocab_size": 32000
|
||||
}
|
||||
6
generation_config.json
Normal file
6
generation_config.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"_from_model_config": true,
|
||||
"bos_token_id": 1,
|
||||
"eos_token_id": 2,
|
||||
"transformers_version": "4.38.2"
|
||||
}
|
||||
3
hyperion-2.1-mistral-7b.Q2_K.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q2_K.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:607b35cfd899e9c3bed9033778c380edd31e7a8fb39aa364c252317117a8d0f1
|
||||
size 2719241984
|
||||
3
hyperion-2.1-mistral-7b.Q3_K_L.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q3_K_L.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:030174c5d76491aab1e8f4eea94a27736d06b73148ac2de590f956f707709a04
|
||||
size 3822024448
|
||||
3
hyperion-2.1-mistral-7b.Q3_K_M.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q3_K_M.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a0378e4ea4244100861bc2512f22c07ee7fbac54c2d13f5e9814a4159ca999f4
|
||||
size 3518985984
|
||||
3
hyperion-2.1-mistral-7b.Q3_K_S.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q3_K_S.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d3c03c93047a788dd327c43a813c7c8642d11187e45bfac5ed37ca6eea1a252d
|
||||
size 3164567296
|
||||
3
hyperion-2.1-mistral-7b.Q4_0.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q4_0.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:300b50d6e163da96147fa738046190ad58dc437400684664afda5cf9b8a53a8a
|
||||
size 4108916480
|
||||
3
hyperion-2.1-mistral-7b.Q4_1.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q4_1.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d3e9a52e96d5aeb68459c29a3994597524720048329bca7ceff2f66ae97ce955
|
||||
size 4553316096
|
||||
3
hyperion-2.1-mistral-7b.Q4_K_M.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q4_K_M.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:60fc531514a14cd16fbd952420f123adcc0b1e259f1fa45d821a1749e22a7b76
|
||||
size 4368439040
|
||||
3
hyperion-2.1-mistral-7b.Q4_K_S.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q4_K_S.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c52d1e7e82ed00d49e90dc58f1f31475e2ba3a485db94d375be2f48756d8b168
|
||||
size 4140373760
|
||||
3
hyperion-2.1-mistral-7b.Q5_0.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q5_0.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c35b97539da990b279ad106e4b4f9f530c908175249a98174934136df897cc6b
|
||||
size 4997715712
|
||||
3
hyperion-2.1-mistral-7b.Q5_1.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q5_1.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:97cc4dd4fcd0a503ab8ffb9f06eae80cdc931905e25745c26efb566310b11534
|
||||
size 5442115328
|
||||
3
hyperion-2.1-mistral-7b.Q5_K_M.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q5_K_M.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8ed66b00a4311ef5cf793d66dcec176de82aeecd1d7a86788d207837cff15b22
|
||||
size 5131409152
|
||||
3
hyperion-2.1-mistral-7b.Q5_K_S.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q5_K_S.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:32d84689312239a8d44fef8e2d53dcba3b431e8b37a01ffa17c5dca847120a3a
|
||||
size 4997715712
|
||||
3
hyperion-2.1-mistral-7b.Q6_K.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q6_K.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:69b4a90a94cde5e7e56f64da5bfbcb45eb426d6332735ec438ae6933d5160067
|
||||
size 5942064896
|
||||
3
hyperion-2.1-mistral-7b.Q8_0.gguf
Normal file
3
hyperion-2.1-mistral-7b.Q8_0.gguf
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5c59c3aa2ade003cef8074021b5e9a03c5304d7e5bcb5373d59b1a3edc3ff434
|
||||
size 7695857408
|
||||
3
hyperion-2.1-mistral-7b.fp16.bin
Normal file
3
hyperion-2.1-mistral-7b.fp16.bin
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:eb695b22b11c1e6af01f5e120f70b30f60b06eff76936eea2197c66a8deac772
|
||||
size 14484731584
|
||||
3
model-00001-of-00008.safetensors
Normal file
3
model-00001-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c1769ee7ed8794108647d766c96fb721f07f46c5ba45cc7a00dbdfb317ca5d7e
|
||||
size 1889587040
|
||||
3
model-00002-of-00008.safetensors
Normal file
3
model-00002-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6866aa45507eed9f06db669901dc7cbdd5e26e90f8d29b0ec9e0294b647bc15e
|
||||
size 1946243936
|
||||
3
model-00003-of-00008.safetensors
Normal file
3
model-00003-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7b795c38fbc9a17da6069c2b86518c71df201c07a099bd92cd31823cd531ddd7
|
||||
size 1979781432
|
||||
3
model-00004-of-00008.safetensors
Normal file
3
model-00004-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a07acbbb2c8c5debe7456a1a38d7f212af48018353ae2354726a7b5e51ae6a36
|
||||
size 1946243984
|
||||
3
model-00005-of-00008.safetensors
Normal file
3
model-00005-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:17bb0862d6ac9e2ca5ca93e6200af3e283693ae681542bb5e3749e33a875556e
|
||||
size 1979781448
|
||||
3
model-00006-of-00008.safetensors
Normal file
3
model-00006-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6d22c45985c1a6833ac722ee969735d99e1f8015234123efb4d98301f1eee742
|
||||
size 1946243984
|
||||
3
model-00007-of-00008.safetensors
Normal file
3
model-00007-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:cba2b0e157cf094d981cd403f5072e0b0038226024433ebecd6c0babafb86174
|
||||
size 1979781448
|
||||
3
model-00008-of-00008.safetensors
Normal file
3
model-00008-of-00008.safetensors
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:27e5152ac8dbfde7854f91ff031957a835dc200b0ce3a829e9e0faeb4b2e4818
|
||||
size 815834680
|
||||
298
model.safetensors.index.json
Normal file
298
model.safetensors.index.json
Normal file
@@ -0,0 +1,298 @@
|
||||
{
|
||||
"metadata": {
|
||||
"total_size": 14483464192
|
||||
},
|
||||
"weight_map": {
|
||||
"lm_head.weight": "model-00008-of-00008.safetensors",
|
||||
"model.embed_tokens.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.input_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.input_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.10.input_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.10.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.input_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.11.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.input_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.12.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.12.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.12.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.12.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.13.input_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.13.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.input_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.14.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.input_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.15.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.input_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.16.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.17.input_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.17.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.17.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.17.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.17.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.17.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.17.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.17.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.17.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
|
||||
"model.layers.18.input_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.18.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.input_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.19.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.2.input_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.20.input_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.20.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.input_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.21.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.21.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.21.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.21.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
|
||||
"model.layers.22.input_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.22.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.input_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.23.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.input_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.24.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.input_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.25.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.26.input_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.26.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.26.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.26.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.26.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.26.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.26.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.26.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.26.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
|
||||
"model.layers.27.input_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.27.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.input_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.28.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.input_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.29.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.3.input_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.3.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.3.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
|
||||
"model.layers.30.input_layernorm.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.30.mlp.down_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.30.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.30.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.30.post_attention_layernorm.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.30.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.30.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.30.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.30.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
|
||||
"model.layers.31.input_layernorm.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.mlp.down_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.mlp.gate_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.mlp.up_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.post_attention_layernorm.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.self_attn.k_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.self_attn.o_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.self_attn.q_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.31.self_attn.v_proj.weight": "model-00008-of-00008.safetensors",
|
||||
"model.layers.4.input_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.4.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.input_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.5.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.input_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.6.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.input_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.8.input_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.8.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.8.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.8.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.8.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
|
||||
"model.layers.9.input_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.layers.9.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
|
||||
"model.norm.weight": "model-00008-of-00008.safetensors"
|
||||
}
|
||||
}
|
||||
24
special_tokens_map.json
Normal file
24
special_tokens_map.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"bos_token": {
|
||||
"content": "<s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"eos_token": {
|
||||
"content": "</s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
},
|
||||
"pad_token": "</s>",
|
||||
"unk_token": {
|
||||
"content": "<unk>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false
|
||||
}
|
||||
}
|
||||
91136
tokenizer.json
Normal file
91136
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
BIN
tokenizer.model
(Stored with Git LFS)
Normal file
BIN
tokenizer.model
(Stored with Git LFS)
Normal file
Binary file not shown.
49
tokenizer_config.json
Normal file
49
tokenizer_config.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"add_bos_token": true,
|
||||
"add_eos_token": false,
|
||||
"added_tokens_decoder": {
|
||||
"0": {
|
||||
"content": "<unk>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"1": {
|
||||
"content": "<s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"2": {
|
||||
"content": "</s>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
}
|
||||
},
|
||||
"additional_special_tokens": [],
|
||||
"bos_token": "<s>",
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "</s>",
|
||||
"legacy": true,
|
||||
"max_length": 512,
|
||||
"model_max_length": 1000000000000000019884624838656,
|
||||
"pad_to_multiple_of": null,
|
||||
"pad_token": "</s>",
|
||||
"pad_token_type_id": 0,
|
||||
"padding_side": "left",
|
||||
"sp_model_kwargs": {},
|
||||
"spaces_between_special_tokens": false,
|
||||
"stride": 0,
|
||||
"tokenizer_class": "LlamaTokenizer",
|
||||
"truncation_side": "right",
|
||||
"truncation_strategy": "longest_first",
|
||||
"unk_token": "<unk>",
|
||||
"use_default_system_prompt": false
|
||||
}
|
||||
Reference in New Issue
Block a user