初始化项目,由ModelHub XC社区提供模型
Model: aloobun/Cypher-7B Source: Original Platform
This commit is contained in:
24
mergekit_config.yml
Normal file
24
mergekit_config.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
slices:
|
||||
- sources:
|
||||
- model: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO"
|
||||
layer_range: [0, 32]
|
||||
- model: "cognitivecomputations/samantha-1.1-westlake-7b-laser"
|
||||
layer_range: [0, 32]
|
||||
merge_method: slerp
|
||||
base_model: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO"
|
||||
parameters:
|
||||
t:
|
||||
- filter: lm_head
|
||||
value: [0.55]
|
||||
- filter: embed_tokens
|
||||
value: [0.7]
|
||||
- filter: self_attn
|
||||
value: [0.65, 0.35]
|
||||
- filter: mlp
|
||||
value: [0.35, 0.65]
|
||||
- filter: layernorm
|
||||
value: [0.4, 0.6]
|
||||
- filter: modelnorm
|
||||
value: [0.6]
|
||||
- value: 0.5 # fallback for rest of tensors
|
||||
dtype: bfloat16
|
||||
Reference in New Issue
Block a user