25 lines
640 B
YAML
25 lines
640 B
YAML
slices:
|
|
- sources:
|
|
- model: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO"
|
|
layer_range: [0, 32]
|
|
- model: "cognitivecomputations/samantha-1.1-westlake-7b-laser"
|
|
layer_range: [0, 32]
|
|
merge_method: slerp
|
|
base_model: "NousResearch/Nous-Hermes-2-Mistral-7B-DPO"
|
|
parameters:
|
|
t:
|
|
- filter: lm_head
|
|
value: [0.55]
|
|
- filter: embed_tokens
|
|
value: [0.7]
|
|
- filter: self_attn
|
|
value: [0.65, 0.35]
|
|
- filter: mlp
|
|
value: [0.35, 0.65]
|
|
- filter: layernorm
|
|
value: [0.4, 0.6]
|
|
- filter: modelnorm
|
|
value: [0.6]
|
|
- value: 0.5 # fallback for rest of tensors
|
|
dtype: bfloat16
|