35 lines
787 B
YAML
35 lines
787 B
YAML
|
|
|
||
|
|
models:
|
||
|
|
- model: mistralai/Mistral-7B-v0.1
|
||
|
|
# No parameters necessary for base model
|
||
|
|
- model: Intel/neural-chat-7b-v3-3
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.2
|
||
|
|
- model: openaccess-ai-collective/DPOpenHermes-7B-v2
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.1
|
||
|
|
- model: fblgit/una-cybertron-7b-v2-bf16
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.2
|
||
|
|
- model: openchat/openchat-3.5-0106
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.15
|
||
|
|
- model: OpenPipe/mistral-ft-optimized-1227
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.25
|
||
|
|
- model: mlabonne/NeuralHermes-2.5-Mistral-7B
|
||
|
|
parameters:
|
||
|
|
density: 0.6
|
||
|
|
weight: 0.1
|
||
|
|
merge_method: dare_ties
|
||
|
|
base_model: mistralai/Mistral-7B-v0.1
|
||
|
|
parameters:
|
||
|
|
int8_mask: true
|
||
|
|
dtype: bfloat16
|
||
|
|
|