初始化项目,由ModelHub XC社区提供模型
Model: mayacinka/chatty-djinn-14B Source: Original Platform
This commit is contained in:
39
mergekit_config.yml
Normal file
39
mergekit_config.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
merge_method: linear # use linear so we can include multiple models, albeit at a zero weight
|
||||
parameters:
|
||||
weight: 1.0 # weight everything as 1 unless specified otherwise - linear with one model weighted at 1 is a no-op like passthrough
|
||||
slices:
|
||||
- sources:
|
||||
- model: openchat/openchat-3.5-0106
|
||||
layer_range: [0, 1]
|
||||
- model: teknium/OpenHermes-2.5-Mistral-7B
|
||||
layer_range: [0, 1]
|
||||
parameters:
|
||||
weight: 0
|
||||
- sources:
|
||||
- model: bardsai/jaskier-7b-dpo-v6.1
|
||||
layer_range: [1, 10]
|
||||
- sources:
|
||||
- model: senseable/WestLake-7B-v2
|
||||
layer_range: [10, 20]
|
||||
- sources:
|
||||
- model: NousResearch/Nous-Hermes-2-Mistral-7B-DPO
|
||||
layer_range: [20, 30]
|
||||
- sources:
|
||||
- model: paulml/OGNO-7B
|
||||
layer_range: [15, 25]
|
||||
- sources:
|
||||
- model: paulml/DPOB-INMTOB-7B
|
||||
layer_range: [22, 32]
|
||||
- sources:
|
||||
- model: mlabonne/AlphaMonarch-7B
|
||||
layer_range: [5, 15]
|
||||
- sources:
|
||||
- model: openchat/openchat-3.5-0106
|
||||
layer_range: [31, 32]
|
||||
- model: teknium/OpenHermes-2.5-Mistral-7B
|
||||
layer_range: [31, 32]
|
||||
parameters:
|
||||
weight: 0
|
||||
dtype: float16
|
||||
tokenizer_source: model:openchat/openchat-3.5-0106
|
||||
Reference in New Issue
Block a user