base_model: TheDrummer/Magidonia-24B-v4.3 dtype: float32 merge_method: ties modules: default: slices: - sources: - layer_range: [0, 40] model: Ateron/Sketch-Cydonia parameters: density: 0.55 weight: 0.18 - layer_range: [0, 40] model: OddTheGreat/Rotor_24B_V.1 parameters: density: 0.65 weight: 0.22 - layer_range: [0, 40] model: DarkArtsForge/Magistaroth-24B-v1.1 parameters: density: 0.7 weight: 0.27 - layer_range: [0, 40] model: MrRikyz/Rei-Pulse-24B parameters: density: 0.6 weight: 0.19 - layer_range: [0, 40] model: sophosympatheia/Magistry-24B-v1.0 parameters: density: 0.44 weight: 0.23 - layer_range: [0, 40] model: TheDrummer/Cydonia-24B-v4.3 parameters: density: 0.25 weight: 0.18 - layer_range: [0, 40] model: TheDrummer/Magidonia-24B-v4.3 base_model_alpha: 0.85 ties: merge_strategy: sum normalize: true sparsity: 0.17 rescale: true layer_wise: - filter: "layers.0-8.*" scale: 0.75 - filter: "layers.9-20.*" scale: 1.05 - filter: "layers.21-31.*" scale: 1.15 tensor_factors: attention: 1.1 mlp: 1.2 post: normalize: true clamp: 2.5 out_dtype: bfloat16 tokenizer: source: base