| base_model: Sao10K/L3-8B-Stheno-v3.2 | |
| chat_template: llama3 | |
| merge_method: dare_ties | |
| modules: | |
| default: | |
| slices: | |
| - sources: | |
| - layer_range: [0, 32] | |
| model: NousResearch/Hermes-3-Llama-3.1-8B | |
| parameters: | |
| density: 0.5 | |
| weight: 0.3 | |
| - layer_range: [0, 32] | |
| model: Sao10K/L3-8B-Stheno-v3.2 | |
| parameters: | |
| density: 0.5 | |
| weight: 0.4 | |
| - layer_range: [0, 32] | |
| model: Sao10K/L3-8B-Lunaris-v1 | |
| parameters: | |
| density: 0.5 | |
| weight: 0.3 | |
| out_dtype: bfloat16 | |
| parameters: | |
| normalize: 0.0 | |
| tokenizer: | |
| source: base |