architecture: MistralForCausalLM models: - model: Darkhn/M3.2-24B-Animus-v7.1 parameters: density: 0.4 weight: 0.3 - model: TheDrummer/Magidonia-24B-v4.2.0 parameters: density: 0.6 weight: 0.4 - model: zerofata/MS3.2-PaintedFantasy-v2-24B parameters: density: 0.4 weight: 0.3 merge_method: ties base_model: TheDrummer/Magidonia-24B-v4.2.0 parameters: normalize: false int8_mask: false dtype: float16 tokenizer: source: union chat_template: auto