architecture: MistralForCausalLM merge_method: slerp dtype: bfloat16 base_model: dphn/Dolphin-Mistral-24B-Venice-Edition models: - model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly parameters: t: 0.5 tokenizer: source: union chat_template: auto