| architecture: MistralForCausalLM | |
| merge_method: slerp | |
| dtype: bfloat16 | |
| base_model: TheDrummer/Magidonia-24B-v4.2.0 | |
| models: | |
| - model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly | |
| parameters: | |
| t: 0.5 | |
| tokenizer: | |
| source: union | |
| chat_template: auto |