| architecture: MistralForCausalLM | |
| merge_method: karcher | |
| dtype: bfloat16 | |
| models: | |
| - model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly | |
| - model: TheDrummer/Magidonia-24B-v4.2.0 | |
| - model: dphn/Dolphin-Mistral-24B-Venice-Edition | |
| - model: TheDrummer/Cydonia-24B-v4.2.0 | |
| - model: zerofata/MS3.2-PaintedFantasy-v2-24B | |
| parameters: | |
| tokenizer: | |
| source: union | |
| chat_template: auto |