File size: 266 Bytes
08b2208 |
1 2 3 4 5 6 7 8 9 10 11 |
architecture: MistralForCausalLM
merge_method: slerp
dtype: bfloat16
base_model: TheDrummer/Magidonia-24B-v4.2.0
models:
- model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly
parameters:
t: 0.5
tokenizer:
source: union
chat_template: auto |