File size: 274 Bytes
08b2208 |
1 2 3 4 5 6 7 8 9 10 11 |
architecture: MistralForCausalLM
merge_method: slerp
dtype: bfloat16
base_model: dphn/Dolphin-Mistral-24B-Venice-Edition
models:
- model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly
parameters:
t: 0.5
tokenizer:
source: union
chat_template: auto |