File size: 400 Bytes
755e6bc
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
architecture: MistralForCausalLM
merge_method: karcher
dtype: bfloat16
models:
  - model: Naphula/BeaverAI_Fallen-Mistral-Small-3.1-24B-v1e_textonly
  - model: TheDrummer/Magidonia-24B-v4.2.0
  - model: dphn/Dolphin-Mistral-24B-Venice-Edition
  - model: TheDrummer/Cydonia-24B-v4.2.0
  - model: zerofata/MS3.2-PaintedFantasy-v2-24B
parameters:
tokenizer:
source: union
chat_template: auto