File size: 551 Bytes
99d21fd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
architecture: MistralForCausalLM
models:
  - model: Darkhn/M3.2-24B-Animus-v7.1
    parameters:
      density: 0.4
      weight: 0.2
  - model: TheDrummer/Magidonia-24B-v4.2.0
    parameters:
      density: 0.6
      weight: 0.6
  - model: zerofata/MS3.2-PaintedFantasy-v2-24B
    parameters:
      density: 0.4
      weight: 0.2
merge_method: ties
base_model: TheDrummer/Magidonia-24B-v4.2.0
parameters:
  normalize: false
  int8_mask: false
dtype: float16
tokenizer: 
source: TheDrummer/Magidonia-24B-v4.2.0
chat_template: auto