File size: 1,211 Bytes
99d21fd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
architecture: MistralForCausalLM
base_model: TheDrummer/Magidonia-24B-v4.2.0
merge_method: karcher
dtype: bfloat16
models:
  - model: TheDrummer/Magidonia-24B-v4.2.0
  - model: TheDrummer/Cydonia-24B-v4.2.0  
  - model: Doctor-Shotgun/MS3.2-24B-Magnum-Diamond
  - model: dphn/Dolphin-Mistral-24B-Venice-Edition
  - model: TroyDoesAI/BlackSheep-24B
  - model: darkc0de/XortronCriminalComputingConfig
  - model: huihui-ai/Mistral-Small-24B-Instruct-2501-abliterated
  - model: PocketDoc/Dans-PersonalityEngine-V1.3.0-24b
  - model: aixonlab/Eurydice-24b-v3.5
  - model: trashpanda-org/MS3.2-24B-Mullein-v2
  - model: Delta-Vector/Austral-24B-Winton
  - model: Delta-Vector/MS3.2-Austral-Winton
  - model: Gryphe/Codex-24B-Small-3.2
#  - model: Gryphe/Pantheon-RP-1.8-24b-Small-3.1
  - model: LatitudeGames/Harbinger-24B
  - model: allura-forge/ms32-final-TEXTONLY
  - model: CrucibleLab/M3.2-24B-Loki-V1.3
  - model: Darkhn/M3.2-24B-Animus-v7.1
  - model: Delta-Vector/Rei-24B-KTO
  - model: ReadyArt/MS3.2-The-Omega-Directive-24B-Unslop-v2.1
  - model: zerofata/MS3.2-PaintedFantasy-v2-24B
parameters:
normalize: false
int8_mask: false
tokenizer: 
source: union
chat_template: auto