Logos-Alpha-LLaMa-70B / mergekit_config.yml
Tarek07's picture
Upload folder using huggingface_hub
d6d4c99 verified
raw
history blame contribute delete
678 Bytes
models:
- model: Tarek07/Progenitor-V3.3-LLaMa-70B
parameters:
weight: 0.20
density: 0.7
- model: Nohobby/L3.3-Prikol-70B-v0.5
parameters:
weight: 0.20
density: 0.7
- model: TheDrummer/Anubis-70B-v1
parameters:
weight: 0.20
density: 0.7
- model: Steelskull/L3.3-MS-Nevoria-70b
parameters:
weight: 0.20
density: 0.7
- model: deepseek-ai/DeepSeek-R1-Distill-Llama-70B
parameters:
weight: 0.20
density: 0.7
merge_method: della_linear
base_model: meta-llama/Llama-3.3-70B-Instruct
parameters:
epsilon: 0.2
lambda: 1.1
out_dtype: bfloat16
tokenizer:
source: Steelskull/L3.3-MS-Nevoria-70b