Dataset Viewer
Auto-converted to Parquet Duplicate
index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
0
HuggingFaceTB/SmolLM3-3B-Base
smollm3
["SmolLM3ForCausalLM"]
128256
bfloat16
4.53.0.dev0
2,048
11,008
36
16
4
silu
0
True
65,536
5,000,000
0.000001
0.02
null
128001
true
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
1
ChatDOC/OCRFlux-3B
qwen2_5_vl
["Qwen2_5_VLForConditionalGeneration"]
151680
bfloat16
4.49.0
2,048
11,008
36
16
2
silu
0
False
128,000
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
2
jinaai/jina-embeddings-v4
null
["JinaEmbeddingsV4Model"]
null
bfloat16
4.52.0
2,048
11,008
36
16
2
silu
0
True
128,000
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
3
HuggingFaceTB/SmolLM3-3B
smollm3
["SmolLM3ForCausalLM"]
128256
bfloat16
4.54.0.dev0
2,048
11,008
36
16
4
silu
0
False
65,536
5,000,000
0.000001
0.02
128000
128012
true
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
4
KurmaAI/AQUA-7B
mistral
["MistralForCausalLM"]
32768
bfloat16
4.52.4
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
5
LiquidAI/LFM2-1.2B
lfm2
["Lfm2ForCausalLM"]
65536
bfloat16
4.54.0.dev0
2,048
null
16
32
8
null
null
True
128,000
1,000,000
null
0.02
1
7
null
64
4
false
null
null
false
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
6
LiquidAI/LFM2-350M
lfm2
["Lfm2ForCausalLM"]
65536
bfloat16
4.54.0.dev0
1,024
null
16
16
8
null
null
True
128,000
1,000,000
null
0.02
1
7
null
64
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.201327
small
very_long
7
mistralai/Devstral-Small-2507
mistral
["MistralForCausalLM"]
131072
bfloat16
4.53.1
5,120
32,768
40
32
8
silu
0
True
131,072
1,000,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
8
kyutai/tts-1.6b-en_fr
tts
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
9
apple/DiffuCoder-7B-cpGRPO
Dream
["DreamModel"]
152064
bfloat16
4.52.0.dev0
3,584
18,944
28
28
4
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
10
microsoft/Phi-4-mini-flash-reasoning
phi4flash
["Phi4FlashForCausalLM"]
200064
bfloat16
4.46.1
2,560
10,240
32
40
20
silu
0
True
262,144
null
null
0.02
199999
199999
true
64
2
false
null
null
false
false
false
false
false
false
false
false
true
false
2.516582
medium
very_long
11
agentica-org/DeepSWE-Preview
qwen3
["Qwen3ForCausalLM"]
151936
float32
4.51.3
5,120
25,600
64
64
8
silu
0
True
40,960
1,000,000
0.000001
0.02
null
151645
false
80
8
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
12
moonshotai/Kimi-K2-Instruct
kimi_k2
["DeepseekV3ForCausalLM"]
163840
bfloat16
4.48.3
7,168
18,432
61
64
64
silu
0
True
131,072
50,000
0.000001
0.02
163584
163585
false
112
1
false
384
8
false
false
false
false
false
true
false
true
false
true
37.610324
large
very_long
13
nanonets/Nanonets-OCR-s
qwen2_5_vl
["Qwen2_5_VLForConditionalGeneration"]
151936
bfloat16
4.52.4
2,048
11,008
36
16
2
silu
0
True
128,000
1,000,000
0.000001
0.02
151643
151645
null
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
14
tencent/Hunyuan-A13B-Instruct
hunyuan_v1_moe
["HunYuanMoEV1ForCausalLM"]
128167
bfloat16
4.41.2
4,096
3,072
32
32
8
silu
0.1
True
32,768
10,000
0.00001
0.02
1
127960
true
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
15
openai/whisper-large-v3
whisper
["WhisperForConditionalGeneration"]
51866
float16
4.36.0.dev0
null
null
32
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
16
K-intelligence/Midm-2.0-Base-Instruct
llama
["LlamaForCausalLM"]
131384
bfloat16
4.51.3
4,096
14,336
48
32
8
silu
0
True
32,768
8,000,000
0.00001
0.02
0
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
17
Qwen/Qwen2.5-VL-7B-Instruct
qwen2_5_vl
["Qwen2_5_VLForConditionalGeneration"]
152064
bfloat16
4.41.2
3,584
18,944
28
28
4
silu
0
True
128,000
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
18
LiquidAI/LFM2-700M
lfm2
["Lfm2ForCausalLM"]
65536
bfloat16
4.54.0.dev0
1,536
null
16
24
8
null
null
True
128,000
1,000,000
null
0.02
1
7
null
64
3
false
null
null
false
false
false
false
false
false
false
false
true
true
0.452985
small
very_long
19
sentence-transformers/all-MiniLM-L6-v2
bert
["BertModel"]
30522
null
4.8.2
384
1,536
6
12
null
gelu
null
True
512
null
null
0.02
null
null
null
32
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.010617
small
short
20
tngtech/DeepSeek-TNG-R1T2-Chimera
deepseek_v3
["DeepseekV3ForCausalLM"]
129280
bfloat16
4.46.3
7,168
18,432
61
128
128
silu
0
True
163,840
10,000
0.000001
0.02
0
1
false
56
1
false
256
8
false
false
false
false
false
true
false
true
false
true
37.610324
large
very_long
21
MiniMaxAI/MiniMax-M1-80k
minimax_m1
["MiniMaxM1ForCausalLM"]
200064
null
4.45.2
6,144
9,216
80
64
8
silu
0
True
10,240,000
10,000,000
0.00001
0.02
null
null
false
96
8
false
32
2
false
false
false
false
false
false
false
true
true
true
36.238787
large
very_long
22
unsloth/DeepSeek-R1-0528-Qwen3-8B-GGUF
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
4,096
12,288
36
32
8
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151645
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
7.247757
large
very_long
23
deepseek-ai/DeepSeek-R1-0528-Qwen3-8B
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.51.0
4,096
12,288
36
32
8
silu
0
True
131,072
1,000,000
0.000001
0.02
151643
151645
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
7.247757
large
very_long
24
mistralai/Magistral-Small-2506
mistral
["MistralForCausalLM"]
131072
bfloat16
4.52.4
5,120
32,768
40
32
8
silu
0
True
40,960
1,000,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
25
ByteDance-Seed/Tar-7B
qwen2
["LlavaQwenForCausalLM"]
217207
bfloat16
4.50.0
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
26
THUDM/GLM-4.1V-9B-Thinking
glm4v
["Glm4vForConditionalGeneration"]
null
null
4.57.1
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
27
microsoft/NextCoder-7B
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.46.3
3,584
18,944
28
28
4
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
28
unsloth/Devstral-Small-2507-GGUF
mistral3
["Mistral3ForConditionalGeneration"]
null
bfloat16
4.52.0.dev0
null
null
null
null
null
null
null
null
null
null
null
null
1
2
null
null
null
false
null
null
false
false
false
false
false
false
true
false
false
true
null
null
null
29
mistralai/Mistral-Small-3.2-24B-Instruct-2506
mistral3
["Mistral3ForConditionalGeneration"]
null
bfloat16
4.52.4
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
true
false
false
true
null
null
null
30
deepseek-ai/DeepSeek-R1
deepseek_v3
["DeepseekV3ForCausalLM"]
129280
bfloat16
4.46.3
7,168
18,432
61
128
128
silu
0
True
163,840
10,000
0.000001
0.02
0
1
false
56
1
false
256
8
false
false
false
false
false
true
false
true
false
true
37.610324
large
very_long
31
deepseek-ai/DeepSeek-R1-0528
deepseek_v3
["DeepseekV3ForCausalLM"]
129280
bfloat16
4.46.3
7,168
18,432
61
128
128
silu
0
True
163,840
10,000
0.000001
0.02
0
1
false
56
1
false
256
8
false
false
false
false
false
true
false
true
false
true
37.610324
large
very_long
32
RekaAI/reka-flash-3.1
llama
["LlamaForCausalLM"]
100352
bfloat16
4.50.3
6,144
19,648
44
64
8
silu
0
True
98,304
8,000,000
0.00001
0.006
100257
100257
false
96
8
false
null
null
true
false
false
false
false
false
false
false
true
true
19.931333
large
very_long
33
skt/A.X-4.0
qwen2
["Qwen2ForCausalLM"]
102400
bfloat16
4.46.0
8,192
29,568
80
64
8
silu
0
False
131,072
5,000,000
0.00001
0.02
0
0
false
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
64.424509
large
very_long
34
Qwen/Qwen3-0.6B
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.51.0
1,024
3,072
28
16
8
silu
0
True
40,960
1,000,000
0.000001
0.02
151643
151645
true
64
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.352322
small
very_long
35
mistralai/Devstral-Small-2505
mistral
["MistralForCausalLM"]
131072
bfloat16
4.51.3
5,120
32,768
40
32
8
silu
0
True
131,072
1,000,000,000
0.00001
0.02
1
2
false
160
4
false
null
null
false
false
false
false
false
false
true
false
true
true
12.582912
large
very_long
36
kyutai/stt-2.6b-en
stt
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
37
mistralai/Mistral-7B-Instruct-v0.3
mistral
["MistralForCausalLM"]
32768
bfloat16
4.42.0.dev0
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
38
baidu/ERNIE-4.5-21B-A3B-PT
ernie4_5_moe
["Ernie4_5_MoeForCausalLM"]
103424
bfloat16
4.54.0.dev0
2,560
12,288
28
20
4
silu
null
True
131,072
500,000
0.00001
0.02
1
2
true
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
2.20201
medium
very_long
39
openai/whisper-large-v3-turbo
whisper
["WhisperForConditionalGeneration"]
51866
float16
4.46.0.dev0
null
null
32
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
40
katanemo/Arch-Router-1.5B
qwen2
["Qwen2ForCausalLM"]
151936
float16
4.51.3
1,536
8,960
28
12
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
6
false
null
null
false
false
false
false
false
false
false
false
true
true
0.792723
small
very_long
41
Menlo/Jan-nano
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.4
2,560
9,728
36
32
8
silu
0
True
40,960
1,000,000
0.000001
0.02
151643
151645
true
80
4
false
null
null
false
false
false
false
false
false
false
false
true
true
2.831155
medium
very_long
42
Qwen/Qwen3-Embedding-0.6B
qwen3
["Qwen3ForCausalLM"]
151669
bfloat16
4.51.3
1,024
3,072
28
16
8
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151643
true
64
2
false
null
null
false
false
false
false
false
false
false
false
true
true
0.352322
small
very_long
43
SicariusSicariiStuff/Impish_LLAMA_4B
llama
["LlamaForCausalLM"]
128256
bfloat16
4.51.3
3,072
9,216
32
32
8
silu
0
False
131,072
500,000
0.00001
0.02
128000
128019
false
96
4
false
null
null
true
false
false
false
false
false
false
false
true
true
3.623879
medium
very_long
44
zeroentropy/zerank-1-small
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.51.3
2,048
6,144
28
16
8
silu
0
True
40,960
1,000,000
0.000001
0.02
151643
151645
true
128
2
false
null
null
false
false
false
false
false
false
false
false
true
true
1.409286
medium
very_long
45
skt/A.X-4.0-Light
qwen2
["Qwen2ForCausalLM"]
102400
bfloat16
4.51.3
3,584
18,944
28
28
4
silu
0
False
16,384
1,000,000
0.00001
0.02
0
0
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
long
46
Intelligent-Internet/II-Medical-32B-Preview
qwen3
["Qwen3ForCausalLM"]
151936
bfloat16
4.52.1
5,120
25,600
64
64
8
silu
0
True
40,960
1,000,000
0.000001
0.02
null
151645
false
80
8
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
47
nvidia/OpenCodeReasoning-Nemotron-1.1-32B
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.47.1
5,120
27,648
64
40
8
silu
0
True
65,536
1,000,000
0.000001
0.02
151643
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
48
internlm/POLAR-7B
internlm2
["InternLM2ForRewardModel"]
92544
float16
4.49.0
4,096
14,336
32
32
8
silu
null
True
262,144
50,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
49
skt/A.X-3.1-Light
llama
["LlamaForCausalLM"]
102400
bfloat16
4.51.3
4,096
10,880
32
32
32
silu
0.1
False
32,768
500,000
0.00001
0.02
0
0
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
very_long
50
microsoft/NextCoder-32B
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.46.0
5,120
27,648
64
40
8
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
51
KurmaAI/AQUA-1B
gemma3_text
["Gemma3ForCausalLM"]
262144
bfloat16
4.52.4
1,152
6,912
26
4
1
null
0
True
32,768
1,000,000
0.000001
0.02
2
[1, 106]
null
288
4
false
null
null
false
false
false
false
false
false
false
false
true
true
0.414056
small
very_long
52
naver/provence-reranker-debertav3-v1
Provence
["Provence"]
128100
float32
4.45.1
1,024
4,096
24
16
null
gelu
null
null
512
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.30199
small
short
53
cognitivecomputations/Devstral-Vision-Small-2507
mistral3
["Mistral3ForConditionalGeneration"]
null
bfloat16
4.52.4
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
true
false
false
true
null
null
null
54
moonshotai/Kimi-K2-Base
kimi_k2
["DeepseekV3ForCausalLM"]
163840
bfloat16
4.48.3
7,168
18,432
61
64
64
silu
0
True
131,072
50,000
0.000001
0.02
163584
163585
false
112
1
false
384
8
false
false
false
false
false
true
false
true
false
true
37.610324
large
very_long
55
marcelbinz/Llama-3.1-Centaur-70B
llama
["LlamaForCausalLM"]
128256
bfloat16
4.43.3
8,192
28,672
80
64
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
128001
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
64.424509
large
very_long
56
0-hero/Matter-0.1-7B-DPO-preview
mistral
["MistralForCausalLM"]
32006
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
57
0-hero/Matter-0.1-7B-boost
mistral
["MistralForCausalLM"]
32006
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
58
0-hero/Matter-0.2-32B
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.40.0.dev0
5,120
27,392
64
40
8
silu
0
False
32,768
1,000,000
0.000001
0.02
128245
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
59
0-hero/Matter-0.2-8x22B
mixtral
["MixtralForCausalLM"]
32006
bfloat16
4.40.0.dev0
6,144
16,384
56
48
8
silu
0
False
65,536
1,000,000
0.00001
0.02
1
32000
false
128
6
false
8
2
false
false
false
false
false
false
false
true
true
true
25.367151
large
very_long
60
0-hero/flan-OIG-base
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
61
0-hero/flan-OIG-small
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
62
0-hero/flan-OIG-ul2
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
63
0-hero/flan-OIG-xl
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
false
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
64
0-hero/flan-alpaca-ul2
t5
["T5ForConditionalGeneration"]
32128
float32
4.27.1
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
65
0-hero/r1-7B-grpo-v3.1-epoch-2
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.50.0
3,584
18,944
28
28
4
silu
0
True
131,072
10,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
66
0-hero/r1-7b-grpo-full
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.49.0
3,584
18,944
28
28
4
silu
0
False
131,072
10,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
67
0-ma/beit-geometric-shapes-base
beit
["BeitForImageClassification"]
8192
float32
4.44.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
68
0-ma/efficientnet-b2-geometric-shapes
efficientnet
["EfficientNetForImageClassification"]
null
float32
4.44.2
null
null
64
null
null
swish
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
69
0-ma/focalnet-geometric-shapes-tiny
focalnet
["FocalNetForImageClassification"]
null
float32
4.44.2
null
null
null
null
null
gelu
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
70
0-ma/mobilenet-v2-geometric-shapes
mobilenet_v2
["MobileNetV2ForImageClassification"]
null
float32
4.44.2
null
null
null
null
null
relu6
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
71
0-ma/swin-geometric-shapes-tiny
swin
["SwinForImageClassification"]
null
float32
4.44.2
768
null
null
null
null
gelu
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
72
00000-X/Dolphin-2.6-FC_Hermes-2-DPO
mistral
["MistralForCausalLM"]
32001
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
73
00000-X/Dolphin-2.6-FC_Hermes-2-Pro
mistral
["MistralForCausalLM"]
32001
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
74
00000-X/Hermes-2-DPO_WestLake-7B-v2
mistral
["MistralForCausalLM"]
32002
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
75
00000-X/Nous-Hermes-2-DPO_into_Dolphin_Mistral_2.8_v02
mistral
["MistralForCausalLM"]
32002
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
1,000,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
76
00000-X/Nous-Hermes-2-DPO_into_Nous_Hermes-2-Pro
mistral
["MistralForCausalLM"]
32002
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
77
00000-X/Nous-Hermes-2-Pro_into_Nous_Hermes-2-DPO
mistral
["MistralForCausalLM"]
32032
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
32000
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
78
000Yash000/crickaichat-llama-3.2-7b
llama
["LlamaForCausalLM"]
32000
float16
4.31.0
4,096
11,008
32
32
32
silu
null
True
4,096
null
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
medium
79
002311-A/RoBERTwito
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.32.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
80
002311-A/robertinho
xlm-roberta
["XLMRobertaForSequenceClassification"]
250002
float32
4.32.1
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
81
007ankit/Phi-Marketing-EMO-phi-128k-3.8b
phi3
["Phi3ForCausalLM"]
32011
float16
4.48.2
3,072
8,192
32
32
32
silu
0
True
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
82
007ankit/mergekit-della-cczeply
phi3
["Phi3ForCausalLM"]
32011
float16
4.48.2
3,072
8,192
32
32
32
silu
0
True
131,072
10,000
0.00001
0.02
1
32000
false
96
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.623879
medium
very_long
83
007ankit/mergekit-della-igvenqz
mistral
["MistralForCausalLM"]
32000
bfloat16
4.48.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
84
007ankit/mergekit-ties-kjfuaal
llama
["LlamaForCausalLM"]
32000
bfloat16
4.48.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
85
007ankit/mergekit-ties-xbmrcyj
llama
["LlamaForCausalLM"]
32000
bfloat16
4.48.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
86
007ankit/mergekit-ties-xxazzps
llama
["LlamaForCausalLM"]
32000
bfloat16
4.48.2
4,096
14,336
32
32
8
silu
0
True
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
87
007imvishesh/gemma-Code-Instruct-Finetune-test
gemma
["GemmaForCausalLM"]
256000
float16
4.38.0
2,048
16,384
18
8
1
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
88
00K4M1/ppo_LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
89
01-ai/Yi-1.5-34B-32K
llama
["LlamaForCausalLM"]
64000
bfloat16
4.37.1
7,168
20,480
60
56
8
silu
0
True
32,768
5,000,000
0.000001
0.02
1
2
false
128
7
false
null
null
true
false
false
false
false
false
false
false
true
true
36.993761
large
very_long
90
01-ai/Yi-1.5-34B-Chat
llama
["LlamaForCausalLM"]
64000
bfloat16
4.40.0
7,168
20,480
60
56
8
silu
0
False
4,096
5,000,000
0.000001
0.02
1
2
false
128
7
false
null
null
true
false
false
false
false
false
false
false
true
true
36.993761
large
medium
91
01-ai/Yi-1.5-34B-Chat-16K
llama
["LlamaForCausalLM"]
64000
bfloat16
4.40.0
7,168
20,480
60
56
8
silu
0
False
16,384
5,000,000
0.000001
0.02
1
2
false
128
7
false
null
null
true
false
false
false
false
false
false
false
true
true
36.993761
large
long
92
01-ai/Yi-1.5-9B-32K
llama
["LlamaForCausalLM"]
64000
bfloat16
4.37.1
4,096
11,008
48
32
4
silu
0
True
32,768
5,000,000
0.000001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
very_long
93
01-ai/Yi-1.5-9B-Chat
llama
["LlamaForCausalLM"]
64000
bfloat16
4.40.0
4,096
11,008
48
32
4
silu
0
False
4,096
5,000,000
0.000001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
medium
94
01-ai/Yi-1.5-9B-Chat-16K
llama
["LlamaForCausalLM"]
64000
bfloat16
4.40.0
4,096
11,008
48
32
4
silu
0
False
16,384
5,000,000
0.000001
0.02
1
2
false
128
8
false
null
null
true
false
false
false
false
false
false
false
true
true
9.663676
large
long
95
01-ai/Yi-34B-200K
llama
["LlamaForCausalLM"]
64000
bfloat16
4.34.0
7,168
20,480
60
56
8
silu
null
True
200,000
10,000,000
0.00001
0.02
1
2
false
128
7
false
null
null
true
false
false
false
false
false
false
false
true
true
36.993761
large
very_long
96
01-ai/Yi-Coder-1.5B-Chat
llama
["LlamaForCausalLM"]
64000
bfloat16
4.44.0
2,048
5,504
24
16
16
silu
0
False
131,072
10,000,000
0.00001
0.02
1
7
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
1.20796
medium
very_long
97
019100385b/SitiosTuristicosyArqueologicosCusco
vit
["ViTForImageClassification"]
null
float32
4.40.2
768
3,072
12
12
null
gelu
null
null
null
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
null
98
01GangaPutraBheeshma/ut_imdb_movies_classification
t5
["T5ForConditionalGeneration"]
32128
float32
4.35.2
null
null
null
null
null
null
null
True
null
null
null
null
null
1
null
null
null
false
null
null
false
false
false
true
false
false
false
false
false
false
null
null
null
99
01PrathamS/mistral-v2_finetune_unsloth_train_merged
mistral
["MistralForCausalLM"]
32000
float16
4.51.1
4,096
14,336
32
32
8
silu
0
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
true
false
true
true
6.442451
medium
very_long
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
13