index
int64
0
125k
modelId
stringlengths
6
115
config_model_type
stringlengths
2
46
config_architectures
stringlengths
2
91
config_vocab_size
stringlengths
1
8
config_torch_dtype
stringclasses
7 values
config_transformers_version
stringclasses
228 values
config_hidden_size
float64
0
18.4k
config_intermediate_size
float64
0
25.2M
config_num_hidden_layers
float64
-1
260
config_num_attention_heads
stringclasses
47 values
config_num_key_value_heads
float64
0
4.1k
config_hidden_act
stringclasses
19 values
config_attention_dropout
float64
0
0.5
config_use_cache
stringclasses
3 values
config_max_position_embeddings
float64
-1
10.5M
config_rope_theta
float64
256
100B
config_rms_norm_eps
float64
0
0
config_initializer_range
float64
0
2
config_bos_token_id
stringclasses
158 values
config_eos_token_id
stringclasses
339 values
config_tie_word_embeddings
bool
2 classes
config_head_dimension
float64
0.5
3.07k
config_gqa_ratio
float64
0.5
64
config_moe_enabled
bool
1 class
config_n_routed_experts
float64
1
384
config_num_experts_per_tok
float64
1
64
is_llama_family
bool
2 classes
is_bert_family
bool
2 classes
is_gpt_family
bool
2 classes
is_t5_family
bool
2 classes
is_whisper_family
bool
2 classes
is_deepseek_family
bool
2 classes
is_mistral_family
bool
2 classes
uses_moe
bool
2 classes
uses_gqa
bool
2 classes
uses_rope
bool
2 classes
config_approx_params_billions
float64
-0.2
606
size_category
stringclasses
4 values
context_category
stringclasses
4 values
125,200
zypchn/dqn-Lunar-Lander
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,201
zyr4c31/layoutlm-funsd
layoutlm
["LayoutLMForTokenClassification"]
30522
float32
4.48.3
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
false
false
false
false
false
false
false
false
false
0.084935
small
short
125,202
zyshan-ds/lab1_random
marian
["MarianMTModel"]
59514
float32
4.48.2
null
null
6
null
null
null
0
True
512
null
null
null
0
0
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
125,203
zyt020713/fortunetelling
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
125,204
zyusc/meta-llama-Meta-Llama-3-8B-Instruct-fine-tune-alpaca-english-avg-similarity
llama
["LlamaForCausalLM"]
128258
float16
4.44.2
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,205
zyusc/meta-llama-Meta-Llama-3-8B-Instruct-fine-tune-alpaca-english-top2-similarity
llama
["LlamaForCausalLM"]
128258
float16
4.44.2
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,206
zyusc/meta-llama-Meta-Llama-3.1-8B-Instruct-fine-tune-alpaca-english-similarity-0.156022
llama
["LlamaForCausalLM"]
128258
float16
4.44.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,207
zyznull/RankingGPT-bloom-560m
bloom
["BloomForCausalLM"]
250880
float32
4.29.0
1,024
null
null
null
null
null
0
True
null
null
null
0.02
1
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,208
zz-xx/gemma-7b-bnb-4bit-bias-f16
gemma
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,209
zz-xx/gemma-7b-bnb-4bit-bias-merged-16-bit
gemma
["GemmaForCausalLM"]
256000
float16
4.40.0
3,072
24,576
28
16
16
gelu
0
True
8,192
10,000
0.000001
0.02
2
1
null
192
1
false
null
null
false
false
false
false
false
false
false
false
false
true
3.170894
medium
long
125,210
zz-xx/gemma-7b-bnb-4bit-bias-q5_k_m
gemma
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,211
zz-xx/gemma-7b-bnb-4bit-bias-q8_0
gemma
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,212
zz-xx/gemma-7b-bnb-4bit-orpo-bias-detection-f16
gemma
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,213
zz-xx/llama-3-8b-bnb-4bit-bias-detection-16-bit
llama
["LlamaForCausalLM"]
128256
float16
4.40.0
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,214
zz-xx/llama-3-8b-bnb-4bit-bias-detection-q8_0
llama
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
true
false
false
false
false
false
false
false
false
false
null
null
null
125,215
zz123tym/sd-class-butterflies-32
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,216
zz990906/GPTNeoX-160M-minipile-final
gpt_neox
["GPTNeoXForCausalLM"]
50304
bfloat16
4.46.2
768
3,072
12
12
null
gelu
0
True
2,048
10,000
null
0.02
0
0
false
64
null
false
null
null
false
false
true
false
false
false
false
false
false
true
0.084935
small
medium
125,217
zz990906/bert-base-uncased-finetuned-cda
bert
["BertForMaskedLM"]
30522
float32
4.36.0.dev0
768
3,072
12
12
null
gelu
null
True
512
null
null
0.02
null
null
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
125,218
zzejiao/581_milestone5
bart
["BartForConditionalGeneration"]
50265
float32
4.50.3
null
null
12
null
null
null
0.1
True
1,024
null
null
null
0
2
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
short
125,219
zzen0008/DeepCoder-14B-Preview-W8A8-Dynamic-Per-Token
qwen2
["Qwen2ForCausalLM"]
152064
float32
4.49.0
5,120
13,824
48
40
8
silu
0
True
131,072
1,000,000
0.00001
0.02
151646
151643
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
15.099494
large
very_long
125,220
zzfive/ComfyChat-InternLM2.5-7b-v2-2
internlm2
["InternLM2ForCausalLM"]
92544
float16
4.39.3
4,096
14,336
32
32
8
silu
null
True
32,768
1,000,000
0.00001
0.02
1
2
false
128
4
false
null
null
false
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,221
zzhang1987/Qwen2.5-3B-Instruct-GRPO
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.51.0
2,048
11,008
36
16
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
125,222
zzhang1987/Qwen2.5-7B-Instruct-GRPO
qwen2
["Qwen2ForCausalLM"]
152064
bfloat16
4.51.0
3,584
18,944
28
28
4
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
125,223
zzhang1987/Qwen2.5-VL-3B-Instruct-Open-R1-Distill-select
qwen2_5_vl
["Qwen2_5_VLForConditionalGeneration"]
151936
bfloat16
4.49.0.dev0
2,048
11,008
36
16
2
silu
0
False
128,000
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
125,224
zzhnb/BioGPT_finetuned_ner
gpt2
["GPT2ForTokenClassification"]
42384
float32
4.28.1
null
4,096
null
null
null
gelu
null
True
null
null
null
0.02
0
2
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
125,225
zzhnb/biogpt-finetuned-ner
gpt2
["GPT2ForTokenClassification"]
42384
float32
4.28.1
null
4,096
null
null
null
gelu
null
True
null
null
null
0.02
0
2
null
null
null
false
null
null
false
false
true
false
false
false
false
false
false
false
null
null
null
125,226
zzingo5/roberta-base-klue-ynat-classification
roberta
["RobertaForSequenceClassification"]
32000
float32
4.46.3
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
125,227
zzjo/whisper_medium_zh
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.30.0.dev0
null
null
24
null
null
null
0
True
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
125,228
zzman/Qwen-SFT-training
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.50.0
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
125,229
zzman/Qwen2.5-3B-SFT
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.50.0
2,048
11,008
36
16
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
128
8
false
null
null
false
false
false
false
false
false
false
false
true
true
1.811939
medium
very_long
125,230
zzmez/ppo-LunarLander-v2
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,231
zzra1n/CodeLlama-syz-toy
llama
["LlamaForCausalLM"]
32016
float16
4.39.3
4,096
11,008
32
32
32
silu
0
True
16,384
1,000,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
6.442451
medium
long
125,232
zztaoqaq/surgix-QwQ-32b-finetune-siglip2-image-final-ckpt-3000
qwen2
["LlavaQwenForCausalLM"]
152064
bfloat16
4.40.0.dev0
5,120
27,648
64
40
8
silu
0
False
131,072
1,000,000
0.00001
0.02
151643
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
125,233
zztaoqaq/surgix-QwQ-32b-finetune-siglip2-image-final-no-system-prompt-ckpt-7000
qwen2
["LlavaQwenForCausalLM"]
152064
bfloat16
4.40.0.dev0
5,120
27,648
64
40
8
silu
0
False
131,072
1,000,000
0.00001
0.02
151643
151645
false
128
5
false
null
null
false
false
false
false
false
false
false
false
true
true
20.132659
large
very_long
125,234
zztaoqaq/surgix-deepseek-R1-Qwen2.5-7B-finetune-image
qwen2
["LlavaQwenForCausalLM"]
152064
bfloat16
4.40.0.dev0
3,584
18,944
28
28
4
silu
0
True
131,072
10,000
0.000001
0.02
151643
151643
false
128
7
false
null
null
false
false
false
false
false
false
false
false
true
true
4.315939
medium
very_long
125,235
zztheaven/Llama-3.2-1B-Instruct-skyt1-GRPO
llama
["LlamaForCausalLM"]
128256
bfloat16
4.50.0.dev0
2,048
8,192
16
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
64
4
false
null
null
true
false
false
false
false
false
false
false
true
true
0.805306
small
very_long
125,236
zztheaven/Llama-3.2-3B-Instruct-skyt1-GRPO
llama
["LlamaForCausalLM"]
128256
bfloat16
4.50.0.dev0
3,072
8,192
28
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
125,237
zzttbrdd/sn6_00
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,238
zzttbrdd/sn6_00l
llama
["LlamaForCausalLM"]
128256
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,239
zzttbrdd/sn6_01
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,240
zzttbrdd/sn6_01_new
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,241
zzttbrdd/sn6_01l
llama
["LlamaForCausalLM"]
128256
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,242
zzttbrdd/sn6_02g
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,243
zzttbrdd/sn6_02l
llama
["LlamaForCausalLM"]
128256
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,244
zzttbrdd/sn6_02m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,245
zzttbrdd/sn6_03l
llama
["LlamaForCausalLM"]
128256
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,246
zzttbrdd/sn6_03m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,247
zzttbrdd/sn6_04g
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,248
zzttbrdd/sn6_07l
llama
["LlamaForCausalLM"]
128256
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
8,192
500,000
0.00001
0.02
128000
128001
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,249
zzttbrdd/sn6_07m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,250
zzttbrdd/sn6_08m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,251
zzttbrdd/sn6_09m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,252
zzttbrdd/sn6_10m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,253
zzttbrdd/sn6_20_new
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,254
zzttbrdd/sn6_21g
gemma
["GemmaForCausalLM"]
256000
bfloat16
4.38.1
2,048
16,384
18
8
1
gelu
0
False
8,192
10,000
0.000001
0.02
204
213
null
256
8
false
null
null
false
false
false
false
false
false
false
false
true
true
0.90597
small
long
125,255
zzttbrdd/sn6_6m
llama
["LlamaForCausalLM"]
32000
bfloat16
4.38.2
4,096
14,336
32
32
8
silu
0
False
32,768
10,000
0.00001
0.02
1
2
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,256
zzxslp/som-llava-v1.5-13b-listing
llava_llama
["LlavaLlamaForCausalLM"]
32000
bfloat16
4.36.2
5,120
13,824
40
40
40
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
125,257
zzxslp/som-llava-v1.5-13b-qa
llava_llama
["LlavaLlamaForCausalLM"]
32000
bfloat16
4.36.2
5,120
13,824
40
40
40
silu
0
True
4,096
10,000
0.00001
0.02
1
2
false
128
1
false
null
null
true
false
false
false
false
false
false
false
false
true
12.582912
large
medium
125,258
zzy0123/com
llava_next
["LlavaNextForConditionalGeneration"]
32064
float16
4.51.3
4,096
null
null
null
null
null
null
True
null
null
null
null
null
null
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
125,259
zzy0123/text
llava_next
["LlavaNextForConditionalGeneration"]
32064
float16
4.51.3
4,096
null
null
null
null
null
null
True
null
null
null
null
null
null
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
true
null
null
null
125,260
zzzch/Qwen2.5-0.5B-Open-R1-Distill
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0.dev0
896
4,864
24
14
2
silu
0
False
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
125,261
zzzch/Qwen2.5-0.5B-Open-R1-GRPO
qwen2
["Qwen2ForCausalLM"]
151936
bfloat16
4.49.0.dev0
896
4,864
24
14
2
silu
0
True
32,768
1,000,000
0.000001
0.02
151643
151645
true
64
7
false
null
null
false
false
false
false
false
false
false
false
true
true
0.231211
small
very_long
125,262
zzzdonut/cs224s-ascend-finetuned-2
whisper
["WhisperForConditionalGeneration"]
51865
float32
4.37.2
null
null
6
null
null
null
0
False
null
null
null
null
50257
50257
null
null
null
false
null
null
false
false
false
false
true
false
false
false
false
false
null
null
null
125,263
zzzmahesh/Flowable-Docs-Llama-3.1-8B
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.2
4,096
14,336
32
32
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
very_long
125,264
zzzmahesh/Flowable-Docs-Llama-3.2-3B
llama
["LlamaForCausalLM"]
128256
bfloat16
4.44.2
3,072
8,192
28
24
8
silu
0
True
131,072
500,000
0.00001
0.02
128000
[128001, 128008, 128009]
true
128
3
false
null
null
true
false
false
false
false
false
false
false
true
true
3.170894
medium
very_long
125,265
zzzmahesh/Meta-Llama-3-8B-Instruct-quantized.w4
llama
["LlamaForCausalLM"]
128256
float16
4.43.4
4,096
14,336
32
32
8
silu
0
True
8,192
500,000
0.00001
0.02
128000
128009
false
128
4
false
null
null
true
false
false
false
false
false
false
false
true
true
6.442451
medium
long
125,266
zzzotop/cross-lingual-transfer-ner-demo-1
roberta
["RobertaForTokenClassification"]
50000
float32
4.32.0
768
3,072
12
12
null
gelu
null
True
514
null
null
0.02
0
2
null
64
null
false
null
null
false
true
false
false
false
false
false
false
false
false
0.084935
small
short
125,267
zzzotop/zero-shot-cross-lingual-transfer-demo-masked
distilbert
["DistilBertForMaskedLM"]
119547
float32
4.31.0
null
null
null
null
null
null
0.1
null
512
null
null
0.02
null
null
null
null
null
false
null
null
false
true
false
false
false
false
false
false
false
false
null
null
short
125,268
zzzyuqing/light-geo-controlnet
null
[]
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,269
zzzzzzttt/swin-tiny-patch4-window7-224-finetuned-eurosat
swin
["SwinForImageClassification"]
null
float32
4.18.0
768
null
null
null
null
gelu
null
null
null
null
null
0.02
null
null
null
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null
125,270
zzzzzzz11/vit-gpt2-image-captioning
vision-encoder-decoder
["VisionEncoderDecoderModel"]
null
float32
null
null
null
null
null
null
null
null
null
null
null
null
null
50256
50256
false
null
null
false
null
null
false
false
false
false
false
false
false
false
false
false
null
null
null