junnei commited on
Commit
173afa3
·
verified ·
1 Parent(s): 7280c99

Update config.json

Browse files

fix model config file

Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "Gemma3ForConditionalGeneration"
4
  ],
5
  "audio_config": {
6
  "activation": "swish",
@@ -55,7 +55,7 @@
55
  "image_token_index": 262144,
56
  "initializer_range": 0.02,
57
  "mm_tokens_per_image": 256,
58
- "model_type": "gemma3",
59
  "speech_lora": {
60
  "dp": 0.01,
61
  "layer": "((layers.*self_attn\\.(q|k|v|o)_proj)|(layers.*mlp\\.(gate|up|down)_proj))",
@@ -94,7 +94,7 @@
94
  "vocab_size": 262208
95
  },
96
  "torch_dtype": "bfloat16",
97
- "transformers_version": "4.50.0.dev0",
98
  "use_cache": false,
99
  "vision_config": {
100
  "attention_dropout": 0.0,
 
1
  {
2
  "architectures": [
3
+ "Gemma3MMForConditionalGeneration"
4
  ],
5
  "audio_config": {
6
  "activation": "swish",
 
55
  "image_token_index": 262144,
56
  "initializer_range": 0.02,
57
  "mm_tokens_per_image": 256,
58
+ "model_type": "gemma3mm",
59
  "speech_lora": {
60
  "dp": 0.01,
61
  "layer": "((layers.*self_attn\\.(q|k|v|o)_proj)|(layers.*mlp\\.(gate|up|down)_proj))",
 
94
  "vocab_size": 262208
95
  },
96
  "torch_dtype": "bfloat16",
97
+ "transformers_version": "4.50.2",
98
  "use_cache": false,
99
  "vision_config": {
100
  "attention_dropout": 0.0,