| { | |
| "_name_or_path": "/data/cambrian-u/ckpt/20251012_mix70960k_SPMD_7b_finetune_diffloss_token256_dit2_4_cfg0_1_context512_ep1_v6-rescue", | |
| "architectures": [ | |
| "CambrianQwenForCausalLM" | |
| ], | |
| "attention_dropout": 0.0, | |
| "aux_regression": false, | |
| "aux_regression_coef": 1.0, | |
| "bos_token_id": 151643, | |
| "connector_only": true, | |
| "ddt_encoder_depth": 2, | |
| "diff_head_lr": 0.000565, | |
| "diffusion_base_dim": null, | |
| "diffusion_class_dropout_prob": 0.1, | |
| "diffusion_model_channels": 1152, | |
| "diffusion_model_depth": 32, | |
| "diffusion_model_heads": 32, | |
| "diffusion_model_hidden_size": 2048, | |
| "diffusion_model_z_channels": 2048, | |
| "diffusion_norm_stats_path": null, | |
| "diffusion_split_per_token": 256, | |
| "diffusion_timesteps_per_sample": 1, | |
| "eos_token_id": 151645, | |
| "freeze_mm_mlp_adapter": false, | |
| "hidden_act": "silu", | |
| "hidden_size": 3584, | |
| "image_aspect_ratio": "square", | |
| "image_position": 35, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 18944, | |
| "max_position_embeddings": 32768, | |
| "max_window_layers": 28, | |
| "miv_token_len": 0, | |
| "mm_hidden_size": 1152, | |
| "mm_projector_lr": null, | |
| "mm_projector_type": "mlp2x_gelu", | |
| "mm_use_im_patch_token": false, | |
| "mm_use_im_start_end": true, | |
| "mm_vision_sampler_lr": null, | |
| "mm_vision_select_feature": "patch", | |
| "mm_vision_select_layer": -1, | |
| "mm_vision_tower_aux_list": [ | |
| "google/siglip2-so400m-patch14-224" | |
| ], | |
| "mm_vision_tower_aux_token_len_list": [ | |
| 256 | |
| ], | |
| "mm_vision_tower_lr": 2e-06, | |
| "model_type": "cambrian_qwen", | |
| "num_attention_heads": 28, | |
| "num_hidden_layers": 28, | |
| "num_key_value_heads": 4, | |
| "pretrain_adapter_and_vision_head": null, | |
| "rms_norm_eps": 1e-06, | |
| "rope_scaling": null, | |
| "rope_theta": 1000000.0, | |
| "si_token_len": 729, | |
| "sliding_window": 131072, | |
| "tie_word_embeddings": false, | |
| "tokenizer_model_max_length": 512, | |
| "tokenizer_padding_side": "right", | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.37.0", | |
| "tune_adapter_and_vision_head": false, | |
| "tune_mm_mlp_adapter": false, | |
| "tune_vision_head": false, | |
| "unfreeze_mm_vision_tower": false, | |
| "use_cache": false, | |
| "use_mm_proj": true, | |
| "use_sliding_window": false, | |
| "vision_coef": 2.0, | |
| "vision_hidden_size": 1152, | |
| "vision_loss": "diffusion-loss", | |
| "vision_loss_mode": "query", | |
| "vision_tower_aux_token_len_list": [ | |
| 256 | |
| ], | |
| "vocab_size": 151667 | |
| } | |