{ "architectures": [ "LlavaQwen2ForCausalLM" ], "auto_map": { "AutoConfig": "llava_qwen.LlavaConfig", "AutoModelForCausalLM": "llava_qwen.LlavaQwen2ForCausalLM", "AutoProcessor": "processing_fastvlm.FastVLMProcessor" }, "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151645, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 896, "image_aspect_ratio": "pad", "image_grid_pinpoints": null, "initializer_range": 0.02, "intermediate_size": 4864, "max_position_embeddings": 32768, "max_window_layers": 24, "mm_hidden_size": 3072, "mm_patch_merge_type": "flat", "mm_projector_lr": null, "mm_projector_type": "mlp2x_gelu", "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "mm_vision_tower": "mobileclip_l_1024", "model_type": "llava_qwen2", "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": 32768, "tie_word_embeddings": true, "tokenizer_model_max_length": 8192, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.39.3", "tune_mm_mlp_adapter": false, "unfreeze_mm_vision_tower": true, "use_cache": true, "use_mm_proj": true, "use_sliding_window": false, "vision_config": { "cls_ratio": 2.0, "down_patch_size": 7, "down_stride": 2, "downsamples": [true, true, true, true, true], "embed_dims": [96, 192, 384, 768, 1536], "hidden_size": 1024, "image_size": 1024, "intermediate_size": 3072, "layer_scale_init_value": 1e-5, "layers": [2, 12, 24, 4, 2], "mlp_ratios": [4, 4, 4, 4, 4], "num_classes": 1000, "patch_size": 64, "pos_embs_shapes": [null, null, null, [7, 7], [7, 7]], "projection_dim": 768, "repmixer_kernel_size": 3, "token_mixers": ["repmixer", "repmixer", "repmixer", "attention", "attention"] }, "vocab_size": 151936 }