{ "image_grid_pinpoints": [ [ 384, 768 ], [ 384, 1152 ], [ 384, 1536 ], [ 384, 1920 ], [ 384, 2304 ], [ 384, 2688 ], [ 384, 3072 ], [ 384, 3456 ], [ 384, 3840 ], [ 768, 384 ], [ 768, 768 ], [ 768, 1152 ], [ 768, 1536 ], [ 768, 1920 ], [ 1152, 384 ], [ 1152, 768 ], [ 1152, 1152 ], [ 1536, 384 ], [ 1536, 768 ], [ 1920, 384 ], [ 1920, 768 ], [ 2304, 384 ], [ 2688, 384 ], [ 3072, 384 ], [ 3456, 384 ], [ 3840, 384 ] ], "tie_word_embeddings": true, "transformers_version": "4.45.0.dev0", "architectures": [ "LlavaNextForConditionalGeneration" ], "model_type": "llava_next", "use_image_newline_parameter": true, "vision_feature_layer": [ 3, 7, 15, 26 ], "vision_feature_select_strategy": "full", "text_config": { "architectures": [ "GraniteForCausalLM" ], "attention_bias": false, "attention_dropout": 0.1, "attention_multiplier": 0.015625, "bos_token_id": 0, "embedding_multiplier": 12.0, "eos_token_id": 0, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 8192, "logits_scaling": 8.0, "max_position_embeddings": 16384, "mlp_bias": false, "model_type": "granite", "num_attention_heads": 32, "num_hidden_layers": 40, "num_key_value_heads": 8, "pad_token_id": 0, "residual_multiplier": 0.22, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 300000, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "transformers_version": "4.46.0.dev0", "use_cache": true, "vocab_size": 49156 }, "image_token_index": 49155, "vision_config": { "hidden_size": 1152, "image_size": 384, "intermediate_size": 4304, "model_type": "siglip_vision_model", "num_attention_heads": 16, "num_hidden_layers": 27, "patch_size": 14 } }