{ "_name_or_path": "yzhuang/llava-jamba-1.5-Mini-linear-anyres-patch", "architectures": [ "LlavaJambaForCausalLM" ], "attention_dropout": 0.0, "attn_layer_offset": 4, "attn_layer_period": 8, "bos_token_id": 1, "eos_token_id": [ 2, 518 ], "expert_layer_offset": 1, "expert_layer_period": 2, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 4096, "image_aspect_ratio": "anyres", "image_grid_pinpoints": [ [ 336, 336 ], [ 336, 672 ], [ 336, 1008 ], [ 336, 1344 ], [ 336, 1680 ], [ 336, 2016 ], [ 336, 2352 ], [ 336, 2688 ], [ 336, 3024 ], [ 336, 3360 ], [ 336, 3696 ], [ 336, 4032 ], [ 336, 4368 ], [ 336, 4704 ], [ 336, 5040 ], [ 672, 336 ], [ 672, 672 ], [ 672, 1008 ], [ 672, 1344 ], [ 672, 1680 ], [ 672, 2016 ], [ 672, 2352 ], [ 672, 2688 ], [ 1008, 336 ], [ 1008, 672 ], [ 1008, 1008 ], [ 1008, 1344 ], [ 1008, 1680 ], [ 1344, 336 ], [ 1344, 672 ], [ 1344, 1008 ], [ 1344, 1344 ], [ 1680, 336 ], [ 1680, 672 ], [ 1680, 1008 ], [ 2016, 336 ], [ 2016, 672 ], [ 2352, 336 ], [ 2352, 672 ], [ 2688, 336 ], [ 2688, 672 ], [ 3024, 336 ], [ 3360, 336 ], [ 3696, 336 ], [ 4032, 336 ], [ 4368, 336 ], [ 4704, 336 ], [ 5040, 336 ] ], "initializer_range": 0.02, "intermediate_size": 14336, "mamba_conv_bias": true, "mamba_d_conv": 4, "mamba_d_state": 16, "mamba_dt_rank": 256, "mamba_expand": 2, "mamba_proj_bias": false, "max_position_embeddings": 262144, "mm_hidden_size": 1024, "mm_patch_merge_type": "spatial_unpad", "mm_projector_lr": null, "mm_projector_type": "linear", "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "mm_vision_tower": "openai/clip-vit-large-patch14-336", "mm_vision_tower_lr": 2e-06, "model_type": "jamba", "num_attention_heads": 32, "num_experts": 16, "num_experts_per_tok": 2, "num_hidden_layers": 32, "num_key_value_heads": 8, "num_logits_to_keep": 1, "output_router_logits": false, "pad_token_id": 0, "rms_norm_eps": 1e-06, "router_aux_loss_coef": 0.001, "sliding_window": null, "tie_word_embeddings": false, "tokenizer_model_max_length": 18240, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.45.0.dev0", "tune_mm_mlp_adapter": false, "unfreeze_mm_vision_tower": false, "use_cache": false, "use_image_rope": false, "use_mamba_kernels": true, "use_mm_proj": true, "vocab_size": 65536 }