|
{ |
|
"_vocab_size": 257152, |
|
"architectures": [ |
|
"PaliGemmaForConditionalGeneration" |
|
], |
|
"hidden_size": 2048, |
|
"image_token_index": 0, |
|
"model_type": "paligemma", |
|
"projection_dim": 32, |
|
"projector_hidden_act": "gelu", |
|
"text_config": { |
|
"attention_probs_dropout_prob": 0.1, |
|
"head_dim": 8, |
|
"hidden_activation": "gelu_pytorch_tanh", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 32, |
|
"id2label": { |
|
"0": "LABEL_0", |
|
"1": "LABEL_1", |
|
"2": "LABEL_2" |
|
}, |
|
"intermediate_size": 37, |
|
"is_training": true, |
|
"label2id": { |
|
"LABEL_0": 0, |
|
"LABEL_1": 1, |
|
"LABEL_2": 2 |
|
}, |
|
"max_position_embeddings": 512, |
|
"model_type": "gemma", |
|
"num_attention_heads": 4, |
|
"num_choices": 4, |
|
"num_hidden_layers": 2, |
|
"num_image_tokens": 16, |
|
"num_key_value_heads": 1, |
|
"pad_token_id": 1, |
|
"seq_length": 264, |
|
"type_sequence_label_size": 2, |
|
"type_vocab_size": 16, |
|
"use_labels": true, |
|
"use_token_type_ids": false, |
|
"vocab_size": 257153 |
|
}, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.48.0.dev0", |
|
"vision_config": { |
|
"attention_dropout": 0.1, |
|
"dropout": 0.1, |
|
"hidden_size": 32, |
|
"image_size": 20, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 37, |
|
"is_training": true, |
|
"model_type": "siglip_vision_model", |
|
"num_attention_heads": 4, |
|
"num_hidden_layers": 2, |
|
"num_image_tokens": 4, |
|
"num_key_value_heads": 1, |
|
"patch_size": 5, |
|
"projection_dim": 32, |
|
"use_labels": true |
|
}, |
|
"vision_feature_layer": -1, |
|
"vision_feature_select_strategy": "default" |
|
} |
|
|