fede97 commited on
Commit
7cb5e22
·
verified ·
1 Parent(s): 1bd4b90

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/leonardo_scratch/large/userexternal/fcocchi0/rag_mlmm/checkpoints/visual_rag/LLaMA_3.1-generator-template_1-enc_info_llavaX3-freq/checkpoint-600",
3
  "architectures": [
4
  "LlavaLlamaForCausalLM"
5
  ],
@@ -27,7 +27,7 @@
27
  "mm_use_im_start_end": false,
28
  "mm_vision_select_feature": "patch",
29
  "mm_vision_select_layer": -2,
30
- "mm_vision_tower": "/leonardo_scratch/large/userexternal/fcocchi0/rag_mlmm/hf_models/openai/clip-vit-large-patch14-336/models--openai--clip-vit-large-patch14-336/snapshots/ce19dc912ca5cd21c8a653c79e251e808ccabcd1",
31
  "model_type": "llava_llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,
 
1
  {
2
+ "_name_or_path": "reflectiva",
3
  "architectures": [
4
  "LlavaLlamaForCausalLM"
5
  ],
 
27
  "mm_use_im_start_end": false,
28
  "mm_vision_select_feature": "patch",
29
  "mm_vision_select_layer": -2,
30
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
31
  "model_type": "llava_llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,