Himetsu commited on
Commit
cb1ea9f
·
verified ·
1 Parent(s): d04b7a5

Upload LlavaForConditionalGeneration

Browse files
Files changed (1) hide show
  1. config.json +5 -2
config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "architectures": [
3
  "LlavaForConditionalGeneration"
4
  ],
@@ -9,8 +11,8 @@
9
  "projector_hidden_act": "gelu",
10
  "text_config": {
11
  "hidden_size": 5120,
12
- "head_dim": 128,
13
  "intermediate_size": 14336,
 
14
  "max_position_embeddings": 1024000,
15
  "model_type": "mistral",
16
  "num_hidden_layers": 40,
@@ -21,11 +23,12 @@
21
  "vocab_size": 131072
22
  },
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.45.0.dev0",
25
  "vision_config": {
26
  "head_dim": 64,
27
  "hidden_act": "gelu",
28
  "image_size": 1024,
 
29
  "model_type": "pixtral",
30
  "patch_size": 16,
31
  "rope_theta": 10000.0,
 
1
  {
2
+ "_commit_hash": null,
3
+ "_name_or_path": "../pixtral",
4
  "architectures": [
5
  "LlavaForConditionalGeneration"
6
  ],
 
11
  "projector_hidden_act": "gelu",
12
  "text_config": {
13
  "hidden_size": 5120,
 
14
  "intermediate_size": 14336,
15
+ "is_composition": true,
16
  "max_position_embeddings": 1024000,
17
  "model_type": "mistral",
18
  "num_hidden_layers": 40,
 
23
  "vocab_size": 131072
24
  },
25
  "torch_dtype": "bfloat16",
26
+ "transformers_version": null,
27
  "vision_config": {
28
  "head_dim": 64,
29
  "hidden_act": "gelu",
30
  "image_size": 1024,
31
+ "is_composition": true,
32
  "model_type": "pixtral",
33
  "patch_size": 16,
34
  "rope_theta": 10000.0,