khulnasoft commited on
Commit
e44a96f
·
verified ·
1 Parent(s): 74d20d4

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +66 -24
config.json CHANGED
@@ -1,28 +1,70 @@
1
  {
 
 
 
 
 
 
 
 
 
 
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
5
- "bos_token_id": 32013,
6
- "eos_token_id": 32014,
7
- "hidden_act": "silu",
8
- "hidden_size": 2048,
9
- "initializer_range": 0.02,
10
- "intermediate_size": 5504,
11
- "max_position_embeddings": 16384,
12
- "model_type": "llama",
13
- "num_attention_heads": 16,
14
- "num_hidden_layers": 24,
15
- "num_key_value_heads": 16,
16
- "pretraining_tp": 1,
17
- "rms_norm_eps": 1e-06,
18
- "rope_scaling": {
19
- "factor": 4.0,
20
- "type": "linear"
21
  },
22
- "rope_theta": 100000,
23
- "tie_word_embeddings": false,
24
- "torch_dtype": "bfloat16",
25
- "transformers_version": "4.34.1",
26
- "use_cache": true,
27
- "vocab_size": 32256
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "aligner_config": {
3
+ "cls": "MlpProjector",
4
+ "model_type": "aligner",
5
+ "params": {
6
+ "depth": 2,
7
+ "input_dim": 1024,
8
+ "n_embed": 4096,
9
+ "projector_type": "low_high_hybrid_split_mlp_gelu"
10
+ }
11
+ },
12
  "architectures": [
13
+ "MultiModalityCausalLM"
14
  ],
15
+ "language_config": {
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "llama",
18
+ "num_hidden_layers": 30,
19
+ "torch_dtype": "float16",
20
+ "vocab_size": 102400
 
 
 
 
 
 
 
 
 
 
21
  },
22
+ "model_type": "multi_modality",
23
+ "torch_dtype": "float16",
24
+ "transformers_version": "4.38.2",
25
+ "vision_config": {
26
+ "cls": "HybridVisionTower",
27
+ "model_type": "vision",
28
+ "params": {
29
+ "concat_type": "tuple",
30
+ "freeze_high": true,
31
+ "freeze_low": true,
32
+ "high_res_cfg": {
33
+ "ckpt_path": "",
34
+ "image_size": 1024,
35
+ "model_name": "sam_b_downsample",
36
+ "output_dim": 1024,
37
+ "pixel_mean": [
38
+ 0.48145466,
39
+ 0.4578275,
40
+ 0.40821073
41
+ ],
42
+ "pixel_std": [
43
+ 0.26862954,
44
+ 0.26130258,
45
+ 0.27577711
46
+ ],
47
+ "select_feature": "same",
48
+ "select_layer": -1
49
+ },
50
+ "low_res_cfg": {
51
+ "ckpt_path": "",
52
+ "image_size": 384,
53
+ "model_name": "siglip_large_patch16_384",
54
+ "output_dim": 1024,
55
+ "pixel_mean": [
56
+ 0.5,
57
+ 0.5,
58
+ 0.5
59
+ ],
60
+ "pixel_std": [
61
+ 0.5,
62
+ 0.5,
63
+ 0.5
64
+ ],
65
+ "select_feature": "same",
66
+ "select_layer": -1
67
+ }
68
+ }
69
+ }
70
+ }