leafspark commited on
Commit
a56282a
1 Parent(s): 412ba4a

Add model config

Browse files
Files changed (1) hide show
  1. config.json +45 -0
config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_qkv_bias": true,
3
+ "asr_adapter": "llamamlp",
4
+ "attn_dropout": 0,
5
+ "bias": false,
6
+ "block_size": 2048,
7
+ "force_align": false,
8
+ "gelu_approximate": "none",
9
+ "head_size": 64,
10
+ "hf_config": {
11
+ "name": "Qwen2-0.5B",
12
+ "org": "Qwen"
13
+ },
14
+ "intermediate_size": 4864,
15
+ "lm_head_bias": false,
16
+ "mlp_class_name": "LLaMAMLP",
17
+ "n_embd": 896,
18
+ "n_expert": 0,
19
+ "n_expert_per_token": 0,
20
+ "n_head": 14,
21
+ "n_layer": 24,
22
+ "n_query_groups": 2,
23
+ "name": "Qwen2-0.5B",
24
+ "norm_class_name": "RMSNorm",
25
+ "norm_eps": 0.000001,
26
+ "padded_vocab_size": 181120,
27
+ "padding_multiple": 512,
28
+ "parallel_residual": false,
29
+ "pos_type": "rope",
30
+ "post_adapter": false,
31
+ "post_adapter_layers": 6,
32
+ "prompt_vocab_size": null,
33
+ "rope_base": 1000000,
34
+ "rope_condense_ratio": 1,
35
+ "rotary_percentage": 1,
36
+ "scale_embeddings": false,
37
+ "shared_attention_norm": false,
38
+ "tie_word_embeddings": true,
39
+ "use_pretrain_phoneme_emb": false,
40
+ "vocab_size": 50254,
41
+ "text_vocab_size": 152000,
42
+ "cat_audio_vocab_size": 29120,
43
+ "audio_vocab_size": 4160,
44
+ "whisper_adapter_dim": 768
45
+ }