File size: 3,298 Bytes
5fbd75c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
{
    "architectures": [
        "Phi3ForCausalLM"
    ],
    "attention_bias": false,
    "attention_dropout": 0.0,
    "auto_map": {
        "AutoConfig": "configuration_phi3.Phi3Config",
        "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
        "AutoTokenizer": "Xenova/gpt-4o"
    },
    "bos_token_id": 199999,
    "embd_pdrop": 0.0,
    "eos_token_id": 199999,
    "full_attn_mod": 1,
    "hidden_act": "silu",
    "hidden_size": 3072,
    "initializer_range": 0.02,
    "intermediate_size": 8192,
    "interpolate_factor": 1,
    "lm_head_bias": false,
    "max_position_embeddings": 131072,
    "mlp_bias": false,
    "model_type": "phi3",
    "num_attention_heads": 24,
    "num_hidden_layers": 32,
    "num_key_value_heads": 8,
    "original_max_position_embeddings": 4096,
    "pad_token_id": 199999,
    "partial_rotary_factor": 0.75,
    "quantization": {
        "group_size": 64,
        "bits": 8
    },
    "quantization_config": {
        "group_size": 64,
        "bits": 8
    },
    "resid_pdrop": 0.0,
    "rms_norm_eps": 1e-05,
    "rope_scaling": {
        "long_factor": [
            1,
            1.118320672,
            1.250641126,
            1.398617824,
            1.564103225,
            1.74916897,
            1.956131817,
            2.187582649,
            2.446418898,
            2.735880826,
            3.059592084,
            3.421605075,
            3.826451687,
            4.279200023,
            4.785517845,
            5.351743533,
            5.984965424,
            6.693110555,
            7.485043894,
            8.370679318,
            9.36110372,
            10.4687158,
            11.70738129,
            13.09260651,
            14.64173252,
            16.37415215,
            18.31155283,
            20.47818807,
            22.90118105,
            25.61086418,
            28.64115884,
            32.03,
            32.1,
            32.13,
            32.23,
            32.6,
            32.61,
            32.64,
            32.66,
            32.7,
            32.71,
            32.93,
            32.97,
            33.28,
            33.49,
            33.5,
            44.16,
            47.77
        ],
        "short_factor": [
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0,
            1.0
        ],
        "type": "longrope"
    },
    "rope_theta": 10000.0,
    "sliding_window": 262144,
    "tie_word_embeddings": true,
    "torch_dtype": "bfloat16",
    "transformers_version": "4.45.0",
    "use_cache": true,
    "vocab_size": 200064
}