File size: 240 Bytes
f83cb6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
[baichuan]
model_name = Baichuan2_7B
head_num = 32
size_per_head = 128
inter_size = 11008
num_layer = 32
rotary_embedding = 128
layernorm_eps = 1e-06
vocab_size = 125696
start_id = 1
end_id = 2
tensor_para_size = 1
weight_data_type = fp16
|