File size: 674 Bytes
4d35e4f
 
 
e604872
4d35e4f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
{
  "attention_dropout": 0.0,
  "base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
  "calibration_type": "transformer",
  "feature_key": "hidden_states",
  "freeze_base_model": true,
  "hidden_act": "silu",
  "in_features": 4096,
  "inference_mode": true,
  "init_temperature": 1.0,
  "intermediate_size": 11008,
  "label_smoothing": 1.0,
  "label_smoothing_type": "uniform",
  "layer_idx": 33,
  "log_auxiliary_info": true,
  "loss_type": "selective_smoothing",
  "max_position_embeddings": 4096,
  "normalize_logits": false,
  "num_attention_heads": 32,
  "num_key_value_heads": 32,
  "smooth_loss_weight": 0.5,
  "smoothing_topk": 5,
  "task_type": "CAUSAL_LM"
}