{ "attention_dropout": 0.0, "base_model_name_or_path": "meta-llama/Llama-2-13b-chat-hf", "calibration_type": "transformer", "feature_key": "hidden_states", "freeze_base_model": true, "hidden_act": "silu", "in_features": 5120, "inference_mode": true, "init_temperature": 1.0, "intermediate_size": 13824, "label_smoothing": 0.0, "label_smoothing_type": "topk", "layer_idx": 40, "log_auxiliary_info": false, "loss_type": "xent", "max_position_embeddings": 5120, "normalize_logits": false, "num_attention_heads": 40, "num_key_value_heads": 40, "smooth_loss_weight": 0.5, "smoothing_topk": 5, "task_type": "CAUSAL_LM" }