{ "attention_dropout": 0.0, "base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", "calibration_type": "transformer", "feature_key": "hidden_states", "freeze_base_model": true, "hidden_act": "silu", "in_features": 4096, "inference_mode": true, "init_temperature": 1.0, "intermediate_size": 11008, "label_smoothing": 1.0, "label_smoothing_type": "uniform", "layer_idx": 33, "log_auxiliary_info": true, "loss_type": "selective_smoothing", "max_position_embeddings": 4096, "normalize_logits": false, "num_attention_heads": 32, "num_key_value_heads": 32, "smooth_loss_weight": 0.5, "smoothing_topk": 5, "task_type": "CAUSAL_LM" }