File size: 669 Bytes
c1c5882
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
  "block_size": 1024,
  "model_max_length": 2048,
  "use_flash_attention_2": false,
  "disable_gradient_checkpointing": false,
  "logging_steps": -1,
  "evaluation_strategy": "epoch",
  "save_total_limit": 1,
  "save_strategy": "epoch",
  "auto_find_batch_size": false,
  "mixed_precision": "fp16",
  "lr": 0.00003,
  "epochs": 3,
  "batch_size": 2,
  "warmup_ratio": 0.1,
  "gradient_accumulation": 1,
  "optimizer": "adamw_torch",
  "scheduler": "linear",
  "weight_decay": 0,
  "max_grad_norm": 1,
  "seed": 42,
  "quantization": "int4",
  "target_modules": "",
  "merge_adapter": false,
  "peft": true,
  "lora_r": 16,
  "lora_alpha": 32,
  "lora_dropout": 0.05
}