Ikeofai commited on
Commit
28023bc
1 Parent(s): 5215e6a

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +6 -29
adapter_config.json CHANGED
@@ -6,10 +6,14 @@
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 32,
12
  "lora_dropout": 0.05,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 8,
@@ -19,33 +23,6 @@
19
  "c_attn"
20
  ],
21
  "task_type": "CAUSAL_LM",
22
- "activation_function": "gelu_new",
23
- "architectures": [
24
- "GPT2LMHeadModel"
25
- ],
26
- "attn_pdrop": 0.1,
27
- "bos_token_id": 50256,
28
- "embd_pdrop": 0.1,
29
- "eos_token_id": 50256,
30
- "initializer_range": 0.02,
31
- "layer_norm_epsilon": 1e-05,
32
- "model_type": "gpt2",
33
- "n_ctx": 1024,
34
- "n_embd": 768,
35
- "n_head": 12,
36
- "n_layer": 12,
37
- "n_positions": 1024,
38
- "resid_pdrop": 0.1,
39
- "summary_activation": null,
40
- "summary_first_dropout": 0.1,
41
- "summary_proj_to_labels": true,
42
- "summary_type": "cls_index",
43
- "summary_use_proj": true,
44
- "task_specific_params": {
45
- "text-generation": {
46
- "do_sample": true,
47
- "max_length": 50
48
- }
49
- },
50
- "vocab_size": 50257
51
  }
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
+ "loftq_config": {},
13
  "lora_alpha": 32,
14
  "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
  "r": 8,
 
23
  "c_attn"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
+ "use_dora": false,
27
+ "use_rslora": false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }