deepakkoli93 commited on
Commit
48ebbbf
1 Parent(s): d522cf7

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_dora": false,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5d09c24ab5fbe534ebb1d0ffe3b419166e885caabb506e2be2ef81b319ad38e1
3
  size 12595704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db30dbd64d414741116499814385b4016321813f7ca4d6afc083bb45f5b97016
3
  size 12595704
runs/Mar18_04-37-25_8de3cc7059d8/events.out.tfevents.1710736720.8de3cc7059d8.1171.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4387d548dc9d4941e3c693028074b72e669454e120a20f350a60d79021d66049
3
+ size 168600
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad8b5d9e6a825abea80df077d912a796b6497aa48039169859ef00db57e857aa
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb0a15ee64669f385911ac5dc55d50f45cc22a4981a4fc0599caa8c226b72d5a
3
  size 4920