ShahlaDnshi96 commited on
Commit
28e0b23
1 Parent(s): 3239fb8

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -23,11 +23,11 @@
23
  "target_modules": [
24
  "gate_proj",
25
  "down_proj",
 
 
26
  "q_proj",
27
  "k_proj",
28
- "v_proj",
29
- "up_proj",
30
- "o_proj"
31
  ],
32
  "task_type": "CAUSAL_LM",
33
  "use_rslora": false
 
23
  "target_modules": [
24
  "gate_proj",
25
  "down_proj",
26
+ "v_proj",
27
+ "o_proj",
28
  "q_proj",
29
  "k_proj",
30
+ "up_proj"
 
 
31
  ],
32
  "task_type": "CAUSAL_LM",
33
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b725e2f858289fd80503c16f2a32acc87c35712e03debe13829fd15dfc4dc4a
3
  size 1090646616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26ebe5d4667239f93fa46434de19e59f767815c031592fbd02bb6472d019a647
3
  size 1090646616
runs/Apr07_06-26-43_f076ffe2ed1d/events.out.tfevents.1712471212.f076ffe2ed1d.348.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc5c325d4c3dc3dcd7397694312a707705ab637a8affada73b0abd02d77100f8
3
+ size 13067
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 3072,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db107a659e36681c588786c4c1f79eeff989b22f58b53f8873df602a1cf6b26c
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b4424221ec8ff011480393e346863fb788941b267add51945647271214df8d9
3
  size 4728