imdatta0 commited on
Commit
e1ccc84
1 Parent(s): e869cf6

Training in progress, step 4

Browse files
adapter_config.json CHANGED
@@ -61,13 +61,13 @@
61
  "rank_pattern": {},
62
  "revision": "unsloth",
63
  "target_modules": [
64
- "down_proj",
65
- "up_proj",
66
  "k_proj",
67
- "q_proj",
68
  "v_proj",
69
  "o_proj",
70
- "gate_proj"
 
 
 
71
  ],
72
  "task_type": "CAUSAL_LM"
73
  }
 
61
  "rank_pattern": {},
62
  "revision": "unsloth",
63
  "target_modules": [
 
 
64
  "k_proj",
 
65
  "v_proj",
66
  "o_proj",
67
+ "down_proj",
68
+ "q_proj",
69
+ "gate_proj",
70
+ "up_proj"
71
  ],
72
  "task_type": "CAUSAL_LM"
73
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1889b3bf7aa86083eeae35bceb60d2f6cc3e9d3e40f5f73e5527980b06ebf02
3
  size 80792096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd43aa6d1bde1a9d2fac1440512587cd4df5ee7db80074643463c642c2039c15
3
  size 80792096
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:398394965e1623dde060520db382cd06180e592c8efa0f9c971040fd0fb3a05f
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:638823074294d507ba5584f417340a94ed667979de4aadbdce717b7c083caf08
3
  size 5112