LeeSB commited on
Commit
7f201c0
1 Parent(s): 3c4e38b

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -1,11 +1,12 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "alignment-handbook/zephyr-7b-sft-full",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
@@ -19,13 +20,15 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "up_proj",
23
- "gate_proj",
24
  "down_proj",
25
- "v_proj",
26
  "k_proj",
 
27
  "o_proj",
28
- "q_proj"
29
  ],
30
- "task_type": "CAUSAL_LM"
 
 
31
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
 
24
  "down_proj",
25
+ "up_proj",
26
  "k_proj",
27
+ "v_proj",
28
  "o_proj",
29
+ "gate_proj"
30
  ],
31
+ "task_type": "CAUSAL_LM",
32
+ "use_dora": false,
33
+ "use_rslora": false
34
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff5fba4c0210a09687a0b6e2a465355eafaaa5d60db0343526cd8882b715b7b7
3
- size 671150064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:572a53bab7df021283a34a157b81b013f50d439e572374f7803d7e4f9771d10f
3
+ size 1342238560
runs/Aug02_17-03-41_node06/events.out.tfevents.1722586250.node06.2914461.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d907e1c7ee69c4ff49f16632e444186dcbcc584f3a1c602b2efd4451d2ef7480
3
+ size 6162
runs/Aug02_20-34-41_node06/events.out.tfevents.1722598893.node06.2921152.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dab1df95fbbeca0a0a53ed04a2360139211bc72c5cb58a87468a45c553ae8068
3
+ size 5680
runs/Aug02_20-55-33_node06/events.out.tfevents.1722599880.node06.2921935.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b5933351c10f9239d969f860a38dd3ff6cc6584b379d1bd987cac07d6d7fa47
3
+ size 5680
runs/Aug02_21-06-34_node06/events.out.tfevents.1722600547.node06.2922459.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53f286ca6be41e4552fa477e8c1d5f55a90c95ef46ed5cb7a2cfc1220582aeaf
3
+ size 5680
runs/Aug02_21-17-49_node06/events.out.tfevents.1722601226.node06.2922889.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7fddad71da0ed2dfba5c7f18b322efd93224ba48720d6a4d933484762bfd32c
3
+ size 5680
runs/Aug02_21-28-49_node06/events.out.tfevents.1722601838.node06.2923376.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca42078ad9d0eff32c7aba405dc0a6d5c7295b11991d0bd32ee1413c40d889cf
3
+ size 11570
runs/Aug03_18-25-20_node01/events.out.tfevents.1722678181.node01.2349725.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5df2f76df0257ef20855651831a8dfabdae3e04c8002f5df078db0bfa342a57c
3
+ size 14325
runs/Jul31_21-34-45_node15/events.out.tfevents.1722429821.node15.2818104.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc74be5d7ccb78726556804db7dbca7fbb458d5cd4d5861d7a3bd9b4a4a73f96
3
+ size 13598
tokenizer.json CHANGED
@@ -134,6 +134,7 @@
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
137
+ "ignore_merges": false,
138
  "vocab": {
139
  "<unk>": 0,
140
  "<s>": 1,
tokenizer_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d93aab22cff04f95b419457da2abad617b73e321789fd03b6cf30ca659e8eae
3
- size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9baab8a91c388af3739f31943c313c413637b267399ed8d5666364229c9e0c75
3
+ size 5944