chchen commited on
Commit
5f7d25c
1 Parent(s): aae8d22

Training in progress, step 1500

Browse files
Files changed (2) hide show
  1. adapter_model.safetensors +1 -1
  2. trainer_log.jsonl +51 -0
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:10a19d704e4d53853df933e1353ec1b34230b2ff53b0cdbff6826ddb4012ddb3
3
  size 83945296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3eb551b64f819671bedb05cfaa7e4125137fc1941fb264cb350c0500641d496
3
  size 83945296
trainer_log.jsonl CHANGED
@@ -100,3 +100,54 @@
100
  {"current_steps": 990, "total_steps": 1854, "loss": 0.7727, "accuracy": 0.543749988079071, "learning_rate": 2.2338200545580577e-06, "epoch": 1.600323297635886, "percentage": 53.4, "elapsed_time": "5:00:24", "remaining_time": "4:22:10"}
101
  {"current_steps": 1000, "total_steps": 1854, "loss": 0.7341, "accuracy": 0.6312500238418579, "learning_rate": 2.191736455761947e-06, "epoch": 1.616488179430188, "percentage": 53.94, "elapsed_time": "5:03:14", "remaining_time": "4:18:57"}
102
  {"current_steps": 1000, "total_steps": 1854, "eval_loss": 0.8449718356132507, "epoch": 1.616488179430188, "percentage": 53.94, "elapsed_time": "5:09:21", "remaining_time": "4:24:11"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  {"current_steps": 990, "total_steps": 1854, "loss": 0.7727, "accuracy": 0.543749988079071, "learning_rate": 2.2338200545580577e-06, "epoch": 1.600323297635886, "percentage": 53.4, "elapsed_time": "5:00:24", "remaining_time": "4:22:10"}
101
  {"current_steps": 1000, "total_steps": 1854, "loss": 0.7341, "accuracy": 0.6312500238418579, "learning_rate": 2.191736455761947e-06, "epoch": 1.616488179430188, "percentage": 53.94, "elapsed_time": "5:03:14", "remaining_time": "4:18:57"}
102
  {"current_steps": 1000, "total_steps": 1854, "eval_loss": 0.8449718356132507, "epoch": 1.616488179430188, "percentage": 53.94, "elapsed_time": "5:09:21", "remaining_time": "4:24:11"}
103
+ {"current_steps": 1010, "total_steps": 1854, "loss": 0.8184, "accuracy": 0.643750011920929, "learning_rate": 2.1497413764574673e-06, "epoch": 1.6326530612244898, "percentage": 54.48, "elapsed_time": "5:12:41", "remaining_time": "4:21:17"}
104
+ {"current_steps": 1020, "total_steps": 1854, "loss": 0.762, "accuracy": 0.59375, "learning_rate": 2.1078468757516395e-06, "epoch": 1.6488179430187917, "percentage": 55.02, "elapsed_time": "5:15:34", "remaining_time": "4:18:02"}
105
+ {"current_steps": 1030, "total_steps": 1854, "loss": 0.8102, "accuracy": 0.574999988079071, "learning_rate": 2.0660649838698145e-06, "epoch": 1.6649828248130936, "percentage": 55.56, "elapsed_time": "5:18:32", "remaining_time": "4:14:50"}
106
+ {"current_steps": 1040, "total_steps": 1854, "loss": 0.8507, "accuracy": 0.5687500238418579, "learning_rate": 2.0244076987011284e-06, "epoch": 1.6811477066073954, "percentage": 56.09, "elapsed_time": "5:21:32", "remaining_time": "4:11:40"}
107
+ {"current_steps": 1050, "total_steps": 1854, "loss": 0.8306, "accuracy": 0.581250011920929, "learning_rate": 1.982886982353251e-06, "epoch": 1.6973125884016973, "percentage": 56.63, "elapsed_time": "5:24:31", "remaining_time": "4:08:29"}
108
+ {"current_steps": 1060, "total_steps": 1854, "loss": 0.8194, "accuracy": 0.6312500238418579, "learning_rate": 1.941514757717392e-06, "epoch": 1.7134774701959992, "percentage": 57.17, "elapsed_time": "5:27:34", "remaining_time": "4:05:22"}
109
+ {"current_steps": 1070, "total_steps": 1854, "loss": 0.8345, "accuracy": 0.5562499761581421, "learning_rate": 1.9003029050445953e-06, "epoch": 1.729642351990301, "percentage": 57.71, "elapsed_time": "5:30:43", "remaining_time": "4:02:19"}
110
+ {"current_steps": 1080, "total_steps": 1854, "loss": 0.7903, "accuracy": 0.550000011920929, "learning_rate": 1.8592632585342523e-06, "epoch": 1.745807233784603, "percentage": 58.25, "elapsed_time": "5:33:40", "remaining_time": "3:59:07"}
111
+ {"current_steps": 1090, "total_steps": 1854, "loss": 0.797, "accuracy": 0.574999988079071, "learning_rate": 1.8184076029358527e-06, "epoch": 1.7619721155789048, "percentage": 58.79, "elapsed_time": "5:36:32", "remaining_time": "3:55:53"}
112
+ {"current_steps": 1100, "total_steps": 1854, "loss": 0.7993, "accuracy": 0.574999988079071, "learning_rate": 1.7777476701649318e-06, "epoch": 1.7781369973732066, "percentage": 59.33, "elapsed_time": "5:39:35", "remaining_time": "3:52:46"}
113
+ {"current_steps": 1110, "total_steps": 1854, "loss": 0.7819, "accuracy": 0.53125, "learning_rate": 1.7372951359341925e-06, "epoch": 1.7943018791675085, "percentage": 59.87, "elapsed_time": "5:42:25", "remaining_time": "3:49:30"}
114
+ {"current_steps": 1120, "total_steps": 1854, "loss": 0.7723, "accuracy": 0.606249988079071, "learning_rate": 1.6970616164007547e-06, "epoch": 1.8104667609618104, "percentage": 60.41, "elapsed_time": "5:45:07", "remaining_time": "3:46:10"}
115
+ {"current_steps": 1130, "total_steps": 1854, "loss": 0.8279, "accuracy": 0.606249988079071, "learning_rate": 1.6570586648305276e-06, "epoch": 1.8266316427561122, "percentage": 60.95, "elapsed_time": "5:48:01", "remaining_time": "3:42:59"}
116
+ {"current_steps": 1140, "total_steps": 1854, "loss": 0.8118, "accuracy": 0.606249988079071, "learning_rate": 1.6172977682806151e-06, "epoch": 1.842796524550414, "percentage": 61.49, "elapsed_time": "5:50:52", "remaining_time": "3:39:45"}
117
+ {"current_steps": 1150, "total_steps": 1854, "loss": 0.8238, "accuracy": 0.59375, "learning_rate": 1.5777903443007586e-06, "epoch": 1.858961406344716, "percentage": 62.03, "elapsed_time": "5:53:47", "remaining_time": "3:36:35"}
118
+ {"current_steps": 1160, "total_steps": 1854, "loss": 0.8087, "accuracy": 0.625, "learning_rate": 1.5385477376547226e-06, "epoch": 1.8751262881390178, "percentage": 62.57, "elapsed_time": "5:56:46", "remaining_time": "3:33:27"}
119
+ {"current_steps": 1170, "total_steps": 1854, "loss": 0.8049, "accuracy": 0.5874999761581421, "learning_rate": 1.4995812170625845e-06, "epoch": 1.89129116993332, "percentage": 63.11, "elapsed_time": "5:59:46", "remaining_time": "3:30:19"}
120
+ {"current_steps": 1180, "total_steps": 1854, "loss": 0.8149, "accuracy": 0.6187499761581421, "learning_rate": 1.4609019719648666e-06, "epoch": 1.9074560517276218, "percentage": 63.65, "elapsed_time": "6:02:46", "remaining_time": "3:27:12"}
121
+ {"current_steps": 1190, "total_steps": 1854, "loss": 0.7314, "accuracy": 0.612500011920929, "learning_rate": 1.42252110930943e-06, "epoch": 1.9236209335219236, "percentage": 64.19, "elapsed_time": "6:05:31", "remaining_time": "3:23:57"}
122
+ {"current_steps": 1200, "total_steps": 1854, "loss": 0.8436, "accuracy": 0.512499988079071, "learning_rate": 1.3844496503620493e-06, "epoch": 1.9397858153162255, "percentage": 64.72, "elapsed_time": "6:08:36", "remaining_time": "3:20:53"}
123
+ {"current_steps": 1210, "total_steps": 1854, "loss": 0.9006, "accuracy": 0.612500011920929, "learning_rate": 1.3466985275416081e-06, "epoch": 1.9559506971105274, "percentage": 65.26, "elapsed_time": "6:11:40", "remaining_time": "3:17:48"}
124
+ {"current_steps": 1220, "total_steps": 1854, "loss": 0.7562, "accuracy": 0.625, "learning_rate": 1.309278581280791e-06, "epoch": 1.9721155789048292, "percentage": 65.8, "elapsed_time": "6:14:32", "remaining_time": "3:14:38"}
125
+ {"current_steps": 1230, "total_steps": 1854, "loss": 0.827, "accuracy": 0.5687500238418579, "learning_rate": 1.272200556913199e-06, "epoch": 1.9882804606991311, "percentage": 66.34, "elapsed_time": "6:17:32", "remaining_time": "3:11:31"}
126
+ {"current_steps": 1240, "total_steps": 1854, "loss": 0.7717, "accuracy": 0.612500011920929, "learning_rate": 1.2354751015877698e-06, "epoch": 2.004445342493433, "percentage": 66.88, "elapsed_time": "6:20:24", "remaining_time": "3:08:21"}
127
+ {"current_steps": 1250, "total_steps": 1854, "loss": 0.8062, "accuracy": 0.59375, "learning_rate": 1.1991127612113945e-06, "epoch": 2.020610224287735, "percentage": 67.42, "elapsed_time": "6:23:22", "remaining_time": "3:05:14"}
128
+ {"current_steps": 1260, "total_steps": 1854, "loss": 0.7998, "accuracy": 0.550000011920929, "learning_rate": 1.1631239774206035e-06, "epoch": 2.036775106082037, "percentage": 67.96, "elapsed_time": "6:26:13", "remaining_time": "3:02:04"}
129
+ {"current_steps": 1270, "total_steps": 1854, "loss": 0.7462, "accuracy": 0.65625, "learning_rate": 1.1275190845831978e-06, "epoch": 2.052939987876339, "percentage": 68.5, "elapsed_time": "6:29:19", "remaining_time": "2:59:01"}
130
+ {"current_steps": 1280, "total_steps": 1854, "loss": 0.7958, "accuracy": 0.6312500238418579, "learning_rate": 1.0923083068306778e-06, "epoch": 2.0691048696706407, "percentage": 69.04, "elapsed_time": "6:32:22", "remaining_time": "2:55:57"}
131
+ {"current_steps": 1290, "total_steps": 1854, "loss": 0.7477, "accuracy": 0.581250011920929, "learning_rate": 1.0575017551223348e-06, "epoch": 2.0852697514649425, "percentage": 69.58, "elapsed_time": "6:35:10", "remaining_time": "2:52:46"}
132
+ {"current_steps": 1300, "total_steps": 1854, "loss": 0.8002, "accuracy": 0.6187499761581421, "learning_rate": 1.023109424341833e-06, "epoch": 2.1014346332592444, "percentage": 70.12, "elapsed_time": "6:38:09", "remaining_time": "2:49:40"}
133
+ {"current_steps": 1310, "total_steps": 1854, "loss": 0.7877, "accuracy": 0.6000000238418579, "learning_rate": 9.891411904271273e-07, "epoch": 2.1175995150535463, "percentage": 70.66, "elapsed_time": "6:41:07", "remaining_time": "2:46:34"}
134
+ {"current_steps": 1320, "total_steps": 1854, "loss": 0.8047, "accuracy": 0.6000000238418579, "learning_rate": 9.556068075345363e-07, "epoch": 2.133764396847848, "percentage": 71.2, "elapsed_time": "6:44:02", "remaining_time": "2:43:27"}
135
+ {"current_steps": 1330, "total_steps": 1854, "loss": 0.8227, "accuracy": 0.581250011920929, "learning_rate": 9.225159052377838e-07, "epoch": 2.14992927864215, "percentage": 71.74, "elapsed_time": "6:47:03", "remaining_time": "2:40:22"}
136
+ {"current_steps": 1340, "total_steps": 1854, "loss": 0.7265, "accuracy": 0.581250011920929, "learning_rate": 8.898779857628184e-07, "epoch": 2.166094160436452, "percentage": 72.28, "elapsed_time": "6:49:58", "remaining_time": "2:37:15"}
137
+ {"current_steps": 1350, "total_steps": 1854, "loss": 0.8458, "accuracy": 0.6000000238418579, "learning_rate": 8.577024212591975e-07, "epoch": 2.1822590422307537, "percentage": 72.82, "elapsed_time": "6:52:55", "remaining_time": "2:34:09"}
138
+ {"current_steps": 1360, "total_steps": 1854, "loss": 0.8321, "accuracy": 0.5874999761581421, "learning_rate": 8.259984511088276e-07, "epoch": 2.1984239240250556, "percentage": 73.35, "elapsed_time": "6:55:52", "remaining_time": "2:31:03"}
139
+ {"current_steps": 1370, "total_steps": 1854, "loss": 0.7973, "accuracy": 0.6000000238418579, "learning_rate": 7.947751792728237e-07, "epoch": 2.2145888058193575, "percentage": 73.89, "elapsed_time": "6:58:44", "remaining_time": "2:27:55"}
140
+ {"current_steps": 1380, "total_steps": 1854, "loss": 0.7991, "accuracy": 0.6499999761581421, "learning_rate": 7.640415716772626e-07, "epoch": 2.2307536876136593, "percentage": 74.43, "elapsed_time": "7:01:51", "remaining_time": "2:24:54"}
141
+ {"current_steps": 1390, "total_steps": 1854, "loss": 0.7805, "accuracy": 0.6187499761581421, "learning_rate": 7.338064536385722e-07, "epoch": 2.246918569407961, "percentage": 74.97, "elapsed_time": "7:04:56", "remaining_time": "2:21:50"}
142
+ {"current_steps": 1400, "total_steps": 1854, "loss": 0.8702, "accuracy": 0.5625, "learning_rate": 7.040785073292883e-07, "epoch": 2.263083451202263, "percentage": 75.51, "elapsed_time": "7:07:52", "remaining_time": "2:18:45"}
143
+ {"current_steps": 1410, "total_steps": 1854, "loss": 0.747, "accuracy": 0.6499999761581421, "learning_rate": 6.748662692849297e-07, "epoch": 2.279248332996565, "percentage": 76.05, "elapsed_time": "7:10:44", "remaining_time": "2:15:38"}
144
+ {"current_steps": 1420, "total_steps": 1854, "loss": 0.7669, "accuracy": 0.6187499761581421, "learning_rate": 6.46178127952686e-07, "epoch": 2.295413214790867, "percentage": 76.59, "elapsed_time": "7:13:39", "remaining_time": "2:12:32"}
145
+ {"current_steps": 1430, "total_steps": 1854, "loss": 0.7933, "accuracy": 0.5625, "learning_rate": 6.180223212826289e-07, "epoch": 2.3115780965851687, "percentage": 77.13, "elapsed_time": "7:16:34", "remaining_time": "2:09:26"}
146
+ {"current_steps": 1440, "total_steps": 1854, "loss": 0.7844, "accuracy": 0.6312500238418579, "learning_rate": 5.904069343621443e-07, "epoch": 2.3277429783794705, "percentage": 77.67, "elapsed_time": "7:19:39", "remaining_time": "2:06:23"}
147
+ {"current_steps": 1450, "total_steps": 1854, "loss": 0.7809, "accuracy": 0.5874999761581421, "learning_rate": 5.633398970942544e-07, "epoch": 2.3439078601737724, "percentage": 78.21, "elapsed_time": "7:22:34", "remaining_time": "2:03:18"}
148
+ {"current_steps": 1460, "total_steps": 1854, "loss": 0.7373, "accuracy": 0.574999988079071, "learning_rate": 5.368289819205069e-07, "epoch": 2.3600727419680743, "percentage": 78.75, "elapsed_time": "7:25:22", "remaining_time": "2:00:11"}
149
+ {"current_steps": 1470, "total_steps": 1854, "loss": 0.8661, "accuracy": 0.5687500238418579, "learning_rate": 5.108818015890785e-07, "epoch": 2.376237623762376, "percentage": 79.29, "elapsed_time": "7:28:23", "remaining_time": "1:57:07"}
150
+ {"current_steps": 1480, "total_steps": 1854, "loss": 0.7594, "accuracy": 0.643750011920929, "learning_rate": 4.855058069687291e-07, "epoch": 2.392402505556678, "percentage": 79.83, "elapsed_time": "7:31:13", "remaining_time": "1:54:01"}
151
+ {"current_steps": 1490, "total_steps": 1854, "loss": 0.8511, "accuracy": 0.5687500238418579, "learning_rate": 4.607082849092523e-07, "epoch": 2.40856738735098, "percentage": 80.37, "elapsed_time": "7:34:24", "remaining_time": "1:51:00"}
152
+ {"current_steps": 1500, "total_steps": 1854, "loss": 0.7691, "accuracy": 0.5625, "learning_rate": 4.3649635614901405e-07, "epoch": 2.4247322691452817, "percentage": 80.91, "elapsed_time": "7:37:24", "remaining_time": "1:47:56"}
153
+ {"current_steps": 1500, "total_steps": 1854, "eval_loss": 0.8434417247772217, "epoch": 2.4247322691452817, "percentage": 80.91, "elapsed_time": "7:43:36", "remaining_time": "1:49:24"}