|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 363, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 1.8901, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.9537, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.9589, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.9071, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.8846, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 1.8558, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 1.8128, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 1.7392, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 1.7129, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.6144, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 1.5617, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 1.6225, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 1.4294, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 1.4533, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 1.4398, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 1.3575, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 1.4498, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 1.3377, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2e-05, |
|
"loss": 1.3156, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999958298675784e-05, |
|
"loss": 1.2075, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999833198181137e-05, |
|
"loss": 1.1799, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9996247089497703e-05, |
|
"loss": 1.1307, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9993328483702393e-05, |
|
"loss": 1.3141, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9989576407844894e-05, |
|
"loss": 1.2313, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.998499117485826e-05, |
|
"loss": 1.1836, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.997957316716307e-05, |
|
"loss": 1.1769, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9973322836635517e-05, |
|
"loss": 1.1203, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9966240704569722e-05, |
|
"loss": 1.1273, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9958327361634248e-05, |
|
"loss": 1.0204, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9949583467822863e-05, |
|
"loss": 1.1182, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9940009752399462e-05, |
|
"loss": 1.105, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9929607013837268e-05, |
|
"loss": 1.1114, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.991837611975223e-05, |
|
"loss": 1.0798, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.990631800683066e-05, |
|
"loss": 1.075, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9893433680751105e-05, |
|
"loss": 1.0588, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9879724216100488e-05, |
|
"loss": 1.0657, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9865190756284467e-05, |
|
"loss": 1.0628, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9849834513432084e-05, |
|
"loss": 1.0119, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.983365676829466e-05, |
|
"loss": 1.0563, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.981665887013899e-05, |
|
"loss": 0.9891, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9798842236634797e-05, |
|
"loss": 1.1157, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9780208353736493e-05, |
|
"loss": 1.1446, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9760758775559275e-05, |
|
"loss": 1.0876, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9740495124249462e-05, |
|
"loss": 1.0892, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.971941908984925e-05, |
|
"loss": 1.0958, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9697532430155718e-05, |
|
"loss": 1.1405, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9674836970574253e-05, |
|
"loss": 0.963, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9651334603966298e-05, |
|
"loss": 1.0596, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.962702729049146e-05, |
|
"loss": 1.2334, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.960191705744407e-05, |
|
"loss": 1.0826, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.957600599908406e-05, |
|
"loss": 0.9767, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9549296276462326e-05, |
|
"loss": 0.9579, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9521790117240472e-05, |
|
"loss": 0.9171, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.949348981550502e-05, |
|
"loss": 1.1456, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9464397731576093e-05, |
|
"loss": 1.0953, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.943451629181054e-05, |
|
"loss": 1.137, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.940384798839957e-05, |
|
"loss": 1.1126, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9372395379160913e-05, |
|
"loss": 0.9716, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9340161087325483e-05, |
|
"loss": 0.9391, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9307147801318585e-05, |
|
"loss": 1.0386, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9273358274535703e-05, |
|
"loss": 0.9707, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9238795325112867e-05, |
|
"loss": 0.989, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9203461835691596e-05, |
|
"loss": 0.9676, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.916736075317848e-05, |
|
"loss": 1.0501, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9130495088499417e-05, |
|
"loss": 0.9988, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.909286791634848e-05, |
|
"loss": 1.0927, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.905448237493147e-05, |
|
"loss": 1.0531, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9015341665704206e-05, |
|
"loss": 1.0239, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8975449053105505e-05, |
|
"loss": 1.0, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8934807864284904e-05, |
|
"loss": 0.967, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.889342148882519e-05, |
|
"loss": 1.0019, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8851293378459685e-05, |
|
"loss": 0.9319, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8808427046784365e-05, |
|
"loss": 0.9753, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.876482606896482e-05, |
|
"loss": 0.9619, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.872049408143808e-05, |
|
"loss": 0.9042, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8675434781609305e-05, |
|
"loss": 1.0871, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8629651927543447e-05, |
|
"loss": 0.9747, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.858314933765178e-05, |
|
"loss": 1.0254, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8535930890373467e-05, |
|
"loss": 1.0032, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.848800052385206e-05, |
|
"loss": 0.8816, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8439362235607074e-05, |
|
"loss": 0.9593, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8390020082200553e-05, |
|
"loss": 1.012, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.833997817889878e-05, |
|
"loss": 1.0523, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.828924069932902e-05, |
|
"loss": 1.0015, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8237811875131446e-05, |
|
"loss": 0.9485, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8185695995606196e-05, |
|
"loss": 0.8034, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8132897407355657e-05, |
|
"loss": 0.9674, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8079420513921913e-05, |
|
"loss": 1.0236, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.802526977541951e-05, |
|
"loss": 0.9783, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7970449708163455e-05, |
|
"loss": 1.0106, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7914964884292543e-05, |
|
"loss": 1.0462, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.785881993138803e-05, |
|
"loss": 0.9723, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7802019532087692e-05, |
|
"loss": 0.9475, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7744568423695257e-05, |
|
"loss": 0.9122, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7686471397785322e-05, |
|
"loss": 0.9502, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7627733299803714e-05, |
|
"loss": 0.9869, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7568359028663365e-05, |
|
"loss": 0.8826, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.750835353633574e-05, |
|
"loss": 0.9557, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.744772182743782e-05, |
|
"loss": 0.979, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7386468958814706e-05, |
|
"loss": 1.0202, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7324600039117862e-05, |
|
"loss": 1.0266, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7262120228379053e-05, |
|
"loss": 0.9527, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7199034737579962e-05, |
|
"loss": 0.9514, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.71353488282176e-05, |
|
"loss": 0.9188, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.8725, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.70061970497306e-05, |
|
"loss": 0.9311, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6940741952206342e-05, |
|
"loss": 0.9228, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.687470797842118e-05, |
|
"loss": 1.0714, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.680810063578342e-05, |
|
"loss": 0.8246, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6740925479521844e-05, |
|
"loss": 0.9465, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6673188112222394e-05, |
|
"loss": 0.9637, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.66048941833609e-05, |
|
"loss": 0.8188, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6536049388831897e-05, |
|
"loss": 0.864, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.646665947047358e-05, |
|
"loss": 0.8149, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6396730215588913e-05, |
|
"loss": 0.9013, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6326267456462965e-05, |
|
"loss": 0.9409, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6255277069876454e-05, |
|
"loss": 0.9824, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.618376497661564e-05, |
|
"loss": 0.9688, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6111737140978495e-05, |
|
"loss": 0.8832, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.603919957027727e-05, |
|
"loss": 1.015, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5966158314337472e-05, |
|
"loss": 0.9867, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.589261946499329e-05, |
|
"loss": 0.9525, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.581858915557953e-05, |
|
"loss": 0.8725, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.574407356042005e-05, |
|
"loss": 0.8788, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5669078894312848e-05, |
|
"loss": 0.997, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5593611412011685e-05, |
|
"loss": 0.9908, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.551767740770446e-05, |
|
"loss": 0.8651, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.544128321448824e-05, |
|
"loss": 0.9856, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5364435203841058e-05, |
|
"loss": 0.9477, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5287139785090534e-05, |
|
"loss": 0.872, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5209403404879305e-05, |
|
"loss": 0.882, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.5131232546627355e-05, |
|
"loss": 0.9071, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5052633729991296e-05, |
|
"loss": 0.9204, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4973613510320595e-05, |
|
"loss": 0.9915, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4894178478110856e-05, |
|
"loss": 0.8548, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4814335258454144e-05, |
|
"loss": 0.8516, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4734090510486435e-05, |
|
"loss": 0.8408, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4653450926832236e-05, |
|
"loss": 0.9548, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4572423233046386e-05, |
|
"loss": 0.8366, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.449101418705315e-05, |
|
"loss": 0.9097, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4409230578582566e-05, |
|
"loss": 0.9336, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4327079228604177e-05, |
|
"loss": 0.9178, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4244566988758152e-05, |
|
"loss": 0.9283, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4161700740783815e-05, |
|
"loss": 0.8792, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4078487395945712e-05, |
|
"loss": 0.9653, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3994933894457193e-05, |
|
"loss": 0.8916, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.391104720490156e-05, |
|
"loss": 0.9913, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3826834323650899e-05, |
|
"loss": 0.8563, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3742302274282532e-05, |
|
"loss": 0.9575, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3657458106993257e-05, |
|
"loss": 0.9216, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3572308898011328e-05, |
|
"loss": 0.9395, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3486861749006286e-05, |
|
"loss": 0.8954, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3401123786496664e-05, |
|
"loss": 0.9628, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3315102161255603e-05, |
|
"loss": 0.8323, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3228804047714462e-05, |
|
"loss": 0.8951, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3142236643364481e-05, |
|
"loss": 0.9588, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3055407168156438e-05, |
|
"loss": 0.8521, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2968322863898533e-05, |
|
"loss": 0.9239, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2880990993652379e-05, |
|
"loss": 0.8886, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2793418841127242e-05, |
|
"loss": 0.8379, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2705613710072575e-05, |
|
"loss": 0.9035, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2617582923668855e-05, |
|
"loss": 0.8227, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2529333823916807e-05, |
|
"loss": 0.9045, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2440873771025079e-05, |
|
"loss": 0.8571, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2352210142796359e-05, |
|
"loss": 0.8233, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2263350334012059e-05, |
|
"loss": 0.9579, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.2174301755815572e-05, |
|
"loss": 0.9478, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.208507183509416e-05, |
|
"loss": 0.8041, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.199566801385953e-05, |
|
"loss": 0.8119, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.190609774862715e-05, |
|
"loss": 0.9701, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1816368509794365e-05, |
|
"loss": 0.9351, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1726487781017337e-05, |
|
"loss": 0.8202, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1636463058586882e-05, |
|
"loss": 0.8849, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1546301850803283e-05, |
|
"loss": 0.9304, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1456011677350052e-05, |
|
"loss": 0.9295, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1365600068666781e-05, |
|
"loss": 0.9849, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.127507456532108e-05, |
|
"loss": 0.7768, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.1184442717379687e-05, |
|
"loss": 1.0072, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1093712083778748e-05, |
|
"loss": 0.9502, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1002890231693395e-05, |
|
"loss": 0.7969, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0911984735906635e-05, |
|
"loss": 0.8685, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0821003178177572e-05, |
|
"loss": 0.8699, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0729953146609076e-05, |
|
"loss": 0.9276, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0638842235014923e-05, |
|
"loss": 0.8302, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0547678042286435e-05, |
|
"loss": 0.7993, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.045646817175874e-05, |
|
"loss": 0.8794, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0365220230576592e-05, |
|
"loss": 0.8809, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.027394182905995e-05, |
|
"loss": 0.9458, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0182640580069249e-05, |
|
"loss": 0.7746, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.009132409837046e-05, |
|
"loss": 0.8053, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8711, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.908675901629542e-06, |
|
"loss": 0.8419, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.817359419930753e-06, |
|
"loss": 0.8391, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.726058170940053e-06, |
|
"loss": 0.8785, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.634779769423412e-06, |
|
"loss": 0.8864, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.543531828241263e-06, |
|
"loss": 0.9405, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.452321957713563e-06, |
|
"loss": 0.8087, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.361157764985079e-06, |
|
"loss": 0.9207, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.270046853390924e-06, |
|
"loss": 0.8302, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.17899682182243e-06, |
|
"loss": 0.9462, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.088015264093365e-06, |
|
"loss": 1.0384, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.997109768306607e-06, |
|
"loss": 0.9399, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.906287916221259e-06, |
|
"loss": 0.8899, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.81555728262032e-06, |
|
"loss": 0.9902, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.724925434678923e-06, |
|
"loss": 0.8855, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.634399931333226e-06, |
|
"loss": 0.8417, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.543988322649954e-06, |
|
"loss": 0.8521, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.45369814919672e-06, |
|
"loss": 0.867, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.363536941413121e-06, |
|
"loss": 0.8874, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.273512218982666e-06, |
|
"loss": 0.8213, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.183631490205636e-06, |
|
"loss": 0.7704, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.093902251372854e-06, |
|
"loss": 0.8162, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.004331986140474e-06, |
|
"loss": 0.8831, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.914928164905844e-06, |
|
"loss": 0.7004, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.825698244184432e-06, |
|
"loss": 0.8225, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.736649665987944e-06, |
|
"loss": 0.823, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.647789857203644e-06, |
|
"loss": 0.8252, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.559126228974921e-06, |
|
"loss": 0.8729, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.470666176083193e-06, |
|
"loss": 0.9415, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.382417076331148e-06, |
|
"loss": 0.9198, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.294386289927425e-06, |
|
"loss": 0.9263, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.206581158872761e-06, |
|
"loss": 0.9364, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.119009006347625e-06, |
|
"loss": 0.8889, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.031677136101471e-06, |
|
"loss": 0.8712, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.944592831843566e-06, |
|
"loss": 0.895, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.857763356635525e-06, |
|
"loss": 0.7805, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.771195952285541e-06, |
|
"loss": 0.7608, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.684897838744403e-06, |
|
"loss": 0.8335, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.5988762135033405e-06, |
|
"loss": 0.8861, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.513138250993716e-06, |
|
"loss": 0.9386, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.427691101988673e-06, |
|
"loss": 0.9067, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.342541893006746e-06, |
|
"loss": 0.9105, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.257697725717469e-06, |
|
"loss": 0.8314, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.173165676349103e-06, |
|
"loss": 0.8538, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.088952795098442e-06, |
|
"loss": 0.7411, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.00506610554281e-06, |
|
"loss": 0.8632, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.921512604054289e-06, |
|
"loss": 0.8846, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.838299259216187e-06, |
|
"loss": 0.8266, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.755433011241851e-06, |
|
"loss": 0.9274, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.672920771395822e-06, |
|
"loss": 0.9221, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.590769421417435e-06, |
|
"loss": 0.8928, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.50898581294685e-06, |
|
"loss": 0.8753, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.427576766953615e-06, |
|
"loss": 0.8497, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.346549073167766e-06, |
|
"loss": 0.7893, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.265909489513568e-06, |
|
"loss": 0.8574, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.185664741545862e-06, |
|
"loss": 0.7664, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.105821521889147e-06, |
|
"loss": 0.7805, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.026386489679408e-06, |
|
"loss": 0.7907, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.947366270008708e-06, |
|
"loss": 0.9125, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.868767453372649e-06, |
|
"loss": 0.9681, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.790596595120699e-06, |
|
"loss": 0.9316, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.712860214909466e-06, |
|
"loss": 0.8419, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.635564796158946e-06, |
|
"loss": 0.8667, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.558716785511764e-06, |
|
"loss": 0.753, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.482322592295541e-06, |
|
"loss": 0.8154, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.406388587988318e-06, |
|
"loss": 0.8494, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.330921105687155e-06, |
|
"loss": 0.7057, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.255926439579948e-06, |
|
"loss": 0.8995, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.181410844420473e-06, |
|
"loss": 0.8642, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.107380535006709e-06, |
|
"loss": 0.7852, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.033841685662529e-06, |
|
"loss": 0.7933, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.960800429722734e-06, |
|
"loss": 0.8312, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.888262859021508e-06, |
|
"loss": 0.8643, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.81623502338436e-06, |
|
"loss": 0.775, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.7447229301235443e-06, |
|
"loss": 0.8607, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.6737325435370376e-06, |
|
"loss": 0.8145, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.6032697844110896e-06, |
|
"loss": 0.8945, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.5333405295264255e-06, |
|
"loss": 0.9856, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.463950611168111e-06, |
|
"loss": 0.8377, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.395105816639106e-06, |
|
"loss": 0.915, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.326811887777607e-06, |
|
"loss": 0.8007, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.2590745204781537e-06, |
|
"loss": 0.8139, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.191899364216581e-06, |
|
"loss": 0.8724, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.125292021578822e-06, |
|
"loss": 0.8596, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.0592580477936606e-06, |
|
"loss": 0.8309, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.993802950269402e-06, |
|
"loss": 0.8712, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.8146, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.864651171782402e-06, |
|
"loss": 0.814, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.8009652624200436e-06, |
|
"loss": 0.7858, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7378797716209506e-06, |
|
"loss": 0.8521, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.675399960882138e-06, |
|
"loss": 0.9599, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.6135310411852977e-06, |
|
"loss": 0.7842, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.5522781725621814e-06, |
|
"loss": 0.8024, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.491646463664261e-06, |
|
"loss": 0.7793, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.4316409713366353e-06, |
|
"loss": 0.8248, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3722667001962898e-06, |
|
"loss": 0.7695, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.3135286022146785e-06, |
|
"loss": 0.8053, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.255431576304744e-06, |
|
"loss": 0.8474, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1979804679123108e-06, |
|
"loss": 0.9379, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.141180068611971e-06, |
|
"loss": 0.8415, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.08503511570746e-06, |
|
"loss": 0.7226, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.0295502918365473e-06, |
|
"loss": 0.8963, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9747302245804944e-06, |
|
"loss": 0.8423, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.920579486078091e-06, |
|
"loss": 0.7995, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8671025926443464e-06, |
|
"loss": 0.8051, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8143040043938054e-06, |
|
"loss": 0.7731, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7621881248685569e-06, |
|
"loss": 0.8458, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.7107593006709799e-06, |
|
"loss": 0.7302, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.660021821101222e-06, |
|
"loss": 0.881, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.6099799177994491e-06, |
|
"loss": 0.7728, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5606377643929305e-06, |
|
"loss": 0.8095, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.5119994761479429e-06, |
|
"loss": 0.8324, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4640691096265358e-06, |
|
"loss": 0.8192, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4168506623482202e-06, |
|
"loss": 0.7712, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3703480724565577e-06, |
|
"loss": 0.7861, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.3245652183906965e-06, |
|
"loss": 0.7886, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.279505918561923e-06, |
|
"loss": 0.8942, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.2351739310351796e-06, |
|
"loss": 0.911, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1915729532156372e-06, |
|
"loss": 0.7743, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1487066215403186e-06, |
|
"loss": 0.8076, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.1065785111748117e-06, |
|
"loss": 0.7571, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0651921357150997e-06, |
|
"loss": 0.8203, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0245509468944992e-06, |
|
"loss": 0.7785, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.84658334295796e-07, |
|
"loss": 0.8648, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.455176250685338e-07, |
|
"loss": 0.8401, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.071320836515263e-07, |
|
"loss": 0.8123, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.695049115005838e-07, |
|
"loss": 0.7879, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.326392468215206e-07, |
|
"loss": 0.8958, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.965381643084069e-07, |
|
"loss": 0.8691, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.612046748871327e-07, |
|
"loss": 0.7841, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.266417254642966e-07, |
|
"loss": 0.7674, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.928521986814196e-07, |
|
"loss": 0.7693, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.598389126745209e-07, |
|
"loss": 0.8653, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.276046208390873e-07, |
|
"loss": 0.8795, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.961520116004326e-07, |
|
"loss": 0.8787, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.654837081894626e-07, |
|
"loss": 0.8687, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.35602268423906e-07, |
|
"loss": 0.8541, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.065101844949793e-07, |
|
"loss": 0.9161, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.782098827595305e-07, |
|
"loss": 0.7652, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.5070372353767543e-07, |
|
"loss": 0.8382, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.2399400091594154e-07, |
|
"loss": 0.9081, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.9808294255593293e-07, |
|
"loss": 0.8119, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.7297270950854224e-07, |
|
"loss": 1.0043, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.486653960337061e-07, |
|
"loss": 0.8723, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.2516302942574794e-07, |
|
"loss": 0.7953, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.024675698442858e-07, |
|
"loss": 0.8223, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.805809101507539e-07, |
|
"loss": 0.9616, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.595048757505392e-07, |
|
"loss": 0.7757, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.392412244407294e-07, |
|
"loss": 0.833, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.1979164626350745e-07, |
|
"loss": 0.8236, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.0115776336520622e-07, |
|
"loss": 0.8654, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.8334112986100994e-07, |
|
"loss": 0.818, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6634323170533928e-07, |
|
"loss": 0.8791, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5016548656791697e-07, |
|
"loss": 0.9477, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.348092437155346e-07, |
|
"loss": 0.8957, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.20275783899515e-07, |
|
"loss": 0.8467, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0656631924889749e-07, |
|
"loss": 0.8196, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.368199316934446e-08, |
|
"loss": 0.7764, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.162388024777202e-08, |
|
"loss": 0.8987, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.039298616273393e-08, |
|
"loss": 0.9182, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.999024760054095e-08, |
|
"loss": 0.8248, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.041653217713993e-08, |
|
"loss": 0.8595, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.167263836575286e-08, |
|
"loss": 0.6834, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.3759295430281226e-08, |
|
"loss": 0.6846, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.667716336448356e-08, |
|
"loss": 0.8627, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.0426832836930588e-08, |
|
"loss": 0.8074, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.50088251417424e-08, |
|
"loss": 0.8568, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.0423592155108798e-08, |
|
"loss": 0.8512, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.671516297606095e-09, |
|
"loss": 0.8249, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.7529105022970915e-09, |
|
"loss": 0.8835, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.6680181886352676e-09, |
|
"loss": 0.8837, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.170132421610351e-10, |
|
"loss": 0.8092, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.8638, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 363, |
|
"total_flos": 457885308682240.0, |
|
"train_loss": 0.9454823605942003, |
|
"train_runtime": 6836.4148, |
|
"train_samples_per_second": 5.088, |
|
"train_steps_per_second": 0.053 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 363, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 457885308682240.0, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|