|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.004201680672269, |
|
"eval_steps": 500, |
|
"global_step": 240, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004201680672268907, |
|
"grad_norm": 1.2855414152145386, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.5024, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008403361344537815, |
|
"grad_norm": 1.2932429313659668, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.5834, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012605042016806723, |
|
"grad_norm": 1.3544902801513672, |
|
"learning_rate": 7.2e-06, |
|
"loss": 2.4398, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01680672268907563, |
|
"grad_norm": 1.6224277019500732, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 2.6242, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02100840336134454, |
|
"grad_norm": 1.5453084707260132, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.2337, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.025210084033613446, |
|
"grad_norm": 1.8252912759780884, |
|
"learning_rate": 1.44e-05, |
|
"loss": 2.3317, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029411764705882353, |
|
"grad_norm": 1.2351248264312744, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 2.156, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03361344537815126, |
|
"grad_norm": 0.6219255924224854, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 2.0386, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.037815126050420166, |
|
"grad_norm": 0.23430386185646057, |
|
"learning_rate": 2.16e-05, |
|
"loss": 1.9042, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04201680672268908, |
|
"grad_norm": 0.5495060086250305, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.1308, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.046218487394957986, |
|
"grad_norm": 0.7401595115661621, |
|
"learning_rate": 2.64e-05, |
|
"loss": 2.1082, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05042016806722689, |
|
"grad_norm": 1.3093836307525635, |
|
"learning_rate": 2.88e-05, |
|
"loss": 1.9264, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0546218487394958, |
|
"grad_norm": 1.1299002170562744, |
|
"learning_rate": 3.12e-05, |
|
"loss": 2.004, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 1.1077698469161987, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 2.1267, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06302521008403361, |
|
"grad_norm": 0.813391923904419, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.9106, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06722689075630252, |
|
"grad_norm": 0.48726755380630493, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 2.055, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.35943350195884705, |
|
"learning_rate": 4.08e-05, |
|
"loss": 2.0779, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07563025210084033, |
|
"grad_norm": 0.27760088443756104, |
|
"learning_rate": 4.32e-05, |
|
"loss": 2.0189, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07983193277310924, |
|
"grad_norm": 0.2671177089214325, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 1.9943, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08403361344537816, |
|
"grad_norm": 0.2401943951845169, |
|
"learning_rate": 4.8e-05, |
|
"loss": 2.032, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08823529411764706, |
|
"grad_norm": 0.18362487852573395, |
|
"learning_rate": 5.04e-05, |
|
"loss": 1.9384, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09243697478991597, |
|
"grad_norm": 0.18155354261398315, |
|
"learning_rate": 5.28e-05, |
|
"loss": 1.9294, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09663865546218488, |
|
"grad_norm": 0.311981201171875, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 2.1202, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10084033613445378, |
|
"grad_norm": 0.46018195152282715, |
|
"learning_rate": 5.76e-05, |
|
"loss": 2.0634, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10504201680672269, |
|
"grad_norm": 0.725566565990448, |
|
"learning_rate": 6e-05, |
|
"loss": 2.0021, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1092436974789916, |
|
"grad_norm": 0.3912893235683441, |
|
"learning_rate": 5.999927215958923e-05, |
|
"loss": 2.0414, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1134453781512605, |
|
"grad_norm": 0.3835509121417999, |
|
"learning_rate": 5.999708867367372e-05, |
|
"loss": 2.0237, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.39111143350601196, |
|
"learning_rate": 5.999344964820206e-05, |
|
"loss": 2.0386, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12184873949579832, |
|
"grad_norm": 0.31010451912879944, |
|
"learning_rate": 5.998835525974959e-05, |
|
"loss": 2.0222, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12605042016806722, |
|
"grad_norm": 0.36277830600738525, |
|
"learning_rate": 5.9981805755509766e-05, |
|
"loss": 1.9205, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13025210084033614, |
|
"grad_norm": 0.24080567061901093, |
|
"learning_rate": 5.9973801453282154e-05, |
|
"loss": 1.8759, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13445378151260504, |
|
"grad_norm": 0.22599194943904877, |
|
"learning_rate": 5.9964342741457086e-05, |
|
"loss": 1.9547, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13865546218487396, |
|
"grad_norm": 0.33472496271133423, |
|
"learning_rate": 5.9953430078996735e-05, |
|
"loss": 1.7979, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.275795578956604, |
|
"learning_rate": 5.994106399541288e-05, |
|
"loss": 1.9714, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 0.1729828417301178, |
|
"learning_rate": 5.992724509074121e-05, |
|
"loss": 2.0898, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15126050420168066, |
|
"grad_norm": 0.11706062406301498, |
|
"learning_rate": 5.991197403551221e-05, |
|
"loss": 1.8647, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15546218487394958, |
|
"grad_norm": 0.18395456671714783, |
|
"learning_rate": 5.989525157071863e-05, |
|
"loss": 1.9258, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15966386554621848, |
|
"grad_norm": 0.14259740710258484, |
|
"learning_rate": 5.98770785077795e-05, |
|
"loss": 1.8656, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1638655462184874, |
|
"grad_norm": 0.15977182984352112, |
|
"learning_rate": 5.98574557285008e-05, |
|
"loss": 2.0456, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16806722689075632, |
|
"grad_norm": 0.1527746468782425, |
|
"learning_rate": 5.9836384185032645e-05, |
|
"loss": 1.8477, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1722689075630252, |
|
"grad_norm": 0.18175554275512695, |
|
"learning_rate": 5.9813864899823085e-05, |
|
"loss": 1.8427, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.19280438125133514, |
|
"learning_rate": 5.978989896556851e-05, |
|
"loss": 1.9003, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.18067226890756302, |
|
"grad_norm": 0.1241818442940712, |
|
"learning_rate": 5.976448754516063e-05, |
|
"loss": 1.7676, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18487394957983194, |
|
"grad_norm": 0.23351380228996277, |
|
"learning_rate": 5.973763187163e-05, |
|
"loss": 1.8321, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18907563025210083, |
|
"grad_norm": 0.11710195988416672, |
|
"learning_rate": 5.970933324808626e-05, |
|
"loss": 1.9762, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19327731092436976, |
|
"grad_norm": 0.1551787108182907, |
|
"learning_rate": 5.967959304765487e-05, |
|
"loss": 2.003, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19747899159663865, |
|
"grad_norm": 0.12680837512016296, |
|
"learning_rate": 5.964841271341046e-05, |
|
"loss": 1.909, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.20168067226890757, |
|
"grad_norm": 0.14685605466365814, |
|
"learning_rate": 5.961579375830686e-05, |
|
"loss": 1.9564, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20588235294117646, |
|
"grad_norm": 0.2090328335762024, |
|
"learning_rate": 5.958173776510365e-05, |
|
"loss": 2.0247, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.21008403361344538, |
|
"grad_norm": 0.15236154198646545, |
|
"learning_rate": 5.9546246386289364e-05, |
|
"loss": 1.8914, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 0.13934959471225739, |
|
"learning_rate": 5.950932134400132e-05, |
|
"loss": 1.9527, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2184873949579832, |
|
"grad_norm": 0.14951136708259583, |
|
"learning_rate": 5.947096442994205e-05, |
|
"loss": 2.0099, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.22268907563025211, |
|
"grad_norm": 0.16193124651908875, |
|
"learning_rate": 5.9431177505292346e-05, |
|
"loss": 1.9779, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.226890756302521, |
|
"grad_norm": 0.12261511385440826, |
|
"learning_rate": 5.9389962500621e-05, |
|
"loss": 1.7994, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.23109243697478993, |
|
"grad_norm": 0.13601715862751007, |
|
"learning_rate": 5.934732141579106e-05, |
|
"loss": 1.8998, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.16675348579883575, |
|
"learning_rate": 5.930325631986285e-05, |
|
"loss": 1.9909, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23949579831932774, |
|
"grad_norm": 0.16077667474746704, |
|
"learning_rate": 5.925776935099353e-05, |
|
"loss": 1.7902, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24369747899159663, |
|
"grad_norm": 0.12847718596458435, |
|
"learning_rate": 5.9210862716333373e-05, |
|
"loss": 1.8161, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.24789915966386555, |
|
"grad_norm": 0.15590892732143402, |
|
"learning_rate": 5.916253869191867e-05, |
|
"loss": 2.0115, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.25210084033613445, |
|
"grad_norm": 0.16642436385154724, |
|
"learning_rate": 5.911279962256126e-05, |
|
"loss": 2.0116, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.25630252100840334, |
|
"grad_norm": 0.12253208458423615, |
|
"learning_rate": 5.9061647921734794e-05, |
|
"loss": 1.9297, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2605042016806723, |
|
"grad_norm": 0.13511379063129425, |
|
"learning_rate": 5.900908607145761e-05, |
|
"loss": 1.8262, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2647058823529412, |
|
"grad_norm": 0.1308983564376831, |
|
"learning_rate": 5.8955116622172274e-05, |
|
"loss": 1.9066, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2689075630252101, |
|
"grad_norm": 0.1284906417131424, |
|
"learning_rate": 5.8899742192621875e-05, |
|
"loss": 1.8297, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.27310924369747897, |
|
"grad_norm": 0.14260078966617584, |
|
"learning_rate": 5.88429654697229e-05, |
|
"loss": 2.0597, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2773109243697479, |
|
"grad_norm": 0.13550403714179993, |
|
"learning_rate": 5.878478920843492e-05, |
|
"loss": 2.1711, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2815126050420168, |
|
"grad_norm": 0.1789373755455017, |
|
"learning_rate": 5.872521623162686e-05, |
|
"loss": 1.6575, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.12788023054599762, |
|
"learning_rate": 5.866424942994004e-05, |
|
"loss": 1.9221, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.28991596638655465, |
|
"grad_norm": 0.18658475577831268, |
|
"learning_rate": 5.8601891761647914e-05, |
|
"loss": 1.9013, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.18295790255069733, |
|
"learning_rate": 5.853814625251257e-05, |
|
"loss": 1.9291, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.29831932773109243, |
|
"grad_norm": 0.12854939699172974, |
|
"learning_rate": 5.847301599563783e-05, |
|
"loss": 2.0076, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3025210084033613, |
|
"grad_norm": 0.17280517518520355, |
|
"learning_rate": 5.840650415131922e-05, |
|
"loss": 1.8706, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3067226890756303, |
|
"grad_norm": 0.15926142036914825, |
|
"learning_rate": 5.8338613946890617e-05, |
|
"loss": 1.9854, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.31092436974789917, |
|
"grad_norm": 0.2687736451625824, |
|
"learning_rate": 5.826934867656765e-05, |
|
"loss": 1.6384, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.31512605042016806, |
|
"grad_norm": 0.11397445946931839, |
|
"learning_rate": 5.819871170128781e-05, |
|
"loss": 1.8928, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.31932773109243695, |
|
"grad_norm": 0.12926487624645233, |
|
"learning_rate": 5.812670644854745e-05, |
|
"loss": 1.8773, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3235294117647059, |
|
"grad_norm": 0.14260263741016388, |
|
"learning_rate": 5.8053336412235445e-05, |
|
"loss": 2.0524, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3277310924369748, |
|
"grad_norm": 0.14220871031284332, |
|
"learning_rate": 5.7978605152463594e-05, |
|
"loss": 1.7581, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3319327731092437, |
|
"grad_norm": 0.1228056475520134, |
|
"learning_rate": 5.7902516295393945e-05, |
|
"loss": 1.9749, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.33613445378151263, |
|
"grad_norm": 0.12861187756061554, |
|
"learning_rate": 5.782507353306285e-05, |
|
"loss": 1.7885, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3403361344537815, |
|
"grad_norm": 0.12451905757188797, |
|
"learning_rate": 5.774628062320175e-05, |
|
"loss": 1.899, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3445378151260504, |
|
"grad_norm": 0.1419980823993683, |
|
"learning_rate": 5.7666141389054923e-05, |
|
"loss": 1.8726, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3487394957983193, |
|
"grad_norm": 0.18138334155082703, |
|
"learning_rate": 5.75846597191939e-05, |
|
"loss": 1.8988, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.2072378247976303, |
|
"learning_rate": 5.750183956732882e-05, |
|
"loss": 1.8585, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.15298499166965485, |
|
"learning_rate": 5.741768495211658e-05, |
|
"loss": 1.9546, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.36134453781512604, |
|
"grad_norm": 0.15661871433258057, |
|
"learning_rate": 5.733219995696581e-05, |
|
"loss": 1.8402, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36554621848739494, |
|
"grad_norm": 0.13822637498378754, |
|
"learning_rate": 5.7245388729838796e-05, |
|
"loss": 2.0029, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3697478991596639, |
|
"grad_norm": 0.14563272893428802, |
|
"learning_rate": 5.715725548305014e-05, |
|
"loss": 2.0999, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3739495798319328, |
|
"grad_norm": 0.15243449807167053, |
|
"learning_rate": 5.706780449306241e-05, |
|
"loss": 2.1257, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.37815126050420167, |
|
"grad_norm": 0.16341632604599, |
|
"learning_rate": 5.697704010027864e-05, |
|
"loss": 2.0414, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.38235294117647056, |
|
"grad_norm": 0.15627144277095795, |
|
"learning_rate": 5.688496670883167e-05, |
|
"loss": 1.9314, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3865546218487395, |
|
"grad_norm": 0.12584370374679565, |
|
"learning_rate": 5.6791588786370515e-05, |
|
"loss": 1.796, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3907563025210084, |
|
"grad_norm": 0.15590286254882812, |
|
"learning_rate": 5.6696910863843546e-05, |
|
"loss": 1.9411, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3949579831932773, |
|
"grad_norm": 0.12517981231212616, |
|
"learning_rate": 5.660093753527861e-05, |
|
"loss": 1.9865, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.39915966386554624, |
|
"grad_norm": 0.1500655859708786, |
|
"learning_rate": 5.650367345756019e-05, |
|
"loss": 1.7765, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.40336134453781514, |
|
"grad_norm": 0.1287829726934433, |
|
"learning_rate": 5.640512335020336e-05, |
|
"loss": 1.9496, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.40756302521008403, |
|
"grad_norm": 0.18750888109207153, |
|
"learning_rate": 5.6305291995124827e-05, |
|
"loss": 1.7406, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.12096212059259415, |
|
"learning_rate": 5.620418423641089e-05, |
|
"loss": 2.0335, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.41596638655462187, |
|
"grad_norm": 0.13212282955646515, |
|
"learning_rate": 5.610180498008239e-05, |
|
"loss": 1.8856, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.42016806722689076, |
|
"grad_norm": 0.14193859696388245, |
|
"learning_rate": 5.599815919385666e-05, |
|
"loss": 1.9243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.42436974789915966, |
|
"grad_norm": 0.144227996468544, |
|
"learning_rate": 5.589325190690648e-05, |
|
"loss": 1.9885, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.14424113929271698, |
|
"learning_rate": 5.578708820961604e-05, |
|
"loss": 1.8761, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4327731092436975, |
|
"grad_norm": 0.14241594076156616, |
|
"learning_rate": 5.567967325333393e-05, |
|
"loss": 1.8844, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4369747899159664, |
|
"grad_norm": 0.23358209431171417, |
|
"learning_rate": 5.557101225012321e-05, |
|
"loss": 1.7688, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4411764705882353, |
|
"grad_norm": 0.13762815296649933, |
|
"learning_rate": 5.546111047250852e-05, |
|
"loss": 1.9983, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.44537815126050423, |
|
"grad_norm": 0.15425845980644226, |
|
"learning_rate": 5.5349973253220164e-05, |
|
"loss": 1.6622, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4495798319327731, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 5.5237605984935435e-05, |
|
"loss": 1.9803, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.453781512605042, |
|
"grad_norm": 0.13661649823188782, |
|
"learning_rate": 5.512401412001692e-05, |
|
"loss": 1.8166, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4579831932773109, |
|
"grad_norm": 0.1451268047094345, |
|
"learning_rate": 5.5009203170247934e-05, |
|
"loss": 1.7331, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.46218487394957986, |
|
"grad_norm": 0.14962796866893768, |
|
"learning_rate": 5.4893178706565053e-05, |
|
"loss": 1.9284, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46638655462184875, |
|
"grad_norm": 0.14141930639743805, |
|
"learning_rate": 5.477594635878783e-05, |
|
"loss": 2.0571, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.1254950761795044, |
|
"learning_rate": 5.465751181534562e-05, |
|
"loss": 1.9862, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.47478991596638653, |
|
"grad_norm": 0.1633477658033371, |
|
"learning_rate": 5.4537880823001535e-05, |
|
"loss": 1.8533, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4789915966386555, |
|
"grad_norm": 0.1374650001525879, |
|
"learning_rate": 5.4417059186573615e-05, |
|
"loss": 1.9262, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4831932773109244, |
|
"grad_norm": 0.16950736939907074, |
|
"learning_rate": 5.429505276865315e-05, |
|
"loss": 1.9775, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48739495798319327, |
|
"grad_norm": 0.14926834404468536, |
|
"learning_rate": 5.417186748932024e-05, |
|
"loss": 1.7784, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.49159663865546216, |
|
"grad_norm": 0.16016586124897003, |
|
"learning_rate": 5.4047509325856514e-05, |
|
"loss": 1.9489, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4957983193277311, |
|
"grad_norm": 0.14702598750591278, |
|
"learning_rate": 5.392198431245507e-05, |
|
"loss": 1.717, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13934609293937683, |
|
"learning_rate": 5.379529853992774e-05, |
|
"loss": 1.9416, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5042016806722689, |
|
"grad_norm": 0.1452355682849884, |
|
"learning_rate": 5.3667458155409505e-05, |
|
"loss": 1.927, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5084033613445378, |
|
"grad_norm": 0.14766626060009003, |
|
"learning_rate": 5.3538469362060226e-05, |
|
"loss": 1.7578, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5126050420168067, |
|
"grad_norm": 0.19430850446224213, |
|
"learning_rate": 5.340833841876367e-05, |
|
"loss": 1.8113, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5168067226890757, |
|
"grad_norm": 0.16308219730854034, |
|
"learning_rate": 5.327707163982377e-05, |
|
"loss": 1.7603, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5210084033613446, |
|
"grad_norm": 0.1342669427394867, |
|
"learning_rate": 5.314467539465829e-05, |
|
"loss": 2.0409, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5252100840336135, |
|
"grad_norm": 0.12464125454425812, |
|
"learning_rate": 5.3011156107489726e-05, |
|
"loss": 1.92, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.1477716565132141, |
|
"learning_rate": 5.2876520257033594e-05, |
|
"loss": 1.7391, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5336134453781513, |
|
"grad_norm": 0.15375854074954987, |
|
"learning_rate": 5.274077437618408e-05, |
|
"loss": 1.9802, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5378151260504201, |
|
"grad_norm": 0.17319805920124054, |
|
"learning_rate": 5.260392505169702e-05, |
|
"loss": 1.9145, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.542016806722689, |
|
"grad_norm": 0.13694263994693756, |
|
"learning_rate": 5.246597892387033e-05, |
|
"loss": 1.8611, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5462184873949579, |
|
"grad_norm": 0.14227637648582458, |
|
"learning_rate": 5.2326942686221754e-05, |
|
"loss": 1.9588, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5504201680672269, |
|
"grad_norm": 0.13238848745822906, |
|
"learning_rate": 5.218682308516413e-05, |
|
"loss": 2.0077, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5546218487394958, |
|
"grad_norm": 0.1516510248184204, |
|
"learning_rate": 5.2045626919677975e-05, |
|
"loss": 2.0937, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5588235294117647, |
|
"grad_norm": 0.15112727880477905, |
|
"learning_rate": 5.1903361040981624e-05, |
|
"loss": 1.8754, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5630252100840336, |
|
"grad_norm": 0.1264156699180603, |
|
"learning_rate": 5.17600323521988e-05, |
|
"loss": 1.8644, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5672268907563025, |
|
"grad_norm": 0.19539165496826172, |
|
"learning_rate": 5.161564780802361e-05, |
|
"loss": 1.9111, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.1472083181142807, |
|
"learning_rate": 5.147021441438312e-05, |
|
"loss": 1.8196, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5756302521008403, |
|
"grad_norm": 0.16398820281028748, |
|
"learning_rate": 5.132373922809738e-05, |
|
"loss": 1.9675, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5798319327731093, |
|
"grad_norm": 0.17623396217823029, |
|
"learning_rate": 5.117622935653706e-05, |
|
"loss": 1.8302, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5840336134453782, |
|
"grad_norm": 0.1819780021905899, |
|
"learning_rate": 5.1027691957278515e-05, |
|
"loss": 1.6874, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.1527121514081955, |
|
"learning_rate": 5.087813423775653e-05, |
|
"loss": 1.7856, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.592436974789916, |
|
"grad_norm": 0.18883730471134186, |
|
"learning_rate": 5.0727563454914566e-05, |
|
"loss": 2.0023, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5966386554621849, |
|
"grad_norm": 0.1411568820476532, |
|
"learning_rate": 5.057598691485265e-05, |
|
"loss": 1.939, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6008403361344538, |
|
"grad_norm": 0.1455598771572113, |
|
"learning_rate": 5.042341197247288e-05, |
|
"loss": 1.9601, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6050420168067226, |
|
"grad_norm": 0.191257044672966, |
|
"learning_rate": 5.026984603112248e-05, |
|
"loss": 1.8474, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6092436974789915, |
|
"grad_norm": 0.1409291923046112, |
|
"learning_rate": 5.011529654223465e-05, |
|
"loss": 1.9115, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6134453781512605, |
|
"grad_norm": 0.1498507708311081, |
|
"learning_rate": 4.995977100496696e-05, |
|
"loss": 1.8116, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6176470588235294, |
|
"grad_norm": 0.1509602963924408, |
|
"learning_rate": 4.980327696583746e-05, |
|
"loss": 1.9574, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6218487394957983, |
|
"grad_norm": 0.15841971337795258, |
|
"learning_rate": 4.964582201835856e-05, |
|
"loss": 1.9305, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6260504201680672, |
|
"grad_norm": 0.154954195022583, |
|
"learning_rate": 4.948741380266846e-05, |
|
"loss": 1.6991, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6302521008403361, |
|
"grad_norm": 0.1550845056772232, |
|
"learning_rate": 4.9328060005160575e-05, |
|
"loss": 1.8016, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.634453781512605, |
|
"grad_norm": 0.15357814729213715, |
|
"learning_rate": 4.916776835811046e-05, |
|
"loss": 2.1041, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6386554621848739, |
|
"grad_norm": 0.16955037415027618, |
|
"learning_rate": 4.9006546639300656e-05, |
|
"loss": 2.0618, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 0.1576966941356659, |
|
"learning_rate": 4.884440267164331e-05, |
|
"loss": 1.7933, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 0.15891976654529572, |
|
"learning_rate": 4.868134432280056e-05, |
|
"loss": 1.7879, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6512605042016807, |
|
"grad_norm": 0.16660089790821075, |
|
"learning_rate": 4.851737950480276e-05, |
|
"loss": 1.8625, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6554621848739496, |
|
"grad_norm": 0.23937979340553284, |
|
"learning_rate": 4.835251617366463e-05, |
|
"loss": 1.8112, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6596638655462185, |
|
"grad_norm": 0.1381472498178482, |
|
"learning_rate": 4.8186762328999144e-05, |
|
"loss": 1.9289, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6638655462184874, |
|
"grad_norm": 0.1760849952697754, |
|
"learning_rate": 4.802012601362938e-05, |
|
"loss": 1.9384, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6680672268907563, |
|
"grad_norm": 0.18892566859722137, |
|
"learning_rate": 4.785261531319829e-05, |
|
"loss": 1.8544, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6722689075630253, |
|
"grad_norm": 0.18952800333499908, |
|
"learning_rate": 4.7684238355776366e-05, |
|
"loss": 1.7216, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6764705882352942, |
|
"grad_norm": 0.1533077359199524, |
|
"learning_rate": 4.7515003311467164e-05, |
|
"loss": 1.924, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.680672268907563, |
|
"grad_norm": 0.20900940895080566, |
|
"learning_rate": 4.7344918392010985e-05, |
|
"loss": 1.8922, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6848739495798319, |
|
"grad_norm": 0.22550180554389954, |
|
"learning_rate": 4.7173991850386325e-05, |
|
"loss": 1.9899, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6890756302521008, |
|
"grad_norm": 0.15734222531318665, |
|
"learning_rate": 4.7002231980409466e-05, |
|
"loss": 1.9437, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6932773109243697, |
|
"grad_norm": 0.15735475718975067, |
|
"learning_rate": 4.6829647116332045e-05, |
|
"loss": 1.9653, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6974789915966386, |
|
"grad_norm": 0.19688263535499573, |
|
"learning_rate": 4.665624563243661e-05, |
|
"loss": 1.9832, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7016806722689075, |
|
"grad_norm": 0.1678629070520401, |
|
"learning_rate": 4.648203594263031e-05, |
|
"loss": 1.7721, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.16972413659095764, |
|
"learning_rate": 4.630702650003664e-05, |
|
"loss": 1.8245, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7100840336134454, |
|
"grad_norm": 0.17227919399738312, |
|
"learning_rate": 4.613122579658522e-05, |
|
"loss": 1.8539, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.20971490442752838, |
|
"learning_rate": 4.595464236259979e-05, |
|
"loss": 1.7229, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7184873949579832, |
|
"grad_norm": 0.2913236618041992, |
|
"learning_rate": 4.577728476638432e-05, |
|
"loss": 1.8346, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7226890756302521, |
|
"grad_norm": 0.16503480076789856, |
|
"learning_rate": 4.559916161380718e-05, |
|
"loss": 1.7839, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.726890756302521, |
|
"grad_norm": 0.2223767340183258, |
|
"learning_rate": 4.542028154788359e-05, |
|
"loss": 1.8485, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7310924369747899, |
|
"grad_norm": 0.3713437616825104, |
|
"learning_rate": 4.5240653248356274e-05, |
|
"loss": 1.8596, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 0.1753188520669937, |
|
"learning_rate": 4.506028543127425e-05, |
|
"loss": 1.7406, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7394957983193278, |
|
"grad_norm": 0.15702295303344727, |
|
"learning_rate": 4.4879186848569916e-05, |
|
"loss": 2.1146, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7436974789915967, |
|
"grad_norm": 0.17345549166202545, |
|
"learning_rate": 4.469736628763441e-05, |
|
"loss": 1.9998, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7478991596638656, |
|
"grad_norm": 0.3005940914154053, |
|
"learning_rate": 4.4514832570891164e-05, |
|
"loss": 1.8081, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7521008403361344, |
|
"grad_norm": 0.19223691523075104, |
|
"learning_rate": 4.433159455536789e-05, |
|
"loss": 1.7473, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7563025210084033, |
|
"grad_norm": 0.15088941156864166, |
|
"learning_rate": 4.414766113226673e-05, |
|
"loss": 1.8934, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7605042016806722, |
|
"grad_norm": 0.28544843196868896, |
|
"learning_rate": 4.396304122653292e-05, |
|
"loss": 1.7362, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 0.27440345287323, |
|
"learning_rate": 4.377774379642165e-05, |
|
"loss": 1.7409, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7689075630252101, |
|
"grad_norm": 0.16212020814418793, |
|
"learning_rate": 4.359177783306344e-05, |
|
"loss": 1.9367, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.773109243697479, |
|
"grad_norm": 0.15055981278419495, |
|
"learning_rate": 4.3405152360027815e-05, |
|
"loss": 1.9787, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7773109243697479, |
|
"grad_norm": 0.28331828117370605, |
|
"learning_rate": 4.3217876432885525e-05, |
|
"loss": 1.6631, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7815126050420168, |
|
"grad_norm": 0.18612366914749146, |
|
"learning_rate": 4.302995913876907e-05, |
|
"loss": 2.0595, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 0.16861191391944885, |
|
"learning_rate": 4.284140959593184e-05, |
|
"loss": 1.6634, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7899159663865546, |
|
"grad_norm": 0.20200952887535095, |
|
"learning_rate": 4.265223695330559e-05, |
|
"loss": 1.8512, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7941176470588235, |
|
"grad_norm": 0.17656445503234863, |
|
"learning_rate": 4.24624503900566e-05, |
|
"loss": 1.8023, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7983193277310925, |
|
"grad_norm": 0.15888820588588715, |
|
"learning_rate": 4.22720591151402e-05, |
|
"loss": 1.9317, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8025210084033614, |
|
"grad_norm": 0.19560733437538147, |
|
"learning_rate": 4.2081072366853984e-05, |
|
"loss": 1.527, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8067226890756303, |
|
"grad_norm": 0.19005230069160461, |
|
"learning_rate": 4.18894994123895e-05, |
|
"loss": 1.9566, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8109243697478992, |
|
"grad_norm": 0.162759929895401, |
|
"learning_rate": 4.169734954738261e-05, |
|
"loss": 2.0595, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8151260504201681, |
|
"grad_norm": 0.26847419142723083, |
|
"learning_rate": 4.150463209546243e-05, |
|
"loss": 1.7949, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.819327731092437, |
|
"grad_norm": 0.18686293065547943, |
|
"learning_rate": 4.13113564077989e-05, |
|
"loss": 1.6174, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 0.15110686421394348, |
|
"learning_rate": 4.1117531862649096e-05, |
|
"loss": 2.0025, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8277310924369747, |
|
"grad_norm": 0.17056453227996826, |
|
"learning_rate": 4.092316786490213e-05, |
|
"loss": 1.9113, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8319327731092437, |
|
"grad_norm": 0.33667296171188354, |
|
"learning_rate": 4.072827384562277e-05, |
|
"loss": 1.808, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8361344537815126, |
|
"grad_norm": 0.19670777022838593, |
|
"learning_rate": 4.0532859261593916e-05, |
|
"loss": 2.0879, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8403361344537815, |
|
"grad_norm": 0.19230350852012634, |
|
"learning_rate": 4.033693359485763e-05, |
|
"loss": 2.0093, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8445378151260504, |
|
"grad_norm": 0.14327159523963928, |
|
"learning_rate": 4.014050635225508e-05, |
|
"loss": 1.8698, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8487394957983193, |
|
"grad_norm": 0.16874836385250092, |
|
"learning_rate": 3.994358706496529e-05, |
|
"loss": 2.1746, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8529411764705882, |
|
"grad_norm": 0.2902648448944092, |
|
"learning_rate": 3.9746185288042574e-05, |
|
"loss": 1.6873, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.14672698080539703, |
|
"learning_rate": 3.954831059995296e-05, |
|
"loss": 2.0229, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8613445378151261, |
|
"grad_norm": 0.17595812678337097, |
|
"learning_rate": 3.9349972602109404e-05, |
|
"loss": 1.8638, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.865546218487395, |
|
"grad_norm": 0.17806874215602875, |
|
"learning_rate": 3.9151180918405886e-05, |
|
"loss": 1.8911, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8697478991596639, |
|
"grad_norm": 0.16666433215141296, |
|
"learning_rate": 3.895194519475046e-05, |
|
"loss": 1.912, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8739495798319328, |
|
"grad_norm": 0.1936957836151123, |
|
"learning_rate": 3.8752275098597186e-05, |
|
"loss": 1.8171, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8781512605042017, |
|
"grad_norm": 0.1712755411863327, |
|
"learning_rate": 3.8552180318477055e-05, |
|
"loss": 2.0602, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 0.16776567697525024, |
|
"learning_rate": 3.8351670563527854e-05, |
|
"loss": 1.6852, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8865546218487395, |
|
"grad_norm": 0.15940265357494354, |
|
"learning_rate": 3.815075556302309e-05, |
|
"loss": 1.7701, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8907563025210085, |
|
"grad_norm": 0.2017056941986084, |
|
"learning_rate": 3.794944506589986e-05, |
|
"loss": 1.6285, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8949579831932774, |
|
"grad_norm": 0.1665181964635849, |
|
"learning_rate": 3.774774884028582e-05, |
|
"loss": 1.8832, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8991596638655462, |
|
"grad_norm": 0.1938188076019287, |
|
"learning_rate": 3.754567667302521e-05, |
|
"loss": 1.7577, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9033613445378151, |
|
"grad_norm": 0.1959097981452942, |
|
"learning_rate": 3.7343238369203985e-05, |
|
"loss": 1.7827, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.907563025210084, |
|
"grad_norm": 0.15705636143684387, |
|
"learning_rate": 3.714044375167403e-05, |
|
"loss": 1.8884, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9117647058823529, |
|
"grad_norm": 0.21525953710079193, |
|
"learning_rate": 3.693730266057653e-05, |
|
"loss": 1.9235, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9159663865546218, |
|
"grad_norm": 0.16254454851150513, |
|
"learning_rate": 3.67338249528645e-05, |
|
"loss": 1.9587, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9201680672268907, |
|
"grad_norm": 0.13950613141059875, |
|
"learning_rate": 3.6530020501824465e-05, |
|
"loss": 2.0398, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9243697478991597, |
|
"grad_norm": 0.171157568693161, |
|
"learning_rate": 3.6325899196597493e-05, |
|
"loss": 1.8841, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 0.1359075903892517, |
|
"learning_rate": 3.612147094169921e-05, |
|
"loss": 2.1739, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9327731092436975, |
|
"grad_norm": 0.22331862151622772, |
|
"learning_rate": 3.591674565653926e-05, |
|
"loss": 2.0195, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9369747899159664, |
|
"grad_norm": 0.16365067660808563, |
|
"learning_rate": 3.571173327494005e-05, |
|
"loss": 1.9375, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.24077700078487396, |
|
"learning_rate": 3.5506443744654644e-05, |
|
"loss": 1.8117, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9453781512605042, |
|
"grad_norm": 0.1790030151605606, |
|
"learning_rate": 3.53008870268841e-05, |
|
"loss": 1.5849, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9495798319327731, |
|
"grad_norm": 0.20200079679489136, |
|
"learning_rate": 3.5095073095794156e-05, |
|
"loss": 2.0211, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9537815126050421, |
|
"grad_norm": 0.17911580204963684, |
|
"learning_rate": 3.4889011938031216e-05, |
|
"loss": 1.8153, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.957983193277311, |
|
"grad_norm": 0.23249390721321106, |
|
"learning_rate": 3.4682713552237795e-05, |
|
"loss": 1.74, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9621848739495799, |
|
"grad_norm": 0.17647947371006012, |
|
"learning_rate": 3.447618794856735e-05, |
|
"loss": 1.8374, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9663865546218487, |
|
"grad_norm": 0.1574244350194931, |
|
"learning_rate": 3.426944514819856e-05, |
|
"loss": 2.0606, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9705882352941176, |
|
"grad_norm": 0.17625559866428375, |
|
"learning_rate": 3.406249518284907e-05, |
|
"loss": 1.841, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9747899159663865, |
|
"grad_norm": 0.16222096979618073, |
|
"learning_rate": 3.385534809428875e-05, |
|
"loss": 1.9827, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9789915966386554, |
|
"grad_norm": 0.18522681295871735, |
|
"learning_rate": 3.3648013933852384e-05, |
|
"loss": 1.8832, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9831932773109243, |
|
"grad_norm": 0.1793953776359558, |
|
"learning_rate": 3.3440502761952016e-05, |
|
"loss": 1.9176, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9873949579831933, |
|
"grad_norm": 0.16444647312164307, |
|
"learning_rate": 3.323282464758874e-05, |
|
"loss": 1.8963, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9915966386554622, |
|
"grad_norm": 0.1859455704689026, |
|
"learning_rate": 3.302498966786418e-05, |
|
"loss": 1.9036, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9957983193277311, |
|
"grad_norm": 0.16911397874355316, |
|
"learning_rate": 3.281700790749145e-05, |
|
"loss": 1.7915, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.1729690283536911, |
|
"learning_rate": 3.260888945830589e-05, |
|
"loss": 1.8512, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.004201680672269, |
|
"grad_norm": 0.19748295843601227, |
|
"learning_rate": 3.240064441877533e-05, |
|
"loss": 2.0865, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.004201680672269, |
|
"grad_norm": 0.2490590363740921, |
|
"learning_rate": 3.219228289351011e-05, |
|
"loss": 1.6512, |
|
"step": 240 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 476, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 60, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4225750429270016e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|