|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 387, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002583979328165375, |
|
"grad_norm": 62.7838249206543, |
|
"learning_rate": 2.564102564102564e-07, |
|
"loss": 1.8986, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00516795865633075, |
|
"grad_norm": 64.10289001464844, |
|
"learning_rate": 5.128205128205128e-07, |
|
"loss": 1.8868, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.007751937984496124, |
|
"grad_norm": 64.08220672607422, |
|
"learning_rate": 7.692307692307694e-07, |
|
"loss": 1.9136, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0103359173126615, |
|
"grad_norm": 57.040557861328125, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.7739, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.012919896640826873, |
|
"grad_norm": 49.75514602661133, |
|
"learning_rate": 1.282051282051282e-06, |
|
"loss": 1.5788, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.015503875968992248, |
|
"grad_norm": 55.35374450683594, |
|
"learning_rate": 1.5384615384615387e-06, |
|
"loss": 1.1322, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01808785529715762, |
|
"grad_norm": 40.152645111083984, |
|
"learning_rate": 1.794871794871795e-06, |
|
"loss": 0.8061, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.020671834625323, |
|
"grad_norm": 38.397464752197266, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 0.5469, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 18.86681365966797, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 0.4388, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.025839793281653745, |
|
"grad_norm": 48.16230773925781, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 0.4077, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.028423772609819122, |
|
"grad_norm": 38.27500534057617, |
|
"learning_rate": 2.8205128205128207e-06, |
|
"loss": 0.374, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.031007751937984496, |
|
"grad_norm": 10.813103675842285, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 0.3252, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03359173126614987, |
|
"grad_norm": 32.890769958496094, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.3145, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03617571059431524, |
|
"grad_norm": 20.16728973388672, |
|
"learning_rate": 3.58974358974359e-06, |
|
"loss": 0.3279, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03875968992248062, |
|
"grad_norm": 37.158111572265625, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.3358, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.041343669250646, |
|
"grad_norm": 10.882681846618652, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 0.2804, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04392764857881137, |
|
"grad_norm": 57.284584045410156, |
|
"learning_rate": 4.358974358974359e-06, |
|
"loss": 0.3743, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 93.74513244628906, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 0.2806, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04909560723514212, |
|
"grad_norm": 123.45682525634766, |
|
"learning_rate": 4.871794871794872e-06, |
|
"loss": 0.4188, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05167958656330749, |
|
"grad_norm": 35.84373474121094, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.4117, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05426356589147287, |
|
"grad_norm": 49.38902282714844, |
|
"learning_rate": 5.384615384615385e-06, |
|
"loss": 0.3442, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.056847545219638244, |
|
"grad_norm": 19.53158187866211, |
|
"learning_rate": 5.641025641025641e-06, |
|
"loss": 0.2894, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.059431524547803614, |
|
"grad_norm": 12.666043281555176, |
|
"learning_rate": 5.897435897435898e-06, |
|
"loss": 0.3197, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06201550387596899, |
|
"grad_norm": 52.884456634521484, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.3199, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06459948320413436, |
|
"grad_norm": 5.781566619873047, |
|
"learning_rate": 6.410256410256412e-06, |
|
"loss": 0.2521, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06718346253229975, |
|
"grad_norm": 3.7365846633911133, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2781, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 4.0676984786987305, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 0.2917, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07235142118863049, |
|
"grad_norm": 5.059348106384277, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.2666, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07493540051679587, |
|
"grad_norm": 3.9822773933410645, |
|
"learning_rate": 7.435897435897437e-06, |
|
"loss": 0.2698, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07751937984496124, |
|
"grad_norm": 3.6662659645080566, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 0.2721, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08010335917312661, |
|
"grad_norm": 3.566708564758301, |
|
"learning_rate": 7.948717948717949e-06, |
|
"loss": 0.2911, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.082687338501292, |
|
"grad_norm": 3.4506027698516846, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.2726, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08527131782945736, |
|
"grad_norm": 2.4874958992004395, |
|
"learning_rate": 8.461538461538462e-06, |
|
"loss": 0.2268, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08785529715762273, |
|
"grad_norm": 4.3625593185424805, |
|
"learning_rate": 8.717948717948719e-06, |
|
"loss": 0.2766, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09043927648578812, |
|
"grad_norm": 3.773474931716919, |
|
"learning_rate": 8.974358974358976e-06, |
|
"loss": 0.2716, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 2.7108590602874756, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.2393, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09560723514211886, |
|
"grad_norm": 2.9571542739868164, |
|
"learning_rate": 9.487179487179487e-06, |
|
"loss": 0.2368, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09819121447028424, |
|
"grad_norm": 3.408618688583374, |
|
"learning_rate": 9.743589743589744e-06, |
|
"loss": 0.2746, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10077519379844961, |
|
"grad_norm": 3.4680657386779785, |
|
"learning_rate": 1e-05, |
|
"loss": 0.309, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.10335917312661498, |
|
"grad_norm": 2.9482579231262207, |
|
"learning_rate": 9.999796259054765e-06, |
|
"loss": 0.2705, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10594315245478036, |
|
"grad_norm": 2.75227427482605, |
|
"learning_rate": 9.999185052823207e-06, |
|
"loss": 0.2693, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.10852713178294573, |
|
"grad_norm": 3.2529563903808594, |
|
"learning_rate": 9.998166431116421e-06, |
|
"loss": 0.2803, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 3.7899768352508545, |
|
"learning_rate": 9.996740476948386e-06, |
|
"loss": 0.2532, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11369509043927649, |
|
"grad_norm": 3.1697609424591064, |
|
"learning_rate": 9.994907306529203e-06, |
|
"loss": 0.305, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 11.274734497070312, |
|
"learning_rate": 9.99266706925562e-06, |
|
"loss": 0.2741, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11886304909560723, |
|
"grad_norm": 2.9524729251861572, |
|
"learning_rate": 9.990019947698864e-06, |
|
"loss": 0.2557, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12144702842377261, |
|
"grad_norm": 2.668808698654175, |
|
"learning_rate": 9.986966157589751e-06, |
|
"loss": 0.2625, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12403100775193798, |
|
"grad_norm": 2.914754629135132, |
|
"learning_rate": 9.983505947801115e-06, |
|
"loss": 0.2967, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12661498708010335, |
|
"grad_norm": 33.38725662231445, |
|
"learning_rate": 9.979639600327522e-06, |
|
"loss": 0.3238, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12919896640826872, |
|
"grad_norm": 6.390991687774658, |
|
"learning_rate": 9.975367430262289e-06, |
|
"loss": 0.32, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13178294573643412, |
|
"grad_norm": 3.03119158744812, |
|
"learning_rate": 9.970689785771798e-06, |
|
"loss": 0.2721, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1343669250645995, |
|
"grad_norm": 2.6381173133850098, |
|
"learning_rate": 9.965607048067138e-06, |
|
"loss": 0.2948, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13695090439276486, |
|
"grad_norm": 2.643310308456421, |
|
"learning_rate": 9.960119631373023e-06, |
|
"loss": 0.2734, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 2.6558730602264404, |
|
"learning_rate": 9.954227982894034e-06, |
|
"loss": 0.24, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1421188630490956, |
|
"grad_norm": 2.789214849472046, |
|
"learning_rate": 9.947932582778188e-06, |
|
"loss": 0.2775, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14470284237726097, |
|
"grad_norm": 2.645108699798584, |
|
"learning_rate": 9.941233944077789e-06, |
|
"loss": 0.2572, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14728682170542637, |
|
"grad_norm": 3.7391746044158936, |
|
"learning_rate": 9.934132612707631e-06, |
|
"loss": 0.2821, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14987080103359174, |
|
"grad_norm": 2.756499767303467, |
|
"learning_rate": 9.9266291674005e-06, |
|
"loss": 0.3018, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1524547803617571, |
|
"grad_norm": 2.3780715465545654, |
|
"learning_rate": 9.918724219660013e-06, |
|
"loss": 0.2512, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15503875968992248, |
|
"grad_norm": 2.6783838272094727, |
|
"learning_rate": 9.91041841371078e-06, |
|
"loss": 0.2781, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15762273901808785, |
|
"grad_norm": 2.859645366668701, |
|
"learning_rate": 9.901712426445901e-06, |
|
"loss": 0.2558, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.16020671834625322, |
|
"grad_norm": 2.2829370498657227, |
|
"learning_rate": 9.892606967371808e-06, |
|
"loss": 0.2794, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16279069767441862, |
|
"grad_norm": 2.7090861797332764, |
|
"learning_rate": 9.883102778550434e-06, |
|
"loss": 0.2788, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.165374677002584, |
|
"grad_norm": 2.501798391342163, |
|
"learning_rate": 9.873200634538746e-06, |
|
"loss": 0.2796, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16795865633074936, |
|
"grad_norm": 3.149674654006958, |
|
"learning_rate": 9.862901342325617e-06, |
|
"loss": 0.2729, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.17054263565891473, |
|
"grad_norm": 2.2722795009613037, |
|
"learning_rate": 9.852205741266058e-06, |
|
"loss": 0.2346, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1731266149870801, |
|
"grad_norm": 2.7709732055664062, |
|
"learning_rate": 9.841114703012817e-06, |
|
"loss": 0.2748, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17571059431524547, |
|
"grad_norm": 3.9952242374420166, |
|
"learning_rate": 9.829629131445342e-06, |
|
"loss": 0.2411, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17829457364341086, |
|
"grad_norm": 2.783984661102295, |
|
"learning_rate": 9.817749962596115e-06, |
|
"loss": 0.2729, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.18087855297157623, |
|
"grad_norm": 12.138313293457031, |
|
"learning_rate": 9.805478164574374e-06, |
|
"loss": 0.2857, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1834625322997416, |
|
"grad_norm": 2.2798404693603516, |
|
"learning_rate": 9.792814737487207e-06, |
|
"loss": 0.2451, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 3.0709733963012695, |
|
"learning_rate": 9.77976071335806e-06, |
|
"loss": 0.2786, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18863049095607234, |
|
"grad_norm": 4.135012626647949, |
|
"learning_rate": 9.766317156042615e-06, |
|
"loss": 0.277, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.19121447028423771, |
|
"grad_norm": 2.4989020824432373, |
|
"learning_rate": 9.752485161142103e-06, |
|
"loss": 0.2699, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1937984496124031, |
|
"grad_norm": 2.8741238117218018, |
|
"learning_rate": 9.738265855914014e-06, |
|
"loss": 0.2795, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19638242894056848, |
|
"grad_norm": 2.3612139225006104, |
|
"learning_rate": 9.723660399180216e-06, |
|
"loss": 0.2463, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19896640826873385, |
|
"grad_norm": 2.699559450149536, |
|
"learning_rate": 9.708669981232542e-06, |
|
"loss": 0.2465, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.20155038759689922, |
|
"grad_norm": 39.061222076416016, |
|
"learning_rate": 9.693295823735754e-06, |
|
"loss": 0.3251, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2041343669250646, |
|
"grad_norm": 2.360377311706543, |
|
"learning_rate": 9.677539179628005e-06, |
|
"loss": 0.2518, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.20671834625322996, |
|
"grad_norm": 2.389756679534912, |
|
"learning_rate": 9.661401333018725e-06, |
|
"loss": 0.2622, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.20930232558139536, |
|
"grad_norm": 2.4635751247406006, |
|
"learning_rate": 9.644883599083959e-06, |
|
"loss": 0.2455, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.21188630490956073, |
|
"grad_norm": 2.5251641273498535, |
|
"learning_rate": 9.627987323959195e-06, |
|
"loss": 0.2401, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2144702842377261, |
|
"grad_norm": 2.14884090423584, |
|
"learning_rate": 9.610713884629667e-06, |
|
"loss": 0.2406, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21705426356589147, |
|
"grad_norm": 2.0660107135772705, |
|
"learning_rate": 9.59306468881811e-06, |
|
"loss": 0.2491, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21963824289405684, |
|
"grad_norm": 2.663794994354248, |
|
"learning_rate": 9.575041174870062e-06, |
|
"loss": 0.2831, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 2.469634532928467, |
|
"learning_rate": 9.556644811636628e-06, |
|
"loss": 0.2507, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2248062015503876, |
|
"grad_norm": 2.5127339363098145, |
|
"learning_rate": 9.537877098354787e-06, |
|
"loss": 0.2913, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22739018087855298, |
|
"grad_norm": 2.2336297035217285, |
|
"learning_rate": 9.51873956452519e-06, |
|
"loss": 0.2717, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22997416020671835, |
|
"grad_norm": 2.478989601135254, |
|
"learning_rate": 9.499233769787534e-06, |
|
"loss": 0.2641, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 2.26320219039917, |
|
"learning_rate": 9.479361303793441e-06, |
|
"loss": 0.244, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2351421188630491, |
|
"grad_norm": 2.4783589839935303, |
|
"learning_rate": 9.459123786076911e-06, |
|
"loss": 0.2624, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23772609819121446, |
|
"grad_norm": 2.497413158416748, |
|
"learning_rate": 9.438522865922344e-06, |
|
"loss": 0.2582, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.24031007751937986, |
|
"grad_norm": 2.4520232677459717, |
|
"learning_rate": 9.417560222230115e-06, |
|
"loss": 0.2624, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.24289405684754523, |
|
"grad_norm": 2.1241888999938965, |
|
"learning_rate": 9.396237563379761e-06, |
|
"loss": 0.2234, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2454780361757106, |
|
"grad_norm": 2.104245185852051, |
|
"learning_rate": 9.374556627090749e-06, |
|
"loss": 0.2348, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24806201550387597, |
|
"grad_norm": 2.5402724742889404, |
|
"learning_rate": 9.352519180280862e-06, |
|
"loss": 0.2638, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.25064599483204136, |
|
"grad_norm": 2.360741376876831, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 0.2612, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2532299741602067, |
|
"grad_norm": 3.073049545288086, |
|
"learning_rate": 9.307381967894798e-06, |
|
"loss": 0.2675, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2558139534883721, |
|
"grad_norm": 2.8394110202789307, |
|
"learning_rate": 9.284285880837947e-06, |
|
"loss": 0.2323, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25839793281653745, |
|
"grad_norm": 2.1594583988189697, |
|
"learning_rate": 9.26084063999909e-06, |
|
"loss": 0.2501, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26098191214470284, |
|
"grad_norm": 2.01662278175354, |
|
"learning_rate": 9.237048156080433e-06, |
|
"loss": 0.2424, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.26356589147286824, |
|
"grad_norm": 1.9394747018814087, |
|
"learning_rate": 9.212910368083246e-06, |
|
"loss": 0.224, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2661498708010336, |
|
"grad_norm": 2.2723138332366943, |
|
"learning_rate": 9.188429243149824e-06, |
|
"loss": 0.2295, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.268733850129199, |
|
"grad_norm": 2.6535990238189697, |
|
"learning_rate": 9.163606776403182e-06, |
|
"loss": 0.2661, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2713178294573643, |
|
"grad_norm": 2.0946295261383057, |
|
"learning_rate": 9.138444990784455e-06, |
|
"loss": 0.2572, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2739018087855297, |
|
"grad_norm": 1.9430915117263794, |
|
"learning_rate": 9.112945936888034e-06, |
|
"loss": 0.2388, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.27648578811369506, |
|
"grad_norm": 1.6468437910079956, |
|
"learning_rate": 9.08711169279446e-06, |
|
"loss": 0.2026, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 1.9926129579544067, |
|
"learning_rate": 9.060944363901057e-06, |
|
"loss": 0.2297, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.28165374677002586, |
|
"grad_norm": 2.1874048709869385, |
|
"learning_rate": 9.034446082750352e-06, |
|
"loss": 0.2474, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2842377260981912, |
|
"grad_norm": 1.8041729927062988, |
|
"learning_rate": 9.007619008856287e-06, |
|
"loss": 0.233, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2868217054263566, |
|
"grad_norm": 4.950301170349121, |
|
"learning_rate": 8.98046532852822e-06, |
|
"loss": 0.2134, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28940568475452194, |
|
"grad_norm": 2.078723430633545, |
|
"learning_rate": 8.952987254692746e-06, |
|
"loss": 0.2547, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.29198966408268734, |
|
"grad_norm": 2.2332022190093994, |
|
"learning_rate": 8.925187026713363e-06, |
|
"loss": 0.2343, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.29457364341085274, |
|
"grad_norm": 1.7842631340026855, |
|
"learning_rate": 8.897066910207958e-06, |
|
"loss": 0.235, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2971576227390181, |
|
"grad_norm": 2.2905609607696533, |
|
"learning_rate": 8.868629196864182e-06, |
|
"loss": 0.2287, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2997416020671835, |
|
"grad_norm": 2.1392345428466797, |
|
"learning_rate": 8.83987620425267e-06, |
|
"loss": 0.2607, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3023255813953488, |
|
"grad_norm": 2.5281567573547363, |
|
"learning_rate": 8.810810275638183e-06, |
|
"loss": 0.2643, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3049095607235142, |
|
"grad_norm": 2.3782424926757812, |
|
"learning_rate": 8.781433779788627e-06, |
|
"loss": 0.2215, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.30749354005167956, |
|
"grad_norm": 1.8404749631881714, |
|
"learning_rate": 8.751749110782013e-06, |
|
"loss": 0.2332, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.31007751937984496, |
|
"grad_norm": 2.1314375400543213, |
|
"learning_rate": 8.721758687811353e-06, |
|
"loss": 0.2445, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.31266149870801035, |
|
"grad_norm": 2.1510751247406006, |
|
"learning_rate": 8.691464954987494e-06, |
|
"loss": 0.2373, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3152454780361757, |
|
"grad_norm": 2.0221781730651855, |
|
"learning_rate": 8.660870381139944e-06, |
|
"loss": 0.2311, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3178294573643411, |
|
"grad_norm": 2.068140745162964, |
|
"learning_rate": 8.629977459615655e-06, |
|
"loss": 0.2271, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.32041343669250644, |
|
"grad_norm": 1.9292054176330566, |
|
"learning_rate": 8.598788708075844e-06, |
|
"loss": 0.2438, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.32299741602067183, |
|
"grad_norm": 1.9059464931488037, |
|
"learning_rate": 8.567306668290801e-06, |
|
"loss": 0.2437, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.32558139534883723, |
|
"grad_norm": 1.87433922290802, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.2348, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3281653746770026, |
|
"grad_norm": 1.9836807250976562, |
|
"learning_rate": 8.503473010366713e-06, |
|
"loss": 0.239, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.330749354005168, |
|
"grad_norm": 1.831213355064392, |
|
"learning_rate": 8.471126594439591e-06, |
|
"loss": 0.2364, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.7371547222137451, |
|
"learning_rate": 8.438497294267117e-06, |
|
"loss": 0.2135, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3359173126614987, |
|
"grad_norm": 2.7316882610321045, |
|
"learning_rate": 8.405587769019072e-06, |
|
"loss": 0.2562, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3385012919896641, |
|
"grad_norm": 2.244509696960449, |
|
"learning_rate": 8.372400700702569e-06, |
|
"loss": 0.2545, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.34108527131782945, |
|
"grad_norm": 1.8742812871932983, |
|
"learning_rate": 8.338938793943478e-06, |
|
"loss": 0.2288, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.34366925064599485, |
|
"grad_norm": 2.042801856994629, |
|
"learning_rate": 8.305204775766003e-06, |
|
"loss": 0.2363, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3462532299741602, |
|
"grad_norm": 1.7261626720428467, |
|
"learning_rate": 8.27120139537044e-06, |
|
"loss": 0.237, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 1.798540711402893, |
|
"learning_rate": 8.23693142390914e-06, |
|
"loss": 0.2275, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.35142118863049093, |
|
"grad_norm": 1.6685022115707397, |
|
"learning_rate": 8.202397654260649e-06, |
|
"loss": 0.2061, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.35400516795865633, |
|
"grad_norm": 2.218613386154175, |
|
"learning_rate": 8.167602900802121e-06, |
|
"loss": 0.2377, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.35658914728682173, |
|
"grad_norm": 2.245936632156372, |
|
"learning_rate": 8.132549999179934e-06, |
|
"loss": 0.2208, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.35917312661498707, |
|
"grad_norm": 2.624891757965088, |
|
"learning_rate": 8.097241806078616e-06, |
|
"loss": 0.2476, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.36175710594315247, |
|
"grad_norm": 2.2246923446655273, |
|
"learning_rate": 8.06168119898802e-06, |
|
"loss": 0.2399, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3643410852713178, |
|
"grad_norm": 1.6137202978134155, |
|
"learning_rate": 8.025871075968828e-06, |
|
"loss": 0.2179, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3669250645994832, |
|
"grad_norm": 2.3189852237701416, |
|
"learning_rate": 7.989814355416362e-06, |
|
"loss": 0.2258, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3695090439276486, |
|
"grad_norm": 1.66120183467865, |
|
"learning_rate": 7.953513975822755e-06, |
|
"loss": 0.2179, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 1.8575221300125122, |
|
"learning_rate": 7.916972895537471e-06, |
|
"loss": 0.2385, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.37467700258397935, |
|
"grad_norm": 1.6913485527038574, |
|
"learning_rate": 7.8801940925262e-06, |
|
"loss": 0.2315, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3772609819121447, |
|
"grad_norm": 2.0804011821746826, |
|
"learning_rate": 7.843180564128178e-06, |
|
"loss": 0.2329, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3798449612403101, |
|
"grad_norm": 1.9570668935775757, |
|
"learning_rate": 7.805935326811913e-06, |
|
"loss": 0.236, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.38242894056847543, |
|
"grad_norm": 2.2063801288604736, |
|
"learning_rate": 7.768461415929344e-06, |
|
"loss": 0.2221, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3850129198966408, |
|
"grad_norm": 1.6152676343917847, |
|
"learning_rate": 7.730761885468486e-06, |
|
"loss": 0.2238, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3875968992248062, |
|
"grad_norm": 1.9125603437423706, |
|
"learning_rate": 7.692839807804522e-06, |
|
"loss": 0.2287, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39018087855297157, |
|
"grad_norm": 1.910693645477295, |
|
"learning_rate": 7.654698273449435e-06, |
|
"loss": 0.2225, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.39276485788113696, |
|
"grad_norm": 1.5631076097488403, |
|
"learning_rate": 7.616340390800127e-06, |
|
"loss": 0.2376, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3953488372093023, |
|
"grad_norm": 1.5215353965759277, |
|
"learning_rate": 7.57776928588511e-06, |
|
"loss": 0.212, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3979328165374677, |
|
"grad_norm": 1.9194774627685547, |
|
"learning_rate": 7.538988102109728e-06, |
|
"loss": 0.2247, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4005167958656331, |
|
"grad_norm": 2.0770885944366455, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.2527, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.40310077519379844, |
|
"grad_norm": 1.776774525642395, |
|
"learning_rate": 7.4608081569450365e-06, |
|
"loss": 0.243, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.40568475452196384, |
|
"grad_norm": 1.8917075395584106, |
|
"learning_rate": 7.421415766938098e-06, |
|
"loss": 0.2319, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4082687338501292, |
|
"grad_norm": 1.8015702962875366, |
|
"learning_rate": 7.381826040316294e-06, |
|
"loss": 0.2115, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4108527131782946, |
|
"grad_norm": 2.221393346786499, |
|
"learning_rate": 7.342042203498952e-06, |
|
"loss": 0.2334, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4134366925064599, |
|
"grad_norm": 1.9779021739959717, |
|
"learning_rate": 7.302067498724681e-06, |
|
"loss": 0.2206, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4160206718346253, |
|
"grad_norm": 1.6825449466705322, |
|
"learning_rate": 7.261905183787136e-06, |
|
"loss": 0.2068, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4186046511627907, |
|
"grad_norm": 1.9611059427261353, |
|
"learning_rate": 7.221558531769519e-06, |
|
"loss": 0.2454, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.42118863049095606, |
|
"grad_norm": 1.7121902704238892, |
|
"learning_rate": 7.181030830777838e-06, |
|
"loss": 0.2397, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.42377260981912146, |
|
"grad_norm": 2.4492485523223877, |
|
"learning_rate": 7.140325383672938e-06, |
|
"loss": 0.2288, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4263565891472868, |
|
"grad_norm": 1.4285128116607666, |
|
"learning_rate": 7.099445507801324e-06, |
|
"loss": 0.1875, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4289405684754522, |
|
"grad_norm": 1.9082492589950562, |
|
"learning_rate": 7.058394534724819e-06, |
|
"loss": 0.2208, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4315245478036176, |
|
"grad_norm": 1.6619027853012085, |
|
"learning_rate": 7.017175809949044e-06, |
|
"loss": 0.2062, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.43410852713178294, |
|
"grad_norm": 1.8407310247421265, |
|
"learning_rate": 6.975792692650778e-06, |
|
"loss": 0.2406, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.43669250645994834, |
|
"grad_norm": 1.7827914953231812, |
|
"learning_rate": 6.934248555404197e-06, |
|
"loss": 0.2258, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4392764857881137, |
|
"grad_norm": 1.6271895170211792, |
|
"learning_rate": 6.892546783906016e-06, |
|
"loss": 0.2222, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4418604651162791, |
|
"grad_norm": 1.9836379289627075, |
|
"learning_rate": 6.850690776699574e-06, |
|
"loss": 0.2307, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.6630229949951172, |
|
"learning_rate": 6.808683944897856e-06, |
|
"loss": 0.2057, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4470284237726098, |
|
"grad_norm": 1.7780696153640747, |
|
"learning_rate": 6.766529711905513e-06, |
|
"loss": 0.2034, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.4496124031007752, |
|
"grad_norm": 1.984447717666626, |
|
"learning_rate": 6.724231513139853e-06, |
|
"loss": 0.2303, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.45219638242894056, |
|
"grad_norm": 2.17633056640625, |
|
"learning_rate": 6.681792795750876e-06, |
|
"loss": 0.2373, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.45478036175710596, |
|
"grad_norm": 1.460951566696167, |
|
"learning_rate": 6.639217018340342e-06, |
|
"loss": 0.2015, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4573643410852713, |
|
"grad_norm": 1.790872573852539, |
|
"learning_rate": 6.5965076506799e-06, |
|
"loss": 0.2244, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4599483204134367, |
|
"grad_norm": 1.4559892416000366, |
|
"learning_rate": 6.553668173428329e-06, |
|
"loss": 0.2045, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4625322997416021, |
|
"grad_norm": 2.018108367919922, |
|
"learning_rate": 6.510702077847864e-06, |
|
"loss": 0.2079, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 1.6619304418563843, |
|
"learning_rate": 6.467612865519674e-06, |
|
"loss": 0.2031, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.46770025839793283, |
|
"grad_norm": 1.7696534395217896, |
|
"learning_rate": 6.424404048058501e-06, |
|
"loss": 0.2104, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.4702842377260982, |
|
"grad_norm": 1.6792902946472168, |
|
"learning_rate": 6.3810791468264654e-06, |
|
"loss": 0.1949, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4728682170542636, |
|
"grad_norm": 1.7121378183364868, |
|
"learning_rate": 6.337641692646106e-06, |
|
"loss": 0.2261, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4754521963824289, |
|
"grad_norm": 1.758552074432373, |
|
"learning_rate": 6.294095225512604e-06, |
|
"loss": 0.2272, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4780361757105943, |
|
"grad_norm": 1.5094135999679565, |
|
"learning_rate": 6.250443294305315e-06, |
|
"loss": 0.2059, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4806201550387597, |
|
"grad_norm": 1.5085089206695557, |
|
"learning_rate": 6.206689456498529e-06, |
|
"loss": 0.2173, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.48320413436692505, |
|
"grad_norm": 1.4562510251998901, |
|
"learning_rate": 6.162837277871553e-06, |
|
"loss": 0.2185, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.48578811369509045, |
|
"grad_norm": 1.507325530052185, |
|
"learning_rate": 6.118890332218117e-06, |
|
"loss": 0.2139, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4883720930232558, |
|
"grad_norm": 1.7156720161437988, |
|
"learning_rate": 6.074852201055121e-06, |
|
"loss": 0.2161, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4909560723514212, |
|
"grad_norm": 1.659424901008606, |
|
"learning_rate": 6.0307264733307515e-06, |
|
"loss": 0.2083, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4935400516795866, |
|
"grad_norm": 1.4655615091323853, |
|
"learning_rate": 5.986516745132e-06, |
|
"loss": 0.2129, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.49612403100775193, |
|
"grad_norm": 1.5427219867706299, |
|
"learning_rate": 5.942226619391592e-06, |
|
"loss": 0.2064, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.49870801033591733, |
|
"grad_norm": 1.3408644199371338, |
|
"learning_rate": 5.8978597055943585e-06, |
|
"loss": 0.1982, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5012919896640827, |
|
"grad_norm": 1.9902156591415405, |
|
"learning_rate": 5.853419619483083e-06, |
|
"loss": 0.226, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5038759689922481, |
|
"grad_norm": 1.3126060962677002, |
|
"learning_rate": 5.808909982763825e-06, |
|
"loss": 0.1991, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5064599483204134, |
|
"grad_norm": 1.8132307529449463, |
|
"learning_rate": 5.764334422810767e-06, |
|
"loss": 0.2271, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5090439276485789, |
|
"grad_norm": 1.8984651565551758, |
|
"learning_rate": 5.719696572370596e-06, |
|
"loss": 0.2363, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5116279069767442, |
|
"grad_norm": 1.817488670349121, |
|
"learning_rate": 5.675000069266451e-06, |
|
"loss": 0.2281, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5142118863049095, |
|
"grad_norm": 2.4053938388824463, |
|
"learning_rate": 5.630248556101448e-06, |
|
"loss": 0.2349, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5167958656330749, |
|
"grad_norm": 1.4642032384872437, |
|
"learning_rate": 5.585445679961823e-06, |
|
"loss": 0.1984, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5193798449612403, |
|
"grad_norm": 2.2405009269714355, |
|
"learning_rate": 5.540595092119709e-06, |
|
"loss": 0.2101, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5219638242894057, |
|
"grad_norm": 1.7168304920196533, |
|
"learning_rate": 5.495700447735572e-06, |
|
"loss": 0.2272, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.524547803617571, |
|
"grad_norm": 1.5339411497116089, |
|
"learning_rate": 5.450765405560328e-06, |
|
"loss": 0.2225, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5271317829457365, |
|
"grad_norm": 1.3848717212677002, |
|
"learning_rate": 5.405793627637157e-06, |
|
"loss": 0.1989, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5297157622739018, |
|
"grad_norm": 1.5923454761505127, |
|
"learning_rate": 5.360788779003082e-06, |
|
"loss": 0.2101, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5322997416020672, |
|
"grad_norm": 1.538506269454956, |
|
"learning_rate": 5.3157545273902585e-06, |
|
"loss": 0.2203, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5348837209302325, |
|
"grad_norm": 1.3902374505996704, |
|
"learning_rate": 5.270694542927089e-06, |
|
"loss": 0.2042, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.537467700258398, |
|
"grad_norm": 2.0255801677703857, |
|
"learning_rate": 5.225612497839099e-06, |
|
"loss": 0.2059, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5400516795865633, |
|
"grad_norm": 1.9409769773483276, |
|
"learning_rate": 5.180512066149682e-06, |
|
"loss": 0.2154, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5426356589147286, |
|
"grad_norm": 1.5798600912094116, |
|
"learning_rate": 5.1353969233806735e-06, |
|
"loss": 0.2116, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5452196382428941, |
|
"grad_norm": 1.9305166006088257, |
|
"learning_rate": 5.090270746252803e-06, |
|
"loss": 0.1892, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5478036175710594, |
|
"grad_norm": 2.8123254776000977, |
|
"learning_rate": 5.045137212386065e-06, |
|
"loss": 0.2128, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5503875968992248, |
|
"grad_norm": 1.9373772144317627, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2199, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.5529715762273901, |
|
"grad_norm": 1.5903531312942505, |
|
"learning_rate": 4.954862787613937e-06, |
|
"loss": 0.2148, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.604992389678955, |
|
"learning_rate": 4.909729253747197e-06, |
|
"loss": 0.2236, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 2.290006399154663, |
|
"learning_rate": 4.864603076619329e-06, |
|
"loss": 0.2175, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.5607235142118863, |
|
"grad_norm": 1.6406919956207275, |
|
"learning_rate": 4.819487933850319e-06, |
|
"loss": 0.2272, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.5633074935400517, |
|
"grad_norm": 1.8373345136642456, |
|
"learning_rate": 4.774387502160902e-06, |
|
"loss": 0.2245, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.5658914728682171, |
|
"grad_norm": 1.6454440355300903, |
|
"learning_rate": 4.729305457072913e-06, |
|
"loss": 0.203, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.5684754521963824, |
|
"grad_norm": 1.6277472972869873, |
|
"learning_rate": 4.684245472609743e-06, |
|
"loss": 0.2325, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5710594315245479, |
|
"grad_norm": 1.4459894895553589, |
|
"learning_rate": 4.6392112209969205e-06, |
|
"loss": 0.2095, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.5736434108527132, |
|
"grad_norm": 1.5780946016311646, |
|
"learning_rate": 4.594206372362845e-06, |
|
"loss": 0.2033, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.5762273901808785, |
|
"grad_norm": 1.4234222173690796, |
|
"learning_rate": 4.549234594439674e-06, |
|
"loss": 0.2014, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.5788113695090439, |
|
"grad_norm": 1.2753453254699707, |
|
"learning_rate": 4.504299552264428e-06, |
|
"loss": 0.1729, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 1.4043508768081665, |
|
"learning_rate": 4.459404907880293e-06, |
|
"loss": 0.2015, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5839793281653747, |
|
"grad_norm": 1.548924446105957, |
|
"learning_rate": 4.414554320038179e-06, |
|
"loss": 0.2153, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.58656330749354, |
|
"grad_norm": 1.4754077196121216, |
|
"learning_rate": 4.369751443898554e-06, |
|
"loss": 0.2023, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5891472868217055, |
|
"grad_norm": 1.2993725538253784, |
|
"learning_rate": 4.32499993073355e-06, |
|
"loss": 0.1763, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5917312661498708, |
|
"grad_norm": 1.3788522481918335, |
|
"learning_rate": 4.280303427629404e-06, |
|
"loss": 0.1941, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5943152454780362, |
|
"grad_norm": 2.100757122039795, |
|
"learning_rate": 4.2356655771892355e-06, |
|
"loss": 0.227, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5968992248062015, |
|
"grad_norm": 1.526432991027832, |
|
"learning_rate": 4.191090017236177e-06, |
|
"loss": 0.2028, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.599483204134367, |
|
"grad_norm": 1.5663360357284546, |
|
"learning_rate": 4.146580380516918e-06, |
|
"loss": 0.2094, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6020671834625323, |
|
"grad_norm": 1.3962419033050537, |
|
"learning_rate": 4.1021402944056415e-06, |
|
"loss": 0.1891, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6046511627906976, |
|
"grad_norm": 1.3490815162658691, |
|
"learning_rate": 4.057773380608411e-06, |
|
"loss": 0.1682, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6072351421188631, |
|
"grad_norm": 1.5363324880599976, |
|
"learning_rate": 4.013483254868001e-06, |
|
"loss": 0.1997, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6098191214470284, |
|
"grad_norm": 1.261446237564087, |
|
"learning_rate": 3.969273526669249e-06, |
|
"loss": 0.1766, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6124031007751938, |
|
"grad_norm": 1.8413023948669434, |
|
"learning_rate": 3.92514779894488e-06, |
|
"loss": 0.2168, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6149870801033591, |
|
"grad_norm": 1.784358263015747, |
|
"learning_rate": 3.881109667781884e-06, |
|
"loss": 0.194, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6175710594315246, |
|
"grad_norm": 1.5789978504180908, |
|
"learning_rate": 3.8371627221284495e-06, |
|
"loss": 0.1961, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6201550387596899, |
|
"grad_norm": 1.3455345630645752, |
|
"learning_rate": 3.7933105435014727e-06, |
|
"loss": 0.1929, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6227390180878553, |
|
"grad_norm": 1.4323828220367432, |
|
"learning_rate": 3.7495567056946856e-06, |
|
"loss": 0.2066, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6253229974160207, |
|
"grad_norm": 1.5241978168487549, |
|
"learning_rate": 3.705904774487396e-06, |
|
"loss": 0.2147, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.627906976744186, |
|
"grad_norm": 1.6744216680526733, |
|
"learning_rate": 3.662358307353897e-06, |
|
"loss": 0.2254, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6304909560723514, |
|
"grad_norm": 2.2153306007385254, |
|
"learning_rate": 3.6189208531735354e-06, |
|
"loss": 0.1895, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6330749354005168, |
|
"grad_norm": 1.4624733924865723, |
|
"learning_rate": 3.5755959519415008e-06, |
|
"loss": 0.1873, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6356589147286822, |
|
"grad_norm": 1.6091656684875488, |
|
"learning_rate": 3.532387134480327e-06, |
|
"loss": 0.198, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6382428940568475, |
|
"grad_norm": 1.208809733390808, |
|
"learning_rate": 3.489297922152136e-06, |
|
"loss": 0.1866, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.6408268733850129, |
|
"grad_norm": 1.3647226095199585, |
|
"learning_rate": 3.446331826571672e-06, |
|
"loss": 0.1981, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.6434108527131783, |
|
"grad_norm": 1.6208391189575195, |
|
"learning_rate": 3.403492349320101e-06, |
|
"loss": 0.2192, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.6459948320413437, |
|
"grad_norm": 1.5029717683792114, |
|
"learning_rate": 3.360782981659659e-06, |
|
"loss": 0.1784, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.648578811369509, |
|
"grad_norm": 1.6115533113479614, |
|
"learning_rate": 3.3182072042491244e-06, |
|
"loss": 0.2231, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.6511627906976745, |
|
"grad_norm": 1.621906042098999, |
|
"learning_rate": 3.275768486860149e-06, |
|
"loss": 0.1729, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.6537467700258398, |
|
"grad_norm": 1.356162190437317, |
|
"learning_rate": 3.233470288094489e-06, |
|
"loss": 0.1873, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.6563307493540051, |
|
"grad_norm": 1.4512073993682861, |
|
"learning_rate": 3.191316055102146e-06, |
|
"loss": 0.1955, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.6589147286821705, |
|
"grad_norm": 1.5930757522583008, |
|
"learning_rate": 3.149309223300428e-06, |
|
"loss": 0.1773, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.661498708010336, |
|
"grad_norm": 1.3301100730895996, |
|
"learning_rate": 3.107453216093985e-06, |
|
"loss": 0.1838, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.6640826873385013, |
|
"grad_norm": 1.2428103685379028, |
|
"learning_rate": 3.0657514445958055e-06, |
|
"loss": 0.1795, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.2732970714569092, |
|
"learning_rate": 3.0242073073492238e-06, |
|
"loss": 0.1907, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.6692506459948321, |
|
"grad_norm": 1.7173457145690918, |
|
"learning_rate": 2.982824190050958e-06, |
|
"loss": 0.1962, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.6718346253229974, |
|
"grad_norm": 1.1723400354385376, |
|
"learning_rate": 2.9416054652751834e-06, |
|
"loss": 0.1553, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6744186046511628, |
|
"grad_norm": 1.2737828493118286, |
|
"learning_rate": 2.9005544921986774e-06, |
|
"loss": 0.1809, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.6770025839793282, |
|
"grad_norm": 1.6005208492279053, |
|
"learning_rate": 2.8596746163270646e-06, |
|
"loss": 0.1876, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6795865633074936, |
|
"grad_norm": 1.6597061157226562, |
|
"learning_rate": 2.8189691692221627e-06, |
|
"loss": 0.214, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.6821705426356589, |
|
"grad_norm": 1.4774705171585083, |
|
"learning_rate": 2.778441468230483e-06, |
|
"loss": 0.1851, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.6847545219638242, |
|
"grad_norm": 1.409745454788208, |
|
"learning_rate": 2.738094816212866e-06, |
|
"loss": 0.1734, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6873385012919897, |
|
"grad_norm": 1.40264093875885, |
|
"learning_rate": 2.6979325012753214e-06, |
|
"loss": 0.175, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.689922480620155, |
|
"grad_norm": 1.2759225368499756, |
|
"learning_rate": 2.65795779650105e-06, |
|
"loss": 0.173, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.6925064599483204, |
|
"grad_norm": 1.3376514911651611, |
|
"learning_rate": 2.6181739596837075e-06, |
|
"loss": 0.1669, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6950904392764858, |
|
"grad_norm": 1.813323974609375, |
|
"learning_rate": 2.5785842330619038e-06, |
|
"loss": 0.1915, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 1.4839708805084229, |
|
"learning_rate": 2.5391918430549635e-06, |
|
"loss": 0.2038, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7002583979328165, |
|
"grad_norm": 1.2945070266723633, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.1685, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7028423772609819, |
|
"grad_norm": 1.5902131795883179, |
|
"learning_rate": 2.4610118978902732e-06, |
|
"loss": 0.1874, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7054263565891473, |
|
"grad_norm": 1.3686803579330444, |
|
"learning_rate": 2.422230714114891e-06, |
|
"loss": 0.1701, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7080103359173127, |
|
"grad_norm": 1.3523751497268677, |
|
"learning_rate": 2.383659609199873e-06, |
|
"loss": 0.1679, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.710594315245478, |
|
"grad_norm": 1.5173090696334839, |
|
"learning_rate": 2.345301726550567e-06, |
|
"loss": 0.203, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7131782945736435, |
|
"grad_norm": 1.1696375608444214, |
|
"learning_rate": 2.3071601921954797e-06, |
|
"loss": 0.1645, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.7157622739018088, |
|
"grad_norm": 1.5028029680252075, |
|
"learning_rate": 2.269238114531515e-06, |
|
"loss": 0.1902, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7183462532299741, |
|
"grad_norm": 1.4014875888824463, |
|
"learning_rate": 2.2315385840706548e-06, |
|
"loss": 0.1817, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7209302325581395, |
|
"grad_norm": 1.1939343214035034, |
|
"learning_rate": 2.1940646731880887e-06, |
|
"loss": 0.1574, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7235142118863049, |
|
"grad_norm": 1.4427356719970703, |
|
"learning_rate": 2.156819435871824e-06, |
|
"loss": 0.1815, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7260981912144703, |
|
"grad_norm": 1.421738624572754, |
|
"learning_rate": 2.1198059074738027e-06, |
|
"loss": 0.1996, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7286821705426356, |
|
"grad_norm": 1.4071861505508423, |
|
"learning_rate": 2.08302710446253e-06, |
|
"loss": 0.1882, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7312661498708011, |
|
"grad_norm": 1.2895996570587158, |
|
"learning_rate": 2.0464860241772454e-06, |
|
"loss": 0.1664, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7338501291989664, |
|
"grad_norm": 1.6300632953643799, |
|
"learning_rate": 2.010185644583641e-06, |
|
"loss": 0.1849, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7364341085271318, |
|
"grad_norm": 1.4193930625915527, |
|
"learning_rate": 1.9741289240311757e-06, |
|
"loss": 0.1903, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7390180878552972, |
|
"grad_norm": 1.3659586906433105, |
|
"learning_rate": 1.9383188010119818e-06, |
|
"loss": 0.1891, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.7416020671834626, |
|
"grad_norm": 1.3775144815444946, |
|
"learning_rate": 1.9027581939213852e-06, |
|
"loss": 0.1934, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 1.3561066389083862, |
|
"learning_rate": 1.8674500008200675e-06, |
|
"loss": 0.193, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.7467700258397932, |
|
"grad_norm": 1.3804134130477905, |
|
"learning_rate": 1.8323970991978823e-06, |
|
"loss": 0.1801, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.7493540051679587, |
|
"grad_norm": 1.3752516508102417, |
|
"learning_rate": 1.797602345739352e-06, |
|
"loss": 0.1784, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.751937984496124, |
|
"grad_norm": 1.4992294311523438, |
|
"learning_rate": 1.7630685760908623e-06, |
|
"loss": 0.1908, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.7545219638242894, |
|
"grad_norm": 1.555408000946045, |
|
"learning_rate": 1.7287986046295597e-06, |
|
"loss": 0.1756, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.7571059431524548, |
|
"grad_norm": 1.603082299232483, |
|
"learning_rate": 1.6947952242339993e-06, |
|
"loss": 0.157, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.7596899224806202, |
|
"grad_norm": 1.503893256187439, |
|
"learning_rate": 1.6610612060565235e-06, |
|
"loss": 0.1929, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.7622739018087855, |
|
"grad_norm": 1.3251415491104126, |
|
"learning_rate": 1.627599299297431e-06, |
|
"loss": 0.1776, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.7648578811369509, |
|
"grad_norm": 1.4177522659301758, |
|
"learning_rate": 1.594412230980928e-06, |
|
"loss": 0.1726, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.7674418604651163, |
|
"grad_norm": 1.4148153066635132, |
|
"learning_rate": 1.561502705732883e-06, |
|
"loss": 0.1823, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.7700258397932817, |
|
"grad_norm": 1.3199900388717651, |
|
"learning_rate": 1.5288734055604087e-06, |
|
"loss": 0.1809, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.772609819121447, |
|
"grad_norm": 1.4778473377227783, |
|
"learning_rate": 1.4965269896332884e-06, |
|
"loss": 0.1853, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.7751937984496124, |
|
"grad_norm": 1.3273617029190063, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.1676, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 1.4746593236923218, |
|
"learning_rate": 1.4326933317092e-06, |
|
"loss": 0.1803, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.7803617571059431, |
|
"grad_norm": 1.204885721206665, |
|
"learning_rate": 1.4012112919241562e-06, |
|
"loss": 0.1617, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.7829457364341085, |
|
"grad_norm": 1.4356775283813477, |
|
"learning_rate": 1.370022540384347e-06, |
|
"loss": 0.1723, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.7855297157622739, |
|
"grad_norm": 1.6779110431671143, |
|
"learning_rate": 1.3391296188600594e-06, |
|
"loss": 0.1768, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.7881136950904393, |
|
"grad_norm": 1.3418086767196655, |
|
"learning_rate": 1.3085350450125073e-06, |
|
"loss": 0.1807, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7906976744186046, |
|
"grad_norm": 1.301296591758728, |
|
"learning_rate": 1.2782413121886483e-06, |
|
"loss": 0.1497, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7932816537467701, |
|
"grad_norm": 1.1588480472564697, |
|
"learning_rate": 1.2482508892179884e-06, |
|
"loss": 0.1582, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.7958656330749354, |
|
"grad_norm": 1.2679500579833984, |
|
"learning_rate": 1.2185662202113764e-06, |
|
"loss": 0.1527, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.7984496124031008, |
|
"grad_norm": 1.13987398147583, |
|
"learning_rate": 1.1891897243618184e-06, |
|
"loss": 0.1582, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8010335917312662, |
|
"grad_norm": 1.3159266710281372, |
|
"learning_rate": 1.16012379574733e-06, |
|
"loss": 0.1717, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8036175710594315, |
|
"grad_norm": 1.5035459995269775, |
|
"learning_rate": 1.1313708031358183e-06, |
|
"loss": 0.1672, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8062015503875969, |
|
"grad_norm": 1.5732388496398926, |
|
"learning_rate": 1.102933089792042e-06, |
|
"loss": 0.1763, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8087855297157622, |
|
"grad_norm": 1.2379496097564697, |
|
"learning_rate": 1.0748129732866391e-06, |
|
"loss": 0.1521, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8113695090439277, |
|
"grad_norm": 1.425968050956726, |
|
"learning_rate": 1.047012745307255e-06, |
|
"loss": 0.182, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.813953488372093, |
|
"grad_norm": 1.199998378753662, |
|
"learning_rate": 1.0195346714717813e-06, |
|
"loss": 0.1522, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8165374677002584, |
|
"grad_norm": 1.3904505968093872, |
|
"learning_rate": 9.92380991143712e-07, |
|
"loss": 0.1705, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.8191214470284238, |
|
"grad_norm": 1.4871166944503784, |
|
"learning_rate": 9.65553917249648e-07, |
|
"loss": 0.1799, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.8217054263565892, |
|
"grad_norm": 1.431360125541687, |
|
"learning_rate": 9.39055636098945e-07, |
|
"loss": 0.1779, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.8242894056847545, |
|
"grad_norm": 1.1474868059158325, |
|
"learning_rate": 9.128883072055411e-07, |
|
"loss": 0.1473, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.8268733850129198, |
|
"grad_norm": 1.3604931831359863, |
|
"learning_rate": 8.870540631119667e-07, |
|
"loss": 0.1658, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8294573643410853, |
|
"grad_norm": 1.319649338722229, |
|
"learning_rate": 8.615550092155478e-07, |
|
"loss": 0.1635, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.8320413436692506, |
|
"grad_norm": 1.6052929162979126, |
|
"learning_rate": 8.363932235968198e-07, |
|
"loss": 0.1862, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.834625322997416, |
|
"grad_norm": 1.2895382642745972, |
|
"learning_rate": 8.115707568501768e-07, |
|
"loss": 0.1603, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8372093023255814, |
|
"grad_norm": 1.3937253952026367, |
|
"learning_rate": 7.870896319167548e-07, |
|
"loss": 0.1637, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8397932816537468, |
|
"grad_norm": 1.549171805381775, |
|
"learning_rate": 7.629518439195671e-07, |
|
"loss": 0.1911, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8423772609819121, |
|
"grad_norm": 1.5424485206604004, |
|
"learning_rate": 7.391593600009123e-07, |
|
"loss": 0.1835, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8449612403100775, |
|
"grad_norm": 1.322595477104187, |
|
"learning_rate": 7.157141191620548e-07, |
|
"loss": 0.1589, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.8475452196382429, |
|
"grad_norm": 1.6589468717575073, |
|
"learning_rate": 6.926180321052045e-07, |
|
"loss": 0.1819, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.8501291989664083, |
|
"grad_norm": 1.3434414863586426, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 0.1683, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.8527131782945736, |
|
"grad_norm": 1.3851432800292969, |
|
"learning_rate": 6.474808197191401e-07, |
|
"loss": 0.1754, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8552971576227391, |
|
"grad_norm": 1.1711254119873047, |
|
"learning_rate": 6.254433729092518e-07, |
|
"loss": 0.1552, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.8578811369509044, |
|
"grad_norm": 1.3657231330871582, |
|
"learning_rate": 6.037624366202405e-07, |
|
"loss": 0.1644, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.8604651162790697, |
|
"grad_norm": 1.3525211811065674, |
|
"learning_rate": 5.824397777698859e-07, |
|
"loss": 0.173, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.8630490956072352, |
|
"grad_norm": 1.3109862804412842, |
|
"learning_rate": 5.614771340776559e-07, |
|
"loss": 0.1692, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.8656330749354005, |
|
"grad_norm": 1.399511694908142, |
|
"learning_rate": 5.408762139230889e-07, |
|
"loss": 0.1602, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.8682170542635659, |
|
"grad_norm": 1.5356361865997314, |
|
"learning_rate": 5.206386962065601e-07, |
|
"loss": 0.181, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.8708010335917312, |
|
"grad_norm": 1.3737794160842896, |
|
"learning_rate": 5.007662302124671e-07, |
|
"loss": 0.1687, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.8733850129198967, |
|
"grad_norm": 1.4284069538116455, |
|
"learning_rate": 4.812604354748107e-07, |
|
"loss": 0.1673, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.875968992248062, |
|
"grad_norm": 1.4897751808166504, |
|
"learning_rate": 4.6212290164521554e-07, |
|
"loss": 0.1727, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.8785529715762274, |
|
"grad_norm": 1.7438304424285889, |
|
"learning_rate": 4.433551883633719e-07, |
|
"loss": 0.1734, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8811369509043928, |
|
"grad_norm": 1.7176605463027954, |
|
"learning_rate": 4.2495882512993913e-07, |
|
"loss": 0.1899, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.8837209302325582, |
|
"grad_norm": 1.2422411441802979, |
|
"learning_rate": 4.069353111818913e-07, |
|
"loss": 0.1701, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.8863049095607235, |
|
"grad_norm": 1.387807011604309, |
|
"learning_rate": 3.8928611537033424e-07, |
|
"loss": 0.1755, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.3703078031539917, |
|
"learning_rate": 3.7201267604080436e-07, |
|
"loss": 0.1632, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.8914728682170543, |
|
"grad_norm": 1.3891550302505493, |
|
"learning_rate": 3.5511640091604293e-07, |
|
"loss": 0.1707, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.8940568475452196, |
|
"grad_norm": 1.5258495807647705, |
|
"learning_rate": 3.385986669812769e-07, |
|
"loss": 0.1719, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.896640826873385, |
|
"grad_norm": 1.3084255456924438, |
|
"learning_rate": 3.224608203719953e-07, |
|
"loss": 0.1639, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.8992248062015504, |
|
"grad_norm": 1.3888843059539795, |
|
"learning_rate": 3.067041762642475e-07, |
|
"loss": 0.1721, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9018087855297158, |
|
"grad_norm": 1.3244494199752808, |
|
"learning_rate": 2.9133001876746004e-07, |
|
"loss": 0.1704, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9043927648578811, |
|
"grad_norm": 1.3492217063903809, |
|
"learning_rate": 2.763396008197833e-07, |
|
"loss": 0.1789, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9069767441860465, |
|
"grad_norm": 1.2785379886627197, |
|
"learning_rate": 2.617341440859883e-07, |
|
"loss": 0.1639, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.9095607235142119, |
|
"grad_norm": 1.225648045539856, |
|
"learning_rate": 2.475148388578974e-07, |
|
"loss": 0.1525, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.9121447028423773, |
|
"grad_norm": 1.4005565643310547, |
|
"learning_rate": 2.3368284395738684e-07, |
|
"loss": 0.1734, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.9147286821705426, |
|
"grad_norm": 1.3667218685150146, |
|
"learning_rate": 2.2023928664194229e-07, |
|
"loss": 0.1598, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.917312661498708, |
|
"grad_norm": 1.2286065816879272, |
|
"learning_rate": 2.0718526251279346e-07, |
|
"loss": 0.1533, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.9198966408268734, |
|
"grad_norm": 1.3071902990341187, |
|
"learning_rate": 1.9452183542562785e-07, |
|
"loss": 0.1612, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.9224806201550387, |
|
"grad_norm": 1.4558244943618774, |
|
"learning_rate": 1.8225003740388546e-07, |
|
"loss": 0.1624, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.9250645994832042, |
|
"grad_norm": 1.3047966957092285, |
|
"learning_rate": 1.7037086855465902e-07, |
|
"loss": 0.1555, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.9276485788113695, |
|
"grad_norm": 1.2621203660964966, |
|
"learning_rate": 1.5888529698718347e-07, |
|
"loss": 0.1514, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 1.2364767789840698, |
|
"learning_rate": 1.477942587339426e-07, |
|
"loss": 0.1648, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9328165374677002, |
|
"grad_norm": 1.6543505191802979, |
|
"learning_rate": 1.3709865767438434e-07, |
|
"loss": 0.1732, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.9354005167958657, |
|
"grad_norm": 1.1805888414382935, |
|
"learning_rate": 1.2679936546125483e-07, |
|
"loss": 0.1489, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.937984496124031, |
|
"grad_norm": 1.443524956703186, |
|
"learning_rate": 1.1689722144956672e-07, |
|
"loss": 0.1772, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.9405684754521964, |
|
"grad_norm": 1.4192824363708496, |
|
"learning_rate": 1.0739303262819301e-07, |
|
"loss": 0.1672, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9431524547803618, |
|
"grad_norm": 1.573648452758789, |
|
"learning_rate": 9.82875735540989e-08, |
|
"loss": 0.1635, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9457364341085271, |
|
"grad_norm": 1.3067245483398438, |
|
"learning_rate": 8.95815862892202e-08, |
|
"loss": 0.1664, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9483204134366925, |
|
"grad_norm": 1.5704214572906494, |
|
"learning_rate": 8.127578033998663e-08, |
|
"loss": 0.1985, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.9509043927648578, |
|
"grad_norm": 1.3045734167099, |
|
"learning_rate": 7.337083259949918e-08, |
|
"loss": 0.1687, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.9534883720930233, |
|
"grad_norm": 1.2257012128829956, |
|
"learning_rate": 6.58673872923693e-08, |
|
"loss": 0.1668, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.9560723514211886, |
|
"grad_norm": 1.3284975290298462, |
|
"learning_rate": 5.8766055922211965e-08, |
|
"loss": 0.1604, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.958656330749354, |
|
"grad_norm": 1.494699478149414, |
|
"learning_rate": 5.206741722181385e-08, |
|
"loss": 0.1802, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.9612403100775194, |
|
"grad_norm": 1.6620988845825195, |
|
"learning_rate": 4.577201710596613e-08, |
|
"loss": 0.1794, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.9638242894056848, |
|
"grad_norm": 1.295282244682312, |
|
"learning_rate": 3.98803686269783e-08, |
|
"loss": 0.1607, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.9664082687338501, |
|
"grad_norm": 1.3023289442062378, |
|
"learning_rate": 3.439295193286174e-08, |
|
"loss": 0.1539, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.9689922480620154, |
|
"grad_norm": 1.3331167697906494, |
|
"learning_rate": 2.9310214228202016e-08, |
|
"loss": 0.1695, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.9715762273901809, |
|
"grad_norm": 1.3956187963485718, |
|
"learning_rate": 2.463256973771311e-08, |
|
"loss": 0.1647, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.9741602067183462, |
|
"grad_norm": 1.1754802465438843, |
|
"learning_rate": 2.0360399672478826e-08, |
|
"loss": 0.1489, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.9767441860465116, |
|
"grad_norm": 1.3734043836593628, |
|
"learning_rate": 1.6494052198886557e-08, |
|
"loss": 0.1675, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.979328165374677, |
|
"grad_norm": 1.3153494596481323, |
|
"learning_rate": 1.3033842410251074e-08, |
|
"loss": 0.1568, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.9819121447028424, |
|
"grad_norm": 1.3614672422409058, |
|
"learning_rate": 9.980052301137854e-09, |
|
"loss": 0.1564, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9844961240310077, |
|
"grad_norm": 1.2049131393432617, |
|
"learning_rate": 7.332930744380906e-09, |
|
"loss": 0.1575, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.9870801033591732, |
|
"grad_norm": 1.2012826204299927, |
|
"learning_rate": 5.092693470798438e-09, |
|
"loss": 0.1483, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.9896640826873385, |
|
"grad_norm": 1.3296922445297241, |
|
"learning_rate": 3.2595230516152543e-09, |
|
"loss": 0.1578, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.9922480620155039, |
|
"grad_norm": 1.3670560121536255, |
|
"learning_rate": 1.8335688835802169e-09, |
|
"loss": 0.1673, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.9948320413436692, |
|
"grad_norm": 1.4654539823532104, |
|
"learning_rate": 8.149471767937567e-10, |
|
"loss": 0.1594, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.9974160206718347, |
|
"grad_norm": 1.3917102813720703, |
|
"learning_rate": 2.0374094523600841e-10, |
|
"loss": 0.1723, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.2807058095932007, |
|
"learning_rate": 0.0, |
|
"loss": 0.1694, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.3819105327129364, |
|
"eval_runtime": 3.48, |
|
"eval_samples_per_second": 66.091, |
|
"eval_steps_per_second": 2.874, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 387, |
|
"total_flos": 1.3654500629387674e+17, |
|
"train_loss": 0.24298929205544542, |
|
"train_runtime": 1239.3737, |
|
"train_samples_per_second": 14.982, |
|
"train_steps_per_second": 0.312 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 387, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 800, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3654500629387674e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|