|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9952038369304557, |
|
"eval_steps": 500, |
|
"global_step": 208, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009592326139088728, |
|
"grad_norm": 2.6591761112213135, |
|
"learning_rate": 9.523809523809525e-07, |
|
"loss": 0.7408, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.019184652278177457, |
|
"grad_norm": 2.5914506912231445, |
|
"learning_rate": 1.904761904761905e-06, |
|
"loss": 0.7838, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02877697841726619, |
|
"grad_norm": 2.4312615394592285, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.7863, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03836930455635491, |
|
"grad_norm": 2.2820498943328857, |
|
"learning_rate": 3.80952380952381e-06, |
|
"loss": 0.775, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.047961630695443645, |
|
"grad_norm": 1.923662543296814, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 0.7394, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05755395683453238, |
|
"grad_norm": 1.4304736852645874, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.7485, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0671462829736211, |
|
"grad_norm": 1.3127758502960205, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.7022, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07673860911270983, |
|
"grad_norm": 1.3539044857025146, |
|
"learning_rate": 7.61904761904762e-06, |
|
"loss": 0.6913, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08633093525179857, |
|
"grad_norm": 1.3196394443511963, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.7041, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09592326139088729, |
|
"grad_norm": 1.1037274599075317, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 0.6186, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10551558752997602, |
|
"grad_norm": 1.3987900018692017, |
|
"learning_rate": 1.0476190476190477e-05, |
|
"loss": 0.6306, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11510791366906475, |
|
"grad_norm": 0.9192888736724854, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 0.6073, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12470023980815348, |
|
"grad_norm": 0.7664029002189636, |
|
"learning_rate": 1.2380952380952383e-05, |
|
"loss": 0.6209, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1342925659472422, |
|
"grad_norm": 1.2668217420578003, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.6077, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14388489208633093, |
|
"grad_norm": 1.1923428773880005, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.5864, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15347721822541965, |
|
"grad_norm": 0.8919103145599365, |
|
"learning_rate": 1.523809523809524e-05, |
|
"loss": 0.5837, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1630695443645084, |
|
"grad_norm": 0.8827866911888123, |
|
"learning_rate": 1.6190476190476193e-05, |
|
"loss": 0.5701, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17266187050359713, |
|
"grad_norm": 0.9107728600502014, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.5569, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18225419664268586, |
|
"grad_norm": 0.8279157876968384, |
|
"learning_rate": 1.8095238095238097e-05, |
|
"loss": 0.5824, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19184652278177458, |
|
"grad_norm": 0.5704746842384338, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 0.5305, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2014388489208633, |
|
"grad_norm": 0.788601815700531, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5451, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21103117505995203, |
|
"grad_norm": 0.9100444912910461, |
|
"learning_rate": 1.9998588839790777e-05, |
|
"loss": 0.5611, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22062350119904076, |
|
"grad_norm": 0.529228925704956, |
|
"learning_rate": 1.999435575743774e-05, |
|
"loss": 0.5204, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2302158273381295, |
|
"grad_norm": 0.5239225625991821, |
|
"learning_rate": 1.9987301947652354e-05, |
|
"loss": 0.5459, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23980815347721823, |
|
"grad_norm": 0.6502991914749146, |
|
"learning_rate": 1.9977429401245764e-05, |
|
"loss": 0.5501, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24940047961630696, |
|
"grad_norm": 0.5559574365615845, |
|
"learning_rate": 1.9964740904566903e-05, |
|
"loss": 0.5219, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2589928057553957, |
|
"grad_norm": 0.5296733379364014, |
|
"learning_rate": 1.9949240038716092e-05, |
|
"loss": 0.5043, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2685851318944844, |
|
"grad_norm": 0.4866749942302704, |
|
"learning_rate": 1.9930931178534353e-05, |
|
"loss": 0.5106, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.27817745803357313, |
|
"grad_norm": 1.6436631679534912, |
|
"learning_rate": 1.9909819491368677e-05, |
|
"loss": 0.5059, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.28776978417266186, |
|
"grad_norm": 0.5561839938163757, |
|
"learning_rate": 1.988591093561364e-05, |
|
"loss": 0.5209, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2973621103117506, |
|
"grad_norm": 0.4857185482978821, |
|
"learning_rate": 1.985921225902975e-05, |
|
"loss": 0.5173, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3069544364508393, |
|
"grad_norm": 0.41804176568984985, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 0.4928, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.31654676258992803, |
|
"grad_norm": 0.49216267466545105, |
|
"learning_rate": 1.9797475469598267e-05, |
|
"loss": 0.4993, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3261390887290168, |
|
"grad_norm": 0.4702392518520355, |
|
"learning_rate": 1.9762454780850807e-05, |
|
"loss": 0.4934, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.33573141486810554, |
|
"grad_norm": 0.4079640507698059, |
|
"learning_rate": 1.972467881455713e-05, |
|
"loss": 0.5058, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.34532374100719426, |
|
"grad_norm": 0.46476152539253235, |
|
"learning_rate": 1.968415823230534e-05, |
|
"loss": 0.4971, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.354916067146283, |
|
"grad_norm": 0.43748095631599426, |
|
"learning_rate": 1.96409044703021e-05, |
|
"loss": 0.4743, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3645083932853717, |
|
"grad_norm": 0.46373656392097473, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.5153, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.37410071942446044, |
|
"grad_norm": 0.42808473110198975, |
|
"learning_rate": 1.9546247005377065e-05, |
|
"loss": 0.4818, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.38369304556354916, |
|
"grad_norm": 0.4117816388607025, |
|
"learning_rate": 1.9494870017824877e-05, |
|
"loss": 0.4673, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3932853717026379, |
|
"grad_norm": 0.43123477697372437, |
|
"learning_rate": 1.9440813273720504e-05, |
|
"loss": 0.4622, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4028776978417266, |
|
"grad_norm": 0.36057785153388977, |
|
"learning_rate": 1.938409202960922e-05, |
|
"loss": 0.459, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.41247002398081534, |
|
"grad_norm": 0.3770412802696228, |
|
"learning_rate": 1.932472229404356e-05, |
|
"loss": 0.4919, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.42206235011990406, |
|
"grad_norm": 0.4013001620769501, |
|
"learning_rate": 1.9262720823065217e-05, |
|
"loss": 0.4957, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4316546762589928, |
|
"grad_norm": 0.3589963912963867, |
|
"learning_rate": 1.9198105115475946e-05, |
|
"loss": 0.4775, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4412470023980815, |
|
"grad_norm": 0.3630225658416748, |
|
"learning_rate": 1.9130893407898834e-05, |
|
"loss": 0.4828, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.45083932853717024, |
|
"grad_norm": 0.32825136184692383, |
|
"learning_rate": 1.9061104669631343e-05, |
|
"loss": 0.4686, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.460431654676259, |
|
"grad_norm": 0.34000617265701294, |
|
"learning_rate": 1.8988758597291577e-05, |
|
"loss": 0.4886, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.47002398081534774, |
|
"grad_norm": 0.3201892673969269, |
|
"learning_rate": 1.8913875609259246e-05, |
|
"loss": 0.4587, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.47961630695443647, |
|
"grad_norm": 0.3151942789554596, |
|
"learning_rate": 1.8836476839912967e-05, |
|
"loss": 0.4727, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4892086330935252, |
|
"grad_norm": 0.3298013210296631, |
|
"learning_rate": 1.8756584133665447e-05, |
|
"loss": 0.4888, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4988009592326139, |
|
"grad_norm": 0.3365343511104584, |
|
"learning_rate": 1.86742200387983e-05, |
|
"loss": 0.4842, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5083932853717026, |
|
"grad_norm": 0.3066977560520172, |
|
"learning_rate": 1.8589407801098192e-05, |
|
"loss": 0.4366, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5179856115107914, |
|
"grad_norm": 0.3741927444934845, |
|
"learning_rate": 1.8502171357296144e-05, |
|
"loss": 0.47, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5275779376498801, |
|
"grad_norm": 0.32977259159088135, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4816, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5371702637889688, |
|
"grad_norm": 0.313314288854599, |
|
"learning_rate": 1.8320525012304685e-05, |
|
"loss": 0.4529, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5467625899280576, |
|
"grad_norm": 0.3619432747364044, |
|
"learning_rate": 1.8226166377534113e-05, |
|
"loss": 0.4583, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5563549160671463, |
|
"grad_norm": 0.2958439886569977, |
|
"learning_rate": 1.8129486055030255e-05, |
|
"loss": 0.4861, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.565947242206235, |
|
"grad_norm": 0.3760446012020111, |
|
"learning_rate": 1.8030511331077945e-05, |
|
"loss": 0.4245, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5755395683453237, |
|
"grad_norm": 0.3982815146446228, |
|
"learning_rate": 1.7929270139515606e-05, |
|
"loss": 0.458, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5851318944844125, |
|
"grad_norm": 0.31815674901008606, |
|
"learning_rate": 1.782579105385145e-05, |
|
"loss": 0.4383, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5947242206235012, |
|
"grad_norm": 0.3115115463733673, |
|
"learning_rate": 1.772010327919912e-05, |
|
"loss": 0.4809, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.60431654676259, |
|
"grad_norm": 0.3637670576572418, |
|
"learning_rate": 1.761223664403505e-05, |
|
"loss": 0.4685, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6139088729016786, |
|
"grad_norm": 0.3954397141933441, |
|
"learning_rate": 1.7502221591779932e-05, |
|
"loss": 0.4591, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6235011990407674, |
|
"grad_norm": 0.30222615599632263, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 0.4747, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6330935251798561, |
|
"grad_norm": 0.3727622926235199, |
|
"learning_rate": 1.727587103267677e-05, |
|
"loss": 0.4706, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6426858513189448, |
|
"grad_norm": 0.35147586464881897, |
|
"learning_rate": 1.7159599409209194e-05, |
|
"loss": 0.5017, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6522781774580336, |
|
"grad_norm": 0.3155067265033722, |
|
"learning_rate": 1.704130711738157e-05, |
|
"loss": 0.4193, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6618705035971223, |
|
"grad_norm": 0.328908234834671, |
|
"learning_rate": 1.692102754306895e-05, |
|
"loss": 0.4606, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6714628297362111, |
|
"grad_norm": 0.3115673363208771, |
|
"learning_rate": 1.6798794633021192e-05, |
|
"loss": 0.4637, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6810551558752997, |
|
"grad_norm": 0.34590572118759155, |
|
"learning_rate": 1.667464288528207e-05, |
|
"loss": 0.4672, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6906474820143885, |
|
"grad_norm": 0.32452234625816345, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4525, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7002398081534772, |
|
"grad_norm": 0.35198819637298584, |
|
"learning_rate": 1.6420723566802982e-05, |
|
"loss": 0.4512, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.709832134292566, |
|
"grad_norm": 0.3182852268218994, |
|
"learning_rate": 1.6291027660230735e-05, |
|
"loss": 0.4645, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7194244604316546, |
|
"grad_norm": 0.34308016300201416, |
|
"learning_rate": 1.6159556224076637e-05, |
|
"loss": 0.4418, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7290167865707434, |
|
"grad_norm": 0.36044055223464966, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 0.4761, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7386091127098321, |
|
"grad_norm": 0.3420026898384094, |
|
"learning_rate": 1.5891435675469376e-05, |
|
"loss": 0.4759, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7482014388489209, |
|
"grad_norm": 0.3534936010837555, |
|
"learning_rate": 1.57548622352261e-05, |
|
"loss": 0.4734, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7577937649880095, |
|
"grad_norm": 0.3484748601913452, |
|
"learning_rate": 1.561666458846365e-05, |
|
"loss": 0.4592, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7673860911270983, |
|
"grad_norm": 0.32589343190193176, |
|
"learning_rate": 1.5476881738986037e-05, |
|
"loss": 0.4715, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7769784172661871, |
|
"grad_norm": 0.30610567331314087, |
|
"learning_rate": 1.5335553137992286e-05, |
|
"loss": 0.425, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7865707434052758, |
|
"grad_norm": 0.3320309817790985, |
|
"learning_rate": 1.519271867294203e-05, |
|
"loss": 0.4441, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7961630695443646, |
|
"grad_norm": 0.31670084595680237, |
|
"learning_rate": 1.504841865629799e-05, |
|
"loss": 0.4902, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8057553956834532, |
|
"grad_norm": 0.3276488184928894, |
|
"learning_rate": 1.490269381414849e-05, |
|
"loss": 0.4918, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.815347721822542, |
|
"grad_norm": 0.35845503211021423, |
|
"learning_rate": 1.4755585274713289e-05, |
|
"loss": 0.4449, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8249400479616307, |
|
"grad_norm": 0.30946382880210876, |
|
"learning_rate": 1.4607134556735836e-05, |
|
"loss": 0.4239, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8345323741007195, |
|
"grad_norm": 0.35152146220207214, |
|
"learning_rate": 1.4457383557765385e-05, |
|
"loss": 0.4795, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8441247002398081, |
|
"grad_norm": 0.3176645338535309, |
|
"learning_rate": 1.4306374542332141e-05, |
|
"loss": 0.459, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8537170263788969, |
|
"grad_norm": 0.31331124901771545, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4471, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8633093525179856, |
|
"grad_norm": 0.32980385422706604, |
|
"learning_rate": 1.4000753283432267e-05, |
|
"loss": 0.4765, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8729016786570744, |
|
"grad_norm": 0.3267308175563812, |
|
"learning_rate": 1.3846227296077568e-05, |
|
"loss": 0.464, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.882494004796163, |
|
"grad_norm": 0.30493155121803284, |
|
"learning_rate": 1.3690615780139703e-05, |
|
"loss": 0.4371, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8920863309352518, |
|
"grad_norm": 0.28876054286956787, |
|
"learning_rate": 1.3533962654174542e-05, |
|
"loss": 0.4609, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9016786570743405, |
|
"grad_norm": 0.2889139652252197, |
|
"learning_rate": 1.337631213071369e-05, |
|
"loss": 0.4466, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9112709832134293, |
|
"grad_norm": 0.3536216914653778, |
|
"learning_rate": 1.321770870378628e-05, |
|
"loss": 0.4531, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.920863309352518, |
|
"grad_norm": 0.3439409136772156, |
|
"learning_rate": 1.3058197136361344e-05, |
|
"loss": 0.4522, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9304556354916067, |
|
"grad_norm": 0.3083202540874481, |
|
"learning_rate": 1.2897822447714247e-05, |
|
"loss": 0.4605, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9400479616306955, |
|
"grad_norm": 0.3410809636116028, |
|
"learning_rate": 1.2736629900720832e-05, |
|
"loss": 0.4389, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9496402877697842, |
|
"grad_norm": 0.28464362025260925, |
|
"learning_rate": 1.257466498908276e-05, |
|
"loss": 0.4509, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9592326139088729, |
|
"grad_norm": 0.279031366109848, |
|
"learning_rate": 1.2411973424487751e-05, |
|
"loss": 0.4728, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9688249400479616, |
|
"grad_norm": 0.29481759667396545, |
|
"learning_rate": 1.2248601123708279e-05, |
|
"loss": 0.4602, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9784172661870504, |
|
"grad_norm": 0.30976414680480957, |
|
"learning_rate": 1.2084594195642367e-05, |
|
"loss": 0.4558, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.988009592326139, |
|
"grad_norm": 0.31227511167526245, |
|
"learning_rate": 1.1919998928300203e-05, |
|
"loss": 0.4542, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9976019184652278, |
|
"grad_norm": 0.30893808603286743, |
|
"learning_rate": 1.1754861775740163e-05, |
|
"loss": 0.4797, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0071942446043165, |
|
"grad_norm": 0.6754066348075867, |
|
"learning_rate": 1.1589229344958e-05, |
|
"loss": 0.7123, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0167865707434052, |
|
"grad_norm": 0.2993621528148651, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3439, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.026378896882494, |
|
"grad_norm": 0.37703534960746765, |
|
"learning_rate": 1.1256665762433798e-05, |
|
"loss": 0.3908, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0359712230215827, |
|
"grad_norm": 0.3754074275493622, |
|
"learning_rate": 1.1089828470790694e-05, |
|
"loss": 0.3997, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0455635491606714, |
|
"grad_norm": 0.3877393901348114, |
|
"learning_rate": 1.092268359463302e-05, |
|
"loss": 0.4586, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0551558752997603, |
|
"grad_norm": 0.40832141041755676, |
|
"learning_rate": 1.0755278307600459e-05, |
|
"loss": 0.4046, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.064748201438849, |
|
"grad_norm": 0.3649183213710785, |
|
"learning_rate": 1.058765985682898e-05, |
|
"loss": 0.374, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0743405275779376, |
|
"grad_norm": 0.3570377826690674, |
|
"learning_rate": 1.0419875549616196e-05, |
|
"loss": 0.3692, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0839328537170263, |
|
"grad_norm": 0.4568242132663727, |
|
"learning_rate": 1.0251972740069724e-05, |
|
"loss": 0.4327, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0935251798561152, |
|
"grad_norm": 0.3146553039550781, |
|
"learning_rate": 1.0083998815742335e-05, |
|
"loss": 0.3766, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1031175059952039, |
|
"grad_norm": 0.29733189940452576, |
|
"learning_rate": 9.916001184257668e-06, |
|
"loss": 0.3503, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1127098321342925, |
|
"grad_norm": 0.3751276433467865, |
|
"learning_rate": 9.748027259930276e-06, |
|
"loss": 0.4089, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.1223021582733812, |
|
"grad_norm": 0.38529419898986816, |
|
"learning_rate": 9.580124450383804e-06, |
|
"loss": 0.4489, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.13189448441247, |
|
"grad_norm": 0.32772964239120483, |
|
"learning_rate": 9.412340143171025e-06, |
|
"loss": 0.3736, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.1414868105515588, |
|
"grad_norm": 0.32223913073539734, |
|
"learning_rate": 9.244721692399545e-06, |
|
"loss": 0.3961, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.1510791366906474, |
|
"grad_norm": 0.3014141917228699, |
|
"learning_rate": 9.07731640536698e-06, |
|
"loss": 0.3863, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.160671462829736, |
|
"grad_norm": 0.3261420726776123, |
|
"learning_rate": 8.910171529209306e-06, |
|
"loss": 0.3797, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.170263788968825, |
|
"grad_norm": 0.30444827675819397, |
|
"learning_rate": 8.743334237566202e-06, |
|
"loss": 0.4264, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1798561151079137, |
|
"grad_norm": 0.26329320669174194, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.388, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1894484412470023, |
|
"grad_norm": 0.28534045815467834, |
|
"learning_rate": 8.410770655042003e-06, |
|
"loss": 0.3899, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1990407673860912, |
|
"grad_norm": 0.30171987414360046, |
|
"learning_rate": 8.24513822425984e-06, |
|
"loss": 0.431, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.20863309352518, |
|
"grad_norm": 0.27923017740249634, |
|
"learning_rate": 8.0800010716998e-06, |
|
"loss": 0.383, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2182254196642686, |
|
"grad_norm": 0.24267299473285675, |
|
"learning_rate": 7.915405804357632e-06, |
|
"loss": 0.3746, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2278177458033572, |
|
"grad_norm": 0.2610694169998169, |
|
"learning_rate": 7.751398876291725e-06, |
|
"loss": 0.3792, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.2374100719424461, |
|
"grad_norm": 0.29287707805633545, |
|
"learning_rate": 7.58802657551225e-06, |
|
"loss": 0.4404, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.2470023980815348, |
|
"grad_norm": 0.33182862401008606, |
|
"learning_rate": 7.425335010917244e-06, |
|
"loss": 0.3757, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.2565947242206235, |
|
"grad_norm": 0.3336862325668335, |
|
"learning_rate": 7.263370099279173e-06, |
|
"loss": 0.474, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2661870503597124, |
|
"grad_norm": 0.26169589161872864, |
|
"learning_rate": 7.102177552285753e-06, |
|
"loss": 0.3462, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.275779376498801, |
|
"grad_norm": 0.29609155654907227, |
|
"learning_rate": 6.9418028636386595e-06, |
|
"loss": 0.3856, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2853717026378897, |
|
"grad_norm": 0.293792188167572, |
|
"learning_rate": 6.7822912962137225e-06, |
|
"loss": 0.3923, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2949640287769784, |
|
"grad_norm": 0.2877916395664215, |
|
"learning_rate": 6.623687869286314e-06, |
|
"loss": 0.4054, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.304556354916067, |
|
"grad_norm": 0.26534056663513184, |
|
"learning_rate": 6.466037345825462e-06, |
|
"loss": 0.3854, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.314148681055156, |
|
"grad_norm": 0.2668323218822479, |
|
"learning_rate": 6.3093842198603014e-06, |
|
"loss": 0.3847, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.3237410071942446, |
|
"grad_norm": 0.2445860207080841, |
|
"learning_rate": 6.153772703922434e-06, |
|
"loss": 0.3721, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.27340319752693176, |
|
"learning_rate": 5.999246716567737e-06, |
|
"loss": 0.3854, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.3429256594724222, |
|
"grad_norm": 0.2549520432949066, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.3819, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.3525179856115108, |
|
"grad_norm": 0.21786105632781982, |
|
"learning_rate": 5.693625457667862e-06, |
|
"loss": 0.3562, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.3621103117505995, |
|
"grad_norm": 0.2585865259170532, |
|
"learning_rate": 5.542616442234618e-06, |
|
"loss": 0.4148, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.3717026378896882, |
|
"grad_norm": 0.25335797667503357, |
|
"learning_rate": 5.392865443264164e-06, |
|
"loss": 0.4044, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.381294964028777, |
|
"grad_norm": 0.23658272624015808, |
|
"learning_rate": 5.244414725286717e-06, |
|
"loss": 0.404, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3908872901678657, |
|
"grad_norm": 0.2405097484588623, |
|
"learning_rate": 5.097306185851515e-06, |
|
"loss": 0.405, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.4004796163069544, |
|
"grad_norm": 0.22226805984973907, |
|
"learning_rate": 4.951581343702014e-06, |
|
"loss": 0.3787, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4100719424460433, |
|
"grad_norm": 0.2628573179244995, |
|
"learning_rate": 4.807281327057972e-06, |
|
"loss": 0.3969, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.419664268585132, |
|
"grad_norm": 0.2247915416955948, |
|
"learning_rate": 4.664446862007718e-06, |
|
"loss": 0.3686, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.4292565947242206, |
|
"grad_norm": 0.2676323652267456, |
|
"learning_rate": 4.523118261013969e-06, |
|
"loss": 0.4435, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.4388489208633093, |
|
"grad_norm": 0.25832346081733704, |
|
"learning_rate": 4.383335411536357e-06, |
|
"loss": 0.3731, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.448441247002398, |
|
"grad_norm": 0.24996225535869598, |
|
"learning_rate": 4.245137764773899e-06, |
|
"loss": 0.3882, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.4580335731414868, |
|
"grad_norm": 0.21546132862567902, |
|
"learning_rate": 4.108564324530626e-06, |
|
"loss": 0.3697, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4676258992805755, |
|
"grad_norm": 0.25346457958221436, |
|
"learning_rate": 3.973653636207437e-06, |
|
"loss": 0.4264, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4772182254196642, |
|
"grad_norm": 0.22166943550109863, |
|
"learning_rate": 3.840443775923365e-06, |
|
"loss": 0.3594, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.486810551558753, |
|
"grad_norm": 0.22984443604946136, |
|
"learning_rate": 3.70897233976927e-06, |
|
"loss": 0.3914, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4964028776978417, |
|
"grad_norm": 0.24873270094394684, |
|
"learning_rate": 3.5792764331970187e-06, |
|
"loss": 0.3922, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5059952038369304, |
|
"grad_norm": 0.23538856208324432, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.3665, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.515587529976019, |
|
"grad_norm": 0.2180040031671524, |
|
"learning_rate": 3.3253571147179333e-06, |
|
"loss": 0.3381, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.5251798561151078, |
|
"grad_norm": 0.24928021430969238, |
|
"learning_rate": 3.2012053669788136e-06, |
|
"loss": 0.4613, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.5347721822541966, |
|
"grad_norm": 0.2552678883075714, |
|
"learning_rate": 3.0789724569310532e-06, |
|
"loss": 0.4428, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.5443645083932853, |
|
"grad_norm": 0.19574448466300964, |
|
"learning_rate": 2.9586928826184323e-06, |
|
"loss": 0.3357, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.5539568345323742, |
|
"grad_norm": 0.22724831104278564, |
|
"learning_rate": 2.8404005907908083e-06, |
|
"loss": 0.4203, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.5635491606714629, |
|
"grad_norm": 0.21780751645565033, |
|
"learning_rate": 2.724128967323234e-06, |
|
"loss": 0.3881, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.5731414868105515, |
|
"grad_norm": 0.24764929711818695, |
|
"learning_rate": 2.6099108277934105e-06, |
|
"loss": 0.4334, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5827338129496402, |
|
"grad_norm": 0.23750008642673492, |
|
"learning_rate": 2.4977784082200728e-06, |
|
"loss": 0.3885, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5923261390887289, |
|
"grad_norm": 0.24234673380851746, |
|
"learning_rate": 2.3877633559649505e-06, |
|
"loss": 0.3901, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6019184652278178, |
|
"grad_norm": 0.22565028071403503, |
|
"learning_rate": 2.2798967208008806e-06, |
|
"loss": 0.3769, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.6115107913669064, |
|
"grad_norm": 0.2320602983236313, |
|
"learning_rate": 2.1742089461485504e-06, |
|
"loss": 0.404, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.6211031175059953, |
|
"grad_norm": 0.21093355119228363, |
|
"learning_rate": 2.0707298604843964e-06, |
|
"loss": 0.3879, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.630695443645084, |
|
"grad_norm": 0.23324111104011536, |
|
"learning_rate": 1.9694886689220592e-06, |
|
"loss": 0.3728, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6402877697841727, |
|
"grad_norm": 0.23098240792751312, |
|
"learning_rate": 1.870513944969743e-06, |
|
"loss": 0.4245, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.6498800959232613, |
|
"grad_norm": 0.21313826739788055, |
|
"learning_rate": 1.773833622465888e-06, |
|
"loss": 0.3685, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.65947242206235, |
|
"grad_norm": 0.2202357053756714, |
|
"learning_rate": 1.6794749876953187e-06, |
|
"loss": 0.385, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.6690647482014387, |
|
"grad_norm": 0.24201858043670654, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.4342, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6786570743405276, |
|
"grad_norm": 0.22546477615833282, |
|
"learning_rate": 1.4978286427038602e-06, |
|
"loss": 0.3845, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6882494004796165, |
|
"grad_norm": 0.2069396823644638, |
|
"learning_rate": 1.4105921989018112e-06, |
|
"loss": 0.3671, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6978417266187051, |
|
"grad_norm": 0.22818851470947266, |
|
"learning_rate": 1.325779961201703e-06, |
|
"loss": 0.4138, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.7074340527577938, |
|
"grad_norm": 0.1912079006433487, |
|
"learning_rate": 1.2434158663345553e-06, |
|
"loss": 0.3297, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.7170263788968825, |
|
"grad_norm": 0.2530308961868286, |
|
"learning_rate": 1.1635231600870334e-06, |
|
"loss": 0.4104, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.7266187050359711, |
|
"grad_norm": 0.21219122409820557, |
|
"learning_rate": 1.086124390740757e-06, |
|
"loss": 0.3553, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7362110311750598, |
|
"grad_norm": 0.23752814531326294, |
|
"learning_rate": 1.0112414027084262e-06, |
|
"loss": 0.3862, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.7458033573141487, |
|
"grad_norm": 0.2403864711523056, |
|
"learning_rate": 9.388953303686587e-07, |
|
"loss": 0.4316, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.7553956834532374, |
|
"grad_norm": 0.20274099707603455, |
|
"learning_rate": 8.691065921011687e-07, |
|
"loss": 0.3422, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.7649880095923263, |
|
"grad_norm": 0.21107341349124908, |
|
"learning_rate": 8.018948845240538e-07, |
|
"loss": 0.3584, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.774580335731415, |
|
"grad_norm": 0.21734651923179626, |
|
"learning_rate": 7.372791769347843e-07, |
|
"loss": 0.4099, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7841726618705036, |
|
"grad_norm": 0.2155366688966751, |
|
"learning_rate": 6.752777059564431e-07, |
|
"loss": 0.3968, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7937649880095923, |
|
"grad_norm": 0.20627352595329285, |
|
"learning_rate": 6.159079703907823e-07, |
|
"loss": 0.34, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.803357314148681, |
|
"grad_norm": 0.2195385843515396, |
|
"learning_rate": 5.591867262794969e-07, |
|
"loss": 0.3729, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.8129496402877698, |
|
"grad_norm": 0.23355402052402496, |
|
"learning_rate": 5.051299821751254e-07, |
|
"loss": 0.3928, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.8225419664268585, |
|
"grad_norm": 0.22403055429458618, |
|
"learning_rate": 4.537529946229369e-07, |
|
"loss": 0.3818, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.8321342925659474, |
|
"grad_norm": 0.21041440963745117, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.3448, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.841726618705036, |
|
"grad_norm": 0.22846421599388123, |
|
"learning_rate": 3.5909552969790376e-07, |
|
"loss": 0.4311, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.8513189448441247, |
|
"grad_norm": 0.22553186118602753, |
|
"learning_rate": 3.158417676946635e-07, |
|
"loss": 0.3701, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.8609112709832134, |
|
"grad_norm": 0.20845209062099457, |
|
"learning_rate": 2.753211854428728e-07, |
|
"loss": 0.3833, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.870503597122302, |
|
"grad_norm": 0.23942551016807556, |
|
"learning_rate": 2.375452191491967e-07, |
|
"loss": 0.4594, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.8800959232613907, |
|
"grad_norm": 0.21887589991092682, |
|
"learning_rate": 2.0252453040173646e-07, |
|
"loss": 0.3805, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8896882494004796, |
|
"grad_norm": 0.21494026482105255, |
|
"learning_rate": 1.7026900316098217e-07, |
|
"loss": 0.3504, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8992805755395683, |
|
"grad_norm": 0.21897700428962708, |
|
"learning_rate": 1.407877409702496e-07, |
|
"loss": 0.3396, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.9088729016786572, |
|
"grad_norm": 0.2428821474313736, |
|
"learning_rate": 1.1408906438636236e-07, |
|
"loss": 0.4681, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.9184652278177459, |
|
"grad_norm": 0.20344102382659912, |
|
"learning_rate": 9.018050863132566e-08, |
|
"loss": 0.3943, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.9280575539568345, |
|
"grad_norm": 0.19937625527381897, |
|
"learning_rate": 6.906882146565097e-08, |
|
"loss": 0.3736, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.9376498800959232, |
|
"grad_norm": 0.3866899311542511, |
|
"learning_rate": 5.0759961283911584e-08, |
|
"loss": 0.4146, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.9472422062350119, |
|
"grad_norm": 0.1998918056488037, |
|
"learning_rate": 3.525909543310002e-08, |
|
"loss": 0.3537, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.9568345323741008, |
|
"grad_norm": 0.19235286116600037, |
|
"learning_rate": 2.257059875423795e-08, |
|
"loss": 0.3812, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.9664268585131894, |
|
"grad_norm": 0.21340519189834595, |
|
"learning_rate": 1.2698052347649426e-08, |
|
"loss": 0.3976, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.9760191846522783, |
|
"grad_norm": 0.21041817963123322, |
|
"learning_rate": 5.644242562264923e-09, |
|
"loss": 0.3976, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.985611510791367, |
|
"grad_norm": 0.22794745862483978, |
|
"learning_rate": 1.4111602092226062e-09, |
|
"loss": 0.3929, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.9952038369304557, |
|
"grad_norm": 0.22788451611995697, |
|
"learning_rate": 0.0, |
|
"loss": 0.4285, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.9952038369304557, |
|
"step": 208, |
|
"total_flos": 716120051679232.0, |
|
"train_loss": 0.4522540926073606, |
|
"train_runtime": 71993.7308, |
|
"train_samples_per_second": 0.278, |
|
"train_steps_per_second": 0.003 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 208, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 716120051679232.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|