|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 760, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.2041, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.27, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5031, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.5115, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6283, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.327, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.3308, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.4163, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.4515, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4e-05, |
|
"loss": 0.3857, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.999982454062275e-05, |
|
"loss": 0.3679, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.9999298165569614e-05, |
|
"loss": 0.486, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.999842088407633e-05, |
|
"loss": 0.4837, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.999719271153561e-05, |
|
"loss": 0.4925, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.999561366949691e-05, |
|
"loss": 0.361, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.9993683785666e-05, |
|
"loss": 0.3462, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.9991403093904505e-05, |
|
"loss": 0.4642, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.998877163422929e-05, |
|
"loss": 0.5412, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.998578945281179e-05, |
|
"loss": 0.6091, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.998245660197717e-05, |
|
"loss": 0.3604, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.997877314020343e-05, |
|
"loss": 0.492, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.997473913212036e-05, |
|
"loss": 0.335, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.99703546485084e-05, |
|
"loss": 0.4276, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.996561976629744e-05, |
|
"loss": 0.2509, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9960534568565436e-05, |
|
"loss": 0.3847, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.995509914453694e-05, |
|
"loss": 0.4315, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.9949313589581555e-05, |
|
"loss": 0.2665, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.994317800521228e-05, |
|
"loss": 0.3356, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.9936692499083696e-05, |
|
"loss": 0.4021, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.992985718499009e-05, |
|
"loss": 0.5412, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.9922672182863456e-05, |
|
"loss": 0.3859, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.9915137618771386e-05, |
|
"loss": 0.4645, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.990725362491488e-05, |
|
"loss": 0.1969, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.989902033962601e-05, |
|
"loss": 0.3058, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.989043790736547e-05, |
|
"loss": 0.4659, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.9881506478720095e-05, |
|
"loss": 0.5754, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.987222621040017e-05, |
|
"loss": 0.4268, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.986259726523671e-05, |
|
"loss": 0.313, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.985261981217858e-05, |
|
"loss": 0.5001, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.9842294026289565e-05, |
|
"loss": 0.5115, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.9831620088745236e-05, |
|
"loss": 0.3916, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.982059818682986e-05, |
|
"loss": 0.2339, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.980922851393303e-05, |
|
"loss": 0.4039, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.979751126954632e-05, |
|
"loss": 0.3582, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.978544665925977e-05, |
|
"loss": 0.6056, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.977303489475828e-05, |
|
"loss": 0.3198, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.976027619381791e-05, |
|
"loss": 0.3763, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.974717078030201e-05, |
|
"loss": 0.3211, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.973371888415736e-05, |
|
"loss": 0.3219, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.97199207414101e-05, |
|
"loss": 0.2872, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.970577659416158e-05, |
|
"loss": 0.113, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.969128669058411e-05, |
|
"loss": 0.31, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.967645128491666e-05, |
|
"loss": 0.3789, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.966127063746031e-05, |
|
"loss": 0.4233, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.964574501457378e-05, |
|
"loss": 0.2903, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.962987468866866e-05, |
|
"loss": 0.2413, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.961365993820471e-05, |
|
"loss": 0.2886, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.959710104768494e-05, |
|
"loss": 0.2111, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.95801983076506e-05, |
|
"loss": 0.1582, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.9562952014676116e-05, |
|
"loss": 0.3719, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.954536247136387e-05, |
|
"loss": 0.258, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.95274299863389e-05, |
|
"loss": 0.3562, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.9509154874243466e-05, |
|
"loss": 0.3451, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.949053745573155e-05, |
|
"loss": 0.3462, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.9471578057463206e-05, |
|
"loss": 0.2999, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.9452277012098875e-05, |
|
"loss": 0.3487, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.943263465829348e-05, |
|
"loss": 0.3086, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.941265134069055e-05, |
|
"loss": 0.2246, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.939232740991612e-05, |
|
"loss": 0.4354, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.9371663222572625e-05, |
|
"loss": 0.3244, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.93506591412326e-05, |
|
"loss": 0.2683, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.932931553443235e-05, |
|
"loss": 0.2599, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.930763277666548e-05, |
|
"loss": 0.2989, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.92856112483763e-05, |
|
"loss": 0.316, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.926325133595317e-05, |
|
"loss": 0.213, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.924055343172172e-05, |
|
"loss": 0.4325, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.9217517933937974e-05, |
|
"loss": 0.441, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.9194145246781336e-05, |
|
"loss": 0.3685, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.917043578034752e-05, |
|
"loss": 0.2307, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.9146389950641345e-05, |
|
"loss": 0.2507, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.912200817956945e-05, |
|
"loss": 0.1716, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.9097290894932866e-05, |
|
"loss": 0.2377, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.9072238530419525e-05, |
|
"loss": 0.4068, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9046851525596656e-05, |
|
"loss": 0.1846, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.9021130325903076e-05, |
|
"loss": 0.1923, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.899507538264134e-05, |
|
"loss": 0.3062, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.896868715296987e-05, |
|
"loss": 0.2967, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.894196609989489e-05, |
|
"loss": 0.1879, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.891491269226234e-05, |
|
"loss": 0.2129, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.888752740474962e-05, |
|
"loss": 0.3089, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.8859810717857296e-05, |
|
"loss": 0.237, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.8831763117900605e-05, |
|
"loss": 0.3648, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.880338509700101e-05, |
|
"loss": 0.264, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.8774677153077485e-05, |
|
"loss": 0.3951, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.874563978983784e-05, |
|
"loss": 0.3565, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.871627351676982e-05, |
|
"loss": 0.3928, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.8686578849132244e-05, |
|
"loss": 0.2163, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.8656556307945894e-05, |
|
"loss": 0.2344, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.862620641998441e-05, |
|
"loss": 0.2295, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.859552971776503e-05, |
|
"loss": 0.2395, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.8564526739539266e-05, |
|
"loss": 0.1045, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.853319802928345e-05, |
|
"loss": 0.3289, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.850154413668916e-05, |
|
"loss": 0.2843, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.8469565617153646e-05, |
|
"loss": 0.194, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.8437263031770015e-05, |
|
"loss": 0.2817, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.840463694731741e-05, |
|
"loss": 0.0673, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.8371687936251085e-05, |
|
"loss": 0.2645, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.8338416576692335e-05, |
|
"loss": 0.2102, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.830482345241835e-05, |
|
"loss": 0.2504, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.827090915285202e-05, |
|
"loss": 0.2484, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.823667427305152e-05, |
|
"loss": 0.4732, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.8202119413699914e-05, |
|
"loss": 0.3838, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.816724518109463e-05, |
|
"loss": 0.2097, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.813205218713676e-05, |
|
"loss": 0.2695, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.809654104932039e-05, |
|
"loss": 0.2515, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.806071239072175e-05, |
|
"loss": 0.1516, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.802456683998823e-05, |
|
"loss": 0.1934, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.798810503132742e-05, |
|
"loss": 0.2033, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.7951327604495957e-05, |
|
"loss": 0.2215, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.791423520478826e-05, |
|
"loss": 0.2253, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.787682848302528e-05, |
|
"loss": 0.3373, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.7839108095543016e-05, |
|
"loss": 0.3479, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.780107470418105e-05, |
|
"loss": 0.2841, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.7762728976270897e-05, |
|
"loss": 0.1959, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.7724071584624296e-05, |
|
"loss": 0.1712, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.768510320752145e-05, |
|
"loss": 0.2421, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.764582452869907e-05, |
|
"loss": 0.194, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.7606236237338406e-05, |
|
"loss": 0.1826, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.756633902805316e-05, |
|
"loss": 0.1903, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.7526133600877275e-05, |
|
"loss": 0.1759, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.7485620661252676e-05, |
|
"loss": 0.383, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.7444800920016875e-05, |
|
"loss": 0.1564, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.740367509339052e-05, |
|
"loss": 0.3796, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.736224390296479e-05, |
|
"loss": 0.2463, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.732050807568878e-05, |
|
"loss": 0.1915, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.727846834385671e-05, |
|
"loss": 0.2983, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.72361254450951e-05, |
|
"loss": 0.1864, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.7193480122349824e-05, |
|
"loss": 0.2377, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.715053312387305e-05, |
|
"loss": 0.1939, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.710728520321014e-05, |
|
"loss": 0.2322, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.706373711918641e-05, |
|
"loss": 0.125, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.701988963589384e-05, |
|
"loss": 0.1978, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.697574352267764e-05, |
|
"loss": 0.2727, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.6931299554122754e-05, |
|
"loss": 0.2192, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.6886558510040305e-05, |
|
"loss": 0.2627, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.684152117545385e-05, |
|
"loss": 0.1493, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.679618834058566e-05, |
|
"loss": 0.2359, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.675056080084284e-05, |
|
"loss": 0.2793, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.670463935680335e-05, |
|
"loss": 0.3028, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.665842481420199e-05, |
|
"loss": 0.2128, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.661191798391626e-05, |
|
"loss": 0.2562, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.6565119681952086e-05, |
|
"loss": 0.159, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.651803072942957e-05, |
|
"loss": 0.2481, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.647065195256855e-05, |
|
"loss": 0.1613, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.642298418267408e-05, |
|
"loss": 0.2321, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.637502825612189e-05, |
|
"loss": 0.1113, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.632678501434368e-05, |
|
"loss": 0.2206, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.6278255303812366e-05, |
|
"loss": 0.193, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.622943997602722e-05, |
|
"loss": 0.0902, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.6180339887498953e-05, |
|
"loss": 0.3044, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.613095589973465e-05, |
|
"loss": 0.0721, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.6081288879222696e-05, |
|
"loss": 0.2974, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.6031339697417535e-05, |
|
"loss": 0.0796, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.59811092307244e-05, |
|
"loss": 0.152, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.5930598360483926e-05, |
|
"loss": 0.2259, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.587980797295671e-05, |
|
"loss": 0.2136, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.5828738959307715e-05, |
|
"loss": 0.2821, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.577739221559069e-05, |
|
"loss": 0.2553, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.572576864273238e-05, |
|
"loss": 0.0768, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.56738691465168e-05, |
|
"loss": 0.2053, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.562169463756927e-05, |
|
"loss": 0.2989, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.5569246031340474e-05, |
|
"loss": 0.1637, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.551652424809039e-05, |
|
"loss": 0.3084, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.5463530212872145e-05, |
|
"loss": 0.2403, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.541026485551579e-05, |
|
"loss": 0.433, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.535672911061196e-05, |
|
"loss": 0.22, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.530292391749549e-05, |
|
"loss": 0.1963, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.524885022022896e-05, |
|
"loss": 0.1768, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.51945089675861e-05, |
|
"loss": 0.2105, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.513990111303513e-05, |
|
"loss": 0.1311, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.508502761472208e-05, |
|
"loss": 0.1729, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.5029889435453924e-05, |
|
"loss": 0.152, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.497448754268173e-05, |
|
"loss": 0.2048, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.4918822908483645e-05, |
|
"loss": 0.2281, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.4862896509547886e-05, |
|
"loss": 0.1982, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.4806709327155564e-05, |
|
"loss": 0.2202, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.475026234716348e-05, |
|
"loss": 0.2121, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.469355655998683e-05, |
|
"loss": 0.1315, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.4636592960581825e-05, |
|
"loss": 0.217, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.457937254842823e-05, |
|
"loss": 0.1935, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 3.4521896327511836e-05, |
|
"loss": 0.4058, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 3.4464165306306845e-05, |
|
"loss": 0.085, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 3.440618049775814e-05, |
|
"loss": 0.1464, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 3.434794291926358e-05, |
|
"loss": 0.1512, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.428945359265607e-05, |
|
"loss": 0.1294, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.423071354418571e-05, |
|
"loss": 0.1927, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 3.417172380450172e-05, |
|
"loss": 0.1937, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.411248540863442e-05, |
|
"loss": 0.1565, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 3.405299939597699e-05, |
|
"loss": 0.2907, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 3.399326681026731e-05, |
|
"loss": 0.119, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 3.393328869956962e-05, |
|
"loss": 0.1828, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.38730661162561e-05, |
|
"loss": 0.1624, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.381260011698846e-05, |
|
"loss": 0.2448, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.375189176269935e-05, |
|
"loss": 0.1497, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.369094211857378e-05, |
|
"loss": 0.1374, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 3.36297522540304e-05, |
|
"loss": 0.1482, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.356832324270277e-05, |
|
"loss": 0.1697, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 3.350665616242049e-05, |
|
"loss": 0.1699, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 3.3444752095190326e-05, |
|
"loss": 0.1813, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 3.3382612127177166e-05, |
|
"loss": 0.2908, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.332023734868504e-05, |
|
"loss": 0.1347, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.325762885413791e-05, |
|
"loss": 0.1451, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.319478774206053e-05, |
|
"loss": 0.1993, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.3131715115059134e-05, |
|
"loss": 0.1173, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.3068412079802114e-05, |
|
"loss": 0.212, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 3.300487974700058e-05, |
|
"loss": 0.2662, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.294111923138889e-05, |
|
"loss": 0.1773, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.287713165170508e-05, |
|
"loss": 0.1837, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 3.281291813067123e-05, |
|
"loss": 0.1091, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.27484797949738e-05, |
|
"loss": 0.0838, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.2683817775243795e-05, |
|
"loss": 0.1391, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 3.2618933206036994e-05, |
|
"loss": 0.2812, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 3.255382722581401e-05, |
|
"loss": 0.1579, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 3.248850097692032e-05, |
|
"loss": 0.2081, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 3.242295560556621e-05, |
|
"loss": 0.1195, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 3.235719226180669e-05, |
|
"loss": 0.209, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.229121209952129e-05, |
|
"loss": 0.3294, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.222501627639384e-05, |
|
"loss": 0.1801, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.215860595389211e-05, |
|
"loss": 0.2359, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.2091982297247505e-05, |
|
"loss": 0.0893, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 3.202514647543454e-05, |
|
"loss": 0.1252, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 3.195809966115038e-05, |
|
"loss": 0.1317, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.189084303079427e-05, |
|
"loss": 0.2122, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.182337776444684e-05, |
|
"loss": 0.0898, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.1755705045849465e-05, |
|
"loss": 0.2762, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.1687826062383444e-05, |
|
"loss": 0.0969, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.1619742005049204e-05, |
|
"loss": 0.1663, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.155145406844535e-05, |
|
"loss": 0.1702, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.148296345074779e-05, |
|
"loss": 0.1498, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.141427135368864e-05, |
|
"loss": 0.2292, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.134537898253514e-05, |
|
"loss": 0.1987, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.1276287546068536e-05, |
|
"loss": 0.1855, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.1206998256562894e-05, |
|
"loss": 0.225, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.113751232976376e-05, |
|
"loss": 0.1875, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 3.106783098486688e-05, |
|
"loss": 0.1781, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 3.09979554444968e-05, |
|
"loss": 0.1794, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.092788693468539e-05, |
|
"loss": 0.1698, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.0857626684850355e-05, |
|
"loss": 0.1616, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.078717592777367e-05, |
|
"loss": 0.1897, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.0716535899579936e-05, |
|
"loss": 0.1124, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.064570783971468e-05, |
|
"loss": 0.1512, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 3.057469299092264e-05, |
|
"loss": 0.0991, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.0503492599225918e-05, |
|
"loss": 0.0772, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.0432107913902162e-05, |
|
"loss": 0.193, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 3.036054018746261e-05, |
|
"loss": 0.1688, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.028879067563013e-05, |
|
"loss": 0.1048, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.02168606373172e-05, |
|
"loss": 0.3042, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 3.014475133460378e-05, |
|
"loss": 0.1962, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.007246403271522e-05, |
|
"loss": 0.105, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.303, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 2.992736050790754e-05, |
|
"loss": 0.2243, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 2.9854546830965833e-05, |
|
"loss": 0.1246, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 2.978156024675913e-05, |
|
"loss": 0.1287, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 2.970840203590548e-05, |
|
"loss": 0.1729, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 2.9635073482034307e-05, |
|
"loss": 0.0749, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 2.956157587176385e-05, |
|
"loss": 0.1655, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 2.9487910494678588e-05, |
|
"loss": 0.1562, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 2.941407864330666e-05, |
|
"loss": 0.0815, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 2.934008161309711e-05, |
|
"loss": 0.3197, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 2.926592070239724e-05, |
|
"loss": 0.1408, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 2.9191597212429763e-05, |
|
"loss": 0.2064, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.9117112447270007e-05, |
|
"loss": 0.1036, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 2.9042467713823015e-05, |
|
"loss": 0.1134, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 2.8967664321800653e-05, |
|
"loss": 0.1781, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 2.8892703583698553e-05, |
|
"loss": 0.1843, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 2.8817586814773174e-05, |
|
"loss": 0.2199, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 2.874231533301866e-05, |
|
"loss": 0.1608, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 2.8666890459143748e-05, |
|
"loss": 0.1271, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.8591313516548566e-05, |
|
"loss": 0.0874, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 2.8515585831301456e-05, |
|
"loss": 0.2306, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 2.8439708732115662e-05, |
|
"loss": 0.1641, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 2.8363683550326028e-05, |
|
"loss": 0.1308, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 2.8287511619865687e-05, |
|
"loss": 0.1435, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.8211194277242563e-05, |
|
"loss": 0.1696, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 2.813473286151601e-05, |
|
"loss": 0.1705, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 2.8058128714273257e-05, |
|
"loss": 0.2127, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 2.798138317960591e-05, |
|
"loss": 0.1692, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 2.7904497604086333e-05, |
|
"loss": 0.1731, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.7827473336744054e-05, |
|
"loss": 0.2061, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 2.7750311729042062e-05, |
|
"loss": 0.0872, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.767301413485313e-05, |
|
"loss": 0.1252, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 2.759558191043603e-05, |
|
"loss": 0.2098, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 2.7518016414411737e-05, |
|
"loss": 0.1598, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 2.7440319007739632e-05, |
|
"loss": 0.0742, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 2.7362491053693564e-05, |
|
"loss": 0.0892, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.728453391783797e-05, |
|
"loss": 0.0556, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 2.7206448968003898e-05, |
|
"loss": 0.0662, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.7128237574265014e-05, |
|
"loss": 0.1099, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 2.7049901108913573e-05, |
|
"loss": 0.1318, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 2.6971440946436306e-05, |
|
"loss": 0.1283, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 2.689285846349034e-05, |
|
"loss": 0.1493, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.681415503887904e-05, |
|
"loss": 0.2968, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.6735332053527768e-05, |
|
"loss": 0.2015, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.6656390890459737e-05, |
|
"loss": 0.1143, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.6577332934771667e-05, |
|
"loss": 0.1593, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 2.649815957360953e-05, |
|
"loss": 0.1453, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 2.641887219614419e-05, |
|
"loss": 0.088, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 2.633947219354704e-05, |
|
"loss": 0.0893, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 2.6259960958965566e-05, |
|
"loss": 0.1731, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.618033988749895e-05, |
|
"loss": 0.1283, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.6100610376173555e-05, |
|
"loss": 0.1619, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 2.6020773823918414e-05, |
|
"loss": 0.1354, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.59408316315407e-05, |
|
"loss": 0.1543, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 2.5860785201701147e-05, |
|
"loss": 0.1447, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 2.5780635938889433e-05, |
|
"loss": 0.105, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 2.5700385249399525e-05, |
|
"loss": 0.1646, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.5620034541305026e-05, |
|
"loss": 0.1084, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 2.5539585224434448e-05, |
|
"loss": 0.2339, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 2.5459038710346507e-05, |
|
"loss": 0.1193, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 2.5378396412305315e-05, |
|
"loss": 0.2211, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 2.5297659745255618e-05, |
|
"loss": 0.1543, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 2.5216830125797943e-05, |
|
"loss": 0.0845, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 2.513590897216376e-05, |
|
"loss": 0.1312, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 2.505489770419059e-05, |
|
"loss": 0.0971, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 2.4973797743297103e-05, |
|
"loss": 0.0927, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 2.489261051245815e-05, |
|
"loss": 0.1674, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 2.4811337436179835e-05, |
|
"loss": 0.2524, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 2.47299799404745e-05, |
|
"loss": 0.1002, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 2.4648539452835682e-05, |
|
"loss": 0.1017, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.4567017402213118e-05, |
|
"loss": 0.176, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 2.4485415218987628e-05, |
|
"loss": 0.1903, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 2.440373433494603e-05, |
|
"loss": 0.2088, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 2.4321976183256045e-05, |
|
"loss": 0.2211, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 2.42401421984411e-05, |
|
"loss": 0.1466, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.4158233816355185e-05, |
|
"loss": 0.071, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.40762524741577e-05, |
|
"loss": 0.1354, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 2.3994199610288142e-05, |
|
"loss": 0.2116, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 2.3912076664440967e-05, |
|
"loss": 0.0774, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 2.382988507754026e-05, |
|
"loss": 0.1502, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 2.37476262917145e-05, |
|
"loss": 0.0959, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.3665301750271198e-05, |
|
"loss": 0.1015, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 2.358291289767165e-05, |
|
"loss": 0.1029, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 2.3500461179505526e-05, |
|
"loss": 0.1072, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 2.3417948042465536e-05, |
|
"loss": 0.0998, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 2.333537493432205e-05, |
|
"loss": 0.0975, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 2.3252743303897677e-05, |
|
"loss": 0.0774, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 2.317005460104186e-05, |
|
"loss": 0.1363, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 2.3087310276605428e-05, |
|
"loss": 0.1714, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 2.3004511782415145e-05, |
|
"loss": 0.1232, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 2.2921660571248237e-05, |
|
"loss": 0.1653, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 2.283875809680689e-05, |
|
"loss": 0.1097, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 2.275580581369276e-05, |
|
"loss": 0.1705, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 2.2672805177381453e-05, |
|
"loss": 0.0928, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.258975764419694e-05, |
|
"loss": 0.2008, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 2.2506664671286087e-05, |
|
"loss": 0.1516, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 2.2423527716593014e-05, |
|
"loss": 0.2086, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 2.2340348238833555e-05, |
|
"loss": 0.1769, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.2257127697469634e-05, |
|
"loss": 0.0859, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.2173867552683707e-05, |
|
"loss": 0.0945, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.209056926535307e-05, |
|
"loss": 0.2011, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.2007234297024298e-05, |
|
"loss": 0.1887, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.1923864109887556e-05, |
|
"loss": 0.1048, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.1840460166750947e-05, |
|
"loss": 0.1283, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.175702393101487e-05, |
|
"loss": 0.1383, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.167355686664632e-05, |
|
"loss": 0.1413, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 2.15900604381532e-05, |
|
"loss": 0.1823, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 2.1506536110558657e-05, |
|
"loss": 0.1718, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 2.142298534937534e-05, |
|
"loss": 0.1991, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 2.1339409620579704e-05, |
|
"loss": 0.0877, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 2.125581039058627e-05, |
|
"loss": 0.0812, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 2.117218912622193e-05, |
|
"loss": 0.1335, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.1088547294700182e-05, |
|
"loss": 0.1302, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.1004886363595392e-05, |
|
"loss": 0.1708, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 2.0921207800817045e-05, |
|
"loss": 0.0997, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.0837513074583993e-05, |
|
"loss": 0.116, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 2.0753803653398697e-05, |
|
"loss": 0.1274, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 2.067008100602143e-05, |
|
"loss": 0.0965, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.0586346601444573e-05, |
|
"loss": 0.0682, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 2.0502601908866754e-05, |
|
"loss": 0.1405, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.0418848397667142e-05, |
|
"loss": 0.0438, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 2.0335087537379632e-05, |
|
"loss": 0.1818, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 2.0251320797667056e-05, |
|
"loss": 0.1675, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 2.0167549648295413e-05, |
|
"loss": 0.1699, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 2.0083775559108082e-05, |
|
"loss": 0.0459, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1314, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 1.9916224440891928e-05, |
|
"loss": 0.1383, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 1.983245035170459e-05, |
|
"loss": 0.0661, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 1.9748679202332948e-05, |
|
"loss": 0.1153, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 1.9664912462620378e-05, |
|
"loss": 0.0413, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 1.9581151602332865e-05, |
|
"loss": 0.0914, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 1.9497398091133253e-05, |
|
"loss": 0.1503, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 1.9413653398555437e-05, |
|
"loss": 0.0606, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 1.9329918993978573e-05, |
|
"loss": 0.1149, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 1.924619634660131e-05, |
|
"loss": 0.1304, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 1.9162486925416014e-05, |
|
"loss": 0.1002, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 1.9078792199182954e-05, |
|
"loss": 0.1102, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 1.899511363640461e-05, |
|
"loss": 0.0848, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 1.891145270529982e-05, |
|
"loss": 0.1889, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 1.882781087377807e-05, |
|
"loss": 0.0923, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 1.8744189609413733e-05, |
|
"loss": 0.0859, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 1.8660590379420306e-05, |
|
"loss": 0.1308, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 1.857701465062467e-05, |
|
"loss": 0.1854, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 1.849346388944135e-05, |
|
"loss": 0.0932, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 1.8409939561846808e-05, |
|
"loss": 0.1364, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 1.8326443133353695e-05, |
|
"loss": 0.2408, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 1.8242976068985137e-05, |
|
"loss": 0.1179, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 1.815953983324906e-05, |
|
"loss": 0.1073, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 1.8076135890112457e-05, |
|
"loss": 0.0655, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 1.7992765702975702e-05, |
|
"loss": 0.1538, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 1.7909430734646936e-05, |
|
"loss": 0.0903, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 1.7826132447316303e-05, |
|
"loss": 0.1715, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 1.7742872302530366e-05, |
|
"loss": 0.2522, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 1.7659651761166455e-05, |
|
"loss": 0.0746, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 1.7576472283406996e-05, |
|
"loss": 0.0989, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 1.7493335328713913e-05, |
|
"loss": 0.1917, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 1.7410242355803064e-05, |
|
"loss": 0.1217, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 1.7327194822618557e-05, |
|
"loss": 0.1232, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 1.724419418630724e-05, |
|
"loss": 0.0488, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 1.7161241903193112e-05, |
|
"loss": 0.162, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 1.707833942875177e-05, |
|
"loss": 0.1006, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 1.699548821758486e-05, |
|
"loss": 0.1032, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 1.691268972339458e-05, |
|
"loss": 0.049, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 1.6829945398958152e-05, |
|
"loss": 0.1613, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 1.674725669610233e-05, |
|
"loss": 0.1608, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 1.6664625065677957e-05, |
|
"loss": 0.1634, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 1.658205195753447e-05, |
|
"loss": 0.1551, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 1.6499538820494477e-05, |
|
"loss": 0.1071, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 1.6417087102328356e-05, |
|
"loss": 0.0577, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 1.6334698249728812e-05, |
|
"loss": 0.0964, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 1.6252373708285505e-05, |
|
"loss": 0.0499, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 1.617011492245974e-05, |
|
"loss": 0.1025, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 1.608792333555904e-05, |
|
"loss": 0.0622, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 1.600580038971186e-05, |
|
"loss": 0.2109, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 1.5923747525842306e-05, |
|
"loss": 0.1395, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 1.584176618364482e-05, |
|
"loss": 0.0408, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 1.5759857801558913e-05, |
|
"loss": 0.0673, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 1.567802381674396e-05, |
|
"loss": 0.1096, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 1.5596265665053972e-05, |
|
"loss": 0.1686, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 1.5514584781012382e-05, |
|
"loss": 0.1421, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 1.5432982597786886e-05, |
|
"loss": 0.1843, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 1.535146054716432e-05, |
|
"loss": 0.1022, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 1.527002005952551e-05, |
|
"loss": 0.0865, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 1.5188662563820165e-05, |
|
"loss": 0.091, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 1.5107389487541856e-05, |
|
"loss": 0.122, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 1.5026202256702909e-05, |
|
"loss": 0.0798, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 1.4945102295809415e-05, |
|
"loss": 0.2063, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 1.4864091027836245e-05, |
|
"loss": 0.0819, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 1.4783169874202067e-05, |
|
"loss": 0.0524, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 1.4702340254744382e-05, |
|
"loss": 0.1523, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 1.4621603587694688e-05, |
|
"loss": 0.119, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 1.45409612896535e-05, |
|
"loss": 0.1453, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 1.4460414775565555e-05, |
|
"loss": 0.1735, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 1.4379965458694982e-05, |
|
"loss": 0.1292, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 1.4299614750600478e-05, |
|
"loss": 0.0797, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 1.4219364061110565e-05, |
|
"loss": 0.0866, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 1.4139214798298854e-05, |
|
"loss": 0.0711, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 1.4059168368459307e-05, |
|
"loss": 0.0812, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 1.3979226176081593e-05, |
|
"loss": 0.1358, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 1.3899389623826451e-05, |
|
"loss": 0.0931, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 1.3819660112501054e-05, |
|
"loss": 0.0724, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 1.3740039041034434e-05, |
|
"loss": 0.0854, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 1.3660527806452965e-05, |
|
"loss": 0.1068, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 1.3581127803855814e-05, |
|
"loss": 0.0703, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 1.3501840426390476e-05, |
|
"loss": 0.1606, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 1.3422667065228336e-05, |
|
"loss": 0.1303, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 1.334360910954027e-05, |
|
"loss": 0.0243, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 1.326466794647224e-05, |
|
"loss": 0.0781, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 1.3185844961120969e-05, |
|
"loss": 0.1115, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 1.3107141536509662e-05, |
|
"loss": 0.1732, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 1.3028559053563701e-05, |
|
"loss": 0.066, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 1.295009889108643e-05, |
|
"loss": 0.0896, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 1.2871762425734989e-05, |
|
"loss": 0.0801, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 1.279355103199611e-05, |
|
"loss": 0.0992, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 1.2715466082162036e-05, |
|
"loss": 0.0625, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 1.2637508946306443e-05, |
|
"loss": 0.0779, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 1.255968099226038e-05, |
|
"loss": 0.1522, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 1.2481983585588266e-05, |
|
"loss": 0.0678, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 1.2404418089563982e-05, |
|
"loss": 0.1706, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.2326985865146877e-05, |
|
"loss": 0.1179, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 1.2249688270957942e-05, |
|
"loss": 0.1971, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 1.2172526663255953e-05, |
|
"loss": 0.1416, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 1.2095502395913676e-05, |
|
"loss": 0.2722, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 1.2018616820394096e-05, |
|
"loss": 0.0472, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 1.194187128572675e-05, |
|
"loss": 0.0911, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 1.1865267138484e-05, |
|
"loss": 0.0896, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 1.1788805722757442e-05, |
|
"loss": 0.1136, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 1.171248838013432e-05, |
|
"loss": 0.0675, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 1.1636316449673974e-05, |
|
"loss": 0.1055, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 1.1560291267884346e-05, |
|
"loss": 0.0841, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 1.1484414168698547e-05, |
|
"loss": 0.1203, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 1.1408686483451439e-05, |
|
"loss": 0.1151, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 1.1333109540856257e-05, |
|
"loss": 0.208, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 1.1257684666981348e-05, |
|
"loss": 0.0836, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 1.1182413185226833e-05, |
|
"loss": 0.0852, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 1.1107296416301456e-05, |
|
"loss": 0.0536, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 1.1032335678199359e-05, |
|
"loss": 0.1039, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 1.0957532286176983e-05, |
|
"loss": 0.0261, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 1.0882887552730006e-05, |
|
"loss": 0.1051, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 1.0808402787570245e-05, |
|
"loss": 0.073, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 1.0734079297602772e-05, |
|
"loss": 0.0993, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 1.0659918386902897e-05, |
|
"loss": 0.056, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 1.0585921356693349e-05, |
|
"loss": 0.0802, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 1.0512089505321419e-05, |
|
"loss": 0.1188, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 1.0438424128236157e-05, |
|
"loss": 0.1029, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 1.0364926517965693e-05, |
|
"loss": 0.1154, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 1.0291597964094522e-05, |
|
"loss": 0.121, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 1.0218439753240883e-05, |
|
"loss": 0.1288, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 1.0145453169034172e-05, |
|
"loss": 0.0903, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 1.0072639492092463e-05, |
|
"loss": 0.1255, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.1045, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 9.927535967284785e-06, |
|
"loss": 0.1246, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 9.855248665396218e-06, |
|
"loss": 0.1372, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 9.783139362682806e-06, |
|
"loss": 0.1386, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 9.71120932436987e-06, |
|
"loss": 0.1093, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"learning_rate": 9.6394598125374e-06, |
|
"loss": 0.069, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 9.567892086097845e-06, |
|
"loss": 0.1175, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 9.496507400774085e-06, |
|
"loss": 0.0853, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 9.425307009077368e-06, |
|
"loss": 0.1196, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 9.354292160285328e-06, |
|
"loss": 0.0623, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 9.283464100420064e-06, |
|
"loss": 0.1087, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 9.212824072226332e-06, |
|
"loss": 0.0789, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 9.142373315149655e-06, |
|
"loss": 0.0835, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 9.07211306531462e-06, |
|
"loss": 0.0487, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 9.002044555503202e-06, |
|
"loss": 0.1229, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 8.93216901513312e-06, |
|
"loss": 0.2623, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 8.862487670236249e-06, |
|
"loss": 0.1368, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 8.793001743437111e-06, |
|
"loss": 0.0802, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 8.723712453931465e-06, |
|
"loss": 0.1322, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 8.654621017464875e-06, |
|
"loss": 0.0543, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"learning_rate": 8.585728646311368e-06, |
|
"loss": 0.0793, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 8.517036549252206e-06, |
|
"loss": 0.0843, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 8.448545931554652e-06, |
|
"loss": 0.1853, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 8.380257994950805e-06, |
|
"loss": 0.0544, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 8.31217393761656e-06, |
|
"loss": 0.1099, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 8.24429495415054e-06, |
|
"loss": 0.0563, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 8.17662223555316e-06, |
|
"loss": 0.0584, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 8.109156969205739e-06, |
|
"loss": 0.181, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 8.04190033884963e-06, |
|
"loss": 0.1617, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 7.974853524565467e-06, |
|
"loss": 0.0997, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 7.908017702752504e-06, |
|
"loss": 0.1279, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 7.841394046107897e-06, |
|
"loss": 0.0704, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 7.774983723606169e-06, |
|
"loss": 0.1566, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 7.708787900478711e-06, |
|
"loss": 0.054, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 7.642807738193316e-06, |
|
"loss": 0.1328, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 7.577044394433795e-06, |
|
"loss": 0.1146, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 7.511499023079689e-06, |
|
"loss": 0.015, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 7.446172774185991e-06, |
|
"loss": 0.1477, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 7.381066793963006e-06, |
|
"loss": 0.1047, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 7.316182224756212e-06, |
|
"loss": 0.0971, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 7.251520205026206e-06, |
|
"loss": 0.2071, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 7.187081869328767e-06, |
|
"loss": 0.0662, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 7.122868348294927e-06, |
|
"loss": 0.0698, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 7.058880768611118e-06, |
|
"loss": 0.083, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 6.995120252999419e-06, |
|
"loss": 0.1451, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 6.931587920197891e-06, |
|
"loss": 0.1383, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 6.868284884940875e-06, |
|
"loss": 0.1219, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 6.805212257939479e-06, |
|
"loss": 0.0751, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 6.742371145862095e-06, |
|
"loss": 0.1379, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 6.679762651314969e-06, |
|
"loss": 0.0863, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 6.617387872822842e-06, |
|
"loss": 0.0955, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 6.555247904809683e-06, |
|
"loss": 0.0517, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 6.493343837579511e-06, |
|
"loss": 0.0688, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 6.431676757297241e-06, |
|
"loss": 0.1223, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 6.3702477459696065e-06, |
|
"loss": 0.0739, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 6.3090578814262256e-06, |
|
"loss": 0.049, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 6.248108237300654e-06, |
|
"loss": 0.0715, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 6.1873998830115425e-06, |
|
"loss": 0.0415, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 6.126933883743904e-06, |
|
"loss": 0.0945, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 6.066711300430386e-06, |
|
"loss": 0.1233, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 6.00673318973269e-06, |
|
"loss": 0.1101, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 5.947000604023019e-06, |
|
"loss": 0.0602, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 5.887514591365593e-06, |
|
"loss": 0.0807, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 5.828276195498275e-06, |
|
"loss": 0.0495, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 5.769286455814294e-06, |
|
"loss": 0.1486, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 5.710546407343938e-06, |
|
"loss": 0.0822, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 5.6520570807364306e-06, |
|
"loss": 0.1102, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 5.593819502241862e-06, |
|
"loss": 0.069, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 5.535834693693163e-06, |
|
"loss": 0.0928, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 5.478103672488162e-06, |
|
"loss": 0.1181, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 5.4206274515717735e-06, |
|
"loss": 0.1228, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 5.3634070394181785e-06, |
|
"loss": 0.032, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 5.306443440013171e-06, |
|
"loss": 0.1551, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 5.249737652836524e-06, |
|
"loss": 0.0463, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 5.193290672844438e-06, |
|
"loss": 0.0718, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 5.137103490452113e-06, |
|
"loss": 0.0779, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 5.081177091516359e-06, |
|
"loss": 0.1025, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 5.025512457318282e-06, |
|
"loss": 0.0925, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 4.97011056454608e-06, |
|
"loss": 0.0603, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 4.914972385277923e-06, |
|
"loss": 0.0896, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 4.8600988869648745e-06, |
|
"loss": 0.0878, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"learning_rate": 4.805491032413912e-06, |
|
"loss": 0.1842, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"learning_rate": 4.75114977977104e-06, |
|
"loss": 0.1667, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"learning_rate": 4.697076082504517e-06, |
|
"loss": 0.0798, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 4.643270889388056e-06, |
|
"loss": 0.1233, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 4.589735144484217e-06, |
|
"loss": 0.0603, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 4.536469787127855e-06, |
|
"loss": 0.034, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 4.483475751909616e-06, |
|
"loss": 0.0596, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 4.430753968659534e-06, |
|
"loss": 0.0392, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 4.378305362430735e-06, |
|
"loss": 0.1675, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 4.326130853483206e-06, |
|
"loss": 0.225, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 4.2742313572676216e-06, |
|
"loss": 0.0895, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 4.2226077844093205e-06, |
|
"loss": 0.0729, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"learning_rate": 4.171261040692287e-06, |
|
"loss": 0.126, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 4.120192027043293e-06, |
|
"loss": 0.0663, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 4.069401639516078e-06, |
|
"loss": 0.1151, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 4.01889076927561e-06, |
|
"loss": 0.141, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 3.968660302582466e-06, |
|
"loss": 0.0802, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 3.918711120777308e-06, |
|
"loss": 0.0763, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 3.8690441002653534e-06, |
|
"loss": 0.0632, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 3.819660112501053e-06, |
|
"loss": 0.0998, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 3.7705600239727825e-06, |
|
"loss": 0.1182, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 3.7217446961876413e-06, |
|
"loss": 0.0694, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 3.6732149856563217e-06, |
|
"loss": 0.1351, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 3.624971743878112e-06, |
|
"loss": 0.0701, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"learning_rate": 3.5770158173259195e-06, |
|
"loss": 0.0379, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 3.529348047431451e-06, |
|
"loss": 0.0998, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 3.481969270570431e-06, |
|
"loss": 0.1696, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 3.4348803180479174e-06, |
|
"loss": 0.026, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 3.3880820160837447e-06, |
|
"loss": 0.0809, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 3.3415751857980118e-06, |
|
"loss": 0.0466, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 3.295360643196659e-06, |
|
"loss": 0.1624, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 3.249439199157167e-06, |
|
"loss": 0.0896, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 3.203811659414342e-06, |
|
"loss": 0.1517, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"learning_rate": 3.158478824546156e-06, |
|
"loss": 0.0994, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"learning_rate": 3.1134414899597033e-06, |
|
"loss": 0.039, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 3.0687004458772417e-06, |
|
"loss": 0.0873, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 3.0242564773223646e-06, |
|
"loss": 0.066, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 2.980110364106168e-06, |
|
"loss": 0.0433, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 2.936262880813596e-06, |
|
"loss": 0.0749, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 2.892714796789868e-06, |
|
"loss": 0.097, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 2.8494668761269585e-06, |
|
"loss": 0.0544, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"learning_rate": 2.80651987765018e-06, |
|
"loss": 0.1061, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"learning_rate": 2.763874554904902e-06, |
|
"loss": 0.0918, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 2.721531656143295e-06, |
|
"loss": 0.0321, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 2.679491924311226e-06, |
|
"loss": 0.0685, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"learning_rate": 2.6377560970352178e-06, |
|
"loss": 0.0529, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 2.5963249066094863e-06, |
|
"loss": 0.0937, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 2.555199079983124e-06, |
|
"loss": 0.1792, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 2.514379338747328e-06, |
|
"loss": 0.2075, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 2.473866399122733e-06, |
|
"loss": 0.0688, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 2.4336609719468453e-06, |
|
"loss": 0.156, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 2.393763762661596e-06, |
|
"loss": 0.0705, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"learning_rate": 2.3541754713009367e-06, |
|
"loss": 0.1292, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 2.3148967924785536e-06, |
|
"loss": 0.106, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 2.2759284153757056e-06, |
|
"loss": 0.1244, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 2.2372710237291105e-06, |
|
"loss": 0.1092, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 2.1989252958189498e-06, |
|
"loss": 0.1082, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 2.1608919044569855e-06, |
|
"loss": 0.0604, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 2.1231715169747247e-06, |
|
"loss": 0.0299, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 2.085764795211742e-06, |
|
"loss": 0.0707, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 2.04867239550405e-06, |
|
"loss": 0.1801, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 2.0118949686725786e-06, |
|
"loss": 0.0691, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 1.975433160011775e-06, |
|
"loss": 0.0734, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 1.9392876092782576e-06, |
|
"loss": 0.0439, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 1.903458950679613e-06, |
|
"loss": 0.1925, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 1.8679478128632466e-06, |
|
"loss": 0.0711, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 1.832754818905378e-06, |
|
"loss": 0.1071, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"learning_rate": 1.797880586300087e-06, |
|
"loss": 0.1135, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 1.7633257269484883e-06, |
|
"loss": 0.0911, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 1.7290908471479805e-06, |
|
"loss": 0.0903, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 1.6951765475816495e-06, |
|
"loss": 0.0518, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"learning_rate": 1.6615834233076756e-06, |
|
"loss": 0.0985, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 1.6283120637489202e-06, |
|
"loss": 0.1171, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 1.5953630526825925e-06, |
|
"loss": 0.0638, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.562736968229992e-06, |
|
"loss": 0.1235, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 1.5304343828463553e-06, |
|
"loss": 0.0286, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"learning_rate": 1.4984558633108414e-06, |
|
"loss": 0.0285, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 17.58, |
|
"learning_rate": 1.4668019707165581e-06, |
|
"loss": 0.1211, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 1.4354732604607335e-06, |
|
"loss": 0.0782, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 1.4044702822349731e-06, |
|
"loss": 0.2467, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 1.3737935800155944e-06, |
|
"loss": 0.0766, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 1.3434436920541072e-06, |
|
"loss": 0.168, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 1.3134211508677597e-06, |
|
"loss": 0.0623, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 1.2837264832301854e-06, |
|
"loss": 0.0662, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 1.2543602101621711e-06, |
|
"loss": 0.0444, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 1.2253228469225186e-06, |
|
"loss": 0.1193, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 1.196614902998996e-06, |
|
"loss": 0.088, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 1.1682368820993983e-06, |
|
"loss": 0.1111, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"learning_rate": 1.1401892821427096e-06, |
|
"loss": 0.0715, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 1.1124725952503801e-06, |
|
"loss": 0.0733, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 1.0850873077376645e-06, |
|
"loss": 0.0469, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 1.0580339001051153e-06, |
|
"loss": 0.1719, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 1.0313128470301371e-06, |
|
"loss": 0.0748, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0049246173586646e-06, |
|
"loss": 0.0982, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 9.788696740969295e-07, |
|
"loss": 0.1122, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 9.53148474403347e-07, |
|
"loss": 0.0359, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"learning_rate": 9.277614695804816e-07, |
|
"loss": 0.0863, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 9.027091050671411e-07, |
|
"loss": 0.0659, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 8.779918204305549e-07, |
|
"loss": 0.0897, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 8.536100493586552e-07, |
|
"loss": 0.0758, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 8.295642196524811e-07, |
|
"loss": 0.0927, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 8.058547532186667e-07, |
|
"loss": 0.1275, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 7.824820660620314e-07, |
|
"loss": 0.139, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 7.594465682782815e-07, |
|
"loss": 0.1121, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 7.36748664046838e-07, |
|
"loss": 0.1089, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 7.143887516237092e-07, |
|
"loss": 0.129, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 6.923672233345225e-07, |
|
"loss": 0.0639, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 6.706844655676481e-07, |
|
"loss": 0.1534, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 6.493408587674022e-07, |
|
"loss": 0.0447, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 18.42, |
|
"learning_rate": 6.283367774273785e-07, |
|
"loss": 0.0438, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 6.076725900838809e-07, |
|
"loss": 0.0469, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 5.873486593094546e-07, |
|
"loss": 0.0529, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 5.673653417065206e-07, |
|
"loss": 0.0582, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 5.477229879011315e-07, |
|
"loss": 0.1588, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"learning_rate": 5.284219425367943e-07, |
|
"loss": 0.0666, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 5.094625442684554e-07, |
|
"loss": 0.0431, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 4.908451257565383e-07, |
|
"loss": 0.1, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 4.7257001366110445e-07, |
|
"loss": 0.1169, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 4.546375286361304e-07, |
|
"loss": 0.1282, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"learning_rate": 4.3704798532388624e-07, |
|
"loss": 0.0536, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"learning_rate": 4.1980169234940415e-07, |
|
"loss": 0.0453, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"learning_rate": 4.028989523150628e-07, |
|
"loss": 0.0781, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 3.863400617952873e-07, |
|
"loss": 0.0336, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 3.701253113313419e-07, |
|
"loss": 0.1494, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 3.5425498542622784e-07, |
|
"loss": 0.1187, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 3.3872936253969147e-07, |
|
"loss": 0.0926, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 3.2354871508334826e-07, |
|
"loss": 0.1389, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 3.087133094158934e-07, |
|
"loss": 0.22, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 2.942234058384297e-07, |
|
"loss": 0.1183, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"learning_rate": 2.800792585899026e-07, |
|
"loss": 0.0282, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 2.662811158426393e-07, |
|
"loss": 0.053, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 2.52829219697992e-07, |
|
"loss": 0.1222, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 2.3972380618209723e-07, |
|
"loss": 0.1408, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 19.05, |
|
"learning_rate": 2.26965105241721e-07, |
|
"loss": 0.0968, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 2.1455334074023336e-07, |
|
"loss": 0.1141, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 2.0248873045368488e-07, |
|
"loss": 0.0991, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 1.9077148606697627e-07, |
|
"loss": 0.0805, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 1.7940181317014583e-07, |
|
"loss": 0.1016, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 1.6837991125476572e-07, |
|
"loss": 0.0752, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 1.577059737104447e-07, |
|
"loss": 0.0746, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 1.4738018782141984e-07, |
|
"loss": 0.0761, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 1.3740273476329224e-07, |
|
"loss": 0.1207, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 1.2777378959983212e-07, |
|
"loss": 0.075, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 1.1849352127990765e-07, |
|
"loss": 0.101, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 1.0956209263453421e-07, |
|
"loss": 0.1104, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 1.0097966037399654e-07, |
|
"loss": 0.071, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 9.274637508512207e-08, |
|
"loss": 0.1179, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 19.42, |
|
"learning_rate": 8.486238122861867e-08, |
|
"loss": 0.0489, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 7.73278171365499e-08, |
|
"loss": 0.0312, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 19.47, |
|
"learning_rate": 7.01428150099126e-08, |
|
"loss": 0.1374, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 6.330750091630533e-08, |
|
"loss": 0.2082, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 5.682199478772133e-08, |
|
"loss": 0.1154, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 5.0686410418450124e-08, |
|
"loss": 0.1482, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 4.4900855463068104e-08, |
|
"loss": 0.1017, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"learning_rate": 3.946543143456882e-08, |
|
"loss": 0.1066, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 3.438023370256005e-08, |
|
"loss": 0.0421, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 2.964535149160286e-08, |
|
"loss": 0.0545, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 2.5260867879650652e-08, |
|
"loss": 0.0766, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 2.1226859796574794e-08, |
|
"loss": 0.0287, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 1.7543398022832337e-08, |
|
"loss": 0.1082, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 1.4210547188215906e-08, |
|
"loss": 0.0621, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 1.1228365770714622e-08, |
|
"loss": 0.0609, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 8.596906095499347e-09, |
|
"loss": 0.1291, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 19.84, |
|
"learning_rate": 6.316214334001203e-09, |
|
"loss": 0.0485, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"learning_rate": 4.386330503090008e-09, |
|
"loss": 0.1391, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"learning_rate": 2.807288464392599e-09, |
|
"loss": 0.0522, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 1.5791159236777654e-09, |
|
"loss": 0.0769, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"learning_rate": 7.018344303877378e-10, |
|
"loss": 0.1479, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 1.7545937724738894e-10, |
|
"loss": 0.0599, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0559, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 760, |
|
"total_flos": 1.23586643755008e+17, |
|
"train_loss": 0.1618304312400716, |
|
"train_runtime": 6994.2721, |
|
"train_samples_per_second": 0.869, |
|
"train_steps_per_second": 0.109 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 760, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 100, |
|
"total_flos": 1.23586643755008e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|