|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 1149, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.347826086956522e-07, |
|
"loss": 1.4878, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 1.48, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3043478260869566e-06, |
|
"loss": 1.4873, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 1.4863, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 1.4839, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 1.4336, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.043478260869566e-06, |
|
"loss": 1.4375, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 1.3418, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 1.2988, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 1.269, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 1.1343, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 1.1025, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 1.0938, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 1.2368, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 1.04, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 0.9915, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.391304347826087e-06, |
|
"loss": 0.9907, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 0.9685, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.260869565217392e-06, |
|
"loss": 0.946, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 0.9382, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 0.9329, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 0.9294, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9084, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0434782608695653e-05, |
|
"loss": 0.9158, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 0.8955, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 0.8843, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1739130434782611e-05, |
|
"loss": 0.8794, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2173913043478263e-05, |
|
"loss": 0.8689, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2608695652173915e-05, |
|
"loss": 0.8572, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 0.8567, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3478260869565218e-05, |
|
"loss": 0.8525, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 0.8464, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4347826086956522e-05, |
|
"loss": 0.8352, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4782608695652174e-05, |
|
"loss": 0.8245, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5217391304347828e-05, |
|
"loss": 0.8237, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 0.8154, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6086956521739132e-05, |
|
"loss": 0.8105, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6521739130434785e-05, |
|
"loss": 0.8108, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6956521739130437e-05, |
|
"loss": 0.8083, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 0.802, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.782608695652174e-05, |
|
"loss": 0.792, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 0.7983, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8695652173913045e-05, |
|
"loss": 0.791, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 0.7961, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.956521739130435e-05, |
|
"loss": 0.7847, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 0.7769, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9999959438086812e-05, |
|
"loss": 0.7751, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999837752676295e-05, |
|
"loss": 0.771, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999963494475561e-05, |
|
"loss": 0.7703, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999935101597001e-05, |
|
"loss": 0.7681, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9998985968622838e-05, |
|
"loss": 0.7747, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9998539805675495e-05, |
|
"loss": 0.772, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999801253074743e-05, |
|
"loss": 0.7712, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9997404148116088e-05, |
|
"loss": 0.7637, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9996714662716914e-05, |
|
"loss": 0.7637, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999594408014327e-05, |
|
"loss": 0.7542, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9995092406646422e-05, |
|
"loss": 0.7603, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999415964913547e-05, |
|
"loss": 0.7556, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999314581517729e-05, |
|
"loss": 0.7524, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9992050912996507e-05, |
|
"loss": 0.7527, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9990874951475377e-05, |
|
"loss": 0.7439, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9989617940153753e-05, |
|
"loss": 0.7449, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9988279889228987e-05, |
|
"loss": 0.751, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9986860809555867e-05, |
|
"loss": 0.7473, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9985360712646502e-05, |
|
"loss": 0.7412, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998377961067026e-05, |
|
"loss": 0.7468, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.998211751645364e-05, |
|
"loss": 0.7585, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.998037444348019e-05, |
|
"loss": 0.7439, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9978550405890384e-05, |
|
"loss": 0.7412, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.997664541848151e-05, |
|
"loss": 0.7324, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.997465949670756e-05, |
|
"loss": 0.7429, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9972592656679085e-05, |
|
"loss": 0.7288, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.997044491516309e-05, |
|
"loss": 0.7349, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9968216289582865e-05, |
|
"loss": 0.7344, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9965906798017883e-05, |
|
"loss": 0.739, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.996351645920362e-05, |
|
"loss": 0.7317, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.996104529253142e-05, |
|
"loss": 0.7292, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9958493318048332e-05, |
|
"loss": 0.7358, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9955860556456947e-05, |
|
"loss": 0.7349, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.995314702911524e-05, |
|
"loss": 0.7239, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9950352758036378e-05, |
|
"loss": 0.7322, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.994747776588856e-05, |
|
"loss": 0.7378, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9944522075994822e-05, |
|
"loss": 0.7314, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.994148571233285e-05, |
|
"loss": 0.729, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9938368699534788e-05, |
|
"loss": 0.7334, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9935171062887035e-05, |
|
"loss": 0.7173, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.993189282833005e-05, |
|
"loss": 0.7307, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9928534022458117e-05, |
|
"loss": 0.7231, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.992509467251916e-05, |
|
"loss": 0.7271, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.99215748064145e-05, |
|
"loss": 0.7258, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.991797445269864e-05, |
|
"loss": 0.7307, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9914293640579025e-05, |
|
"loss": 0.7297, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.991053239991581e-05, |
|
"loss": 0.7231, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.990669076122162e-05, |
|
"loss": 0.7217, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.99027687556613e-05, |
|
"loss": 0.7256, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9898766415051654e-05, |
|
"loss": 0.7134, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9894683771861207e-05, |
|
"loss": 0.7214, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.989052085920992e-05, |
|
"loss": 0.7324, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988627771086893e-05, |
|
"loss": 0.7173, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.988195436126029e-05, |
|
"loss": 0.7239, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9877550845456654e-05, |
|
"loss": 0.7188, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9873067199181032e-05, |
|
"loss": 0.719, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.986850345880648e-05, |
|
"loss": 0.7195, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.986385966135581e-05, |
|
"loss": 0.7283, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9859135844501274e-05, |
|
"loss": 0.7217, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.985433204656429e-05, |
|
"loss": 0.7112, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9849448306515096e-05, |
|
"loss": 0.719, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.984448466397247e-05, |
|
"loss": 0.7229, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9839441159203374e-05, |
|
"loss": 0.7134, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.983431783312265e-05, |
|
"loss": 0.7124, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.982911472729268e-05, |
|
"loss": 0.7288, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9823831883923044e-05, |
|
"loss": 0.7126, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.98184693458702e-05, |
|
"loss": 0.7131, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9813027156637102e-05, |
|
"loss": 0.717, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9807505360372868e-05, |
|
"loss": 0.7119, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9801904001872426e-05, |
|
"loss": 0.7246, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.979622312657614e-05, |
|
"loss": 0.7202, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9790462780569442e-05, |
|
"loss": 0.7207, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.978462301058247e-05, |
|
"loss": 0.7153, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.977870386398966e-05, |
|
"loss": 0.7107, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9772705388809402e-05, |
|
"loss": 0.7168, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9766627633703622e-05, |
|
"loss": 0.7078, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9760470647977395e-05, |
|
"loss": 0.717, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9754234481578544e-05, |
|
"loss": 0.7097, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9747919185097238e-05, |
|
"loss": 0.709, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9741524809765574e-05, |
|
"loss": 0.7085, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.973505140745718e-05, |
|
"loss": 0.7217, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.972849903068676e-05, |
|
"loss": 0.7126, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9721867732609714e-05, |
|
"loss": 0.7134, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9715157567021663e-05, |
|
"loss": 0.7004, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.970836858835804e-05, |
|
"loss": 0.7188, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9701500851693635e-05, |
|
"loss": 0.7114, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.969455441274216e-05, |
|
"loss": 0.7085, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.968752932785578e-05, |
|
"loss": 0.7166, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.968042565402468e-05, |
|
"loss": 0.7087, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.967324344887657e-05, |
|
"loss": 0.7256, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9665982770676256e-05, |
|
"loss": 0.71, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.965864367832513e-05, |
|
"loss": 0.7073, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9651226231360724e-05, |
|
"loss": 0.7195, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9643730489956198e-05, |
|
"loss": 0.709, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9636156514919883e-05, |
|
"loss": 0.7092, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9628504367694754e-05, |
|
"loss": 0.7073, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9620774110357967e-05, |
|
"loss": 0.7031, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9612965805620318e-05, |
|
"loss": 0.7126, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.960507951682577e-05, |
|
"loss": 0.7034, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9597115307950914e-05, |
|
"loss": 0.7107, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9589073243604454e-05, |
|
"loss": 0.7026, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.95809533890267e-05, |
|
"loss": 0.7197, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9572755810089016e-05, |
|
"loss": 0.7104, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9564480573293302e-05, |
|
"loss": 0.6936, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9556127745771437e-05, |
|
"loss": 0.7031, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9547697395284765e-05, |
|
"loss": 0.6987, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9539189590223512e-05, |
|
"loss": 0.7087, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.953060439960624e-05, |
|
"loss": 0.708, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9521941893079314e-05, |
|
"loss": 0.7095, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9513202140916284e-05, |
|
"loss": 0.7002, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.950438521401738e-05, |
|
"loss": 0.7004, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.949549118390888e-05, |
|
"loss": 0.707, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9486520122742557e-05, |
|
"loss": 0.698, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9477472103295097e-05, |
|
"loss": 0.7061, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9468347198967494e-05, |
|
"loss": 0.7, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.945914548378446e-05, |
|
"loss": 0.7053, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9449867032393834e-05, |
|
"loss": 0.7034, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9440511920065962e-05, |
|
"loss": 0.7075, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9431080222693096e-05, |
|
"loss": 0.7085, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9421572016788773e-05, |
|
"loss": 0.6951, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9411987379487197e-05, |
|
"loss": 0.7053, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9402326388542615e-05, |
|
"loss": 0.6926, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9392589122328682e-05, |
|
"loss": 0.6978, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9382775659837824e-05, |
|
"loss": 0.7024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9372886080680607e-05, |
|
"loss": 0.7, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9362920465085078e-05, |
|
"loss": 0.7073, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.935287889389613e-05, |
|
"loss": 0.6953, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9342761448574826e-05, |
|
"loss": 0.7083, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9332568211197756e-05, |
|
"loss": 0.7075, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9322299264456358e-05, |
|
"loss": 0.7039, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9311954691656264e-05, |
|
"loss": 0.7026, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.93015345767166e-05, |
|
"loss": 0.6985, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9291039004169332e-05, |
|
"loss": 0.7117, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.928046805915855e-05, |
|
"loss": 0.7053, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9269821827439822e-05, |
|
"loss": 0.7039, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9259100395379435e-05, |
|
"loss": 0.7097, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.924830384995376e-05, |
|
"loss": 0.7012, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9237432278748498e-05, |
|
"loss": 0.7107, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9226485769958e-05, |
|
"loss": 0.7009, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.921546441238453e-05, |
|
"loss": 0.7097, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.920436829543756e-05, |
|
"loss": 0.697, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9193197509133035e-05, |
|
"loss": 0.71, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.918195214409265e-05, |
|
"loss": 0.6982, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9170632291543106e-05, |
|
"loss": 0.7036, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9159238043315383e-05, |
|
"loss": 0.707, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9147769491843977e-05, |
|
"loss": 0.7056, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9136226730166168e-05, |
|
"loss": 0.6914, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9124609851921257e-05, |
|
"loss": 0.7029, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9112918951349805e-05, |
|
"loss": 0.6963, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9101154123292866e-05, |
|
"loss": 0.6924, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9089315463191234e-05, |
|
"loss": 0.6904, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9077403067084646e-05, |
|
"loss": 0.7058, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9065417031611014e-05, |
|
"loss": 0.6975, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9053357454005654e-05, |
|
"loss": 0.6982, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9041224432100464e-05, |
|
"loss": 0.6985, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9029018064323167e-05, |
|
"loss": 0.697, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9016738449696484e-05, |
|
"loss": 0.6963, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9004385687837346e-05, |
|
"loss": 0.6946, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8991959878956096e-05, |
|
"loss": 0.6985, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8979461123855634e-05, |
|
"loss": 0.6858, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8966889523930657e-05, |
|
"loss": 0.6931, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8954245181166787e-05, |
|
"loss": 0.6909, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.894152819813977e-05, |
|
"loss": 0.6909, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8928738678014646e-05, |
|
"loss": 0.6948, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8915876724544886e-05, |
|
"loss": 0.6921, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.890294244207158e-05, |
|
"loss": 0.6948, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8889935935522587e-05, |
|
"loss": 0.6997, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.887685731041165e-05, |
|
"loss": 0.6987, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8863706672837593e-05, |
|
"loss": 0.697, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.885048412948341e-05, |
|
"loss": 0.696, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.883718978761544e-05, |
|
"loss": 0.6921, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8823823755082463e-05, |
|
"loss": 0.6855, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8810386140314857e-05, |
|
"loss": 0.6926, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.879687705232369e-05, |
|
"loss": 0.6951, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8783296600699858e-05, |
|
"loss": 0.6958, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8769644895613175e-05, |
|
"loss": 0.688, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.87559220478115e-05, |
|
"loss": 0.6975, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8742128168619822e-05, |
|
"loss": 0.6892, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8728263369939373e-05, |
|
"loss": 0.6929, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8714327764246697e-05, |
|
"loss": 0.6863, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8700321464592766e-05, |
|
"loss": 0.6936, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.868624458460204e-05, |
|
"loss": 0.6897, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8672097238471554e-05, |
|
"loss": 0.6897, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8657879540969995e-05, |
|
"loss": 0.6934, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8643591607436767e-05, |
|
"loss": 0.7053, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8629233553781052e-05, |
|
"loss": 0.6902, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8614805496480875e-05, |
|
"loss": 0.6948, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8600307552582158e-05, |
|
"loss": 0.6907, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8585739839697768e-05, |
|
"loss": 0.6973, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.857110247600657e-05, |
|
"loss": 0.696, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8556395580252457e-05, |
|
"loss": 0.6887, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8541619271743393e-05, |
|
"loss": 0.6946, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8526773670350446e-05, |
|
"loss": 0.696, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.851185889650682e-05, |
|
"loss": 0.6868, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8496875071206862e-05, |
|
"loss": 0.6836, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.84818223160051e-05, |
|
"loss": 0.6877, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8466700753015242e-05, |
|
"loss": 0.6985, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8451510504909193e-05, |
|
"loss": 0.6978, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.843625169491606e-05, |
|
"loss": 0.688, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8420924446821146e-05, |
|
"loss": 0.6882, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.840552888496495e-05, |
|
"loss": 0.6792, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.839006513424217e-05, |
|
"loss": 0.6943, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8374533320100658e-05, |
|
"loss": 0.6951, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.835893356854044e-05, |
|
"loss": 0.6851, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8343266006112665e-05, |
|
"loss": 0.6907, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.83275307599186e-05, |
|
"loss": 0.6895, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.831172795760858e-05, |
|
"loss": 0.6941, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8295857727380983e-05, |
|
"loss": 0.6855, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8279920197981194e-05, |
|
"loss": 0.6951, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8263915498700542e-05, |
|
"loss": 0.7004, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.824784375937528e-05, |
|
"loss": 0.6948, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8231705110385497e-05, |
|
"loss": 0.6865, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8215499682654097e-05, |
|
"loss": 0.687, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8199227607645713e-05, |
|
"loss": 0.6873, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8182889017365637e-05, |
|
"loss": 0.6887, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8166484044358762e-05, |
|
"loss": 0.6943, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.815001282170852e-05, |
|
"loss": 0.6873, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8133475483035758e-05, |
|
"loss": 0.6924, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.81168721624977e-05, |
|
"loss": 0.6975, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8100202994786836e-05, |
|
"loss": 0.6958, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8083468115129833e-05, |
|
"loss": 0.6885, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8066667659286435e-05, |
|
"loss": 0.687, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8049801763548374e-05, |
|
"loss": 0.7009, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8032870564738244e-05, |
|
"loss": 0.6956, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8015874200208414e-05, |
|
"loss": 0.6912, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7998812807839893e-05, |
|
"loss": 0.6785, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7981686526041226e-05, |
|
"loss": 0.6819, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7964495493747357e-05, |
|
"loss": 0.6819, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7947239850418524e-05, |
|
"loss": 0.6978, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7929919736039113e-05, |
|
"loss": 0.6814, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7912535291116507e-05, |
|
"loss": 0.6882, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.789508665667999e-05, |
|
"loss": 0.6821, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.787757397427955e-05, |
|
"loss": 0.6914, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.785999738598477e-05, |
|
"loss": 0.689, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7842357034383662e-05, |
|
"loss": 0.679, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.78246530625815e-05, |
|
"loss": 0.6858, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7806885614199693e-05, |
|
"loss": 0.6943, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.778905483337457e-05, |
|
"loss": 0.6885, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.777116086475625e-05, |
|
"loss": 0.6851, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.775320385350745e-05, |
|
"loss": 0.689, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7735183945302324e-05, |
|
"loss": 0.6919, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7717101286325254e-05, |
|
"loss": 0.6868, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7698956023269693e-05, |
|
"loss": 0.6934, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.768074830333696e-05, |
|
"loss": 0.6914, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.766247827423504e-05, |
|
"loss": 0.6904, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7644146084177408e-05, |
|
"loss": 0.6931, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7625751881881796e-05, |
|
"loss": 0.6855, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7607295816569017e-05, |
|
"loss": 0.6794, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7588778037961733e-05, |
|
"loss": 0.6721, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.757019869628325e-05, |
|
"loss": 0.6799, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7551557942256295e-05, |
|
"loss": 0.6863, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7532855927101796e-05, |
|
"loss": 0.6948, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7514092802537663e-05, |
|
"loss": 0.6819, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.749526872077754e-05, |
|
"loss": 0.687, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7476383834529577e-05, |
|
"loss": 0.6736, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7457438296995197e-05, |
|
"loss": 0.6904, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7438432261867855e-05, |
|
"loss": 0.6794, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7419365883331774e-05, |
|
"loss": 0.6921, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.740023931606072e-05, |
|
"loss": 0.679, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7381052715216715e-05, |
|
"loss": 0.6821, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7361806236448817e-05, |
|
"loss": 0.6841, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.734250003589182e-05, |
|
"loss": 0.689, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.732313427016501e-05, |
|
"loss": 0.6843, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7303709096370895e-05, |
|
"loss": 0.6907, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.728422467209391e-05, |
|
"loss": 0.689, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7264681155399167e-05, |
|
"loss": 0.6743, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7245078704831144e-05, |
|
"loss": 0.6826, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7225417479412424e-05, |
|
"loss": 0.6904, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.720569763864239e-05, |
|
"loss": 0.6765, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7185919342495942e-05, |
|
"loss": 0.677, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7166082751422178e-05, |
|
"loss": 0.688, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.714618802634312e-05, |
|
"loss": 0.6733, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7126235328652386e-05, |
|
"loss": 0.6746, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.71062248202139e-05, |
|
"loss": 0.6846, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.708615666336056e-05, |
|
"loss": 0.6843, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7066031020892935e-05, |
|
"loss": 0.6863, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.704584805607793e-05, |
|
"loss": 0.6848, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.702560793264749e-05, |
|
"loss": 0.6816, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7005310814797233e-05, |
|
"loss": 0.6907, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.698495686718515e-05, |
|
"loss": 0.6816, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6964546254930248e-05, |
|
"loss": 0.6833, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6944079143611226e-05, |
|
"loss": 0.6887, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.692355569926512e-05, |
|
"loss": 0.6863, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.690297608838597e-05, |
|
"loss": 0.6887, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6882340477923445e-05, |
|
"loss": 0.6755, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.686164903528152e-05, |
|
"loss": 0.6697, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.684090192831709e-05, |
|
"loss": 0.6838, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.682009932533863e-05, |
|
"loss": 0.6816, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6799241395104816e-05, |
|
"loss": 0.6821, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.677832830682315e-05, |
|
"loss": 0.6873, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6757360230148617e-05, |
|
"loss": 0.6782, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6736337335182276e-05, |
|
"loss": 0.6851, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.671525979246989e-05, |
|
"loss": 0.6775, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6694127773000554e-05, |
|
"loss": 0.6797, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.667294144820529e-05, |
|
"loss": 0.6873, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6651700989955685e-05, |
|
"loss": 0.6833, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6630406570562453e-05, |
|
"loss": 0.6816, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6609058362774072e-05, |
|
"loss": 0.6797, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6587656539775374e-05, |
|
"loss": 0.6763, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.656620127518614e-05, |
|
"loss": 0.6809, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6544692743059686e-05, |
|
"loss": 0.689, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6523131117881446e-05, |
|
"loss": 0.6826, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6501516574567583e-05, |
|
"loss": 0.6809, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6479849288463536e-05, |
|
"loss": 0.6709, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6458129435342626e-05, |
|
"loss": 0.686, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.643635719140461e-05, |
|
"loss": 0.6792, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6414532733274265e-05, |
|
"loss": 0.6768, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.639265623799994e-05, |
|
"loss": 0.6814, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.637072788305214e-05, |
|
"loss": 0.6829, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.634874784632207e-05, |
|
"loss": 0.6775, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6326716306120195e-05, |
|
"loss": 0.6858, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6304633441174803e-05, |
|
"loss": 0.6782, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6282499430630543e-05, |
|
"loss": 0.6753, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6260314454046982e-05, |
|
"loss": 0.6821, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6238078691397134e-05, |
|
"loss": 0.6858, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6215792323066013e-05, |
|
"loss": 0.6841, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6193455529849168e-05, |
|
"loss": 0.6826, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6171068492951218e-05, |
|
"loss": 0.6753, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6148631393984363e-05, |
|
"loss": 0.6755, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.612614441496694e-05, |
|
"loss": 0.6851, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.610360773832193e-05, |
|
"loss": 0.6807, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.608102154687547e-05, |
|
"loss": 0.6809, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6058386023855392e-05, |
|
"loss": 0.6848, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.603570135288972e-05, |
|
"loss": 0.6873, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6012967718005185e-05, |
|
"loss": 0.6809, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.599018530362573e-05, |
|
"loss": 0.676, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5967354294571018e-05, |
|
"loss": 0.686, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.594447487605493e-05, |
|
"loss": 0.6787, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5921547233684073e-05, |
|
"loss": 0.6826, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.589857155345624e-05, |
|
"loss": 0.6833, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.587554802175895e-05, |
|
"loss": 0.6787, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5852476825367896e-05, |
|
"loss": 0.6792, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.582935815144546e-05, |
|
"loss": 0.6794, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.580619218753916e-05, |
|
"loss": 0.6895, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.578297912158017e-05, |
|
"loss": 0.678, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5759719141881752e-05, |
|
"loss": 0.6819, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.573641243713777e-05, |
|
"loss": 0.6848, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5713059196421125e-05, |
|
"loss": 0.645, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5689659609182248e-05, |
|
"loss": 0.6482, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5666213865247542e-05, |
|
"loss": 0.6362, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5642722154817848e-05, |
|
"loss": 0.6406, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5619184668466917e-05, |
|
"loss": 0.6396, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.559560159713984e-05, |
|
"loss": 0.6477, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5571973132151518e-05, |
|
"loss": 0.6492, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5548299465185096e-05, |
|
"loss": 0.6379, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5524580788290425e-05, |
|
"loss": 0.6436, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.550081729388248e-05, |
|
"loss": 0.6421, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.547700917473983e-05, |
|
"loss": 0.6384, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.545315662400304e-05, |
|
"loss": 0.6411, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5429259835173133e-05, |
|
"loss": 0.6379, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.540531900211e-05, |
|
"loss": 0.6299, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5381334319030844e-05, |
|
"loss": 0.6414, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5357305980508582e-05, |
|
"loss": 0.636, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.53332341814703e-05, |
|
"loss": 0.6506, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5309119117195642e-05, |
|
"loss": 0.6394, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.528496098331523e-05, |
|
"loss": 0.6409, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.52607599758091e-05, |
|
"loss": 0.6465, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5236516291005075e-05, |
|
"loss": 0.6375, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5212230125577207e-05, |
|
"loss": 0.6406, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5187901676544162e-05, |
|
"loss": 0.6375, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.516353114126763e-05, |
|
"loss": 0.6421, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5139118717450714e-05, |
|
"loss": 0.6484, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.511466460313634e-05, |
|
"loss": 0.644, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5090168996705643e-05, |
|
"loss": 0.6353, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.506563209687635e-05, |
|
"loss": 0.6411, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5041054102701184e-05, |
|
"loss": 0.6409, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5016435213566239e-05, |
|
"loss": 0.6421, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.499177562918936e-05, |
|
"loss": 0.645, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4967075549618535e-05, |
|
"loss": 0.6423, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4942335175230258e-05, |
|
"loss": 0.6416, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4917554706727915e-05, |
|
"loss": 0.6387, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.489273434514015e-05, |
|
"loss": 0.6421, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4867874291819226e-05, |
|
"loss": 0.6409, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4842974748439416e-05, |
|
"loss": 0.6462, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.481803591699534e-05, |
|
"loss": 0.6411, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4793057999800335e-05, |
|
"loss": 0.635, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4768041199484837e-05, |
|
"loss": 0.6475, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4742985718994693e-05, |
|
"loss": 0.6375, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.471789176158955e-05, |
|
"loss": 0.6431, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.469275953084119e-05, |
|
"loss": 0.6387, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.466758923063189e-05, |
|
"loss": 0.6382, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4642381065152748e-05, |
|
"loss": 0.6401, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.461713523890206e-05, |
|
"loss": 0.644, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4591851956683618e-05, |
|
"loss": 0.6487, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.456653142360509e-05, |
|
"loss": 0.6426, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4541173845076323e-05, |
|
"loss": 0.6389, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.45157794268077e-05, |
|
"loss": 0.646, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4490348374808454e-05, |
|
"loss": 0.6411, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4464880895385018e-05, |
|
"loss": 0.6404, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4439377195139321e-05, |
|
"loss": 0.6458, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4413837480967146e-05, |
|
"loss": 0.637, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.438826196005642e-05, |
|
"loss": 0.6506, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4362650839885558e-05, |
|
"loss": 0.6436, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4337004328221768e-05, |
|
"loss": 0.6489, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4311322633119367e-05, |
|
"loss": 0.6404, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4285605962918086e-05, |
|
"loss": 0.6335, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4259854526241399e-05, |
|
"loss": 0.6389, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4234068531994812e-05, |
|
"loss": 0.6375, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.420824818936418e-05, |
|
"loss": 0.6372, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4182393707813998e-05, |
|
"loss": 0.6372, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4156505297085715e-05, |
|
"loss": 0.6401, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4130583167196027e-05, |
|
"loss": 0.6409, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4104627528435166e-05, |
|
"loss": 0.6475, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4078638591365205e-05, |
|
"loss": 0.6379, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4052616566818355e-05, |
|
"loss": 0.6372, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4026561665895225e-05, |
|
"loss": 0.6411, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4000474099963145e-05, |
|
"loss": 0.644, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3974354080654435e-05, |
|
"loss": 0.647, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3948201819864685e-05, |
|
"loss": 0.6287, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3922017529751036e-05, |
|
"loss": 0.6418, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3895801422730472e-05, |
|
"loss": 0.6433, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3869553711478087e-05, |
|
"loss": 0.6326, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3843274608925356e-05, |
|
"loss": 0.6404, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3816964328258418e-05, |
|
"loss": 0.6411, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3790623082916326e-05, |
|
"loss": 0.6431, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3764251086589353e-05, |
|
"loss": 0.6338, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3737848553217222e-05, |
|
"loss": 0.6484, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3711415696987378e-05, |
|
"loss": 0.6414, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3684952732333274e-05, |
|
"loss": 0.6348, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3658459873932604e-05, |
|
"loss": 0.6477, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3631937336705568e-05, |
|
"loss": 0.6355, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3605385335813142e-05, |
|
"loss": 0.6406, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3578804086655311e-05, |
|
"loss": 0.6453, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3552193804869343e-05, |
|
"loss": 0.6421, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3525554706328026e-05, |
|
"loss": 0.6311, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3498887007137918e-05, |
|
"loss": 0.6348, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3472190923637603e-05, |
|
"loss": 0.6375, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3445466672395923e-05, |
|
"loss": 0.6433, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3418714470210228e-05, |
|
"loss": 0.6404, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3391934534104625e-05, |
|
"loss": 0.6443, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3365127081328193e-05, |
|
"loss": 0.6477, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3338292329353253e-05, |
|
"loss": 0.6389, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.331143049587358e-05, |
|
"loss": 0.6426, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3284541798802644e-05, |
|
"loss": 0.6406, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3257626456271849e-05, |
|
"loss": 0.6418, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3230684686628746e-05, |
|
"loss": 0.645, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3203716708435281e-05, |
|
"loss": 0.6448, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3176722740466015e-05, |
|
"loss": 0.6333, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3149703001706341e-05, |
|
"loss": 0.6382, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3122657711350722e-05, |
|
"loss": 0.6479, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3095587088800903e-05, |
|
"loss": 0.6504, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.306849135366413e-05, |
|
"loss": 0.6377, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.304137072575137e-05, |
|
"loss": 0.6377, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3014225425075544e-05, |
|
"loss": 0.6477, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2987055671849708e-05, |
|
"loss": 0.6306, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2959861686485304e-05, |
|
"loss": 0.6382, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2932643689590342e-05, |
|
"loss": 0.6387, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.290540190196763e-05, |
|
"loss": 0.6321, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.287813654461297e-05, |
|
"loss": 0.6438, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2850847838713375e-05, |
|
"loss": 0.6433, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2823536005645272e-05, |
|
"loss": 0.6472, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2796201266972698e-05, |
|
"loss": 0.6379, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2768843844445514e-05, |
|
"loss": 0.6433, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2741463959997598e-05, |
|
"loss": 0.6335, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 0.666928231716156, |
|
"eval_runtime": 558.9408, |
|
"eval_samples_per_second": 3.58, |
|
"eval_steps_per_second": 0.896, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2714061835745048e-05, |
|
"loss": 0.6367, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2686637693984384e-05, |
|
"loss": 0.6357, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2659191757190739e-05, |
|
"loss": 0.6377, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.263172424801605e-05, |
|
"loss": 0.646, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.260423538928726e-05, |
|
"loss": 0.6377, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2576725404004517e-05, |
|
"loss": 0.6392, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2549194515339345e-05, |
|
"loss": 0.6387, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2521642946632841e-05, |
|
"loss": 0.6406, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2494070921393879e-05, |
|
"loss": 0.6416, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2466478663297279e-05, |
|
"loss": 0.6304, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2438866396181988e-05, |
|
"loss": 0.645, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2411234344049293e-05, |
|
"loss": 0.6443, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2383582731060966e-05, |
|
"loss": 0.6431, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2355911781537478e-05, |
|
"loss": 0.6387, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2328221719956154e-05, |
|
"loss": 0.6338, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2300512770949373e-05, |
|
"loss": 0.6401, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.227278515930273e-05, |
|
"loss": 0.6328, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2245039109953219e-05, |
|
"loss": 0.6379, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.221727484798741e-05, |
|
"loss": 0.6431, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2189492598639623e-05, |
|
"loss": 0.6304, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2161692587290093e-05, |
|
"loss": 0.6499, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2133875039463147e-05, |
|
"loss": 0.6362, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.210604018082538e-05, |
|
"loss": 0.6377, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2078188237183817e-05, |
|
"loss": 0.6331, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2050319434484077e-05, |
|
"loss": 0.6428, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2022433998808557e-05, |
|
"loss": 0.6472, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1994532156374573e-05, |
|
"loss": 0.6431, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1966614133532559e-05, |
|
"loss": 0.6355, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1938680156764189e-05, |
|
"loss": 0.6499, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1910730452680575e-05, |
|
"loss": 0.6506, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1882765248020412e-05, |
|
"loss": 0.6379, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1854784769648137e-05, |
|
"loss": 0.6357, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.18267892445521e-05, |
|
"loss": 0.6423, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1798778899842712e-05, |
|
"loss": 0.6399, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1770753962750608e-05, |
|
"loss": 0.6406, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1742714660624799e-05, |
|
"loss": 0.6355, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1714661220930834e-05, |
|
"loss": 0.6416, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1686593871248952e-05, |
|
"loss": 0.6372, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1658512839272231e-05, |
|
"loss": 0.6472, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.163041835280475e-05, |
|
"loss": 0.636, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1602310639759724e-05, |
|
"loss": 0.6396, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1574189928157689e-05, |
|
"loss": 0.6318, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1546056446124612e-05, |
|
"loss": 0.6433, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.151791042189006e-05, |
|
"loss": 0.6389, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.148975208378536e-05, |
|
"loss": 0.6348, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1461581660241717e-05, |
|
"loss": 0.6326, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1433399379788388e-05, |
|
"loss": 0.6609, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1405205471050822e-05, |
|
"loss": 0.6367, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1377000162748786e-05, |
|
"loss": 0.6409, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1348783683694535e-05, |
|
"loss": 0.6445, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1320556262790951e-05, |
|
"loss": 0.6418, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1292318129029665e-05, |
|
"loss": 0.6338, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1264069511489225e-05, |
|
"loss": 0.6353, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1235810639333234e-05, |
|
"loss": 0.6401, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1207541741808461e-05, |
|
"loss": 0.6409, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.117926304824303e-05, |
|
"loss": 0.6311, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1150974788044522e-05, |
|
"loss": 0.646, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1122677190698123e-05, |
|
"loss": 0.6375, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1094370485764775e-05, |
|
"loss": 0.6343, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.10660549028793e-05, |
|
"loss": 0.6438, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1037730671748538e-05, |
|
"loss": 0.6458, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.1009398022149496e-05, |
|
"loss": 0.6467, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.098105718392746e-05, |
|
"loss": 0.635, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0952708386994159e-05, |
|
"loss": 0.6399, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0924351861325882e-05, |
|
"loss": 0.6299, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0895987836961608e-05, |
|
"loss": 0.6309, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0867616544001165e-05, |
|
"loss": 0.6443, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0839238212603335e-05, |
|
"loss": 0.6345, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0810853072984e-05, |
|
"loss": 0.6277, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0782461355414273e-05, |
|
"loss": 0.6392, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0754063290218632e-05, |
|
"loss": 0.6343, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0725659107773046e-05, |
|
"loss": 0.6333, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0697249038503115e-05, |
|
"loss": 0.6331, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0668833312882187e-05, |
|
"loss": 0.646, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0640412161429506e-05, |
|
"loss": 0.6316, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0611985814708325e-05, |
|
"loss": 0.6404, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0583554503324046e-05, |
|
"loss": 0.6338, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0555118457922345e-05, |
|
"loss": 0.6379, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0526677909187306e-05, |
|
"loss": 0.6299, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0498233087839538e-05, |
|
"loss": 0.6348, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.046978422463432e-05, |
|
"loss": 0.6318, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0441331550359713e-05, |
|
"loss": 0.636, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.04128752958347e-05, |
|
"loss": 0.6389, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0384415691907304e-05, |
|
"loss": 0.6379, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0355952969452728e-05, |
|
"loss": 0.6343, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0327487359371458e-05, |
|
"loss": 0.6445, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0299019092587421e-05, |
|
"loss": 0.6331, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0270548400046087e-05, |
|
"loss": 0.636, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0242075512712612e-05, |
|
"loss": 0.6355, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0213600661569952e-05, |
|
"loss": 0.6382, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.018512407761699e-05, |
|
"loss": 0.6423, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0156645991866679e-05, |
|
"loss": 0.6379, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0128166635344141e-05, |
|
"loss": 0.6379, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0099686239084813e-05, |
|
"loss": 0.6418, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0071205034132575e-05, |
|
"loss": 0.6428, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0042723251537849e-05, |
|
"loss": 0.6392, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0014241122355763e-05, |
|
"loss": 0.6357, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.985758877644242e-06, |
|
"loss": 0.6367, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.957276748462154e-06, |
|
"loss": 0.6377, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.92879496586743e-06, |
|
"loss": 0.6309, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.900313760915188e-06, |
|
"loss": 0.6443, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.871833364655866e-06, |
|
"loss": 0.6399, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.843354008133326e-06, |
|
"loss": 0.6453, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.814875922383013e-06, |
|
"loss": 0.6455, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.786399338430051e-06, |
|
"loss": 0.6328, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.757924487287388e-06, |
|
"loss": 0.6431, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.729451599953916e-06, |
|
"loss": 0.6372, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.70098090741258e-06, |
|
"loss": 0.6348, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.672512640628547e-06, |
|
"loss": 0.6335, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.644047030547277e-06, |
|
"loss": 0.6277, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.615584308092698e-06, |
|
"loss": 0.6357, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.587124704165302e-06, |
|
"loss": 0.6343, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.55866844964029e-06, |
|
"loss": 0.6396, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.530215775365683e-06, |
|
"loss": 0.6409, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.501766912160468e-06, |
|
"loss": 0.6243, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.473322090812697e-06, |
|
"loss": 0.6323, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.44488154207766e-06, |
|
"loss": 0.6311, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.416445496675958e-06, |
|
"loss": 0.6284, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.38801418529168e-06, |
|
"loss": 0.6375, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.359587838570496e-06, |
|
"loss": 0.6353, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.331166687117816e-06, |
|
"loss": 0.6311, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.302750961496889e-06, |
|
"loss": 0.6313, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.274340892226954e-06, |
|
"loss": 0.6304, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.245936709781371e-06, |
|
"loss": 0.6331, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.217538644585727e-06, |
|
"loss": 0.6328, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.189146927016004e-06, |
|
"loss": 0.646, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.160761787396665e-06, |
|
"loss": 0.6406, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.132383455998836e-06, |
|
"loss": 0.6316, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.104012163038393e-06, |
|
"loss": 0.6365, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.075648138674123e-06, |
|
"loss": 0.623, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.047291613005844e-06, |
|
"loss": 0.6321, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.018942816072544e-06, |
|
"loss": 0.6355, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.990601977850507e-06, |
|
"loss": 0.6426, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.962269328251465e-06, |
|
"loss": 0.6318, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.933945097120704e-06, |
|
"loss": 0.6355, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.90562951423523e-06, |
|
"loss": 0.6328, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.87732280930188e-06, |
|
"loss": 0.6326, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.849025211955482e-06, |
|
"loss": 0.6345, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.820736951756972e-06, |
|
"loss": 0.636, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.79245825819154e-06, |
|
"loss": 0.6367, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.764189360666771e-06, |
|
"loss": 0.6399, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.735930488510773e-06, |
|
"loss": 0.6355, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.70768187097034e-06, |
|
"loss": 0.636, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.67944373720905e-06, |
|
"loss": 0.6311, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.651216316305467e-06, |
|
"loss": 0.6389, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.622999837251219e-06, |
|
"loss": 0.6345, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.594794528949183e-06, |
|
"loss": 0.6345, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.566600620211614e-06, |
|
"loss": 0.6301, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.538418339758287e-06, |
|
"loss": 0.6375, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.510247916214645e-06, |
|
"loss": 0.6345, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.482089578109945e-06, |
|
"loss": 0.636, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.453943553875393e-06, |
|
"loss": 0.6326, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.425810071842316e-06, |
|
"loss": 0.6404, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.397689360240279e-06, |
|
"loss": 0.6333, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.369581647195252e-06, |
|
"loss": 0.6248, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.341487160727772e-06, |
|
"loss": 0.6418, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.31340612875105e-06, |
|
"loss": 0.6328, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.285338779069167e-06, |
|
"loss": 0.6282, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.257285339375203e-06, |
|
"loss": 0.6245, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.229246037249395e-06, |
|
"loss": 0.6318, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.20122110015729e-06, |
|
"loss": 0.6199, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.173210755447905e-06, |
|
"loss": 0.6313, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.145215230351864e-06, |
|
"loss": 0.6282, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.117234751979595e-06, |
|
"loss": 0.6296, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.089269547319428e-06, |
|
"loss": 0.6299, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.061319843235816e-06, |
|
"loss": 0.6321, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.033385866467445e-06, |
|
"loss": 0.6372, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.005467843625428e-06, |
|
"loss": 0.6353, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.977566001191447e-06, |
|
"loss": 0.6316, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.949680565515924e-06, |
|
"loss": 0.6384, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.921811762816186e-06, |
|
"loss": 0.6328, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.893959819174619e-06, |
|
"loss": 0.6318, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.866124960536856e-06, |
|
"loss": 0.6294, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.838307412709909e-06, |
|
"loss": 0.6355, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.810507401360379e-06, |
|
"loss": 0.6404, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.782725152012591e-06, |
|
"loss": 0.6345, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.754960890046785e-06, |
|
"loss": 0.6333, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.727214840697272e-06, |
|
"loss": 0.6335, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.69948722905063e-06, |
|
"loss": 0.6306, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.671778280043847e-06, |
|
"loss": 0.635, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.644088218462527e-06, |
|
"loss": 0.6294, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.616417268939037e-06, |
|
"loss": 0.6289, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.588765655950712e-06, |
|
"loss": 0.6384, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.561133603818014e-06, |
|
"loss": 0.6287, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.533521336702727e-06, |
|
"loss": 0.6279, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.505929078606123e-06, |
|
"loss": 0.6396, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.47835705336716e-06, |
|
"loss": 0.6282, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.450805484660659e-06, |
|
"loss": 0.6296, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.423274595995482e-06, |
|
"loss": 0.6387, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.3957646107127415e-06, |
|
"loss": 0.6335, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.368275751983951e-06, |
|
"loss": 0.6382, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.3408082428092656e-06, |
|
"loss": 0.6438, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.313362306015618e-06, |
|
"loss": 0.6279, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.285938164254955e-06, |
|
"loss": 0.6309, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.258536040002406e-06, |
|
"loss": 0.634, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.23115615555449e-06, |
|
"loss": 0.6355, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.203798733027304e-06, |
|
"loss": 0.6265, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.176463994354732e-06, |
|
"loss": 0.6355, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.1491521612866255e-06, |
|
"loss": 0.6316, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.121863455387034e-06, |
|
"loss": 0.6284, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.094598098032375e-06, |
|
"loss": 0.6343, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.067356310409659e-06, |
|
"loss": 0.6326, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.0401383135147e-06, |
|
"loss": 0.6323, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.0129443281502925e-06, |
|
"loss": 0.6343, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.98577457492446e-06, |
|
"loss": 0.6348, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.9586292742486294e-06, |
|
"loss": 0.6274, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.931508646335874e-06, |
|
"loss": 0.6362, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.904412911199099e-06, |
|
"loss": 0.6301, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.87734228864928e-06, |
|
"loss": 0.6223, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.850296998293661e-06, |
|
"loss": 0.6301, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.823277259533989e-06, |
|
"loss": 0.6311, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.796283291564723e-06, |
|
"loss": 0.6323, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.7693153133712585e-06, |
|
"loss": 0.6289, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.742373543728155e-06, |
|
"loss": 0.6328, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.715458201197358e-06, |
|
"loss": 0.6387, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.688569504126421e-06, |
|
"loss": 0.6216, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.661707670646751e-06, |
|
"loss": 0.6296, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.634872918671811e-06, |
|
"loss": 0.6313, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.608065465895377e-06, |
|
"loss": 0.6313, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.581285529789774e-06, |
|
"loss": 0.6255, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.554533327604079e-06, |
|
"loss": 0.6382, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.5278090763623995e-06, |
|
"loss": 0.6313, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.501112992862082e-06, |
|
"loss": 0.6323, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.474445293671977e-06, |
|
"loss": 0.6289, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.447806195130659e-06, |
|
"loss": 0.636, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.4211959133446945e-06, |
|
"loss": 0.6328, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.394614664186863e-06, |
|
"loss": 0.6274, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.368062663294436e-06, |
|
"loss": 0.6316, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.3415401260674e-06, |
|
"loss": 0.6301, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.315047267666729e-06, |
|
"loss": 0.6238, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.288584303012624e-06, |
|
"loss": 0.6262, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.262151446782785e-06, |
|
"loss": 0.6243, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.2357489134106485e-06, |
|
"loss": 0.6328, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.2093769170836735e-06, |
|
"loss": 0.635, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.183035671741586e-06, |
|
"loss": 0.6272, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.156725391074642e-06, |
|
"loss": 0.6355, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.1304462885219154e-06, |
|
"loss": 0.6357, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.104198577269527e-06, |
|
"loss": 0.6367, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.077982470248968e-06, |
|
"loss": 0.6304, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.0517981801353194e-06, |
|
"loss": 0.6318, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.025645919345569e-06, |
|
"loss": 0.6265, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.9995259000368555e-06, |
|
"loss": 0.6223, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.973438334104779e-06, |
|
"loss": 0.6414, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.947383433181648e-06, |
|
"loss": 0.6321, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.921361408634798e-06, |
|
"loss": 0.6299, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.895372471564837e-06, |
|
"loss": 0.6243, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.86941683280398e-06, |
|
"loss": 0.6306, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.843494702914289e-06, |
|
"loss": 0.6296, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.817606292186006e-06, |
|
"loss": 0.6367, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.791751810635824e-06, |
|
"loss": 0.6228, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.765931468005189e-06, |
|
"loss": 0.6257, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.740145473758606e-06, |
|
"loss": 0.6208, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.714394037081918e-06, |
|
"loss": 0.6277, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.688677366880637e-06, |
|
"loss": 0.6243, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.6629956717782334e-06, |
|
"loss": 0.6248, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.637349160114445e-06, |
|
"loss": 0.6404, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.6117380399435824e-06, |
|
"loss": 0.6252, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.586162519032857e-06, |
|
"loss": 0.6301, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.560622804860679e-06, |
|
"loss": 0.6321, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.535119104614988e-06, |
|
"loss": 0.6309, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.509651625191547e-06, |
|
"loss": 0.6252, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.484220573192307e-06, |
|
"loss": 0.6316, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.458826154923682e-06, |
|
"loss": 0.6362, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.433468576394914e-06, |
|
"loss": 0.6228, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.408148043316381e-06, |
|
"loss": 0.6289, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.382864761097941e-06, |
|
"loss": 0.6299, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.357618934847253e-06, |
|
"loss": 0.5867, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.332410769368115e-06, |
|
"loss": 0.6025, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.307240469158815e-06, |
|
"loss": 0.5852, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.28210823841045e-06, |
|
"loss": 0.5879, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.25701428100531e-06, |
|
"loss": 0.6008, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.231958800515164e-06, |
|
"loss": 0.6025, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.206942000199666e-06, |
|
"loss": 0.5933, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.1819640830046646e-06, |
|
"loss": 0.5913, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.157025251560588e-06, |
|
"loss": 0.5947, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.1321257081807755e-06, |
|
"loss": 0.5894, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.107265654859856e-06, |
|
"loss": 0.5845, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.082445293272087e-06, |
|
"loss": 0.5869, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.057664824769746e-06, |
|
"loss": 0.5881, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.032924450381469e-06, |
|
"loss": 0.5994, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.0082243708106426e-06, |
|
"loss": 0.595, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.983564786433763e-06, |
|
"loss": 0.5864, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.9589458972988155e-06, |
|
"loss": 0.5828, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.934367903123652e-06, |
|
"loss": 0.5911, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.90983100329436e-06, |
|
"loss": 0.5852, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.88533539686366e-06, |
|
"loss": 0.5859, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.860881282549286e-06, |
|
"loss": 0.5867, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.836468858732374e-06, |
|
"loss": 0.5959, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.812098323455839e-06, |
|
"loss": 0.5879, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.787769874422798e-06, |
|
"loss": 0.593, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.763483708994931e-06, |
|
"loss": 0.6033, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.739240024190904e-06, |
|
"loss": 0.5889, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.715039016684769e-06, |
|
"loss": 0.5916, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.690880882804363e-06, |
|
"loss": 0.5891, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.666765818529702e-06, |
|
"loss": 0.594, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.642694019491423e-06, |
|
"loss": 0.5957, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.618665680969164e-06, |
|
"loss": 0.5793, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.5946809978899985e-06, |
|
"loss": 0.5857, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.570740164826869e-06, |
|
"loss": 0.5864, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.546843375996959e-06, |
|
"loss": 0.5886, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.522990825260172e-06, |
|
"loss": 0.5864, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.49918270611752e-06, |
|
"loss": 0.5911, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.475419211709576e-06, |
|
"loss": 0.5852, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.451700534814903e-06, |
|
"loss": 0.5928, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.428026867848486e-06, |
|
"loss": 0.5896, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.4043984028601624e-06, |
|
"loss": 0.5867, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.380815331533088e-06, |
|
"loss": 0.5896, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.357277845182155e-06, |
|
"loss": 0.5828, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.333786134752462e-06, |
|
"loss": 0.5879, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.310340390817753e-06, |
|
"loss": 0.5989, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.286940803578879e-06, |
|
"loss": 0.5916, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.263587562862234e-06, |
|
"loss": 0.5906, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.240280858118254e-06, |
|
"loss": 0.584, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.217020878419834e-06, |
|
"loss": 0.5862, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.19380781246084e-06, |
|
"loss": 0.5962, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.170641848554543e-06, |
|
"loss": 0.5884, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.1475231746321035e-06, |
|
"loss": 0.5952, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.124451978241054e-06, |
|
"loss": 0.5864, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.101428446543762e-06, |
|
"loss": 0.5884, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.07845276631593e-06, |
|
"loss": 0.5891, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.055525123945068e-06, |
|
"loss": 0.5979, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.032645705428984e-06, |
|
"loss": 0.594, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.0098146963742734e-06, |
|
"loss": 0.5835, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.987032281994821e-06, |
|
"loss": 0.5854, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.964298647110282e-06, |
|
"loss": 0.5784, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.941613976144612e-06, |
|
"loss": 0.5879, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.918978453124533e-06, |
|
"loss": 0.5945, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.896392261678078e-06, |
|
"loss": 0.5854, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.8738555850330635e-06, |
|
"loss": 0.5898, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.85136860601564e-06, |
|
"loss": 0.5881, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.828931507048786e-06, |
|
"loss": 0.5991, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.8065444701508312e-06, |
|
"loss": 0.5847, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.78420767693399e-06, |
|
"loss": 0.5942, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.7619213086028695e-06, |
|
"loss": 0.5847, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.7396855459530225e-06, |
|
"loss": 0.5864, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.7175005693694587e-06, |
|
"loss": 0.5852, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6953665588251984e-06, |
|
"loss": 0.592, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6732836938798077e-06, |
|
"loss": 0.5876, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6512521536779364e-06, |
|
"loss": 0.595, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6292721169478652e-06, |
|
"loss": 0.5947, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.607343762000063e-06, |
|
"loss": 0.5852, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5854672667257373e-06, |
|
"loss": 0.5796, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.563642808595392e-06, |
|
"loss": 0.592, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5418705646573758e-06, |
|
"loss": 0.5901, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.5201507115364685e-06, |
|
"loss": 0.5898, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.4984834254324217e-06, |
|
"loss": 0.5903, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.476868882118557e-06, |
|
"loss": 0.5913, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.4553072569403178e-06, |
|
"loss": 0.5889, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.4337987248138594e-06, |
|
"loss": 0.5889, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.4123434602246277e-06, |
|
"loss": 0.5996, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3909416372259305e-06, |
|
"loss": 0.5872, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3695934294375544e-06, |
|
"loss": 0.5911, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.3482990100443157e-06, |
|
"loss": 0.5811, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.3270585517947096e-06, |
|
"loss": 0.5854, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.305872226999449e-06, |
|
"loss": 0.5947, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.284740207530115e-06, |
|
"loss": 0.5928, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2636626648177284e-06, |
|
"loss": 0.5801, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2426397698513836e-06, |
|
"loss": 0.5947, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2216716931768487e-06, |
|
"loss": 0.582, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.200758604895189e-06, |
|
"loss": 0.5896, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1799006746613713e-06, |
|
"loss": 0.5867, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.15909807168291e-06, |
|
"loss": 0.592, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.138350964718485e-06, |
|
"loss": 0.585, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.1176595220765584e-06, |
|
"loss": 0.5952, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0970239116140343e-06, |
|
"loss": 0.5874, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0764443007348808e-06, |
|
"loss": 0.5916, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.055920856388779e-06, |
|
"loss": 0.5913, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.035453745069755e-06, |
|
"loss": 0.582, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.0150431328148534e-06, |
|
"loss": 0.5898, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.994689185202767e-06, |
|
"loss": 0.594, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9743920673525126e-06, |
|
"loss": 0.5828, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.95415194392207e-06, |
|
"loss": 0.5957, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.9339689791070713e-06, |
|
"loss": 0.5879, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.913843336639444e-06, |
|
"loss": 0.5837, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8937751797861025e-06, |
|
"loss": 0.5896, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.873764671347614e-06, |
|
"loss": 0.5911, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.853811973656885e-06, |
|
"loss": 0.5876, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8339172485778243e-06, |
|
"loss": 0.5869, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.8140806575040603e-06, |
|
"loss": 0.5825, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7943023613576127e-06, |
|
"loss": 0.5935, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7745825205875764e-06, |
|
"loss": 0.5845, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7549212951688598e-06, |
|
"loss": 0.5857, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7353188446008374e-06, |
|
"loss": 0.5881, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7157753279060926e-06, |
|
"loss": 0.5884, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6962909036291075e-06, |
|
"loss": 0.5903, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6768657298349897e-06, |
|
"loss": 0.584, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.657499964108181e-06, |
|
"loss": 0.5867, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.6381937635511854e-06, |
|
"loss": 0.5867, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.6189472847832853e-06, |
|
"loss": 0.5876, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5997606839392866e-06, |
|
"loss": 0.5916, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5806341166682294e-06, |
|
"loss": 0.5854, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5615677381321492e-06, |
|
"loss": 0.5879, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5425617030048045e-06, |
|
"loss": 0.5925, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.523616165470425e-06, |
|
"loss": 0.5928, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.504731279222463e-06, |
|
"loss": 0.5916, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.485907197462337e-06, |
|
"loss": 0.5869, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.467144072898202e-06, |
|
"loss": 0.5874, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.448442057743706e-06, |
|
"loss": 0.5918, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.4298013037167544e-06, |
|
"loss": 0.5833, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.411221962038269e-06, |
|
"loss": 0.5864, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3927041834309873e-06, |
|
"loss": 0.5928, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3742481181182063e-06, |
|
"loss": 0.5999, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3558539158225956e-06, |
|
"loss": 0.5828, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3375217257649605e-06, |
|
"loss": 0.5918, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.319251696663044e-06, |
|
"loss": 0.5867, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.3010439767303084e-06, |
|
"loss": 0.5938, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2828987136747503e-06, |
|
"loss": 0.5942, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2648160546976785e-06, |
|
"loss": 0.5803, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2467961464925514e-06, |
|
"loss": 0.5886, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.228839135243753e-06, |
|
"loss": 0.594, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.210945166625431e-06, |
|
"loss": 0.5808, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.193114385800309e-06, |
|
"loss": 0.5913, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1753469374184987e-06, |
|
"loss": 0.5913, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.157642965616341e-06, |
|
"loss": 0.5862, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.140002614015232e-06, |
|
"loss": 0.5898, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.122426025720454e-06, |
|
"loss": 0.5789, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.104913343320013e-06, |
|
"loss": 0.5886, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.087464708883494e-06, |
|
"loss": 0.6033, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.070080263960892e-06, |
|
"loss": 0.5898, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0527601495814773e-06, |
|
"loss": 0.5894, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0355045062526457e-06, |
|
"loss": 0.5945, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.0183134739587806e-06, |
|
"loss": 0.5847, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.00118719216011e-06, |
|
"loss": 0.592, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9841257997915865e-06, |
|
"loss": 0.5845, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9671294352617543e-06, |
|
"loss": 0.5879, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9501982364516248e-06, |
|
"loss": 0.592, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9333323407135653e-06, |
|
"loss": 0.5903, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9165318848701686e-06, |
|
"loss": 0.5872, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8997970052131665e-06, |
|
"loss": 0.5825, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.883127837502302e-06, |
|
"loss": 0.5959, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8665245169642443e-06, |
|
"loss": 0.5803, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8499871782914824e-06, |
|
"loss": 0.5918, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8335159556412385e-06, |
|
"loss": 0.592, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8171109826343669e-06, |
|
"loss": 0.582, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8007723923542919e-06, |
|
"loss": 0.5908, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7845003173459007e-06, |
|
"loss": 0.5925, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7682948896145036e-06, |
|
"loss": 0.5864, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7521562406247239e-06, |
|
"loss": 0.5891, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7360845012994609e-06, |
|
"loss": 0.5762, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7200798020188103e-06, |
|
"loss": 0.5813, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.7041422726190182e-06, |
|
"loss": 0.5906, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6882720423914211e-06, |
|
"loss": 0.5957, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6724692400814002e-06, |
|
"loss": 0.5825, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6567339938873362e-06, |
|
"loss": 0.5884, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6410664314595625e-06, |
|
"loss": 0.5913, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6254666798993458e-06, |
|
"loss": 0.592, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.609934865757835e-06, |
|
"loss": 0.594, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.59447111503505e-06, |
|
"loss": 0.5874, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5790755531788571e-06, |
|
"loss": 0.5811, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.563748305083944e-06, |
|
"loss": 0.5952, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5484894950908091e-06, |
|
"loss": 0.5884, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5332992469847597e-06, |
|
"loss": 0.5901, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5181776839949014e-06, |
|
"loss": 0.5925, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5031249287931403e-06, |
|
"loss": 0.5811, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4881411034931825e-06, |
|
"loss": 0.5813, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4732263296495564e-06, |
|
"loss": 0.5928, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.458380728256611e-06, |
|
"loss": 0.5913, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4436044197475463e-06, |
|
"loss": 0.5908, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4288975239934312e-06, |
|
"loss": 0.5898, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.414260160302231e-06, |
|
"loss": 0.5867, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3996924474178443e-06, |
|
"loss": 0.5854, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3851945035191272e-06, |
|
"loss": 0.5903, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3707664462189518e-06, |
|
"loss": 0.5957, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3564083925632333e-06, |
|
"loss": 0.5867, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3421204590300064e-06, |
|
"loss": 0.5835, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.327902761528448e-06, |
|
"loss": 0.5869, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3137554153979648e-06, |
|
"loss": 0.5825, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2996785354072371e-06, |
|
"loss": 0.5862, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2856722357533058e-06, |
|
"loss": 0.5867, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2717366300606303e-06, |
|
"loss": 0.5854, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2578718313801796e-06, |
|
"loss": 0.5955, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2440779521885027e-06, |
|
"loss": 0.5886, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2303551043868278e-06, |
|
"loss": 0.5972, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2167033993001443e-06, |
|
"loss": 0.5884, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.2031229476763074e-06, |
|
"loss": 0.5786, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1896138596851436e-06, |
|
"loss": 0.5854, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1761762449175362e-06, |
|
"loss": 0.5854, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.162810212384564e-06, |
|
"loss": 0.5894, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.149515870516592e-06, |
|
"loss": 0.5833, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.13629332716241e-06, |
|
"loss": 0.5837, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1231426895883502e-06, |
|
"loss": 0.5781, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1100640644774175e-06, |
|
"loss": 0.5959, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0970575579284192e-06, |
|
"loss": 0.5859, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.084123275455119e-06, |
|
"loss": 0.594, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0712613219853585e-06, |
|
"loss": 0.594, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0584718018602314e-06, |
|
"loss": 0.5854, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0457548188332155e-06, |
|
"loss": 0.5864, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0331104760693455e-06, |
|
"loss": 0.592, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0205388761443668e-06, |
|
"loss": 0.5864, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0080401210439073e-06, |
|
"loss": 0.5908, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.956143121626526e-07, |
|
"loss": 0.5938, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.832615503035182e-07, |
|
"loss": 0.5825, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.709819356768368e-07, |
|
"loss": 0.5828, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.58775567899538e-07, |
|
"loss": 0.5928, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.466425459943496e-07, |
|
"loss": 0.5833, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.345829683889873e-07, |
|
"loss": 0.5801, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.225969329153572e-07, |
|
"loss": 0.5847, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.106845368087691e-07, |
|
"loss": 0.5889, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.988458767071373e-07, |
|
"loss": 0.5916, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.870810486502002e-07, |
|
"loss": 0.5889, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"eval_loss": 0.655534565448761, |
|
"eval_runtime": 556.664, |
|
"eval_samples_per_second": 3.595, |
|
"eval_steps_per_second": 0.9, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.753901480787474e-07, |
|
"loss": 0.5916, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.637732698338352e-07, |
|
"loss": 0.5886, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.522305081560267e-07, |
|
"loss": 0.5896, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.407619566846203e-07, |
|
"loss": 0.595, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.293677084568963e-07, |
|
"loss": 0.594, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.180478559073524e-07, |
|
"loss": 0.5813, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.068024908669658e-07, |
|
"loss": 0.583, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.956317045624406e-07, |
|
"loss": 0.5767, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.845355876154703e-07, |
|
"loss": 0.5884, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.735142300420018e-07, |
|
"loss": 0.5876, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.625677212515025e-07, |
|
"loss": 0.5791, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.516961500462438e-07, |
|
"loss": 0.5857, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.408996046205674e-07, |
|
"loss": 0.5898, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.30178172560182e-07, |
|
"loss": 0.5933, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.19531940841447e-07, |
|
"loss": 0.5793, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.089609958306709e-07, |
|
"loss": 0.5894, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.984654232833999e-07, |
|
"loss": 0.585, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.880453083437388e-07, |
|
"loss": 0.5793, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.777007355436405e-07, |
|
"loss": 0.5867, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.674317888022452e-07, |
|
"loss": 0.5925, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.572385514251744e-07, |
|
"loss": 0.5803, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.471211061038696e-07, |
|
"loss": 0.5959, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.370795349149228e-07, |
|
"loss": 0.5823, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.271139193193954e-07, |
|
"loss": 0.5901, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.17224340162178e-07, |
|
"loss": 0.5771, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.074108776713206e-07, |
|
"loss": 0.5791, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.976736114573867e-07, |
|
"loss": 0.582, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.880126205128033e-07, |
|
"loss": 0.5801, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.784279832112294e-07, |
|
"loss": 0.592, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.689197773069055e-07, |
|
"loss": 0.5918, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.594880799340385e-07, |
|
"loss": 0.5845, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.501329676061662e-07, |
|
"loss": 0.5837, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.408545162155421e-07, |
|
"loss": 0.5916, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.316528010325084e-07, |
|
"loss": 0.5847, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.225278967049042e-07, |
|
"loss": 0.5864, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.134798772574434e-07, |
|
"loss": 0.5857, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.045088160911226e-07, |
|
"loss": 0.5796, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.95614785982621e-07, |
|
"loss": 0.5911, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.867978590837152e-07, |
|
"loss": 0.5837, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.780581069206913e-07, |
|
"loss": 0.5786, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.693956003937605e-07, |
|
"loss": 0.5906, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6081040977649094e-07, |
|
"loss": 0.585, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.523026047152346e-07, |
|
"loss": 0.5881, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.438722542285623e-07, |
|
"loss": 0.5906, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.355194267067009e-07, |
|
"loss": 0.5852, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.272441899109858e-07, |
|
"loss": 0.5864, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.1904661097330043e-07, |
|
"loss": 0.5886, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.1092675639554657e-07, |
|
"loss": 0.5896, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.028846920490892e-07, |
|
"loss": 0.5884, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.949204831742315e-07, |
|
"loss": 0.5898, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.870341943796829e-07, |
|
"loss": 0.5852, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.792258896420353e-07, |
|
"loss": 0.5718, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.7149563230524653e-07, |
|
"loss": 0.5894, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.638434850801198e-07, |
|
"loss": 0.583, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.5626951004380407e-07, |
|
"loss": 0.5874, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.4877376863928004e-07, |
|
"loss": 0.584, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.4135632167487274e-07, |
|
"loss": 0.5828, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.3401722932374534e-07, |
|
"loss": 0.5864, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.267565511234305e-07, |
|
"loss": 0.5859, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.19574345975322e-07, |
|
"loss": 0.5823, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.1247067214422054e-07, |
|
"loss": 0.5886, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.0544558725784213e-07, |
|
"loss": 0.584, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.9849914830636595e-07, |
|
"loss": 0.593, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.916314116419616e-07, |
|
"loss": 0.5891, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8484243297833945e-07, |
|
"loss": 0.5847, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.781322673902886e-07, |
|
"loss": 0.5867, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.71500969313242e-07, |
|
"loss": 0.5864, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.649485925428252e-07, |
|
"loss": 0.5884, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.584751902344285e-07, |
|
"loss": 0.585, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.520808149027654e-07, |
|
"loss": 0.5789, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.457655184214569e-07, |
|
"loss": 0.5815, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.395293520226061e-07, |
|
"loss": 0.5791, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3337236629637827e-07, |
|
"loss": 0.5891, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2729461119059826e-07, |
|
"loss": 0.5891, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.212961360103416e-07, |
|
"loss": 0.5945, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.1537698941753504e-07, |
|
"loss": 0.5808, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0953721943055784e-07, |
|
"loss": 0.584, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.037768734238621e-07, |
|
"loss": 0.5879, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9809599812757652e-07, |
|
"loss": 0.5942, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9249463962713432e-07, |
|
"loss": 0.5803, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8697284336290145e-07, |
|
"loss": 0.5918, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.8153065412980232e-07, |
|
"loss": 0.5798, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.761681160769557e-07, |
|
"loss": 0.5835, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.70885272707324e-07, |
|
"loss": 0.5913, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.656821668773534e-07, |
|
"loss": 0.592, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6055884079662742e-07, |
|
"loss": 0.5815, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.55515336027533e-07, |
|
"loss": 0.5874, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5055169348490496e-07, |
|
"loss": 0.5833, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4566795343571528e-07, |
|
"loss": 0.585, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4086415549872777e-07, |
|
"loss": 0.5857, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3614033864419284e-07, |
|
"loss": 0.5867, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3149654119351874e-07, |
|
"loss": 0.5923, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2693280081896853e-07, |
|
"loss": 0.5808, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2244915454334815e-07, |
|
"loss": 0.5906, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1804563873971442e-07, |
|
"loss": 0.5886, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1372228913106965e-07, |
|
"loss": 0.5923, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.0947914079008192e-07, |
|
"loss": 0.5833, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0531622813879427e-07, |
|
"loss": 0.5864, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0123358494834701e-07, |
|
"loss": 0.5918, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.723124433870357e-08, |
|
"loss": 0.5876, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.330923877838182e-08, |
|
"loss": 0.5879, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.946760008419208e-08, |
|
"loss": 0.5894, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.570635942097727e-08, |
|
"loss": 0.585, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.202554730136203e-08, |
|
"loss": 0.5857, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.842519358550071e-08, |
|
"loss": 0.593, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.4905327480842e-08, |
|
"loss": 0.5872, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.146597754188578e-08, |
|
"loss": 0.5981, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.81071716699544e-08, |
|
"loss": 0.5896, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.482893711296623e-08, |
|
"loss": 0.5825, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.163130046521582e-08, |
|
"loss": 0.5886, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.851428766715406e-08, |
|
"loss": 0.592, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.54779240051817e-08, |
|
"loss": 0.5881, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.252223411144286e-08, |
|
"loss": 0.5781, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.9647241963624023e-08, |
|
"loss": 0.5947, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.685297088476204e-08, |
|
"loss": 0.5823, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.4139443543053106e-08, |
|
"loss": 0.5823, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.1506681951669624e-08, |
|
"loss": 0.5859, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.8954707468581435e-08, |
|
"loss": 0.592, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.648354079638039e-08, |
|
"loss": 0.5876, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.40932019821183e-08, |
|
"loss": 0.5894, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.17837104171359e-08, |
|
"loss": 0.5889, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9555084836914162e-08, |
|
"loss": 0.5903, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.7407343320916546e-08, |
|
"loss": 0.5815, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.534050329244253e-08, |
|
"loss": 0.5889, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.3354581518491014e-08, |
|
"loss": 0.5908, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1449594109618223e-08, |
|
"loss": 0.5876, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.962555651981113e-08, |
|
"loss": 0.5903, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7882483546360907e-08, |
|
"loss": 0.584, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6220389329741903e-08, |
|
"loss": 0.5891, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.463928735349951e-08, |
|
"loss": 0.5828, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3139190444136917e-08, |
|
"loss": 0.5891, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1720110771015203e-08, |
|
"loss": 0.5964, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.038205984625007e-08, |
|
"loss": 0.5869, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.125048524624147e-09, |
|
"loss": 0.5835, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.949087003493727e-09, |
|
"loss": 0.5869, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.8541848227077215e-09, |
|
"loss": 0.5769, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.840350864533273e-09, |
|
"loss": 0.5908, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.9075933535791545e-09, |
|
"loss": 0.5854, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.055919856730262e-09, |
|
"loss": 0.5889, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.285337283087664e-09, |
|
"loss": 0.5896, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.595851883913092e-09, |
|
"loss": 0.5806, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.9874692525745365e-09, |
|
"loss": 0.5835, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.4601943245062812e-09, |
|
"loss": 0.5886, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0140313771644927e-09, |
|
"loss": 0.5823, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.489840299916949e-10, |
|
"loss": 0.5896, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.6505524439345295e-10, |
|
"loss": 0.5828, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.6224732370728747e-10, |
|
"loss": 0.5818, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.056191319046221e-11, |
|
"loss": 0.5854, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.5864, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1149, |
|
"total_flos": 1498288100474880.0, |
|
"train_loss": 0.6545881863984987, |
|
"train_runtime": 105511.587, |
|
"train_samples_per_second": 2.787, |
|
"train_steps_per_second": 0.011 |
|
} |
|
], |
|
"max_steps": 1149, |
|
"num_train_epochs": 3, |
|
"total_flos": 1498288100474880.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|