|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 216, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004629629629629629, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 9.953703703703704e-06, |
|
"loss": 1.7276, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009259259259259259, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.907407407407408e-06, |
|
"loss": 1.7761, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013888888888888888, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.861111111111112e-06, |
|
"loss": 1.8375, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018518518518518517, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 9.814814814814815e-06, |
|
"loss": 1.7143, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023148148148148147, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.768518518518519e-06, |
|
"loss": 1.7013, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.027777777777777776, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 1.7835, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.032407407407407406, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.675925925925926e-06, |
|
"loss": 1.7326, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 1.6947, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.041666666666666664, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 1.7215, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.046296296296296294, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.537037037037037e-06, |
|
"loss": 1.7046, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05092592592592592, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.490740740740741e-06, |
|
"loss": 1.6468, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05555555555555555, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 1.7174, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06018518518518518, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 9.398148148148148e-06, |
|
"loss": 1.6456, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06481481481481481, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 9.351851851851854e-06, |
|
"loss": 1.6182, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06944444444444445, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 9.305555555555557e-06, |
|
"loss": 1.6217, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 1.6597, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0787037037037037, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 9.212962962962963e-06, |
|
"loss": 1.6162, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 1.6341, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08796296296296297, |
|
"grad_norm": 0.2275390625, |
|
"learning_rate": 9.120370370370372e-06, |
|
"loss": 1.5764, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09259259259259259, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 9.074074074074075e-06, |
|
"loss": 1.5642, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09722222222222222, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 9.027777777777779e-06, |
|
"loss": 1.6045, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10185185185185185, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 8.981481481481483e-06, |
|
"loss": 1.5553, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10648148148148148, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 8.935185185185186e-06, |
|
"loss": 1.572, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.5114, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11574074074074074, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 8.842592592592594e-06, |
|
"loss": 1.8281, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12037037037037036, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 1.4867, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.5723, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12962962962962962, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 8.703703703703705e-06, |
|
"loss": 1.5523, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13425925925925927, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 8.657407407407408e-06, |
|
"loss": 1.5341, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 1.5378, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14351851851851852, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 8.564814814814816e-06, |
|
"loss": 1.4981, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 8.518518518518519e-06, |
|
"loss": 1.5476, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1527777777777778, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 8.472222222222223e-06, |
|
"loss": 1.5159, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1574074074074074, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 8.425925925925926e-06, |
|
"loss": 1.5192, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.16203703703703703, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.37962962962963e-06, |
|
"loss": 1.4858, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.5132, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1712962962962963, |
|
"grad_norm": 0.1494140625, |
|
"learning_rate": 8.287037037037037e-06, |
|
"loss": 1.4807, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17592592592592593, |
|
"grad_norm": 0.14453125, |
|
"learning_rate": 8.240740740740741e-06, |
|
"loss": 1.4736, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18055555555555555, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 8.194444444444445e-06, |
|
"loss": 1.4961, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 1.4433, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18981481481481483, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 8.101851851851854e-06, |
|
"loss": 1.5109, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19444444444444445, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 1.4705, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19907407407407407, |
|
"grad_norm": 0.1455078125, |
|
"learning_rate": 8.00925925925926e-06, |
|
"loss": 1.4605, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2037037037037037, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 7.962962962962963e-06, |
|
"loss": 1.4427, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 0.1435546875, |
|
"learning_rate": 7.916666666666667e-06, |
|
"loss": 1.4488, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21296296296296297, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 7.870370370370372e-06, |
|
"loss": 1.481, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2175925925925926, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 7.824074074074076e-06, |
|
"loss": 1.4406, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 1.4429, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22685185185185186, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 7.731481481481483e-06, |
|
"loss": 1.4763, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23148148148148148, |
|
"grad_norm": 0.1513671875, |
|
"learning_rate": 7.685185185185185e-06, |
|
"loss": 1.506, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2361111111111111, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 7.638888888888888e-06, |
|
"loss": 1.4969, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.24074074074074073, |
|
"grad_norm": 0.125, |
|
"learning_rate": 7.592592592592594e-06, |
|
"loss": 1.4319, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24537037037037038, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 7.546296296296297e-06, |
|
"loss": 1.5133, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.3933, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.25462962962962965, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 7.453703703703704e-06, |
|
"loss": 1.4242, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 1.4213, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2638888888888889, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 7.361111111111112e-06, |
|
"loss": 1.4084, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.26851851851851855, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 7.314814814814816e-06, |
|
"loss": 1.4043, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.27314814814814814, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 7.268518518518519e-06, |
|
"loss": 1.4564, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 1.3909, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2824074074074074, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 7.1759259259259266e-06, |
|
"loss": 1.3785, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.28703703703703703, |
|
"grad_norm": 0.140625, |
|
"learning_rate": 7.129629629629629e-06, |
|
"loss": 1.4236, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2916666666666667, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 7.083333333333335e-06, |
|
"loss": 1.4272, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 7.0370370370370375e-06, |
|
"loss": 1.4143, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.30092592592592593, |
|
"grad_norm": 0.130859375, |
|
"learning_rate": 6.990740740740741e-06, |
|
"loss": 1.4178, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3055555555555556, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 1.4412, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3101851851851852, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 6.898148148148148e-06, |
|
"loss": 1.3966, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3148148148148148, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 6.851851851851853e-06, |
|
"loss": 1.3747, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3194444444444444, |
|
"grad_norm": 0.12109375, |
|
"learning_rate": 6.8055555555555566e-06, |
|
"loss": 1.3808, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.32407407407407407, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 6.75925925925926e-06, |
|
"loss": 1.3998, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3287037037037037, |
|
"grad_norm": 0.125, |
|
"learning_rate": 6.712962962962963e-06, |
|
"loss": 1.3376, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.125, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.3627, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.33796296296296297, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 6.620370370370371e-06, |
|
"loss": 1.3424, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3425925925925926, |
|
"grad_norm": 0.12109375, |
|
"learning_rate": 6.574074074074075e-06, |
|
"loss": 1.383, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3472222222222222, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 6.5277777777777784e-06, |
|
"loss": 1.3362, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.35185185185185186, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 6.481481481481482e-06, |
|
"loss": 1.3672, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.35648148148148145, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 6.435185185185186e-06, |
|
"loss": 1.3552, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3611111111111111, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 1.3901, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.36574074074074076, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 6.342592592592594e-06, |
|
"loss": 1.3756, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 6.296296296296297e-06, |
|
"loss": 1.4091, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.3801, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.37962962962962965, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 6.203703703703704e-06, |
|
"loss": 1.3784, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.38425925925925924, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 6.157407407407408e-06, |
|
"loss": 1.3578, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3888888888888889, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 1.3502, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.39351851851851855, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 6.064814814814816e-06, |
|
"loss": 1.3592, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.39814814814814814, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 6.018518518518519e-06, |
|
"loss": 1.3566, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4027777777777778, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 5.972222222222222e-06, |
|
"loss": 1.358, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 1.3644, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.41203703703703703, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 5.8796296296296295e-06, |
|
"loss": 1.3555, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 1.3633, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4212962962962963, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 5.787037037037038e-06, |
|
"loss": 1.3404, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.42592592592592593, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 5.740740740740741e-06, |
|
"loss": 1.3373, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4305555555555556, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 5.694444444444445e-06, |
|
"loss": 1.4147, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4351851851851852, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 5.6481481481481485e-06, |
|
"loss": 1.3617, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4398148148148148, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 5.601851851851853e-06, |
|
"loss": 1.3697, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.3574, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.44907407407407407, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 5.5092592592592595e-06, |
|
"loss": 1.3114, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4537037037037037, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 5.462962962962963e-06, |
|
"loss": 1.3247, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4583333333333333, |
|
"grad_norm": 0.1240234375, |
|
"learning_rate": 5.416666666666667e-06, |
|
"loss": 1.391, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.46296296296296297, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 5.370370370370371e-06, |
|
"loss": 1.3282, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4675925925925926, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 5.324074074074075e-06, |
|
"loss": 1.3572, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4722222222222222, |
|
"grad_norm": 0.14453125, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 1.344, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.47685185185185186, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 5.231481481481482e-06, |
|
"loss": 1.3269, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 0.1201171875, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 1.4097, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4861111111111111, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 5.138888888888889e-06, |
|
"loss": 1.3737, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.49074074074074076, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 5.092592592592593e-06, |
|
"loss": 1.3586, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.49537037037037035, |
|
"grad_norm": 0.1416015625, |
|
"learning_rate": 5.046296296296297e-06, |
|
"loss": 1.3287, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3559, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5046296296296297, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 4.953703703703704e-06, |
|
"loss": 1.3248, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5092592592592593, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 4.907407407407408e-06, |
|
"loss": 1.3586, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5138888888888888, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 4.861111111111111e-06, |
|
"loss": 1.3474, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 4.814814814814815e-06, |
|
"loss": 1.3546, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5231481481481481, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 4.768518518518519e-06, |
|
"loss": 1.3404, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5277777777777778, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 1.3424, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5324074074074074, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 4.675925925925927e-06, |
|
"loss": 1.3039, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5370370370370371, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 4.62962962962963e-06, |
|
"loss": 1.3523, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5416666666666666, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 1.3284, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5462962962962963, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 4.537037037037038e-06, |
|
"loss": 1.3411, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5509259259259259, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 4.490740740740741e-06, |
|
"loss": 1.3177, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.3192, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5601851851851852, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 4.398148148148149e-06, |
|
"loss": 1.3198, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5648148148148148, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 4.351851851851852e-06, |
|
"loss": 1.2787, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5694444444444444, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 4.305555555555556e-06, |
|
"loss": 1.3156, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5740740740740741, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 4.2592592592592596e-06, |
|
"loss": 1.3766, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5787037037037037, |
|
"grad_norm": 0.125, |
|
"learning_rate": 4.212962962962963e-06, |
|
"loss": 1.3406, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.3472, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5879629629629629, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 4.1203703703703705e-06, |
|
"loss": 1.323, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 4.074074074074074e-06, |
|
"loss": 1.3155, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5972222222222222, |
|
"grad_norm": 0.138671875, |
|
"learning_rate": 4.027777777777779e-06, |
|
"loss": 1.2893, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6018518518518519, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 3.9814814814814814e-06, |
|
"loss": 1.3401, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6064814814814815, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 3.935185185185186e-06, |
|
"loss": 1.2977, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6111111111111112, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 1.3258, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6157407407407407, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 3.842592592592592e-06, |
|
"loss": 1.319, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6203703703703703, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 3.796296296296297e-06, |
|
"loss": 1.2834, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.2919, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 1.3069, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6342592592592593, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 3.657407407407408e-06, |
|
"loss": 1.3049, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6388888888888888, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 1.3314, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6435185185185185, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 3.5648148148148147e-06, |
|
"loss": 1.299, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6481481481481481, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 3.5185185185185187e-06, |
|
"loss": 1.3155, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6527777777777778, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 3.4722222222222224e-06, |
|
"loss": 1.2978, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6574074074074074, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 3.4259259259259265e-06, |
|
"loss": 1.3267, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6620370370370371, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 3.37962962962963e-06, |
|
"loss": 1.331, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.3204, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6712962962962963, |
|
"grad_norm": 0.1455078125, |
|
"learning_rate": 3.2870370370370374e-06, |
|
"loss": 1.3048, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6759259259259259, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 3.240740740740741e-06, |
|
"loss": 1.3547, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6805555555555556, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.1944444444444443e-06, |
|
"loss": 1.2981, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6851851851851852, |
|
"grad_norm": 0.138671875, |
|
"learning_rate": 3.1481481481481483e-06, |
|
"loss": 1.3497, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6898148148148148, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 3.101851851851852e-06, |
|
"loss": 1.329, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 1.3209, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6990740740740741, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 3.0092592592592597e-06, |
|
"loss": 1.2906, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 1.317, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7083333333333334, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 2.916666666666667e-06, |
|
"loss": 1.3041, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7129629629629629, |
|
"grad_norm": 0.1240234375, |
|
"learning_rate": 2.8703703703703706e-06, |
|
"loss": 1.3197, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7175925925925926, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 2.8240740740740743e-06, |
|
"loss": 1.3043, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7222222222222222, |
|
"grad_norm": 0.1416015625, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 1.364, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7268518518518519, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 2.7314814814814816e-06, |
|
"loss": 1.2892, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7314814814814815, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 2.6851851851851856e-06, |
|
"loss": 1.3311, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7361111111111112, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 2.6388888888888893e-06, |
|
"loss": 1.2989, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 2.5925925925925925e-06, |
|
"loss": 1.2537, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7453703703703703, |
|
"grad_norm": 0.12109375, |
|
"learning_rate": 2.5462962962962966e-06, |
|
"loss": 1.2893, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.3127, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7546296296296297, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 2.453703703703704e-06, |
|
"loss": 1.3022, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7592592592592593, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 2.4074074074074075e-06, |
|
"loss": 1.3068, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7638888888888888, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 2.361111111111111e-06, |
|
"loss": 1.4497, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7685185185185185, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 2.314814814814815e-06, |
|
"loss": 1.3118, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7731481481481481, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 2.268518518518519e-06, |
|
"loss": 1.3174, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 1.3016, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7824074074074074, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 2.175925925925926e-06, |
|
"loss": 1.3194, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7870370370370371, |
|
"grad_norm": 0.1435546875, |
|
"learning_rate": 2.1296296296296298e-06, |
|
"loss": 1.3513, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7916666666666666, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 1.2981, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7962962962962963, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 2.037037037037037e-06, |
|
"loss": 1.321, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8009259259259259, |
|
"grad_norm": 0.1318359375, |
|
"learning_rate": 1.9907407407407407e-06, |
|
"loss": 1.2758, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8055555555555556, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 1.2757, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8101851851851852, |
|
"grad_norm": 0.130859375, |
|
"learning_rate": 1.8981481481481484e-06, |
|
"loss": 1.2969, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.1220703125, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 1.3381, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8194444444444444, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 1.8055555555555557e-06, |
|
"loss": 1.2732, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8240740740740741, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 1.7592592592592594e-06, |
|
"loss": 1.2802, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8287037037037037, |
|
"grad_norm": 0.1396484375, |
|
"learning_rate": 1.7129629629629632e-06, |
|
"loss": 1.2786, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.3174, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8379629629629629, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 1.6203703703703705e-06, |
|
"loss": 1.2806, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8425925925925926, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 1.5740740740740742e-06, |
|
"loss": 1.3792, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8472222222222222, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 1.527777777777778e-06, |
|
"loss": 1.3167, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 1.2976, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8564814814814815, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 1.4351851851851853e-06, |
|
"loss": 1.3156, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8611111111111112, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 1.2805, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8657407407407407, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 1.3425925925925928e-06, |
|
"loss": 1.3003, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8703703703703703, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 1.2962962962962962e-06, |
|
"loss": 1.3028, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.12109375, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.2803, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8796296296296297, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 1.2037037037037037e-06, |
|
"loss": 1.2685, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8842592592592593, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 1.1574074074074076e-06, |
|
"loss": 1.2779, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 1.3063, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8935185185185185, |
|
"grad_norm": 0.130859375, |
|
"learning_rate": 1.0648148148148149e-06, |
|
"loss": 1.2826, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8981481481481481, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 1.0185185185185185e-06, |
|
"loss": 1.3563, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9027777777777778, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 9.722222222222224e-07, |
|
"loss": 1.2576, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9074074074074074, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 1.2909, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9120370370370371, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 8.796296296296297e-07, |
|
"loss": 1.3144, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.2798, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9212962962962963, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 7.870370370370371e-07, |
|
"loss": 1.2795, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 1.3655, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9305555555555556, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 6.944444444444446e-07, |
|
"loss": 1.2915, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9351851851851852, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 6.481481481481481e-07, |
|
"loss": 1.3282, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9398148148148148, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 6.018518518518519e-07, |
|
"loss": 1.2615, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9444444444444444, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 1.301, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9490740740740741, |
|
"grad_norm": 0.1220703125, |
|
"learning_rate": 5.092592592592593e-07, |
|
"loss": 1.3038, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9537037037037037, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 4.6296296296296297e-07, |
|
"loss": 1.28, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9583333333333334, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"loss": 1.3063, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.7037037037037036e-07, |
|
"loss": 1.2821, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9675925925925926, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 3.2407407407407406e-07, |
|
"loss": 1.3411, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 1.3051, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9768518518518519, |
|
"grad_norm": 0.1474609375, |
|
"learning_rate": 2.3148148148148148e-07, |
|
"loss": 1.3365, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.9814814814814815, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 1.8518518518518518e-07, |
|
"loss": 1.2888, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9861111111111112, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 1.3888888888888888e-07, |
|
"loss": 1.3176, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9907407407407407, |
|
"grad_norm": 0.1455078125, |
|
"learning_rate": 9.259259259259259e-08, |
|
"loss": 1.3473, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9953703703703703, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 4.6296296296296295e-08, |
|
"loss": 1.2989, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.0, |
|
"loss": 1.2764, |
|
"step": 216 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 216, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.552438580445184e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|