|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9683377308707124, |
|
"eval_steps": 500, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.989006156552331e-05, |
|
"loss": 1.8193, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.978012313104662e-05, |
|
"loss": 1.7656, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.967018469656992e-05, |
|
"loss": 1.7573, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.956024626209323e-05, |
|
"loss": 1.6758, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9450307827616535e-05, |
|
"loss": 1.5776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.934036939313985e-05, |
|
"loss": 1.615, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9230430958663146e-05, |
|
"loss": 1.5378, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912049252418646e-05, |
|
"loss": 1.6188, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901055408970976e-05, |
|
"loss": 1.5489, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8900615655233075e-05, |
|
"loss": 1.5724, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.879067722075638e-05, |
|
"loss": 1.5262, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8680738786279686e-05, |
|
"loss": 1.5224, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.857080035180299e-05, |
|
"loss": 1.5176, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.84608619173263e-05, |
|
"loss": 1.5263, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835092348284961e-05, |
|
"loss": 1.5832, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824098504837291e-05, |
|
"loss": 1.5403, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813104661389622e-05, |
|
"loss": 1.5689, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802110817941953e-05, |
|
"loss": 1.5389, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7911169744942836e-05, |
|
"loss": 1.5593, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780123131046614e-05, |
|
"loss": 1.52, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7691292875989446e-05, |
|
"loss": 1.6009, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.758135444151275e-05, |
|
"loss": 1.5612, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7471416007036063e-05, |
|
"loss": 1.5532, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.736147757255937e-05, |
|
"loss": 1.5636, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7251539138082674e-05, |
|
"loss": 1.56, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.714160070360598e-05, |
|
"loss": 1.5406, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703166226912929e-05, |
|
"loss": 1.4851, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6921723834652596e-05, |
|
"loss": 1.5333, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.681178540017591e-05, |
|
"loss": 1.5684, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.670184696569921e-05, |
|
"loss": 1.5636, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659190853122252e-05, |
|
"loss": 1.5434, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6481970096745824e-05, |
|
"loss": 1.5002, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6372031662269136e-05, |
|
"loss": 1.5413, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6262093227792435e-05, |
|
"loss": 1.5275, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615215479331575e-05, |
|
"loss": 1.508, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.604221635883905e-05, |
|
"loss": 1.5033, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.593227792436236e-05, |
|
"loss": 1.5416, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.582233948988567e-05, |
|
"loss": 1.5092, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5712401055408974e-05, |
|
"loss": 1.5605, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.560246262093228e-05, |
|
"loss": 1.5624, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5492524186455585e-05, |
|
"loss": 1.5394, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.53825857519789e-05, |
|
"loss": 1.476, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.52726473175022e-05, |
|
"loss": 1.5367, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.516270888302551e-05, |
|
"loss": 1.5041, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.505277044854881e-05, |
|
"loss": 1.5342, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4942832014072125e-05, |
|
"loss": 1.5068, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.483289357959543e-05, |
|
"loss": 1.4912, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4722955145118735e-05, |
|
"loss": 1.4893, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461301671064204e-05, |
|
"loss": 1.5338, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.450307827616535e-05, |
|
"loss": 1.5258, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.439313984168866e-05, |
|
"loss": 1.5373, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.428320140721196e-05, |
|
"loss": 1.4972, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.417326297273527e-05, |
|
"loss": 1.4868, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.406332453825858e-05, |
|
"loss": 1.5167, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3953386103781885e-05, |
|
"loss": 1.5242, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.384344766930519e-05, |
|
"loss": 1.5309, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3733509234828496e-05, |
|
"loss": 1.5091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36235708003518e-05, |
|
"loss": 1.5041, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.351363236587511e-05, |
|
"loss": 1.4934, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340369393139842e-05, |
|
"loss": 1.5113, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.329375549692173e-05, |
|
"loss": 1.4899, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318381706244503e-05, |
|
"loss": 1.5019, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.307387862796834e-05, |
|
"loss": 1.5019, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2963940193491646e-05, |
|
"loss": 1.5331, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285400175901496e-05, |
|
"loss": 1.5444, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2744063324538256e-05, |
|
"loss": 1.527, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263412489006157e-05, |
|
"loss": 1.5329, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2524186455584874e-05, |
|
"loss": 1.5572, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2414248021108186e-05, |
|
"loss": 1.53, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.230430958663149e-05, |
|
"loss": 1.4891, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2194371152154796e-05, |
|
"loss": 1.5004, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.20844327176781e-05, |
|
"loss": 1.5134, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1974494283201407e-05, |
|
"loss": 1.5081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.186455584872472e-05, |
|
"loss": 1.5006, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1754617414248024e-05, |
|
"loss": 1.515, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.164467897977133e-05, |
|
"loss": 1.4862, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1534740545294634e-05, |
|
"loss": 1.5196, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1424802110817946e-05, |
|
"loss": 1.4908, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131486367634125e-05, |
|
"loss": 1.5443, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.120492524186456e-05, |
|
"loss": 1.5457, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.109498680738786e-05, |
|
"loss": 1.5616, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0985048372911174e-05, |
|
"loss": 1.5537, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087510993843448e-05, |
|
"loss": 1.5206, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0765171503957784e-05, |
|
"loss": 1.4923, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.065523306948109e-05, |
|
"loss": 1.4721, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05452946350044e-05, |
|
"loss": 1.5357, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043535620052771e-05, |
|
"loss": 1.5163, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.032541776605102e-05, |
|
"loss": 1.5406, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.021547933157432e-05, |
|
"loss": 1.5151, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.010554089709763e-05, |
|
"loss": 1.5482, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9995602462620935e-05, |
|
"loss": 1.487, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.988566402814424e-05, |
|
"loss": 1.5559, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9775725593667545e-05, |
|
"loss": 1.5085, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.966578715919085e-05, |
|
"loss": 1.5091, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.955584872471416e-05, |
|
"loss": 1.512, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.944591029023747e-05, |
|
"loss": 1.4824, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.933597185576078e-05, |
|
"loss": 1.4935, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.922603342128408e-05, |
|
"loss": 1.5034, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.911609498680739e-05, |
|
"loss": 1.5449, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9006156552330695e-05, |
|
"loss": 1.481, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.889621811785401e-05, |
|
"loss": 1.554, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8786279683377306e-05, |
|
"loss": 1.5155, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.867634124890062e-05, |
|
"loss": 1.5023, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.856640281442392e-05, |
|
"loss": 1.5311, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8456464379947235e-05, |
|
"loss": 1.554, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.834652594547054e-05, |
|
"loss": 1.5175, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8236587510993846e-05, |
|
"loss": 1.499, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.812664907651715e-05, |
|
"loss": 1.514, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.801671064204046e-05, |
|
"loss": 1.5116, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.790677220756377e-05, |
|
"loss": 1.5545, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.779683377308707e-05, |
|
"loss": 1.5126, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.768689533861038e-05, |
|
"loss": 1.5129, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7576956904133684e-05, |
|
"loss": 1.5148, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7467018469656996e-05, |
|
"loss": 1.5715, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.73570800351803e-05, |
|
"loss": 1.5227, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7247141600703606e-05, |
|
"loss": 1.4697, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.713720316622691e-05, |
|
"loss": 1.5557, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7027264731750223e-05, |
|
"loss": 1.5204, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.691732629727353e-05, |
|
"loss": 1.5246, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.680738786279684e-05, |
|
"loss": 1.526, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.669744942832014e-05, |
|
"loss": 1.5052, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.658751099384345e-05, |
|
"loss": 1.5383, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6477572559366756e-05, |
|
"loss": 1.5561, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.636763412489007e-05, |
|
"loss": 1.5293, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.625769569041337e-05, |
|
"loss": 1.5146, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614775725593668e-05, |
|
"loss": 1.5626, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6037818821459984e-05, |
|
"loss": 1.5603, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.592788038698329e-05, |
|
"loss": 1.538, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.58179419525066e-05, |
|
"loss": 1.4977, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5708003518029907e-05, |
|
"loss": 1.4758, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.559806508355321e-05, |
|
"loss": 1.4657, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.548812664907652e-05, |
|
"loss": 1.5382, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.537818821459983e-05, |
|
"loss": 1.4825, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.526824978012313e-05, |
|
"loss": 1.5182, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.515831134564644e-05, |
|
"loss": 1.5143, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5048372911169745e-05, |
|
"loss": 1.5374, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493843447669306e-05, |
|
"loss": 1.5335, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.482849604221636e-05, |
|
"loss": 1.532, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.471855760773967e-05, |
|
"loss": 1.5056, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.460861917326297e-05, |
|
"loss": 1.5248, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4498680738786285e-05, |
|
"loss": 1.5129, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.438874230430959e-05, |
|
"loss": 1.5223, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4278803869832895e-05, |
|
"loss": 1.5236, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.41688654353562e-05, |
|
"loss": 1.4966, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.405892700087951e-05, |
|
"loss": 1.5395, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.394898856640282e-05, |
|
"loss": 1.5559, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.383905013192612e-05, |
|
"loss": 1.4817, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.372911169744943e-05, |
|
"loss": 1.4662, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.361917326297273e-05, |
|
"loss": 1.5073, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3509234828496045e-05, |
|
"loss": 1.5354, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.339929639401935e-05, |
|
"loss": 1.5353, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3289357959542656e-05, |
|
"loss": 1.4755, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.317941952506596e-05, |
|
"loss": 1.5223, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.306948109058927e-05, |
|
"loss": 1.4958, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.295954265611258e-05, |
|
"loss": 1.5234, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.284960422163589e-05, |
|
"loss": 1.5169, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.273966578715919e-05, |
|
"loss": 1.5107, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.26297273526825e-05, |
|
"loss": 1.5183, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2519788918205806e-05, |
|
"loss": 1.4787, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.240985048372912e-05, |
|
"loss": 1.5123, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2299912049252416e-05, |
|
"loss": 1.528, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.218997361477573e-05, |
|
"loss": 1.5125, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2080035180299034e-05, |
|
"loss": 1.5148, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197009674582234e-05, |
|
"loss": 1.5091, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.186015831134565e-05, |
|
"loss": 1.4923, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1750219876868956e-05, |
|
"loss": 1.5544, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.164028144239226e-05, |
|
"loss": 1.5111, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1530343007915567e-05, |
|
"loss": 1.5814, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.142040457343888e-05, |
|
"loss": 1.4826, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131046613896218e-05, |
|
"loss": 1.4994, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.120052770448549e-05, |
|
"loss": 1.4444, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1090589270008794e-05, |
|
"loss": 1.5175, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0980650835532106e-05, |
|
"loss": 1.4922, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.087071240105541e-05, |
|
"loss": 1.5155, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.076077396657872e-05, |
|
"loss": 1.5037, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.065083553210202e-05, |
|
"loss": 1.5448, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0540897097625334e-05, |
|
"loss": 1.4882, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.043095866314864e-05, |
|
"loss": 1.514, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.032102022867194e-05, |
|
"loss": 1.4848, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.021108179419525e-05, |
|
"loss": 1.4672, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.010114335971856e-05, |
|
"loss": 1.5191, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9991204925241867e-05, |
|
"loss": 1.4836, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9881266490765176e-05, |
|
"loss": 1.5088, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9771328056288477e-05, |
|
"loss": 1.5183, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9661389621811786e-05, |
|
"loss": 1.5508, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9551451187335095e-05, |
|
"loss": 1.5068, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9441512752858403e-05, |
|
"loss": 1.512, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9331574318381705e-05, |
|
"loss": 1.523, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9221635883905014e-05, |
|
"loss": 1.4935, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9111697449428322e-05, |
|
"loss": 1.4949, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.900175901495163e-05, |
|
"loss": 1.5423, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8891820580474936e-05, |
|
"loss": 1.5473, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.878188214599824e-05, |
|
"loss": 1.4828, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8671943711521547e-05, |
|
"loss": 1.5154, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8562005277044855e-05, |
|
"loss": 1.4707, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8452066842568164e-05, |
|
"loss": 1.5086, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8342128408091473e-05, |
|
"loss": 1.5008, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8232189973614774e-05, |
|
"loss": 1.5568, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8122251539138083e-05, |
|
"loss": 1.464, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.801231310466139e-05, |
|
"loss": 1.5346, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.79023746701847e-05, |
|
"loss": 1.5286, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7792436235708002e-05, |
|
"loss": 1.5393, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.768249780123131e-05, |
|
"loss": 1.5015, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.757255936675462e-05, |
|
"loss": 1.4415, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7462620932277928e-05, |
|
"loss": 1.5488, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7352682497801237e-05, |
|
"loss": 1.5048, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.724274406332454e-05, |
|
"loss": 1.5446, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7132805628847847e-05, |
|
"loss": 1.4672, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7022867194371156e-05, |
|
"loss": 1.4549, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.691292875989446e-05, |
|
"loss": 1.5202, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6802990325417766e-05, |
|
"loss": 1.5026, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6693051890941075e-05, |
|
"loss": 1.532, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.658311345646438e-05, |
|
"loss": 1.4932, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.647317502198769e-05, |
|
"loss": 1.4823, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6363236587510997e-05, |
|
"loss": 1.4976, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.62532981530343e-05, |
|
"loss": 1.5265, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6143359718557608e-05, |
|
"loss": 1.5474, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6033421284080916e-05, |
|
"loss": 1.5145, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5923482849604225e-05, |
|
"loss": 1.4762, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5813544415127527e-05, |
|
"loss": 1.5242, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5703605980650835e-05, |
|
"loss": 1.5089, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5593667546174144e-05, |
|
"loss": 1.507, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5483729111697453e-05, |
|
"loss": 1.5178, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.537379067722076e-05, |
|
"loss": 1.5254, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5263852242744063e-05, |
|
"loss": 1.5167, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5153913808267372e-05, |
|
"loss": 1.533, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.504397537379068e-05, |
|
"loss": 1.5485, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4934036939313986e-05, |
|
"loss": 1.5244, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.482409850483729e-05, |
|
"loss": 1.4981, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.47141600703606e-05, |
|
"loss": 1.4904, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4604221635883905e-05, |
|
"loss": 1.5157, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4494283201407213e-05, |
|
"loss": 1.5497, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.438434476693052e-05, |
|
"loss": 1.5007, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4274406332453827e-05, |
|
"loss": 1.5031, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4164467897977133e-05, |
|
"loss": 1.5114, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.405452946350044e-05, |
|
"loss": 1.5132, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3944591029023746e-05, |
|
"loss": 1.5287, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3834652594547055e-05, |
|
"loss": 1.5206, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.372471416007036e-05, |
|
"loss": 1.5242, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.361477572559367e-05, |
|
"loss": 1.5426, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3504837291116977e-05, |
|
"loss": 1.53, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3394898856640283e-05, |
|
"loss": 1.4904, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.328496042216359e-05, |
|
"loss": 1.4645, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3175021987686897e-05, |
|
"loss": 1.5093, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3065083553210205e-05, |
|
"loss": 1.4894, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.295514511873351e-05, |
|
"loss": 1.4864, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.284520668425682e-05, |
|
"loss": 1.4851, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2735268249780124e-05, |
|
"loss": 1.4936, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.262532981530343e-05, |
|
"loss": 1.5312, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2515391380826738e-05, |
|
"loss": 1.5394, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2405452946350043e-05, |
|
"loss": 1.5216, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2295514511873352e-05, |
|
"loss": 1.4723, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2185576077396657e-05, |
|
"loss": 1.5438, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2075637642919966e-05, |
|
"loss": 1.5432, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.196569920844327e-05, |
|
"loss": 1.478, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.185576077396658e-05, |
|
"loss": 1.5049, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.174582233948989e-05, |
|
"loss": 1.5364, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1635883905013194e-05, |
|
"loss": 1.4933, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1525945470536502e-05, |
|
"loss": 1.4991, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1416007036059807e-05, |
|
"loss": 1.5161, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1306068601583116e-05, |
|
"loss": 1.5207, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.119613016710642e-05, |
|
"loss": 1.5015, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.108619173262973e-05, |
|
"loss": 1.5262, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0976253298153035e-05, |
|
"loss": 1.4978, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0866314863676344e-05, |
|
"loss": 1.5328, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.075637642919965e-05, |
|
"loss": 1.5053, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0646437994722954e-05, |
|
"loss": 1.4936, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0536499560246263e-05, |
|
"loss": 1.5393, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0426561125769568e-05, |
|
"loss": 1.5089, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0316622691292877e-05, |
|
"loss": 1.5144, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0206684256816182e-05, |
|
"loss": 1.5419, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.009674582233949e-05, |
|
"loss": 1.498, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9986807387862796e-05, |
|
"loss": 1.4677, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9876868953386104e-05, |
|
"loss": 1.5416, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9766930518909413e-05, |
|
"loss": 1.5034, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.965699208443272e-05, |
|
"loss": 1.5101, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9547053649956027e-05, |
|
"loss": 1.5327, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9437115215479332e-05, |
|
"loss": 1.4765, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.932717678100264e-05, |
|
"loss": 1.5124, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9217238346525946e-05, |
|
"loss": 1.4932, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9107299912049255e-05, |
|
"loss": 1.4953, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.899736147757256e-05, |
|
"loss": 1.4753, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.888742304309587e-05, |
|
"loss": 1.5279, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8777484608619174e-05, |
|
"loss": 1.4789, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.866754617414248e-05, |
|
"loss": 1.5415, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8557607739665788e-05, |
|
"loss": 1.5265, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8447669305189093e-05, |
|
"loss": 1.55, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.83377308707124e-05, |
|
"loss": 1.5046, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8227792436235707e-05, |
|
"loss": 1.4824, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8117854001759015e-05, |
|
"loss": 1.5157, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8007915567282324e-05, |
|
"loss": 1.5236, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.789797713280563e-05, |
|
"loss": 1.5184, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7788038698328938e-05, |
|
"loss": 1.5329, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7678100263852243e-05, |
|
"loss": 1.5378, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.756816182937555e-05, |
|
"loss": 1.5325, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7458223394898857e-05, |
|
"loss": 1.5116, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7348284960422166e-05, |
|
"loss": 1.4901, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.723834652594547e-05, |
|
"loss": 1.5305, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.712840809146878e-05, |
|
"loss": 1.4912, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7018469656992088e-05, |
|
"loss": 1.4994, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6908531222515393e-05, |
|
"loss": 1.5383, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.67985927880387e-05, |
|
"loss": 1.5031, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6688654353562007e-05, |
|
"loss": 1.5203, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6578715919085312e-05, |
|
"loss": 1.4729, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6468777484608618e-05, |
|
"loss": 1.4721, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6358839050131926e-05, |
|
"loss": 1.5185, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.624890061565523e-05, |
|
"loss": 1.5086, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.613896218117854e-05, |
|
"loss": 1.477, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.602902374670185e-05, |
|
"loss": 1.4775, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5919085312225154e-05, |
|
"loss": 1.4952, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5809146877748463e-05, |
|
"loss": 1.5316, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5699208443271768e-05, |
|
"loss": 1.5095, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5589270008795076e-05, |
|
"loss": 1.4931, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.547933157431838e-05, |
|
"loss": 1.4824, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.536939313984169e-05, |
|
"loss": 1.5219, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5259454705364995e-05, |
|
"loss": 1.4946, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5149516270888302e-05, |
|
"loss": 1.4936, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5039577836411611e-05, |
|
"loss": 1.5459, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4929639401934916e-05, |
|
"loss": 1.4993, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4819700967458225e-05, |
|
"loss": 1.4834, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.470976253298153e-05, |
|
"loss": 1.5001, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4599824098504839e-05, |
|
"loss": 1.4882, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4489885664028144e-05, |
|
"loss": 1.5259, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4379947229551453e-05, |
|
"loss": 1.4359, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.427000879507476e-05, |
|
"loss": 1.465, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4160070360598065e-05, |
|
"loss": 1.533, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4050131926121373e-05, |
|
"loss": 1.5291, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3940193491644679e-05, |
|
"loss": 1.4929, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3830255057167987e-05, |
|
"loss": 1.4779, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3720316622691292e-05, |
|
"loss": 1.4619, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3610378188214601e-05, |
|
"loss": 1.4924, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3500439753737906e-05, |
|
"loss": 1.4683, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3390501319261215e-05, |
|
"loss": 1.4618, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3280562884784522e-05, |
|
"loss": 1.4998, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3170624450307827e-05, |
|
"loss": 1.5046, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3060686015831136e-05, |
|
"loss": 1.5552, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2950747581354441e-05, |
|
"loss": 1.4746, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.284080914687775e-05, |
|
"loss": 1.5345, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2730870712401055e-05, |
|
"loss": 1.4956, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2620932277924363e-05, |
|
"loss": 1.5049, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2510993843447669e-05, |
|
"loss": 1.5076, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2401055408970977e-05, |
|
"loss": 1.4969, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2291116974494284e-05, |
|
"loss": 1.519, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2181178540017591e-05, |
|
"loss": 1.5183, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2071240105540896e-05, |
|
"loss": 1.5067, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1961301671064203e-05, |
|
"loss": 1.4998, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1851363236587512e-05, |
|
"loss": 1.5668, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1741424802110819e-05, |
|
"loss": 1.4695, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1631486367634126e-05, |
|
"loss": 1.4968, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1521547933157433e-05, |
|
"loss": 1.5051, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.141160949868074e-05, |
|
"loss": 1.4821, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1301671064204047e-05, |
|
"loss": 1.524, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1191732629727354e-05, |
|
"loss": 1.5443, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1081794195250659e-05, |
|
"loss": 1.5092, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0971855760773967e-05, |
|
"loss": 1.5014, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0861917326297274e-05, |
|
"loss": 1.4707, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0751978891820581e-05, |
|
"loss": 1.5156, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0642040457343888e-05, |
|
"loss": 1.5403, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0532102022867195e-05, |
|
"loss": 1.5286, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0422163588390502e-05, |
|
"loss": 1.5357, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0312225153913809e-05, |
|
"loss": 1.5484, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0202286719437116e-05, |
|
"loss": 1.5294, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0092348284960421e-05, |
|
"loss": 1.499, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.98240985048373e-06, |
|
"loss": 1.5148, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.872471416007037e-06, |
|
"loss": 1.4733, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.762532981530344e-06, |
|
"loss": 1.5284, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.65259454705365e-06, |
|
"loss": 1.4751, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.542656112576958e-06, |
|
"loss": 1.5068, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.432717678100264e-06, |
|
"loss": 1.4949, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.322779243623571e-06, |
|
"loss": 1.525, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.212840809146878e-06, |
|
"loss": 1.4881, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.102902374670185e-06, |
|
"loss": 1.5442, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.992963940193492e-06, |
|
"loss": 1.5155, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.883025505716799e-06, |
|
"loss": 1.5664, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.773087071240106e-06, |
|
"loss": 1.5439, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.663148636763413e-06, |
|
"loss": 1.5152, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.55321020228672e-06, |
|
"loss": 1.4832, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.443271767810027e-06, |
|
"loss": 1.5083, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.4807, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.22339489885664e-06, |
|
"loss": 1.4985, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.113456464379948e-06, |
|
"loss": 1.5169, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.003518029903255e-06, |
|
"loss": 1.4993, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.893579595426561e-06, |
|
"loss": 1.4818, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.783641160949868e-06, |
|
"loss": 1.55, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.673702726473175e-06, |
|
"loss": 1.4664, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.563764291996482e-06, |
|
"loss": 1.491, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.453825857519789e-06, |
|
"loss": 1.464, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.343887423043096e-06, |
|
"loss": 1.459, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.233948988566404e-06, |
|
"loss": 1.5321, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.124010554089711e-06, |
|
"loss": 1.5527, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.014072119613018e-06, |
|
"loss": 1.501, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.904133685136324e-06, |
|
"loss": 1.465, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.794195250659631e-06, |
|
"loss": 1.4879, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.684256816182938e-06, |
|
"loss": 1.4688, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.574318381706245e-06, |
|
"loss": 1.5239, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.4643799472295515e-06, |
|
"loss": 1.4883, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.3544415127528585e-06, |
|
"loss": 1.5056, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.244503078276165e-06, |
|
"loss": 1.4918, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.134564643799472e-06, |
|
"loss": 1.5037, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.02462620932278e-06, |
|
"loss": 1.4658, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.914687774846087e-06, |
|
"loss": 1.5168, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.804749340369393e-06, |
|
"loss": 1.5247, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.6948109058927e-06, |
|
"loss": 1.5044, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.584872471416008e-06, |
|
"loss": 1.4632, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.474934036939315e-06, |
|
"loss": 1.5316, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.364995602462621e-06, |
|
"loss": 1.5039, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.255057167985928e-06, |
|
"loss": 1.4866, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.145118733509235e-06, |
|
"loss": 1.5002, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0351802990325425e-06, |
|
"loss": 1.5306, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.925241864555849e-06, |
|
"loss": 1.4817, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.8153034300791555e-06, |
|
"loss": 1.5395, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.705364995602462e-06, |
|
"loss": 1.5056, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.59542656112577e-06, |
|
"loss": 1.5113, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.485488126649077e-06, |
|
"loss": 1.4882, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.375549692172383e-06, |
|
"loss": 1.5077, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.26561125769569e-06, |
|
"loss": 1.527, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.155672823218998e-06, |
|
"loss": 1.4684, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.045734388742305e-06, |
|
"loss": 1.5311, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.935795954265612e-06, |
|
"loss": 1.5618, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.825857519788918e-06, |
|
"loss": 1.4906, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.715919085312225e-06, |
|
"loss": 1.5283, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6059806508355325e-06, |
|
"loss": 1.496, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.496042216358839e-06, |
|
"loss": 1.4827, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.386103781882146e-06, |
|
"loss": 1.4666, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.276165347405453e-06, |
|
"loss": 1.471, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1662269129287603e-06, |
|
"loss": 1.4538, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.056288478452067e-06, |
|
"loss": 1.5422, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9463500439753737e-06, |
|
"loss": 1.4878, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.836411609498681e-06, |
|
"loss": 1.4723, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7264731750219876e-06, |
|
"loss": 1.5051, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.616534740545295e-06, |
|
"loss": 1.5215, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.506596306068602e-06, |
|
"loss": 1.5018, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3966578715919088e-06, |
|
"loss": 1.4871, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2867194371152157e-06, |
|
"loss": 1.528, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1767810026385226e-06, |
|
"loss": 1.5061, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0668425681618296e-06, |
|
"loss": 1.5363, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.956904133685136e-06, |
|
"loss": 1.5655, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8469656992084434e-06, |
|
"loss": 1.5354, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7370272647317501e-06, |
|
"loss": 1.5223, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6270888302550573e-06, |
|
"loss": 1.537, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5171503957783642e-06, |
|
"loss": 1.5111, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4072119613016711e-06, |
|
"loss": 1.551, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.297273526824978e-06, |
|
"loss": 1.5003, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.187335092348285e-06, |
|
"loss": 1.4851, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.077396657871592e-06, |
|
"loss": 1.5354, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.674582233948989e-07, |
|
"loss": 1.4533, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.575197889182058e-07, |
|
"loss": 1.51, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.475813544415127e-07, |
|
"loss": 1.5274, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.376429199648197e-07, |
|
"loss": 1.5004, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.277044854881267e-07, |
|
"loss": 1.5363, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4548, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.843795686246318e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|