|
{ |
|
"best_metric": 1.671525478363037, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.45454545454545453, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0022727272727272726, |
|
"grad_norm": 1.0854241847991943, |
|
"learning_rate": 5e-06, |
|
"loss": 1.9975, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0022727272727272726, |
|
"eval_loss": 2.511274576187134, |
|
"eval_runtime": 61.5899, |
|
"eval_samples_per_second": 12.031, |
|
"eval_steps_per_second": 6.024, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004545454545454545, |
|
"grad_norm": 1.1716954708099365, |
|
"learning_rate": 1e-05, |
|
"loss": 1.987, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006818181818181818, |
|
"grad_norm": 1.2702757120132446, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.1314, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00909090909090909, |
|
"grad_norm": 1.2734986543655396, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0119, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011363636363636364, |
|
"grad_norm": 1.246037244796753, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.0429, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013636363636363636, |
|
"grad_norm": 1.274608850479126, |
|
"learning_rate": 3e-05, |
|
"loss": 2.1966, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015909090909090907, |
|
"grad_norm": 1.0633928775787354, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.0118, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01818181818181818, |
|
"grad_norm": 0.9818317294120789, |
|
"learning_rate": 4e-05, |
|
"loss": 1.9608, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.020454545454545454, |
|
"grad_norm": 0.9981714487075806, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.7583, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.022727272727272728, |
|
"grad_norm": 1.0854991674423218, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9337, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 1.260656714439392, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.93, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02727272727272727, |
|
"grad_norm": 1.2182083129882812, |
|
"learning_rate": 6e-05, |
|
"loss": 1.8759, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.029545454545454545, |
|
"grad_norm": 1.1839447021484375, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.8657, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.031818181818181815, |
|
"grad_norm": 1.1049240827560425, |
|
"learning_rate": 7e-05, |
|
"loss": 1.9298, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03409090909090909, |
|
"grad_norm": 1.0906251668930054, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.8479, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03636363636363636, |
|
"grad_norm": 1.049364447593689, |
|
"learning_rate": 8e-05, |
|
"loss": 1.7015, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.038636363636363635, |
|
"grad_norm": 1.1540734767913818, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.8595, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04090909090909091, |
|
"grad_norm": 1.2078640460968018, |
|
"learning_rate": 9e-05, |
|
"loss": 1.9029, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04318181818181818, |
|
"grad_norm": 1.207094669342041, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.77, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 1.3741079568862915, |
|
"learning_rate": 0.0001, |
|
"loss": 1.7512, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04772727272727273, |
|
"grad_norm": 1.4326366186141968, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 1.8151, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.2714883089065552, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 1.6148, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05227272727272727, |
|
"grad_norm": 1.300572156906128, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 1.6359, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05454545454545454, |
|
"grad_norm": 1.375064492225647, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 1.6037, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.056818181818181816, |
|
"grad_norm": 1.659896731376648, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 1.74, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05909090909090909, |
|
"grad_norm": 1.5481903553009033, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.5901, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06136363636363636, |
|
"grad_norm": 1.524217963218689, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 1.6961, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06363636363636363, |
|
"grad_norm": 1.3575574159622192, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 1.6211, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0659090909090909, |
|
"grad_norm": 1.5180761814117432, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.4102, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06818181818181818, |
|
"grad_norm": 1.4529622793197632, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 1.6236, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07045454545454545, |
|
"grad_norm": 1.6273088455200195, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 1.7423, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 1.903216004371643, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.5936, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 2.2435097694396973, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 1.5407, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07727272727272727, |
|
"grad_norm": 1.9744415283203125, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 1.4966, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07954545454545454, |
|
"grad_norm": 1.7648143768310547, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.5073, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08181818181818182, |
|
"grad_norm": 2.0035488605499268, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 1.4437, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08409090909090909, |
|
"grad_norm": 1.9713212251663208, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 1.4549, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08636363636363636, |
|
"grad_norm": 2.2346913814544678, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.5638, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08863636363636364, |
|
"grad_norm": 3.030761480331421, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.5912, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 2.513296127319336, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.528, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09318181818181819, |
|
"grad_norm": 2.73734712600708, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.4652, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09545454545454546, |
|
"grad_norm": 2.2211833000183105, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.4597, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09772727272727273, |
|
"grad_norm": 2.345437526702881, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.0216, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.364999294281006, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.5967, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.10227272727272728, |
|
"grad_norm": 3.069117307662964, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 1.3515, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10454545454545454, |
|
"grad_norm": 3.324639081954956, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 1.8474, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10681818181818181, |
|
"grad_norm": 3.492915153503418, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 1.5723, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10909090909090909, |
|
"grad_norm": 3.7940917015075684, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.7661, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11136363636363636, |
|
"grad_norm": 5.169501304626465, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.7139, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11363636363636363, |
|
"grad_norm": 7.132964134216309, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 2.6124, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11363636363636363, |
|
"eval_loss": 2.2143633365631104, |
|
"eval_runtime": 62.1849, |
|
"eval_samples_per_second": 11.916, |
|
"eval_steps_per_second": 5.966, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1159090909090909, |
|
"grad_norm": 5.338608264923096, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 2.4139, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11818181818181818, |
|
"grad_norm": 3.362200975418091, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 2.2589, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12045454545454545, |
|
"grad_norm": 1.6737889051437378, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 2.0941, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.12272727272727273, |
|
"grad_norm": 0.9776598811149597, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 1.9651, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.7610154151916504, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 1.9439, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12727272727272726, |
|
"grad_norm": 0.8197718858718872, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.8068, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12954545454545455, |
|
"grad_norm": 0.8665831089019775, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 1.7662, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1318181818181818, |
|
"grad_norm": 0.8547587990760803, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 1.8937, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1340909090909091, |
|
"grad_norm": 0.8161806464195251, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 1.8253, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 1.3534706830978394, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 1.8252, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13863636363636364, |
|
"grad_norm": 0.8150987029075623, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 1.722, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1409090909090909, |
|
"grad_norm": 0.8128631114959717, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 1.6843, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1431818181818182, |
|
"grad_norm": 0.8791077136993408, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 1.7533, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 0.9752345085144043, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 1.7942, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14772727272727273, |
|
"grad_norm": 0.9608557820320129, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 1.6936, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8867492079734802, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 1.8443, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15227272727272728, |
|
"grad_norm": 1.0232892036437988, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 1.6526, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15454545454545454, |
|
"grad_norm": 0.9459812641143799, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 1.6367, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15681818181818183, |
|
"grad_norm": 1.0353021621704102, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 1.5919, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1590909090909091, |
|
"grad_norm": 1.075607419013977, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 1.6931, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16136363636363638, |
|
"grad_norm": 1.1494629383087158, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 1.6779, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.16363636363636364, |
|
"grad_norm": 1.1475697755813599, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 1.5637, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16590909090909092, |
|
"grad_norm": 1.121464490890503, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 1.663, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16818181818181818, |
|
"grad_norm": 1.1755046844482422, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.6188, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17045454545454544, |
|
"grad_norm": 1.2241384983062744, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 1.6052, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17272727272727273, |
|
"grad_norm": 1.230518102645874, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 1.4706, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 1.419175148010254, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 1.7987, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.17727272727272728, |
|
"grad_norm": 1.1758114099502563, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.4117, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17954545454545454, |
|
"grad_norm": 1.3510282039642334, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 1.6036, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 1.3500906229019165, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.5561, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18409090909090908, |
|
"grad_norm": 1.4716064929962158, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 1.4799, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18636363636363637, |
|
"grad_norm": 1.6628302335739136, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.6705, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18863636363636363, |
|
"grad_norm": 1.582809329032898, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 1.5939, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.19090909090909092, |
|
"grad_norm": 1.385305643081665, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 1.3643, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19318181818181818, |
|
"grad_norm": 1.6903640031814575, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 1.422, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19545454545454546, |
|
"grad_norm": 1.8601150512695312, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 1.4809, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.19772727272727272, |
|
"grad_norm": 1.809529423713684, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 1.3402, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.8485039472579956, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 1.5892, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.20227272727272727, |
|
"grad_norm": 2.308549404144287, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 1.8241, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.20454545454545456, |
|
"grad_norm": 2.0033457279205322, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 1.194, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20681818181818182, |
|
"grad_norm": 1.977484107017517, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 1.5185, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.20909090909090908, |
|
"grad_norm": 1.7701504230499268, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.3986, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.21136363636363636, |
|
"grad_norm": 2.2279322147369385, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 1.5069, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21363636363636362, |
|
"grad_norm": 2.3066811561584473, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 1.5997, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2159090909090909, |
|
"grad_norm": 2.211686611175537, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 1.597, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 2.6118404865264893, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 1.8248, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.22045454545454546, |
|
"grad_norm": 4.249022483825684, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 1.778, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.22272727272727272, |
|
"grad_norm": 3.167658567428589, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 1.5116, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 8.20261287689209, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 1.8495, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 9.853837013244629, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.7099, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"eval_loss": 1.855621337890625, |
|
"eval_runtime": 62.2567, |
|
"eval_samples_per_second": 11.902, |
|
"eval_steps_per_second": 5.959, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22954545454545455, |
|
"grad_norm": 1.7310711145401, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 2.1732, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2318181818181818, |
|
"grad_norm": 1.5579116344451904, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 2.2006, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2340909090909091, |
|
"grad_norm": 1.238558053970337, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 2.013, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.23636363636363636, |
|
"grad_norm": 0.9029104113578796, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 1.8844, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.23863636363636365, |
|
"grad_norm": 0.743322491645813, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 1.7487, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2409090909090909, |
|
"grad_norm": 0.6849915981292725, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 1.8623, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2431818181818182, |
|
"grad_norm": 0.7389811873435974, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 1.9221, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.24545454545454545, |
|
"grad_norm": 0.7297128438949585, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 1.7917, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24772727272727274, |
|
"grad_norm": 0.7800744771957397, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 1.8211, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8231241703033447, |
|
"learning_rate": 5e-05, |
|
"loss": 1.7451, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25227272727272726, |
|
"grad_norm": 0.7917044758796692, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 1.764, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2545454545454545, |
|
"grad_norm": 0.8158065676689148, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 1.7322, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.25681818181818183, |
|
"grad_norm": 0.8097729086875916, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 1.5361, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2590909090909091, |
|
"grad_norm": 0.8858523964881897, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 1.7935, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.26136363636363635, |
|
"grad_norm": 0.9715299606323242, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 1.7427, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2636363636363636, |
|
"grad_norm": 0.8697161674499512, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 1.6359, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.26590909090909093, |
|
"grad_norm": 0.9404212236404419, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 1.6893, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2681818181818182, |
|
"grad_norm": 0.982750654220581, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 1.6622, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.27045454545454545, |
|
"grad_norm": 1.0052326917648315, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 1.594, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 1.0120433568954468, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 1.4671, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 0.979282557964325, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 1.4655, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2772727272727273, |
|
"grad_norm": 1.0582424402236938, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 1.6335, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.27954545454545454, |
|
"grad_norm": 1.0690196752548218, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 1.5545, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2818181818181818, |
|
"grad_norm": 1.042829155921936, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 1.5805, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2840909090909091, |
|
"grad_norm": 1.1763111352920532, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 1.5978, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2863636363636364, |
|
"grad_norm": 1.1423262357711792, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 1.6638, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.28863636363636364, |
|
"grad_norm": 1.2990177869796753, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 1.5838, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 1.2960045337677002, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.6534, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.29318181818181815, |
|
"grad_norm": 1.3563361167907715, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 1.6595, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.29545454545454547, |
|
"grad_norm": 1.2741658687591553, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 1.5418, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29772727272727273, |
|
"grad_norm": 1.2695192098617554, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 1.5655, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5309696197509766, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 1.7486, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.30227272727272725, |
|
"grad_norm": 1.5820587873458862, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 1.7254, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.30454545454545456, |
|
"grad_norm": 1.3667179346084595, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 1.3629, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3068181818181818, |
|
"grad_norm": 1.5829827785491943, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 1.4121, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3090909090909091, |
|
"grad_norm": 1.5984939336776733, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 1.4233, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.31136363636363634, |
|
"grad_norm": 1.61519455909729, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 1.504, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.31363636363636366, |
|
"grad_norm": 1.8187119960784912, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 1.3552, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3159090909090909, |
|
"grad_norm": 1.7586318254470825, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 1.5518, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 1.8496627807617188, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 1.1604, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32045454545454544, |
|
"grad_norm": 2.0876195430755615, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 1.3557, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.32272727272727275, |
|
"grad_norm": 1.922152042388916, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 1.6045, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 2.586014747619629, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 1.5614, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.32727272727272727, |
|
"grad_norm": 1.9770749807357788, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 1.4865, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.32954545454545453, |
|
"grad_norm": 2.507377862930298, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 1.5248, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.33181818181818185, |
|
"grad_norm": 2.8446969985961914, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.6563, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3340909090909091, |
|
"grad_norm": 2.9634368419647217, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 1.8713, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.33636363636363636, |
|
"grad_norm": 4.226101875305176, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 2.1388, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3386363636363636, |
|
"grad_norm": 5.961648941040039, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 2.2712, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3409090909090909, |
|
"grad_norm": 5.697531223297119, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.4296, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3409090909090909, |
|
"eval_loss": 1.7187246084213257, |
|
"eval_runtime": 62.331, |
|
"eval_samples_per_second": 11.888, |
|
"eval_steps_per_second": 5.952, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3431818181818182, |
|
"grad_norm": 0.8612778782844543, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 1.807, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.34545454545454546, |
|
"grad_norm": 1.0314704179763794, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 2.0075, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3477272727272727, |
|
"grad_norm": 0.9576540589332581, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 1.8654, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9733042120933533, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 1.6772, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3522727272727273, |
|
"grad_norm": 0.9843775033950806, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 1.8438, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35454545454545455, |
|
"grad_norm": 0.9702544212341309, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 1.8467, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3568181818181818, |
|
"grad_norm": 0.9344901442527771, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 1.7625, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.35909090909090907, |
|
"grad_norm": 0.8675273060798645, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 1.6983, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3613636363636364, |
|
"grad_norm": 0.8406621217727661, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 1.7905, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.8631937503814697, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 1.7407, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3659090909090909, |
|
"grad_norm": 0.8949780464172363, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 1.7007, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.36818181818181817, |
|
"grad_norm": 0.8787016272544861, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 1.7203, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3704545454545455, |
|
"grad_norm": 0.9685916900634766, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 1.7624, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.37272727272727274, |
|
"grad_norm": 0.8896955847740173, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.7665, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.8174881935119629, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 1.6385, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37727272727272726, |
|
"grad_norm": 0.844082772731781, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 1.585, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3795454545454545, |
|
"grad_norm": 0.9889433979988098, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 1.5663, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.38181818181818183, |
|
"grad_norm": 1.087799310684204, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 1.4998, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3840909090909091, |
|
"grad_norm": 0.9920996427536011, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 1.6414, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.38636363636363635, |
|
"grad_norm": 0.9823297262191772, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 1.688, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3886363636363636, |
|
"grad_norm": 1.0681322813034058, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 1.6283, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.39090909090909093, |
|
"grad_norm": 0.996666431427002, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 1.4533, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3931818181818182, |
|
"grad_norm": 1.0531445741653442, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 1.6793, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.39545454545454545, |
|
"grad_norm": 1.1200153827667236, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 1.5885, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3977272727272727, |
|
"grad_norm": 1.1026841402053833, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 1.4877, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2321683168411255, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 1.7277, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4022727272727273, |
|
"grad_norm": 1.1422961950302124, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 1.5716, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.40454545454545454, |
|
"grad_norm": 1.1938626766204834, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 1.4974, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4068181818181818, |
|
"grad_norm": 1.0967926979064941, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 1.4771, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 1.366172194480896, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 1.7274, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4113636363636364, |
|
"grad_norm": 1.2525289058685303, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 1.4662, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.41363636363636364, |
|
"grad_norm": 1.4062180519104004, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.6317, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4159090909090909, |
|
"grad_norm": 1.5819109678268433, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 1.7772, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.41818181818181815, |
|
"grad_norm": 1.6406415700912476, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 1.5917, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.42045454545454547, |
|
"grad_norm": 1.506263256072998, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 1.5047, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.42272727272727273, |
|
"grad_norm": 1.6531805992126465, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 1.5347, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 1.6337155103683472, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 1.3556, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.42727272727272725, |
|
"grad_norm": 1.7176258563995361, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 1.5636, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.42954545454545456, |
|
"grad_norm": 1.8075064420700073, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 1.4563, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4318181818181818, |
|
"grad_norm": 1.7502872943878174, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 1.2788, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4340909090909091, |
|
"grad_norm": 1.9488650560379028, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 1.3266, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 2.198270320892334, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 1.4447, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.43863636363636366, |
|
"grad_norm": 2.048422336578369, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 1.5203, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4409090909090909, |
|
"grad_norm": 2.4291017055511475, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 1.6495, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4431818181818182, |
|
"grad_norm": 2.916109085083008, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 1.7544, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.44545454545454544, |
|
"grad_norm": 3.0741372108459473, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 1.8354, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.44772727272727275, |
|
"grad_norm": 3.8066229820251465, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 2.0545, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.246571063995361, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 2.0616, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.45227272727272727, |
|
"grad_norm": 6.185179710388184, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 1.8495, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 7.912553310394287, |
|
"learning_rate": 0.0, |
|
"loss": 2.5258, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"eval_loss": 1.671525478363037, |
|
"eval_runtime": 62.4768, |
|
"eval_samples_per_second": 11.86, |
|
"eval_steps_per_second": 5.938, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.0434261451197645e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|