|
{ |
|
"best_metric": 0.17896194756031036, |
|
"best_model_checkpoint": "output/output__lora/checkpoint-100", |
|
"epoch": 2.821869488536155, |
|
"eval_steps": 100, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.007054673721340388, |
|
"grad_norm": 1.7538496255874634, |
|
"learning_rate": 0.0, |
|
"loss": 0.4857, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.014109347442680775, |
|
"grad_norm": 1.2099508047103882, |
|
"learning_rate": 8.859191006777897e-06, |
|
"loss": 0.3211, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.021164021164021163, |
|
"grad_norm": 1.2099508047103882, |
|
"learning_rate": 8.859191006777897e-06, |
|
"loss": 0.4225, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02821869488536155, |
|
"grad_norm": 1.1170622110366821, |
|
"learning_rate": 1.4041485532469073e-05, |
|
"loss": 0.3698, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03527336860670194, |
|
"grad_norm": 1.1217817068099976, |
|
"learning_rate": 1.7718382013555794e-05, |
|
"loss": 0.347, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.042328042328042326, |
|
"grad_norm": 0.87273770570755, |
|
"learning_rate": 2.0570404496611053e-05, |
|
"loss": 0.3743, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04938271604938271, |
|
"grad_norm": 0.7485833168029785, |
|
"learning_rate": 2.2900676539246968e-05, |
|
"loss": 0.2656, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0564373897707231, |
|
"grad_norm": 1.150911808013916, |
|
"learning_rate": 2.4870893478326387e-05, |
|
"loss": 0.3674, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 0.8416776061058044, |
|
"learning_rate": 2.6577573020333684e-05, |
|
"loss": 0.3613, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07054673721340388, |
|
"grad_norm": 0.8826892971992493, |
|
"learning_rate": 2.8082971064938146e-05, |
|
"loss": 0.3495, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07760141093474426, |
|
"grad_norm": 0.847586989402771, |
|
"learning_rate": 2.9429595503388953e-05, |
|
"loss": 0.2358, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08465608465608465, |
|
"grad_norm": 0.7876963019371033, |
|
"learning_rate": 3.064776548439465e-05, |
|
"loss": 0.3538, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09171075837742504, |
|
"grad_norm": 0.718278169631958, |
|
"learning_rate": 3.1759867546024865e-05, |
|
"loss": 0.3632, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09876543209876543, |
|
"grad_norm": 1.0214649438858032, |
|
"learning_rate": 3.2782902272079295e-05, |
|
"loss": 0.26, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.10582010582010581, |
|
"grad_norm": 0.7344455122947693, |
|
"learning_rate": 3.373008448510428e-05, |
|
"loss": 0.3286, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1128747795414462, |
|
"grad_norm": 0.9921013116836548, |
|
"learning_rate": 3.4611890029080124e-05, |
|
"loss": 0.3157, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11992945326278659, |
|
"grad_norm": 2.3107821941375732, |
|
"learning_rate": 3.543676402711159e-05, |
|
"loss": 0.2328, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.12698412698412698, |
|
"grad_norm": 0.6335638761520386, |
|
"learning_rate": 3.621161404374383e-05, |
|
"loss": 0.2289, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13403880070546736, |
|
"grad_norm": 0.848183810710907, |
|
"learning_rate": 3.694216207171603e-05, |
|
"loss": 0.3419, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14109347442680775, |
|
"grad_norm": 0.8602741956710815, |
|
"learning_rate": 3.76332012245438e-05, |
|
"loss": 0.3045, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.7947338819503784, |
|
"learning_rate": 3.8288786510166846e-05, |
|
"loss": 0.3092, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15520282186948853, |
|
"grad_norm": 1.7006886005401611, |
|
"learning_rate": 3.8912379010795455e-05, |
|
"loss": 0.191, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16225749559082892, |
|
"grad_norm": 0.7695497274398804, |
|
"learning_rate": 3.9506956491172545e-05, |
|
"loss": 0.2258, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1693121693121693, |
|
"grad_norm": 0.675501823425293, |
|
"learning_rate": 4.007509939970292e-05, |
|
"loss": 0.2382, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1763668430335097, |
|
"grad_norm": 0.9618274569511414, |
|
"learning_rate": 4.061905855280276e-05, |
|
"loss": 0.2598, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18342151675485008, |
|
"grad_norm": 1.6994143724441528, |
|
"learning_rate": 4.1140808993222106e-05, |
|
"loss": 0.2156, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.773503303527832, |
|
"learning_rate": 4.164209327885719e-05, |
|
"loss": 0.3194, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.19753086419753085, |
|
"grad_norm": 0.6324657201766968, |
|
"learning_rate": 4.2124456597407214e-05, |
|
"loss": 0.2947, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.20458553791887124, |
|
"grad_norm": 0.8014081120491028, |
|
"learning_rate": 4.258927549188218e-05, |
|
"loss": 0.256, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.21164021164021163, |
|
"grad_norm": 0.9835159182548523, |
|
"learning_rate": 4.303778154313212e-05, |
|
"loss": 0.1988, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21869488536155202, |
|
"grad_norm": 1.0733263492584229, |
|
"learning_rate": 4.347108103585803e-05, |
|
"loss": 0.286, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2257495590828924, |
|
"grad_norm": 0.8567582964897156, |
|
"learning_rate": 4.389017139879164e-05, |
|
"loss": 0.2152, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2328042328042328, |
|
"grad_norm": 0.8153343796730042, |
|
"learning_rate": 4.429595503388948e-05, |
|
"loss": 0.301, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.23985890652557318, |
|
"grad_norm": 1.381162405014038, |
|
"learning_rate": 4.468925101686371e-05, |
|
"loss": 0.3343, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 0.9026140570640564, |
|
"learning_rate": 4.507080505052173e-05, |
|
"loss": 0.2369, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25396825396825395, |
|
"grad_norm": 1.1759003400802612, |
|
"learning_rate": 4.544129797493744e-05, |
|
"loss": 0.3082, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.26102292768959434, |
|
"grad_norm": 1.5342798233032227, |
|
"learning_rate": 4.5801353078493936e-05, |
|
"loss": 0.3168, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.26807760141093473, |
|
"grad_norm": 0.7265400886535645, |
|
"learning_rate": 4.615154240700883e-05, |
|
"loss": 0.2484, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2751322751322751, |
|
"grad_norm": 0.7057551741600037, |
|
"learning_rate": 4.6492392231321696e-05, |
|
"loss": 0.2654, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2821869488536155, |
|
"grad_norm": 0.7926758527755737, |
|
"learning_rate": 4.682438780454837e-05, |
|
"loss": 0.1032, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2892416225749559, |
|
"grad_norm": 0.9488016963005066, |
|
"learning_rate": 4.714797751694474e-05, |
|
"loss": 0.2454, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 1.9464285373687744, |
|
"learning_rate": 4.7463576537657414e-05, |
|
"loss": 0.2924, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.30335097001763667, |
|
"grad_norm": 1.1096171140670776, |
|
"learning_rate": 4.777157001757336e-05, |
|
"loss": 0.3058, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.31040564373897706, |
|
"grad_norm": 0.8923623561859131, |
|
"learning_rate": 4.8072315915252694e-05, |
|
"loss": 0.2073, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 1.0031824111938477, |
|
"learning_rate": 4.8366147497950435e-05, |
|
"loss": 0.2059, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.32451499118165783, |
|
"grad_norm": 1.1026407480239868, |
|
"learning_rate": 4.8653375561549195e-05, |
|
"loss": 0.1936, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3315696649029982, |
|
"grad_norm": 1.0787580013275146, |
|
"learning_rate": 4.8934290406480814e-05, |
|
"loss": 0.1917, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3386243386243386, |
|
"grad_norm": 1.1401859521865845, |
|
"learning_rate": 4.920916360113129e-05, |
|
"loss": 0.1943, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.345679012345679, |
|
"grad_norm": 1.4272260665893555, |
|
"learning_rate": 4.947824955958066e-05, |
|
"loss": 0.2286, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3527336860670194, |
|
"grad_norm": 0.9560362696647644, |
|
"learning_rate": 4.9741786956652774e-05, |
|
"loss": 0.1451, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.35978835978835977, |
|
"grad_norm": 0.8079693913459778, |
|
"learning_rate": 5e-05, |
|
"loss": 0.168, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.36684303350970016, |
|
"grad_norm": 1.3394136428833008, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1198, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.37389770723104054, |
|
"grad_norm": 2.1363890171051025, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2001, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.903056263923645, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2231, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3880070546737213, |
|
"grad_norm": 0.9227181077003479, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1655, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3950617283950617, |
|
"grad_norm": 0.8971679210662842, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2074, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4021164021164021, |
|
"grad_norm": 0.8905817270278931, |
|
"learning_rate": 5e-05, |
|
"loss": 0.177, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.4091710758377425, |
|
"grad_norm": 1.6770292520523071, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2223, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.41622574955908287, |
|
"grad_norm": 1.6770292520523071, |
|
"learning_rate": 5e-05, |
|
"loss": 0.104, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.42328042328042326, |
|
"grad_norm": 0.9908204674720764, |
|
"learning_rate": 5e-05, |
|
"loss": 0.28, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.43033509700176364, |
|
"grad_norm": 0.8167163133621216, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2298, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.43738977072310403, |
|
"grad_norm": 1.0286040306091309, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1949, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.9994080066680908, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1785, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4514991181657848, |
|
"grad_norm": 1.0961331129074097, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1841, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4585537918871252, |
|
"grad_norm": 1.0142136812210083, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1438, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4656084656084656, |
|
"grad_norm": 0.9026923775672913, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1882, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.47266313932980597, |
|
"grad_norm": 0.9396039843559265, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1735, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.47971781305114636, |
|
"grad_norm": 0.9887558817863464, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1382, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.48677248677248675, |
|
"grad_norm": 0.7315873503684998, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1833, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 0.7315873503684998, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1534, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5008818342151675, |
|
"grad_norm": 1.2682855129241943, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1475, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.5079365079365079, |
|
"grad_norm": 2.3604001998901367, |
|
"learning_rate": 5e-05, |
|
"loss": 0.142, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.5149911816578483, |
|
"grad_norm": 1.310669183731079, |
|
"learning_rate": 5e-05, |
|
"loss": 0.144, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.5220458553791887, |
|
"grad_norm": 0.8857008218765259, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1303, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5291005291005291, |
|
"grad_norm": 0.8952376246452332, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1491, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5361552028218695, |
|
"grad_norm": 1.175799012184143, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1242, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5432098765432098, |
|
"grad_norm": 0.9906390905380249, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1407, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5502645502645502, |
|
"grad_norm": 1.0439103841781616, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0782, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5573192239858906, |
|
"grad_norm": 1.7823419570922852, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1723, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.564373897707231, |
|
"grad_norm": 1.1472703218460083, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2095, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.9809481501579285, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1764, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5784832451499118, |
|
"grad_norm": 0.82795250415802, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1617, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5855379188712522, |
|
"grad_norm": 0.7833464741706848, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1152, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.691254198551178, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0862, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.599647266313933, |
|
"grad_norm": 0.635303258895874, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1132, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.6067019400352733, |
|
"grad_norm": 0.8403255939483643, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1724, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.6137566137566137, |
|
"grad_norm": 1.0148879289627075, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1044, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.6208112874779541, |
|
"grad_norm": 1.0047919750213623, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1456, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.6278659611992945, |
|
"grad_norm": 0.6750177145004272, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1013, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.9232619404792786, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1651, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6419753086419753, |
|
"grad_norm": 0.9468359351158142, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1783, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6490299823633157, |
|
"grad_norm": 0.7553344964981079, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1174, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.656084656084656, |
|
"grad_norm": 1.0219858884811401, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0887, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6631393298059964, |
|
"grad_norm": 0.6816285848617554, |
|
"learning_rate": 5e-05, |
|
"loss": 0.106, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6701940035273368, |
|
"grad_norm": 0.928390622138977, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1175, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6772486772486772, |
|
"grad_norm": 1.2344391345977783, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1767, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6843033509700176, |
|
"grad_norm": 0.7818422913551331, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1372, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.691358024691358, |
|
"grad_norm": 0.9105628132820129, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0941, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6984126984126984, |
|
"grad_norm": 0.8559777140617371, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1225, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.7054673721340388, |
|
"grad_norm": 0.847297191619873, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1465, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7054673721340388, |
|
"eval_loss": 0.17896194756031036, |
|
"eval_runtime": 360.2551, |
|
"eval_samples_per_second": 6.296, |
|
"eval_steps_per_second": 0.394, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7125220458553791, |
|
"grad_norm": 0.8381081819534302, |
|
"learning_rate": 5e-05, |
|
"loss": 0.186, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.7195767195767195, |
|
"grad_norm": 1.418454885482788, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0831, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.7266313932980599, |
|
"grad_norm": 1.4008698463439941, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1661, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.7336860670194003, |
|
"grad_norm": 0.8513796329498291, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1871, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 1.5074268579483032, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1343, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7477954144620811, |
|
"grad_norm": 1.1504905223846436, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1611, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7548500881834215, |
|
"grad_norm": 0.7931183576583862, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0819, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 2.454035758972168, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1533, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7689594356261023, |
|
"grad_norm": 0.905976414680481, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1099, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7760141093474426, |
|
"grad_norm": 1.0910918712615967, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0784, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.783068783068783, |
|
"grad_norm": 0.8586850166320801, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1083, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7901234567901234, |
|
"grad_norm": 0.8878437876701355, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0986, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7971781305114638, |
|
"grad_norm": 0.7834882736206055, |
|
"learning_rate": 5e-05, |
|
"loss": 0.135, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.8042328042328042, |
|
"grad_norm": 0.7140138745307922, |
|
"learning_rate": 5e-05, |
|
"loss": 0.105, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.8112874779541446, |
|
"grad_norm": 0.5810890197753906, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0669, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.818342151675485, |
|
"grad_norm": 0.957624077796936, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1112, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.8253968253968254, |
|
"grad_norm": 0.8660424947738647, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1219, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.8324514991181657, |
|
"grad_norm": 0.7031818628311157, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0599, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.8395061728395061, |
|
"grad_norm": 1.0137779712677002, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1029, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.8465608465608465, |
|
"grad_norm": 0.696945309638977, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0824, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8536155202821869, |
|
"grad_norm": 1.1428477764129639, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1621, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8606701940035273, |
|
"grad_norm": 1.124836802482605, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1221, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8677248677248677, |
|
"grad_norm": 0.7269309163093567, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0561, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8747795414462081, |
|
"grad_norm": 0.5245713591575623, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0474, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8818342151675485, |
|
"grad_norm": 1.181915521621704, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1044, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.696465015411377, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1542, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8959435626102292, |
|
"grad_norm": 0.6903875470161438, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0605, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.9029982363315696, |
|
"grad_norm": 0.8914228081703186, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1034, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.91005291005291, |
|
"grad_norm": 0.8926368355751038, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1088, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.9171075837742504, |
|
"grad_norm": 0.9177432656288147, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1255, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9241622574955908, |
|
"grad_norm": 0.7664863467216492, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1019, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.9312169312169312, |
|
"grad_norm": 0.7729786038398743, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0747, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.9382716049382716, |
|
"grad_norm": 0.5827771425247192, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1354, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.9453262786596119, |
|
"grad_norm": 0.9261442422866821, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0624, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.7620460391044617, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0884, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9594356261022927, |
|
"grad_norm": 0.8573839068412781, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0758, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9664902998236331, |
|
"grad_norm": 0.8569554090499878, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1048, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.9735449735449735, |
|
"grad_norm": 0.9836267232894897, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1243, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.9805996472663139, |
|
"grad_norm": 0.6673980355262756, |
|
"learning_rate": 5e-05, |
|
"loss": 0.061, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9876543209876543, |
|
"grad_norm": 0.8903977870941162, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1116, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9947089947089947, |
|
"grad_norm": 0.9424787163734436, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0865, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.001763668430335, |
|
"grad_norm": 0.6135322451591492, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0733, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.0088183421516754, |
|
"grad_norm": 0.7416030764579773, |
|
"learning_rate": 5e-05, |
|
"loss": 0.092, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.0158730158730158, |
|
"grad_norm": 0.7281748056411743, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0896, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.0229276895943562, |
|
"grad_norm": 1.047066330909729, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1042, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0299823633156966, |
|
"grad_norm": 3.0005276203155518, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0774, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 0.7662864923477173, |
|
"learning_rate": 5e-05, |
|
"loss": 0.119, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.0440917107583774, |
|
"grad_norm": 0.9837973117828369, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0668, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.0511463844797178, |
|
"grad_norm": 0.8258010149002075, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0473, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.0582010582010581, |
|
"grad_norm": 0.928363025188446, |
|
"learning_rate": 5e-05, |
|
"loss": 0.09, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0652557319223985, |
|
"grad_norm": 1.009312391281128, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1133, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.072310405643739, |
|
"grad_norm": 0.7772536873817444, |
|
"learning_rate": 5e-05, |
|
"loss": 0.069, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0793650793650793, |
|
"grad_norm": 1.2250009775161743, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0981, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.0864197530864197, |
|
"grad_norm": 0.5997045040130615, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0598, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.09347442680776, |
|
"grad_norm": 0.9267628192901611, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0384, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.1005291005291005, |
|
"grad_norm": 0.6563625335693359, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0766, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.1075837742504409, |
|
"grad_norm": 0.8543305993080139, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0896, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.1146384479717812, |
|
"grad_norm": 0.5792420506477356, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1053, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.1216931216931216, |
|
"grad_norm": 0.7286255955696106, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0998, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.128747795414462, |
|
"grad_norm": 0.4976038932800293, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0782, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1358024691358024, |
|
"grad_norm": 0.6580901741981506, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0652, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.6756439805030823, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1008, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.1499118165784832, |
|
"grad_norm": 0.895070493221283, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0771, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.1569664902998236, |
|
"grad_norm": 1.056799292564392, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0516, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.164021164021164, |
|
"grad_norm": 0.8106465339660645, |
|
"learning_rate": 5e-05, |
|
"loss": 0.135, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.1710758377425043, |
|
"grad_norm": 0.5313682556152344, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0929, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.1781305114638447, |
|
"grad_norm": 0.8005192279815674, |
|
"learning_rate": 5e-05, |
|
"loss": 0.064, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 0.7275320291519165, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1083, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.1922398589065255, |
|
"grad_norm": 1.9933772087097168, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1345, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.199294532627866, |
|
"grad_norm": 0.7224838733673096, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0492, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.2063492063492063, |
|
"grad_norm": 0.6393369436264038, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0726, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.2134038800705467, |
|
"grad_norm": 0.5461933612823486, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0283, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.220458553791887, |
|
"grad_norm": 1.0142794847488403, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0768, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.2275132275132274, |
|
"grad_norm": 0.626775860786438, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0971, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.2345679012345678, |
|
"grad_norm": 1.3706245422363281, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0643, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.2416225749559082, |
|
"grad_norm": 0.6315011978149414, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0431, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.2486772486772486, |
|
"grad_norm": 0.6979820132255554, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0911, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.255731922398589, |
|
"grad_norm": 0.9407426118850708, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0583, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.2627865961199294, |
|
"grad_norm": 1.1693543195724487, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0636, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.2698412698412698, |
|
"grad_norm": 0.6498689651489258, |
|
"learning_rate": 5e-05, |
|
"loss": 0.023, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.2768959435626102, |
|
"grad_norm": 0.6040365695953369, |
|
"learning_rate": 5e-05, |
|
"loss": 0.06, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.2839506172839505, |
|
"grad_norm": 0.5838135480880737, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0842, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.291005291005291, |
|
"grad_norm": 0.4301566779613495, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0684, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.2980599647266313, |
|
"grad_norm": 0.6036396026611328, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0938, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.3051146384479717, |
|
"grad_norm": 0.9883386492729187, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0462, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.312169312169312, |
|
"grad_norm": 0.5813114643096924, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0656, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.3192239858906525, |
|
"grad_norm": 0.8730061054229736, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0433, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.3262786596119929, |
|
"grad_norm": 0.8635377287864685, |
|
"learning_rate": 5e-05, |
|
"loss": 0.082, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.7513411045074463, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0932, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.3403880070546736, |
|
"grad_norm": 0.7456687092781067, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0723, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.347442680776014, |
|
"grad_norm": 0.572740912437439, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0721, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.3544973544973544, |
|
"grad_norm": 1.2454944849014282, |
|
"learning_rate": 5e-05, |
|
"loss": 0.054, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.3615520282186948, |
|
"grad_norm": 0.5826276540756226, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0367, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.3686067019400352, |
|
"grad_norm": 0.6496626138687134, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0648, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.3756613756613756, |
|
"grad_norm": 0.8498976826667786, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0942, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.382716049382716, |
|
"grad_norm": 1.1423848867416382, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0545, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.3897707231040564, |
|
"grad_norm": 2.1429636478424072, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0588, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.3968253968253967, |
|
"grad_norm": 0.6466217041015625, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0313, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.4038800705467371, |
|
"grad_norm": 0.49399393796920776, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0527, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.4109347442680775, |
|
"grad_norm": 0.5343263745307922, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0339, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.4109347442680775, |
|
"eval_loss": 0.18905282020568848, |
|
"eval_runtime": 358.5389, |
|
"eval_samples_per_second": 6.326, |
|
"eval_steps_per_second": 0.396, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.417989417989418, |
|
"grad_norm": 1.0444480180740356, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0789, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.4250440917107583, |
|
"grad_norm": 1.5087664127349854, |
|
"learning_rate": 5e-05, |
|
"loss": 0.054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.4320987654320987, |
|
"grad_norm": 0.8255050778388977, |
|
"learning_rate": 5e-05, |
|
"loss": 0.054, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.439153439153439, |
|
"grad_norm": 0.5734973549842834, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0413, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.4462081128747795, |
|
"grad_norm": 0.5203781127929688, |
|
"learning_rate": 5e-05, |
|
"loss": 0.042, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.4532627865961198, |
|
"grad_norm": 0.6086573600769043, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0415, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.4603174603174602, |
|
"grad_norm": 0.5426012873649597, |
|
"learning_rate": 5e-05, |
|
"loss": 0.036, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.4673721340388006, |
|
"grad_norm": 0.8343996405601501, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0829, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.474426807760141, |
|
"grad_norm": 0.9328364133834839, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0699, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.64570152759552, |
|
"learning_rate": 5e-05, |
|
"loss": 0.067, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.4885361552028218, |
|
"grad_norm": 0.6451817750930786, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0367, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.4955908289241622, |
|
"grad_norm": 0.6584360003471375, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0932, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.5026455026455028, |
|
"grad_norm": 0.7680897116661072, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0394, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.509700176366843, |
|
"grad_norm": 0.6519668698310852, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0699, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.5167548500881836, |
|
"grad_norm": 0.7947211861610413, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0323, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.5238095238095237, |
|
"grad_norm": 0.7202886343002319, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0642, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.5308641975308643, |
|
"grad_norm": 0.507116973400116, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0502, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.5379188712522045, |
|
"grad_norm": 0.5617340207099915, |
|
"learning_rate": 5e-05, |
|
"loss": 0.024, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.544973544973545, |
|
"grad_norm": 0.7389073371887207, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0531, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.5520282186948853, |
|
"grad_norm": 0.6520772576332092, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0652, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.5590828924162259, |
|
"grad_norm": 0.6626583933830261, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0256, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.566137566137566, |
|
"grad_norm": 0.6184734106063843, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0299, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.5731922398589067, |
|
"grad_norm": 0.5411644577980042, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0466, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.5802469135802468, |
|
"grad_norm": 0.6262487769126892, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0916, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.5873015873015874, |
|
"grad_norm": 0.6367466449737549, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0462, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.5943562610229276, |
|
"grad_norm": 0.3993736505508423, |
|
"learning_rate": 5e-05, |
|
"loss": 0.053, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.6014109347442682, |
|
"grad_norm": 0.46192389726638794, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0709, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.6084656084656084, |
|
"grad_norm": 1.1713682413101196, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0442, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.615520282186949, |
|
"grad_norm": 0.6246752142906189, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0588, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.6225749559082892, |
|
"grad_norm": 0.569995641708374, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0625, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 0.7993080019950867, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0448, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.63668430335097, |
|
"grad_norm": 0.9412052631378174, |
|
"learning_rate": 5e-05, |
|
"loss": 0.106, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.6437389770723105, |
|
"grad_norm": 0.5617280006408691, |
|
"learning_rate": 5e-05, |
|
"loss": 0.048, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.6507936507936507, |
|
"grad_norm": 0.5206360220909119, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0271, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.6578483245149913, |
|
"grad_norm": 0.5513091087341309, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0352, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.6649029982363315, |
|
"grad_norm": 0.5904120206832886, |
|
"learning_rate": 5e-05, |
|
"loss": 0.069, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.671957671957672, |
|
"grad_norm": 0.8241035342216492, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0654, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.6790123456790123, |
|
"grad_norm": 0.9514018297195435, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0726, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.6860670194003529, |
|
"grad_norm": 0.5005506277084351, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0655, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.693121693121693, |
|
"grad_norm": 0.44962841272354126, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0403, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.7001763668430336, |
|
"grad_norm": 0.46677684783935547, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0436, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.7072310405643738, |
|
"grad_norm": 0.7335297465324402, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0496, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 0.6942448019981384, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1039, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.7213403880070546, |
|
"grad_norm": 0.4999411106109619, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0543, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.7283950617283952, |
|
"grad_norm": 0.5241154432296753, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0478, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.7354497354497354, |
|
"grad_norm": 0.6574307084083557, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0579, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.742504409171076, |
|
"grad_norm": 0.8321516513824463, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0663, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.7495590828924161, |
|
"grad_norm": 0.6835036873817444, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0441, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.7566137566137567, |
|
"grad_norm": 0.6555665135383606, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0311, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.763668430335097, |
|
"grad_norm": 0.4744579493999481, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0302, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.7707231040564375, |
|
"grad_norm": 1.0260396003723145, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0503, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.6683841347694397, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0355, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.7848324514991183, |
|
"grad_norm": 0.5520302057266235, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0343, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.7918871252204585, |
|
"grad_norm": 0.7275727391242981, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0379, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.798941798941799, |
|
"grad_norm": 0.7184352278709412, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0541, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.8059964726631392, |
|
"grad_norm": 0.6140182614326477, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0536, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.8130511463844798, |
|
"grad_norm": 0.48473063111305237, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0401, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.82010582010582, |
|
"grad_norm": 0.5717530250549316, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0376, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.8271604938271606, |
|
"grad_norm": 0.6555631756782532, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0429, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.8342151675485008, |
|
"grad_norm": 0.539983868598938, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0418, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.8412698412698414, |
|
"grad_norm": 0.7640647292137146, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0648, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.8483245149911816, |
|
"grad_norm": 0.7768380641937256, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0248, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.8553791887125222, |
|
"grad_norm": 0.42515769600868225, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0652, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.8624338624338623, |
|
"grad_norm": 0.6031877994537354, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0399, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.869488536155203, |
|
"grad_norm": 0.5836982131004333, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0547, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.876543209876543, |
|
"grad_norm": 0.5359038710594177, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0582, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.8835978835978837, |
|
"grad_norm": 0.5191663503646851, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0347, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.8906525573192239, |
|
"grad_norm": 0.6412689089775085, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0419, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.8977072310405645, |
|
"grad_norm": 0.76778244972229, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0464, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 0.4410521388053894, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0342, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.9118165784832453, |
|
"grad_norm": 0.8977980017662048, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0793, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.9188712522045854, |
|
"grad_norm": 0.9770653247833252, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0519, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 0.9411201477050781, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0443, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.9329805996472662, |
|
"grad_norm": 0.504085123538971, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0112, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.9400352733686068, |
|
"grad_norm": 0.5680234432220459, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0367, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.947089947089947, |
|
"grad_norm": 0.5446969270706177, |
|
"learning_rate": 5e-05, |
|
"loss": 0.046, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.9541446208112876, |
|
"grad_norm": 0.4226911664009094, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0254, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.9611992945326278, |
|
"grad_norm": 0.4745901823043823, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0296, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.9682539682539684, |
|
"grad_norm": 0.4772842526435852, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0235, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.9753086419753085, |
|
"grad_norm": 0.561303436756134, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0289, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.9823633156966491, |
|
"grad_norm": 3.243555784225464, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0625, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.9894179894179893, |
|
"grad_norm": 0.5137839317321777, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0852, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.99647266313933, |
|
"grad_norm": 0.4999793767929077, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0398, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.00352733686067, |
|
"grad_norm": 0.4685242772102356, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0404, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.0105820105820107, |
|
"grad_norm": 0.4656790494918823, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0315, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.017636684303351, |
|
"grad_norm": 0.5658271312713623, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0298, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.0246913580246915, |
|
"grad_norm": 0.731564998626709, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0577, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.0317460317460316, |
|
"grad_norm": 0.45937952399253845, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0393, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.0388007054673722, |
|
"grad_norm": 0.559946596622467, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0602, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.0458553791887124, |
|
"grad_norm": 0.5641235709190369, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0501, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.052910052910053, |
|
"grad_norm": 0.47334426641464233, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0501, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.059964726631393, |
|
"grad_norm": 0.6770443320274353, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0853, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.067019400352734, |
|
"grad_norm": 0.4847649037837982, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0553, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.074074074074074, |
|
"grad_norm": 0.49225914478302, |
|
"learning_rate": 5e-05, |
|
"loss": 0.046, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.0811287477954146, |
|
"grad_norm": 0.7099577188491821, |
|
"learning_rate": 5e-05, |
|
"loss": 0.047, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.0881834215167547, |
|
"grad_norm": 0.5774824023246765, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0329, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.0952380952380953, |
|
"grad_norm": 0.4122166931629181, |
|
"learning_rate": 5e-05, |
|
"loss": 0.032, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.1022927689594355, |
|
"grad_norm": 0.6180548071861267, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0396, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.109347442680776, |
|
"grad_norm": 0.4783317446708679, |
|
"learning_rate": 5e-05, |
|
"loss": 0.028, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.1164021164021163, |
|
"grad_norm": 0.5351418256759644, |
|
"learning_rate": 5e-05, |
|
"loss": 0.033, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.1164021164021163, |
|
"eval_loss": 0.19941070675849915, |
|
"eval_runtime": 362.8638, |
|
"eval_samples_per_second": 6.25, |
|
"eval_steps_per_second": 0.391, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.123456790123457, |
|
"grad_norm": 0.3710566759109497, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0291, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.130511463844797, |
|
"grad_norm": 0.5422140955924988, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0364, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.1375661375661377, |
|
"grad_norm": 0.6321272253990173, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0462, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.144620811287478, |
|
"grad_norm": 0.5658572912216187, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0265, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.1516754850088184, |
|
"grad_norm": 0.5490357279777527, |
|
"learning_rate": 5e-05, |
|
"loss": 0.028, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.1587301587301586, |
|
"grad_norm": 0.42299216985702515, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0301, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.165784832451499, |
|
"grad_norm": 0.5153865814208984, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0192, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.1728395061728394, |
|
"grad_norm": 0.5954696536064148, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0362, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.17989417989418, |
|
"grad_norm": 0.7111172080039978, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0496, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.18694885361552, |
|
"grad_norm": 0.7426902055740356, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0625, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.1940035273368608, |
|
"grad_norm": 0.5204232335090637, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0252, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.201058201058201, |
|
"grad_norm": 0.7376022338867188, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0286, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.2081128747795415, |
|
"grad_norm": 0.46142226457595825, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0484, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.2151675485008817, |
|
"grad_norm": 0.3670506477355957, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0254, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.7535457611083984, |
|
"learning_rate": 5e-05, |
|
"loss": 0.101, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.2292768959435625, |
|
"grad_norm": 0.5934197306632996, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0496, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.236331569664903, |
|
"grad_norm": 0.30532220005989075, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0212, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.2433862433862433, |
|
"grad_norm": 0.4631759822368622, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0361, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.250440917107584, |
|
"grad_norm": 0.3577938675880432, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0416, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.257495590828924, |
|
"grad_norm": 0.4464920461177826, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0408, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.2645502645502646, |
|
"grad_norm": 0.3824048638343811, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0248, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.271604938271605, |
|
"grad_norm": 0.6711484789848328, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0487, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.2786596119929454, |
|
"grad_norm": 0.6138588786125183, |
|
"learning_rate": 5e-05, |
|
"loss": 0.043, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 0.4771498739719391, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0258, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.292768959435626, |
|
"grad_norm": 0.5928422808647156, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0295, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.2998236331569664, |
|
"grad_norm": 0.579921305179596, |
|
"learning_rate": 5e-05, |
|
"loss": 0.068, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.306878306878307, |
|
"grad_norm": 0.6070377230644226, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0542, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.313932980599647, |
|
"grad_norm": 0.6430080533027649, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0492, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.3209876543209877, |
|
"grad_norm": 0.4864685535430908, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0432, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.328042328042328, |
|
"grad_norm": 0.7958788275718689, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0455, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.3350970017636685, |
|
"grad_norm": 0.49791809916496277, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0132, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.3421516754850087, |
|
"grad_norm": 0.7294586896896362, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0571, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.3492063492063493, |
|
"grad_norm": 0.42604896426200867, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0288, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.3562610229276895, |
|
"grad_norm": 0.41210633516311646, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0316, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.36331569664903, |
|
"grad_norm": 0.5827341675758362, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0202, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.3703703703703702, |
|
"grad_norm": 0.4686066210269928, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0431, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.377425044091711, |
|
"grad_norm": 0.4502599835395813, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0234, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.384479717813051, |
|
"grad_norm": 0.6698682904243469, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0523, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.3915343915343916, |
|
"grad_norm": 0.5106789469718933, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0666, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.398589065255732, |
|
"grad_norm": 0.4766543507575989, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0293, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.4056437389770724, |
|
"grad_norm": 0.9548158049583435, |
|
"learning_rate": 5e-05, |
|
"loss": 0.029, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.4126984126984126, |
|
"grad_norm": 0.4722541570663452, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0585, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.419753086419753, |
|
"grad_norm": 0.42772984504699707, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0684, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.4268077601410933, |
|
"grad_norm": 0.5742454528808594, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0491, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.433862433862434, |
|
"grad_norm": 1.2364797592163086, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0348, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.440917107583774, |
|
"grad_norm": 3.099977731704712, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0264, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.4479717813051147, |
|
"grad_norm": 0.6101909279823303, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0939, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.455026455026455, |
|
"grad_norm": 1.5888972282409668, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0187, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.4620811287477955, |
|
"grad_norm": 0.5128622651100159, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0267, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.4691358024691357, |
|
"grad_norm": 0.4196084141731262, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0312, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.4761904761904763, |
|
"grad_norm": 0.5221646428108215, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0234, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.4832451499118164, |
|
"grad_norm": 0.769625186920166, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0181, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.490299823633157, |
|
"grad_norm": 0.5391407608985901, |
|
"learning_rate": 5e-05, |
|
"loss": 0.039, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.497354497354497, |
|
"grad_norm": 0.488760381937027, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0292, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.504409171075838, |
|
"grad_norm": 0.40043187141418457, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0197, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.511463844797178, |
|
"grad_norm": 0.4821717441082001, |
|
"learning_rate": 5e-05, |
|
"loss": 0.042, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.5185185185185186, |
|
"grad_norm": 0.3457970917224884, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0276, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.5255731922398588, |
|
"grad_norm": 0.28284406661987305, |
|
"learning_rate": 5e-05, |
|
"loss": 0.022, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.5326278659611994, |
|
"grad_norm": 0.5945581793785095, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0735, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.5396825396825395, |
|
"grad_norm": 0.4184766709804535, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0257, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.54673721340388, |
|
"grad_norm": 0.620948851108551, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0181, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.5537918871252203, |
|
"grad_norm": 0.4545421898365021, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0146, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.560846560846561, |
|
"grad_norm": 0.7520444989204407, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0262, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.567901234567901, |
|
"grad_norm": 0.46159473061561584, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0254, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.5749559082892417, |
|
"grad_norm": 0.899054229259491, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0166, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.582010582010582, |
|
"grad_norm": 0.501522958278656, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0426, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.5890652557319225, |
|
"grad_norm": 0.5105156898498535, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0275, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.5961199294532626, |
|
"grad_norm": 0.5256771445274353, |
|
"learning_rate": 5e-05, |
|
"loss": 0.054, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.6031746031746033, |
|
"grad_norm": 0.49999576807022095, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0279, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.6102292768959434, |
|
"grad_norm": 0.5966548919677734, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0425, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.617283950617284, |
|
"grad_norm": 0.45533132553100586, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0218, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.624338624338624, |
|
"grad_norm": 0.6391978859901428, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0365, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.631393298059965, |
|
"grad_norm": 0.8711655735969543, |
|
"learning_rate": 5e-05, |
|
"loss": 0.039, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.638447971781305, |
|
"grad_norm": 0.6811898350715637, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0241, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.6455026455026456, |
|
"grad_norm": 0.7612636089324951, |
|
"learning_rate": 5e-05, |
|
"loss": 0.02, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.6525573192239857, |
|
"grad_norm": 0.8182013630867004, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0703, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.6596119929453264, |
|
"grad_norm": 0.39782482385635376, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0245, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.4738399386405945, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0302, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.673721340388007, |
|
"grad_norm": 0.3865676820278168, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0369, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.6807760141093473, |
|
"grad_norm": 0.4017925262451172, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0132, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.687830687830688, |
|
"grad_norm": 0.5341134667396545, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0461, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.694885361552028, |
|
"grad_norm": 0.7711620330810547, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0287, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.7019400352733687, |
|
"grad_norm": 0.4064803719520569, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0202, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.708994708994709, |
|
"grad_norm": 0.24531042575836182, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0211, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.7160493827160495, |
|
"grad_norm": 0.6126789450645447, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0215, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.7231040564373896, |
|
"grad_norm": 0.4999621510505676, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0218, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.7301587301587302, |
|
"grad_norm": 0.5171307921409607, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0243, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.7372134038800704, |
|
"grad_norm": 0.95902419090271, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0367, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.744268077601411, |
|
"grad_norm": 0.6669855117797852, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0188, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.751322751322751, |
|
"grad_norm": 0.49141064286231995, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0174, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.758377425044092, |
|
"grad_norm": 0.36193522810935974, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0074, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.765432098765432, |
|
"grad_norm": 0.6513599753379822, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0491, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.7724867724867726, |
|
"grad_norm": 0.5697189569473267, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0225, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.7795414462081127, |
|
"grad_norm": 0.752934992313385, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0561, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.7865961199294533, |
|
"grad_norm": 0.8643583655357361, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0199, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.7936507936507935, |
|
"grad_norm": 0.5964650511741638, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0356, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.800705467372134, |
|
"grad_norm": 0.3856894075870514, |
|
"learning_rate": 5e-05, |
|
"loss": 0.028, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.8077601410934743, |
|
"grad_norm": 0.6613327860832214, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0278, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.814814814814815, |
|
"grad_norm": 0.40905261039733887, |
|
"learning_rate": 5e-05, |
|
"loss": 0.022, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.821869488536155, |
|
"grad_norm": 0.3591744005680084, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0333, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.821869488536155, |
|
"eval_loss": 0.19655056297779083, |
|
"eval_runtime": 358.8507, |
|
"eval_samples_per_second": 6.32, |
|
"eval_steps_per_second": 0.396, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.821869488536155, |
|
"step": 400, |
|
"total_flos": 2.7548487274232545e+18, |
|
"train_loss": 0.09715834772447124, |
|
"train_runtime": 13184.0406, |
|
"train_samples_per_second": 4.854, |
|
"train_steps_per_second": 0.076 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 8, |
|
"save_steps": 100, |
|
"total_flos": 2.7548487274232545e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|