|
{ |
|
"best_metric": 0.7442646622657776, |
|
"best_model_checkpoint": "./mistral-lamia-7/checkpoint-309", |
|
"epoch": 1.3333333333333333, |
|
"eval_steps": 103, |
|
"global_step": 412, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1e-05, |
|
"loss": 2.0583, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 1.443671464920044, |
|
"eval_runtime": 855.8822, |
|
"eval_samples_per_second": 0.609, |
|
"eval_steps_per_second": 0.153, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2e-05, |
|
"loss": 2.1547, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3e-05, |
|
"loss": 1.7915, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4e-05, |
|
"loss": 1.5082, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8973, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6e-05, |
|
"loss": 1.5764, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7e-05, |
|
"loss": 1.7784, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1673, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3696, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4372, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.999997399012391e-05, |
|
"loss": 1.3773, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.999989596052265e-05, |
|
"loss": 2.1384, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.999976591127746e-05, |
|
"loss": 1.2998, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.999958384252362e-05, |
|
"loss": 1.3715, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.999934975445053e-05, |
|
"loss": 1.5568, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.999906364730176e-05, |
|
"loss": 1.5464, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999872552137497e-05, |
|
"loss": 1.488, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999833537702192e-05, |
|
"loss": 1.258, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999789321464857e-05, |
|
"loss": 1.165, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.999739903471488e-05, |
|
"loss": 1.1888, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.999685283773504e-05, |
|
"loss": 1.4435, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.999625462427728e-05, |
|
"loss": 1.5896, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.999560439496399e-05, |
|
"loss": 1.663, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.999490215047167e-05, |
|
"loss": 1.6058, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.999414789153094e-05, |
|
"loss": 1.0705, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.999334161892648e-05, |
|
"loss": 1.4235, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.99924833334972e-05, |
|
"loss": 1.3092, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.999157303613601e-05, |
|
"loss": 1.2291, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.999061072778999e-05, |
|
"loss": 1.2964, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.998959640946033e-05, |
|
"loss": 1.197, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.99885300822023e-05, |
|
"loss": 1.5062, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.998741174712533e-05, |
|
"loss": 1.2466, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998624140539292e-05, |
|
"loss": 1.1498, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998501905822266e-05, |
|
"loss": 1.2019, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998374470688632e-05, |
|
"loss": 0.9986, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.99824183527097e-05, |
|
"loss": 1.2226, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.998103999707273e-05, |
|
"loss": 1.0795, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.997960964140947e-05, |
|
"loss": 1.2918, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.997812728720802e-05, |
|
"loss": 1.2244, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.997659293601065e-05, |
|
"loss": 1.126, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.997500658941366e-05, |
|
"loss": 1.4344, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.997336824906748e-05, |
|
"loss": 1.0974, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.997167791667667e-05, |
|
"loss": 1.4621, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.99699355939998e-05, |
|
"loss": 1.2877, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.99681412828496e-05, |
|
"loss": 1.2676, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.996629498509283e-05, |
|
"loss": 1.339, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.99643967026504e-05, |
|
"loss": 1.1842, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.996244643749727e-05, |
|
"loss": 1.4184, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.996044419166248e-05, |
|
"loss": 1.2832, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.995838996722914e-05, |
|
"loss": 1.2316, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.99562837663345e-05, |
|
"loss": 1.1826, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.995412559116979e-05, |
|
"loss": 1.3186, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.995191544398039e-05, |
|
"loss": 1.3744, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.994965332706573e-05, |
|
"loss": 1.1803, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.99473392427793e-05, |
|
"loss": 1.1569, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.994497319352867e-05, |
|
"loss": 1.2095, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.994255518177544e-05, |
|
"loss": 1.3214, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.994008521003533e-05, |
|
"loss": 1.3726, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.993756328087805e-05, |
|
"loss": 1.148, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.993498939692743e-05, |
|
"loss": 1.065, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.993236356086133e-05, |
|
"loss": 1.2071, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.992968577541163e-05, |
|
"loss": 1.1237, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.992695604336431e-05, |
|
"loss": 1.2605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.992417436755936e-05, |
|
"loss": 1.2115, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.992134075089084e-05, |
|
"loss": 1.2713, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.991845519630678e-05, |
|
"loss": 1.1697, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.991551770680936e-05, |
|
"loss": 1.162, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.991252828545468e-05, |
|
"loss": 1.1412, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.990948693535294e-05, |
|
"loss": 1.388, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.990639365966836e-05, |
|
"loss": 1.1837, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.990324846161913e-05, |
|
"loss": 1.2843, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.990005134447753e-05, |
|
"loss": 1.0951, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.989680231156981e-05, |
|
"loss": 1.2402, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.989350136627627e-05, |
|
"loss": 1.3355, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.989014851203117e-05, |
|
"loss": 1.2216, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.98867437523228e-05, |
|
"loss": 1.2621, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.98832870906935e-05, |
|
"loss": 1.1265, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.98797785307395e-05, |
|
"loss": 1.1277, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.987621807611114e-05, |
|
"loss": 1.3614, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.987260573051269e-05, |
|
"loss": 1.2972, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.986894149770238e-05, |
|
"loss": 1.0557, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.98652253814925e-05, |
|
"loss": 1.2714, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.986145738574927e-05, |
|
"loss": 1.2355, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.985763751439288e-05, |
|
"loss": 1.0934, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.985376577139753e-05, |
|
"loss": 1.0878, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.984984216079132e-05, |
|
"loss": 1.4082, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.98458666866564e-05, |
|
"loss": 1.1796, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.984183935312881e-05, |
|
"loss": 1.3199, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.983776016439859e-05, |
|
"loss": 1.2988, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.983362912470966e-05, |
|
"loss": 1.3196, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.982944623836e-05, |
|
"loss": 1.2859, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.98252115097014e-05, |
|
"loss": 1.3135, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.982092494313969e-05, |
|
"loss": 1.0826, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.981658654313457e-05, |
|
"loss": 1.0133, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.981219631419972e-05, |
|
"loss": 1.089, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.980775426090268e-05, |
|
"loss": 1.0537, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.980326038786496e-05, |
|
"loss": 1.2502, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.979871469976196e-05, |
|
"loss": 1.1967, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.979411720132299e-05, |
|
"loss": 1.3497, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.978946789733126e-05, |
|
"loss": 1.1884, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.978476679262387e-05, |
|
"loss": 1.2027, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.978001389209186e-05, |
|
"loss": 1.3288, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.977520920068011e-05, |
|
"loss": 1.1651, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"eval_loss": 0.7769330739974976, |
|
"eval_runtime": 859.2735, |
|
"eval_samples_per_second": 0.606, |
|
"eval_steps_per_second": 0.152, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.977035272338738e-05, |
|
"loss": 1.164, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.976544446526634e-05, |
|
"loss": 1.0954, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.976048443142354e-05, |
|
"loss": 1.0738, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.975547262701932e-05, |
|
"loss": 1.226, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.975040905726798e-05, |
|
"loss": 1.2027, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.974529372743761e-05, |
|
"loss": 1.2244, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.974012664285021e-05, |
|
"loss": 1.3685, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.973490780888154e-05, |
|
"loss": 1.1611, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.972963723096128e-05, |
|
"loss": 1.152, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.97243149145729e-05, |
|
"loss": 0.9295, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.971894086525372e-05, |
|
"loss": 0.994, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.971351508859488e-05, |
|
"loss": 1.2434, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.97080375902413e-05, |
|
"loss": 1.1832, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.970250837589177e-05, |
|
"loss": 1.1384, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.969692745129885e-05, |
|
"loss": 1.1646, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.969129482226894e-05, |
|
"loss": 1.2104, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.968561049466214e-05, |
|
"loss": 1.0034, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.967987447439243e-05, |
|
"loss": 1.1279, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.967408676742751e-05, |
|
"loss": 1.003, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.966824737978893e-05, |
|
"loss": 1.0833, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.966235631755192e-05, |
|
"loss": 1.1952, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.965641358684553e-05, |
|
"loss": 1.1463, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.965041919385252e-05, |
|
"loss": 1.2657, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.964437314480947e-05, |
|
"loss": 1.2557, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.963827544600662e-05, |
|
"loss": 1.2098, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.963212610378803e-05, |
|
"loss": 1.1649, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.962592512455138e-05, |
|
"loss": 1.1723, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.961967251474822e-05, |
|
"loss": 1.0861, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.961336828088365e-05, |
|
"loss": 1.1231, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.960701242951663e-05, |
|
"loss": 1.1572, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.960060496725974e-05, |
|
"loss": 1.236, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.959414590077925e-05, |
|
"loss": 1.0556, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.958763523679514e-05, |
|
"loss": 1.0868, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.95810729820811e-05, |
|
"loss": 1.1307, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.957445914346444e-05, |
|
"loss": 1.1461, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.95677937278262e-05, |
|
"loss": 1.1393, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.9561076742101e-05, |
|
"loss": 1.2003, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.95543081932772e-05, |
|
"loss": 1.1272, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.954748808839674e-05, |
|
"loss": 1.0357, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.954061643455523e-05, |
|
"loss": 1.2283, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.953369323890191e-05, |
|
"loss": 1.1221, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.952671850863963e-05, |
|
"loss": 1.2206, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.951969225102486e-05, |
|
"loss": 1.4204, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.95126144733677e-05, |
|
"loss": 1.012, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.950548518303183e-05, |
|
"loss": 1.2038, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.949830438743453e-05, |
|
"loss": 1.2403, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.949107209404665e-05, |
|
"loss": 0.9957, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.948378831039264e-05, |
|
"loss": 1.1783, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.947645304405051e-05, |
|
"loss": 1.1181, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.946906630265184e-05, |
|
"loss": 0.9741, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.946162809388176e-05, |
|
"loss": 1.1431, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.945413842547894e-05, |
|
"loss": 1.1987, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.94465973052356e-05, |
|
"loss": 1.134, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.943900474099748e-05, |
|
"loss": 1.2935, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.943136074066385e-05, |
|
"loss": 0.9978, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.942366531218748e-05, |
|
"loss": 1.1968, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.941591846357468e-05, |
|
"loss": 1.1044, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.94081202028852e-05, |
|
"loss": 1.1268, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.940027053823234e-05, |
|
"loss": 1.2869, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.939236947778281e-05, |
|
"loss": 1.2323, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.1835, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.937641320242822e-05, |
|
"loss": 1.0609, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.9368358004124e-05, |
|
"loss": 1.2646, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.936025144322476e-05, |
|
"loss": 1.1829, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.935209352816455e-05, |
|
"loss": 1.1813, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.934388426743082e-05, |
|
"loss": 1.1192, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.933562366956445e-05, |
|
"loss": 1.264, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.932731174315972e-05, |
|
"loss": 1.1099, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.931894849686432e-05, |
|
"loss": 1.0458, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.931053393937932e-05, |
|
"loss": 1.1754, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.93020680794592e-05, |
|
"loss": 1.0062, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.92935509259118e-05, |
|
"loss": 1.1492, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.92849824875983e-05, |
|
"loss": 1.0825, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.927636277343329e-05, |
|
"loss": 1.1788, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.926769179238466e-05, |
|
"loss": 1.0398, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.925896955347364e-05, |
|
"loss": 1.3753, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.925019606577486e-05, |
|
"loss": 1.1793, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.924137133841616e-05, |
|
"loss": 1.1719, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.923249538057875e-05, |
|
"loss": 1.0094, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.922356820149715e-05, |
|
"loss": 1.2844, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.921458981045914e-05, |
|
"loss": 1.1394, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.92055602168058e-05, |
|
"loss": 1.161, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.919647942993148e-05, |
|
"loss": 1.2338, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.918734745928378e-05, |
|
"loss": 1.1616, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.917816431436355e-05, |
|
"loss": 1.1774, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.91689300047249e-05, |
|
"loss": 1.1482, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.915964453997515e-05, |
|
"loss": 1.1453, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.915030792977487e-05, |
|
"loss": 1.2261, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.914092018383778e-05, |
|
"loss": 0.9531, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.91314813119309e-05, |
|
"loss": 1.1557, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.912199132387434e-05, |
|
"loss": 1.2099, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.911245022954147e-05, |
|
"loss": 1.0969, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.910285803885877e-05, |
|
"loss": 1.3077, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.909321476180594e-05, |
|
"loss": 1.2176, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.908352040841576e-05, |
|
"loss": 1.1101, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.90737749887742e-05, |
|
"loss": 1.3515, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.906397851302036e-05, |
|
"loss": 0.9948, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.905413099134644e-05, |
|
"loss": 1.1265, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.904423243399774e-05, |
|
"loss": 1.4007, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.903428285127268e-05, |
|
"loss": 1.0674, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.902428225352277e-05, |
|
"loss": 1.0193, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.901423065115254e-05, |
|
"loss": 1.0533, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.900412805461967e-05, |
|
"loss": 1.1377, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"eval_loss": 0.7517589926719666, |
|
"eval_runtime": 859.1271, |
|
"eval_samples_per_second": 0.606, |
|
"eval_steps_per_second": 0.152, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.899397447443484e-05, |
|
"loss": 1.2305, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.898376992116179e-05, |
|
"loss": 1.2319, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.897351440541727e-05, |
|
"loss": 1.0622, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.896320793787106e-05, |
|
"loss": 1.1298, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.895285052924599e-05, |
|
"loss": 1.1869, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.894244219031783e-05, |
|
"loss": 1.3071, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.893198293191538e-05, |
|
"loss": 1.1135, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.892147276492039e-05, |
|
"loss": 1.1577, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.89109117002676e-05, |
|
"loss": 1.1323, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.890029974894466e-05, |
|
"loss": 1.1308, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.888963692199223e-05, |
|
"loss": 1.0908, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.887892323050384e-05, |
|
"loss": 1.2721, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.886815868562596e-05, |
|
"loss": 1.1598, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.885734329855798e-05, |
|
"loss": 0.9964, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.884647708055217e-05, |
|
"loss": 1.1307, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.883556004291368e-05, |
|
"loss": 1.3948, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.882459219700056e-05, |
|
"loss": 0.9427, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.881357355422368e-05, |
|
"loss": 1.2337, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.880250412604681e-05, |
|
"loss": 1.0835, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.87913839239865e-05, |
|
"loss": 1.263, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.878021295961217e-05, |
|
"loss": 1.0411, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.876899124454604e-05, |
|
"loss": 1.2549, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.87577187904631e-05, |
|
"loss": 1.1883, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 1.1391, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.873502171221087e-05, |
|
"loss": 1.1801, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.872359711165549e-05, |
|
"loss": 1.3583, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.871212181931115e-05, |
|
"loss": 1.0431, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.870059584711668e-05, |
|
"loss": 1.0404, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.868901920706366e-05, |
|
"loss": 1.1149, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.867739191119636e-05, |
|
"loss": 1.1922, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.866571397161175e-05, |
|
"loss": 1.0328, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.865398540045952e-05, |
|
"loss": 1.1225, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.864220620994201e-05, |
|
"loss": 1.2339, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.863037641231422e-05, |
|
"loss": 1.1169, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.861849601988383e-05, |
|
"loss": 1.3356, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.860656504501113e-05, |
|
"loss": 1.2194, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.859458350010905e-05, |
|
"loss": 1.1395, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.858255139764315e-05, |
|
"loss": 1.0451, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.857046875013154e-05, |
|
"loss": 1.1524, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.855833557014495e-05, |
|
"loss": 1.2449, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.85461518703067e-05, |
|
"loss": 1.151, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.853391766329263e-05, |
|
"loss": 1.1867, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.852163296183117e-05, |
|
"loss": 1.1155, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.850929777870324e-05, |
|
"loss": 1.0962, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.84969121267423e-05, |
|
"loss": 1.16, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.848447601883435e-05, |
|
"loss": 1.1288, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.847198946791784e-05, |
|
"loss": 1.3088, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.84594524869837e-05, |
|
"loss": 1.2906, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.844686508907537e-05, |
|
"loss": 1.2188, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.843422728728871e-05, |
|
"loss": 1.1946, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.842153909477201e-05, |
|
"loss": 1.0312, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.840880052472601e-05, |
|
"loss": 1.0968, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.839601159040386e-05, |
|
"loss": 1.2417, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.838317230511112e-05, |
|
"loss": 1.1532, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.837028268220567e-05, |
|
"loss": 1.3025, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.835734273509786e-05, |
|
"loss": 1.0594, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.834435247725033e-05, |
|
"loss": 1.3581, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.833131192217806e-05, |
|
"loss": 1.1592, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.831822108344841e-05, |
|
"loss": 1.043, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.830507997468101e-05, |
|
"loss": 1.1142, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.829188860954781e-05, |
|
"loss": 1.184, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.827864700177302e-05, |
|
"loss": 1.3455, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.826535516513317e-05, |
|
"loss": 1.0595, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.8252013113457e-05, |
|
"loss": 1.2036, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.823862086062553e-05, |
|
"loss": 1.2629, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.822517842057198e-05, |
|
"loss": 1.1612, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.821168580728181e-05, |
|
"loss": 1.1814, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.819814303479267e-05, |
|
"loss": 1.0398, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.818455011719439e-05, |
|
"loss": 1.123, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.817090706862895e-05, |
|
"loss": 1.2314, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.815721390329055e-05, |
|
"loss": 1.1784, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.814347063542546e-05, |
|
"loss": 0.9881, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.812967727933213e-05, |
|
"loss": 1.2049, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.811583384936107e-05, |
|
"loss": 1.0459, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.810194035991495e-05, |
|
"loss": 1.0323, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.808799682544846e-05, |
|
"loss": 1.237, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.807400326046843e-05, |
|
"loss": 1.1142, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.805995967953363e-05, |
|
"loss": 1.1063, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.804586609725499e-05, |
|
"loss": 1.0957, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.803172252829536e-05, |
|
"loss": 0.8887, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.801752898736966e-05, |
|
"loss": 1.0086, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.800328548924478e-05, |
|
"loss": 1.1333, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.798899204873957e-05, |
|
"loss": 1.0308, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.797464868072488e-05, |
|
"loss": 1.0024, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.796025540012346e-05, |
|
"loss": 1.1911, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.794581222191001e-05, |
|
"loss": 1.0609, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.793131916111114e-05, |
|
"loss": 1.2506, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.791677623280537e-05, |
|
"loss": 1.0964, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.790218345212308e-05, |
|
"loss": 1.037, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.788754083424652e-05, |
|
"loss": 1.2541, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.787284839440982e-05, |
|
"loss": 1.2164, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.785810614789891e-05, |
|
"loss": 0.9282, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.784331411005155e-05, |
|
"loss": 1.0881, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.782847229625729e-05, |
|
"loss": 1.0904, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.78135807219575e-05, |
|
"loss": 1.1065, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.779863940264529e-05, |
|
"loss": 1.1201, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.778364835386553e-05, |
|
"loss": 1.2199, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.776860759121484e-05, |
|
"loss": 1.2194, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.775351713034156e-05, |
|
"loss": 1.1701, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.773837698694571e-05, |
|
"loss": 1.339, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.772318717677904e-05, |
|
"loss": 1.0602, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.770794771564495e-05, |
|
"loss": 1.0773, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.76926586193985e-05, |
|
"loss": 1.166, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.7442646622657776, |
|
"eval_runtime": 859.0542, |
|
"eval_samples_per_second": 0.606, |
|
"eval_steps_per_second": 0.152, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.767731990394638e-05, |
|
"loss": 0.8696, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.766193158524692e-05, |
|
"loss": 0.8288, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.764649367931007e-05, |
|
"loss": 0.899, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.763100620219731e-05, |
|
"loss": 0.8109, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.761546917002178e-05, |
|
"loss": 0.882, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.759988259894808e-05, |
|
"loss": 0.7985, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.758424650519244e-05, |
|
"loss": 0.8101, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.756856090502258e-05, |
|
"loss": 1.0696, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9892, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.753704125076848e-05, |
|
"loss": 0.8788, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.752120722947718e-05, |
|
"loss": 0.8815, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.750532376735736e-05, |
|
"loss": 0.8671, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.748939088093413e-05, |
|
"loss": 0.8458, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.747340858678402e-05, |
|
"loss": 1.0109, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.745737690153487e-05, |
|
"loss": 0.7822, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.744129584186598e-05, |
|
"loss": 0.8424, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.742516542450803e-05, |
|
"loss": 0.9926, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.740898566624302e-05, |
|
"loss": 0.8599, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.739275658390427e-05, |
|
"loss": 0.9044, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.737647819437645e-05, |
|
"loss": 1.0284, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.73601505145955e-05, |
|
"loss": 0.8623, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.734377356154869e-05, |
|
"loss": 0.8071, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.73273473522745e-05, |
|
"loss": 1.0447, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.731087190386268e-05, |
|
"loss": 0.9164, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.72943472334542e-05, |
|
"loss": 1.0258, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.727777335824124e-05, |
|
"loss": 0.8969, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.726115029546721e-05, |
|
"loss": 0.942, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.724447806242662e-05, |
|
"loss": 1.1972, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.72277566764652e-05, |
|
"loss": 0.8396, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.72109861549798e-05, |
|
"loss": 1.0402, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.719416651541839e-05, |
|
"loss": 0.9729, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.717729777528001e-05, |
|
"loss": 0.9919, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.716037995211484e-05, |
|
"loss": 1.2068, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.714341306352411e-05, |
|
"loss": 0.9471, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.712639712716007e-05, |
|
"loss": 1.0325, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.7109332160726e-05, |
|
"loss": 0.9684, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.709221818197624e-05, |
|
"loss": 0.8641, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.707505520871607e-05, |
|
"loss": 0.9543, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.705784325880176e-05, |
|
"loss": 1.1185, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.704058235014054e-05, |
|
"loss": 0.7917, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.702327250069059e-05, |
|
"loss": 0.8544, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.700591372846095e-05, |
|
"loss": 0.8404, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.698850605151165e-05, |
|
"loss": 1.023, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.697104948795352e-05, |
|
"loss": 0.9241, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.69535440559483e-05, |
|
"loss": 0.9049, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.693598977370854e-05, |
|
"loss": 0.998, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.691838665949764e-05, |
|
"loss": 1.1147, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.690073473162978e-05, |
|
"loss": 1.0152, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.688303400846995e-05, |
|
"loss": 0.8904, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.686528450843389e-05, |
|
"loss": 0.9847, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.68474862499881e-05, |
|
"loss": 1.1606, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.682963925164977e-05, |
|
"loss": 0.86, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.681174353198687e-05, |
|
"loss": 0.7986, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.679379910961799e-05, |
|
"loss": 0.9406, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.677580600321242e-05, |
|
"loss": 1.0272, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.675776423149013e-05, |
|
"loss": 0.8774, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.673967381322163e-05, |
|
"loss": 1.037, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.672153476722816e-05, |
|
"loss": 0.8539, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.670334711238145e-05, |
|
"loss": 0.8378, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.668511086760389e-05, |
|
"loss": 0.8108, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.666682605186835e-05, |
|
"loss": 0.7509, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.664849268419825e-05, |
|
"loss": 1.1062, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.663011078366757e-05, |
|
"loss": 0.8277, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.661168036940071e-05, |
|
"loss": 0.8602, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.659320146057262e-05, |
|
"loss": 1.0327, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.657467407640864e-05, |
|
"loss": 0.9697, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.655609823618457e-05, |
|
"loss": 1.0604, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.653747395922663e-05, |
|
"loss": 0.8769, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.651880126491144e-05, |
|
"loss": 0.8384, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.650008017266594e-05, |
|
"loss": 0.9498, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.648131070196749e-05, |
|
"loss": 0.987, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.646249287234374e-05, |
|
"loss": 0.9841, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.644362670337268e-05, |
|
"loss": 1.034, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.642471221468257e-05, |
|
"loss": 0.9491, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.640574942595196e-05, |
|
"loss": 0.9272, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.638673835690961e-05, |
|
"loss": 1.0313, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.636767902733458e-05, |
|
"loss": 0.8084, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.634857145705612e-05, |
|
"loss": 1.0232, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.632941566595357e-05, |
|
"loss": 0.7735, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.631021167395659e-05, |
|
"loss": 0.801, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.62909595010449e-05, |
|
"loss": 0.792, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.627165916724837e-05, |
|
"loss": 1.2264, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.625231069264696e-05, |
|
"loss": 0.9676, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.623291409737074e-05, |
|
"loss": 1.0697, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.62134694015998e-05, |
|
"loss": 0.8656, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.952, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.617443578954455e-05, |
|
"loss": 0.9509, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.615484691387058e-05, |
|
"loss": 0.9439, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.613521001892264e-05, |
|
"loss": 0.8009, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.611552512513085e-05, |
|
"loss": 0.7669, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.609579225297525e-05, |
|
"loss": 1.0578, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.607601142298584e-05, |
|
"loss": 0.7875, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.60561826557425e-05, |
|
"loss": 0.8935, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.603630597187499e-05, |
|
"loss": 1.0119, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.601638139206291e-05, |
|
"loss": 0.8773, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.599640893703568e-05, |
|
"loss": 1.0947, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.597638862757255e-05, |
|
"loss": 0.9826, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.595632048450254e-05, |
|
"loss": 0.9346, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.593620452870446e-05, |
|
"loss": 1.1038, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.591604078110685e-05, |
|
"loss": 0.8444, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.589582926268798e-05, |
|
"loss": 0.8715, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.587556999447579e-05, |
|
"loss": 0.931, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.585526299754794e-05, |
|
"loss": 0.9218, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"eval_loss": 0.7483064532279968, |
|
"eval_runtime": 858.4676, |
|
"eval_samples_per_second": 0.607, |
|
"eval_steps_per_second": 0.153, |
|
"step": 412 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3090, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 103, |
|
"total_flos": 9.494314946301788e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|