|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 310, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-05, |
|
"loss": 11.2153, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4e-05, |
|
"loss": 11.2101, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6e-05, |
|
"loss": 11.1907, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8e-05, |
|
"loss": 11.0346, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0001, |
|
"loss": 10.6171, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00012, |
|
"loss": 10.0148, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00014, |
|
"loss": 9.4405, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00016, |
|
"loss": 9.7438, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00018, |
|
"loss": 8.707, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0002, |
|
"loss": 8.8771, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019999451693655123, |
|
"loss": 8.7858, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019997806834748456, |
|
"loss": 8.511, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019995065603657316, |
|
"loss": 8.2111, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019991228300988585, |
|
"loss": 8.0204, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001998629534754574, |
|
"loss": 7.9454, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019980267284282717, |
|
"loss": 7.8055, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019973144772244582, |
|
"loss": 7.6032, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019964928592495045, |
|
"loss": 7.7403, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019955619646030802, |
|
"loss": 7.7085, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019945218953682734, |
|
"loss": 7.5279, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019933727656003963, |
|
"loss": 7.2119, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0001992114701314478, |
|
"loss": 6.9685, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019907478404714436, |
|
"loss": 6.8322, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019892723329629887, |
|
"loss": 6.7232, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019876883405951377, |
|
"loss": 6.7616, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001985996037070505, |
|
"loss": 6.5955, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001984195607969242, |
|
"loss": 6.4523, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001982287250728689, |
|
"loss": 6.3178, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019802711746217218, |
|
"loss": 6.371, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019781476007338058, |
|
"loss": 6.4004, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019759167619387476, |
|
"loss": 6.2502, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019735789028731604, |
|
"loss": 6.1807, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019711342799096361, |
|
"loss": 6.1244, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001968583161128631, |
|
"loss": 6.0723, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 5.8887, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019631625667976583, |
|
"loss": 5.8987, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001960293685676943, |
|
"loss": 5.5855, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019573194975320673, |
|
"loss": 5.7129, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001954240328516277, |
|
"loss": 5.8263, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 5.7092, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001947768410009586, |
|
"loss": 5.6344, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019443763702374812, |
|
"loss": 5.5662, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019408807689542257, |
|
"loss": 5.5991, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019372819894918915, |
|
"loss": 5.3797, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019335804264972018, |
|
"loss": 5.4445, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019297764858882514, |
|
"loss": 5.4585, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001925870584809995, |
|
"loss": 5.2898, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019218631515885006, |
|
"loss": 5.196, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019177546256839812, |
|
"loss": 5.2029, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 5.3546, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019092361090470688, |
|
"loss": 5.2278, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019048270524660196, |
|
"loss": 5.2661, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019003187714021938, |
|
"loss": 5.0926, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001895711760239413, |
|
"loss": 5.2142, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001891006524188368, |
|
"loss": 5.1078, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018862035792312147, |
|
"loss": 5.1481, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001881303452064992, |
|
"loss": 5.1971, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018763066800438636, |
|
"loss": 5.1135, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018712138111201895, |
|
"loss": 5.1675, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 4.9891, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001860742027003944, |
|
"loss": 4.9451, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018553642601605068, |
|
"loss": 4.8872, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018498926929868642, |
|
"loss": 5.0473, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018443279255020152, |
|
"loss": 4.9591, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018386705679454242, |
|
"loss": 4.8218, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018329212407100994, |
|
"loss": 4.8672, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018270805742745617, |
|
"loss": 4.9517, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018211492091337042, |
|
"loss": 4.7033, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018151277957285543, |
|
"loss": 4.6816, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 4.6706, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018028174751911146, |
|
"loss": 4.8965, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017965299180241963, |
|
"loss": 4.5277, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017901550123756906, |
|
"loss": 4.709, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000178369345732584, |
|
"loss": 4.5827, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001777145961456971, |
|
"loss": 4.526, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017705132427757895, |
|
"loss": 4.4775, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017637960286346425, |
|
"loss": 4.389, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017569950556517566, |
|
"loss": 4.5912, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017501110696304596, |
|
"loss": 4.3966, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017431448254773944, |
|
"loss": 4.3832, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017360970871197346, |
|
"loss": 4.3049, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017289686274214118, |
|
"loss": 4.5175, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017217602280983623, |
|
"loss": 4.4391, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017144726796328034, |
|
"loss": 4.359, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 4.4578, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016996633405133655, |
|
"loss": 4.2509, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001692143173870407, |
|
"loss": 4.213, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016845471059286887, |
|
"loss": 4.3808, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016768759696826608, |
|
"loss": 4.2431, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 4.1752, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016613118653236518, |
|
"loss": 4.1288, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016534206039901057, |
|
"loss": 4.2743, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016454576877239507, |
|
"loss": 4.1326, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000163742398974869, |
|
"loss": 4.2462, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016293203910498376, |
|
"loss": 4.1801, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016211477802783103, |
|
"loss": 4.2519, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016129070536529766, |
|
"loss": 4.1227, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001604599114862375, |
|
"loss": 4.096, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001596224874965616, |
|
"loss": 4.1539, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 4.1399, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001579281172342679, |
|
"loss": 4.0179, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001570713567684432, |
|
"loss": 4.2023, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015620833778521307, |
|
"loss": 4.017, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015533915492433443, |
|
"loss": 4.0196, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015446390350150273, |
|
"loss": 4.0342, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015358267949789966, |
|
"loss": 4.2113, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015269557954966778, |
|
"loss": 3.9744, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015180270093731303, |
|
"loss": 3.9296, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015090414157503714, |
|
"loss": 4.1932, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 3.9307, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014909037536151409, |
|
"loss": 4.1181, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014817536741017152, |
|
"loss": 4.0439, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014725507648690543, |
|
"loss": 4.0253, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014632960351198618, |
|
"loss": 4.0668, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014539904997395468, |
|
"loss": 3.9828, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014446351791849276, |
|
"loss": 4.0319, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014352310993723277, |
|
"loss": 3.9696, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014257792915650728, |
|
"loss": 3.9411, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014162807922604012, |
|
"loss": 4.0313, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014067366430758004, |
|
"loss": 3.9115, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013971478906347806, |
|
"loss": 3.941, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001387515586452103, |
|
"loss": 3.975, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013778407868184672, |
|
"loss": 3.7062, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013681245526846783, |
|
"loss": 3.8805, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013583679495453, |
|
"loss": 3.7612, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013485720473218154, |
|
"loss": 3.7326, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013387379202452917, |
|
"loss": 3.8042, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013288666467385833, |
|
"loss": 3.9848, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013189593092980702, |
|
"loss": 4.011, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 3.8322, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00012990407922560868, |
|
"loss": 3.7704, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012890317969444716, |
|
"loss": 3.8909, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012789911060392294, |
|
"loss": 3.8207, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012689198206152657, |
|
"loss": 3.8095, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012588190451025207, |
|
"loss": 3.8504, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001248689887164855, |
|
"loss": 3.6548, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001238533457578581, |
|
"loss": 3.7723, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012283508701106557, |
|
"loss": 3.6582, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012181432413965428, |
|
"loss": 3.6674, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012079116908177593, |
|
"loss": 3.9033, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00011976573403791262, |
|
"loss": 3.7011, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011873813145857249, |
|
"loss": 3.8179, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011770847403195834, |
|
"loss": 3.6867, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011667687467161024, |
|
"loss": 3.6831, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001156434465040231, |
|
"loss": 3.8595, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011460830285624118, |
|
"loss": 3.721, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011357155724343045, |
|
"loss": 3.712, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011253332335643043, |
|
"loss": 3.6603, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011149371504928668, |
|
"loss": 3.6563, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011045284632676536, |
|
"loss": 3.5381, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010941083133185146, |
|
"loss": 3.7151, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010836778433323158, |
|
"loss": 3.7003, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010732381971276318, |
|
"loss": 3.8455, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010627905195293135, |
|
"loss": 3.5937, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0001052335956242944, |
|
"loss": 3.5345, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010418756537291996, |
|
"loss": 3.527, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010314107590781284, |
|
"loss": 3.5617, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0001020942419883357, |
|
"loss": 3.5834, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010104717841162458, |
|
"loss": 3.6394, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001, |
|
"loss": 3.4265, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.895282158837545e-05, |
|
"loss": 3.4816, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.790575801166432e-05, |
|
"loss": 3.5481, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.685892409218717e-05, |
|
"loss": 3.6996, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.581243462708006e-05, |
|
"loss": 3.6275, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.476640437570562e-05, |
|
"loss": 3.5595, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.372094804706867e-05, |
|
"loss": 3.4173, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.267618028723686e-05, |
|
"loss": 3.6742, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.163221566676847e-05, |
|
"loss": 3.5923, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.058916866814858e-05, |
|
"loss": 3.5414, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 3.7635, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.850628495071336e-05, |
|
"loss": 3.3945, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.746667664356956e-05, |
|
"loss": 3.5575, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.642844275656957e-05, |
|
"loss": 3.4334, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.539169714375885e-05, |
|
"loss": 3.5686, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.435655349597689e-05, |
|
"loss": 3.7207, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.332312532838978e-05, |
|
"loss": 3.4545, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.229152596804168e-05, |
|
"loss": 3.4879, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.126186854142752e-05, |
|
"loss": 3.6279, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.023426596208739e-05, |
|
"loss": 3.4412, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.920883091822408e-05, |
|
"loss": 3.5066, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.818567586034577e-05, |
|
"loss": 3.3587, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.716491298893442e-05, |
|
"loss": 3.5664, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.614665424214193e-05, |
|
"loss": 3.4065, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.513101128351454e-05, |
|
"loss": 3.6623, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 3.3463, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.310801793847344e-05, |
|
"loss": 3.5487, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.210088939607708e-05, |
|
"loss": 3.3901, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.109682030555283e-05, |
|
"loss": 3.2565, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.009592077439134e-05, |
|
"loss": 3.4692, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 3.4258, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.8104069070193e-05, |
|
"loss": 3.4803, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.711333532614168e-05, |
|
"loss": 3.5306, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.612620797547087e-05, |
|
"loss": 3.4718, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.51427952678185e-05, |
|
"loss": 3.4513, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.416320504546997e-05, |
|
"loss": 3.6049, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.318754473153221e-05, |
|
"loss": 3.4402, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.22159213181533e-05, |
|
"loss": 3.4833, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.12484413547897e-05, |
|
"loss": 3.493, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0285210936521955e-05, |
|
"loss": 3.3789, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.9326335692419995e-05, |
|
"loss": 3.386, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.83719207739599e-05, |
|
"loss": 3.5344, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7422070843492734e-05, |
|
"loss": 3.4203, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.647689006276726e-05, |
|
"loss": 3.5331, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.553648208150728e-05, |
|
"loss": 3.5052, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4600950026045326e-05, |
|
"loss": 3.631, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.3670396488013854e-05, |
|
"loss": 3.4552, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.274492351309461e-05, |
|
"loss": 3.4078, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.182463258982846e-05, |
|
"loss": 3.4163, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.090962463848592e-05, |
|
"loss": 3.4208, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 3.5513, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.909585842496287e-05, |
|
"loss": 3.4736, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8197299062686995e-05, |
|
"loss": 3.4162, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7304420450332244e-05, |
|
"loss": 3.4161, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6417320502100316e-05, |
|
"loss": 3.5424, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.5536096498497295e-05, |
|
"loss": 3.4375, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.46608450756656e-05, |
|
"loss": 3.4181, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.379166221478697e-05, |
|
"loss": 3.4234, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2928643231556844e-05, |
|
"loss": 3.2769, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.207188276573214e-05, |
|
"loss": 3.5804, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 3.4969, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.037751250343841e-05, |
|
"loss": 3.4878, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.954008851376252e-05, |
|
"loss": 3.4461, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8709294634702376e-05, |
|
"loss": 3.4997, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.788522197216897e-05, |
|
"loss": 3.2672, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7067960895016275e-05, |
|
"loss": 3.4284, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6257601025131026e-05, |
|
"loss": 3.3721, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.545423122760493e-05, |
|
"loss": 3.3755, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.465793960098945e-05, |
|
"loss": 3.412, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.386881346763483e-05, |
|
"loss": 3.3902, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.308693936411421e-05, |
|
"loss": 3.3975, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.231240303173394e-05, |
|
"loss": 3.3772, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.154528940713113e-05, |
|
"loss": 3.1924, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.078568261295933e-05, |
|
"loss": 3.439, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0033665948663448e-05, |
|
"loss": 3.2018, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 3.4706, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8552732036719687e-05, |
|
"loss": 3.4761, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7823977190163786e-05, |
|
"loss": 3.4259, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7103137257858868e-05, |
|
"loss": 3.4053, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.639029128802657e-05, |
|
"loss": 3.2636, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5685517452260567e-05, |
|
"loss": 3.3724, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4988893036954043e-05, |
|
"loss": 3.3544, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4300494434824373e-05, |
|
"loss": 3.2656, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.362039713653581e-05, |
|
"loss": 3.4023, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2948675722421086e-05, |
|
"loss": 3.2969, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2285403854302912e-05, |
|
"loss": 3.4511, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.163065426741603e-05, |
|
"loss": 3.4428, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.098449876243096e-05, |
|
"loss": 3.3647, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0347008197580374e-05, |
|
"loss": 3.2604, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9718252480888566e-05, |
|
"loss": 3.3492, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 3.3132, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.848722042714457e-05, |
|
"loss": 3.1101, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.78850790866296e-05, |
|
"loss": 3.4295, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7291942572543807e-05, |
|
"loss": 3.27, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6707875928990058e-05, |
|
"loss": 3.307, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6132943205457606e-05, |
|
"loss": 3.2597, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5567207449798515e-05, |
|
"loss": 3.2053, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5010730701313625e-05, |
|
"loss": 3.2863, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4463573983949341e-05, |
|
"loss": 3.1945, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3925797299605647e-05, |
|
"loss": 3.3066, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 3.3855, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2878618887981064e-05, |
|
"loss": 3.1933, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2369331995613665e-05, |
|
"loss": 3.4084, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1869654793500784e-05, |
|
"loss": 3.315, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1379642076878527e-05, |
|
"loss": 3.3994, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0899347581163221e-05, |
|
"loss": 3.2624, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.042882397605871e-05, |
|
"loss": 3.2287, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.968122859780648e-06, |
|
"loss": 3.245, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.517294753398064e-06, |
|
"loss": 3.2106, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.076389095293148e-06, |
|
"loss": 3.2575, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.645454235739903e-06, |
|
"loss": 3.3462, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.224537431601886e-06, |
|
"loss": 3.5159, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.81368484114996e-06, |
|
"loss": 3.3177, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.412941519000527e-06, |
|
"loss": 3.1598, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.022351411174866e-06, |
|
"loss": 3.4421, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.6419573502798374e-06, |
|
"loss": 3.2242, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.2718010508108545e-06, |
|
"loss": 3.3379, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.911923104577455e-06, |
|
"loss": 3.3409, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.562362976251901e-06, |
|
"loss": 3.2563, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.223158999041444e-06, |
|
"loss": 3.322, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 3.365, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.575967148372317e-06, |
|
"loss": 3.3066, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.268050246793276e-06, |
|
"loss": 3.2917, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.970631432305694e-06, |
|
"loss": 3.2967, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.68374332023419e-06, |
|
"loss": 3.2636, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.40741737109318e-06, |
|
"loss": 3.2059, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1416838871368924e-06, |
|
"loss": 3.2664, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8865720090364034e-06, |
|
"loss": 3.4219, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6421097126839712e-06, |
|
"loss": 3.3064, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4083238061252567e-06, |
|
"loss": 3.1889, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1852399266194314e-06, |
|
"loss": 3.4085, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9728825378278246e-06, |
|
"loss": 3.3206, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.771274927131139e-06, |
|
"loss": 3.4628, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.580439203075812e-06, |
|
"loss": 3.1341, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.400396292949513e-06, |
|
"loss": 3.3387, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.231165940486234e-06, |
|
"loss": 3.2056, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0727667037011668e-06, |
|
"loss": 3.1854, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.252159528556403e-07, |
|
"loss": 3.2785, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.885298685522235e-07, |
|
"loss": 3.4136, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.627234399603555e-07, |
|
"loss": 3.2466, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.478104631726711e-07, |
|
"loss": 3.5445, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.438035396920004e-07, |
|
"loss": 3.4556, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.50714075049563e-07, |
|
"loss": 3.0958, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.685522775541904e-07, |
|
"loss": 3.3386, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.973271571728441e-07, |
|
"loss": 3.3139, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3704652454261668e-07, |
|
"loss": 3.2791, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.771699011416168e-08, |
|
"loss": 3.2188, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.934396342684e-08, |
|
"loss": 3.3562, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.193165251545004e-08, |
|
"loss": 3.3555, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.483063448785686e-09, |
|
"loss": 3.609, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.4165, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 310, |
|
"total_flos": 53958111068160.0, |
|
"train_loss": 4.296783600314971, |
|
"train_runtime": 4111.9826, |
|
"train_samples_per_second": 7.229, |
|
"train_steps_per_second": 0.075 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 310, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50000, |
|
"total_flos": 53958111068160.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|