|
{ |
|
"best_metric": 0.9140611886978149, |
|
"best_model_checkpoint": "ckpt/origin/pedes_environment_v2/checkpoint-91", |
|
"epoch": 4.90566037735849, |
|
"eval_steps": 7, |
|
"global_step": 130, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1312, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0024, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998494093481022e-06, |
|
"loss": 1.1562, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.993977281025862e-06, |
|
"loss": 1.1379, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.986452283393452e-06, |
|
"loss": 1.0642, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.975923633360985e-06, |
|
"loss": 1.1291, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.962397672993552e-06, |
|
"loss": 1.0684, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_loss": 1.0384804010391235, |
|
"eval_runtime": 36.0334, |
|
"eval_samples_per_second": 1.915, |
|
"eval_steps_per_second": 1.915, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.945882549823906e-06, |
|
"loss": 0.9229, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.926388211944707e-06, |
|
"loss": 1.0811, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.903926402016153e-06, |
|
"loss": 0.9438, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.878510650192644e-06, |
|
"loss": 1.0043, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.850156265972722e-06, |
|
"loss": 0.9032, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.8188803289772e-06, |
|
"loss": 1.0192, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.784701678661045e-06, |
|
"loss": 1.0098, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"eval_loss": 0.9786658883094788, |
|
"eval_runtime": 35.9159, |
|
"eval_samples_per_second": 1.921, |
|
"eval_steps_per_second": 1.921, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.747640902965185e-06, |
|
"loss": 0.9059, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.707720325915105e-06, |
|
"loss": 0.9866, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.664963994173695e-06, |
|
"loss": 0.9917, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.9275, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.571048778517655e-06, |
|
"loss": 0.9052, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.519946465617217e-06, |
|
"loss": 0.8874, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.466121505977577e-06, |
|
"loss": 0.978, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_loss": 0.9512829780578613, |
|
"eval_runtime": 35.9114, |
|
"eval_samples_per_second": 1.921, |
|
"eval_steps_per_second": 1.921, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.409606321741776e-06, |
|
"loss": 0.9062, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.350434955543557e-06, |
|
"loss": 1.0455, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.288643050001362e-06, |
|
"loss": 0.8784, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.224267826248536e-06, |
|
"loss": 0.924, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.157348061512728e-06, |
|
"loss": 0.9133, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.08792406575792e-06, |
|
"loss": 0.8332, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.016037657403225e-06, |
|
"loss": 0.9772, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"eval_loss": 0.9353252649307251, |
|
"eval_runtime": 36.0706, |
|
"eval_samples_per_second": 1.913, |
|
"eval_steps_per_second": 1.913, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.941732138133032e-06, |
|
"loss": 0.8295, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.865052266813686e-06, |
|
"loss": 0.9754, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.786044232532423e-06, |
|
"loss": 0.9305, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.704755626774796e-06, |
|
"loss": 0.8728, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 8.621235414757337e-06, |
|
"loss": 0.9236, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.9223, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.447702723685335e-06, |
|
"loss": 0.9838, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"eval_loss": 0.9286850690841675, |
|
"eval_runtime": 35.9219, |
|
"eval_samples_per_second": 1.921, |
|
"eval_steps_per_second": 1.921, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 8.357794774235094e-06, |
|
"loss": 0.8317, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.265864214768883e-06, |
|
"loss": 0.7946, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.171966420818227e-06, |
|
"loss": 0.9288, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.076157952903134e-06, |
|
"loss": 0.8589, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.978496522462167e-06, |
|
"loss": 0.8974, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 7.879040957089229e-06, |
|
"loss": 0.9479, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 7.777851165098012e-06, |
|
"loss": 0.8649, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_loss": 0.9226968288421631, |
|
"eval_runtime": 36.0309, |
|
"eval_samples_per_second": 1.915, |
|
"eval_steps_per_second": 1.915, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 7.674988099435487e-06, |
|
"loss": 0.9145, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 7.570513720966108e-06, |
|
"loss": 0.9756, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 7.464490961148921e-06, |
|
"loss": 0.8617, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.3569836841299905e-06, |
|
"loss": 0.8243, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.248056648273034e-06, |
|
"loss": 0.8681, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.137775467151411e-06, |
|
"loss": 0.801, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.026206570024949e-06, |
|
"loss": 0.8676, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"eval_loss": 0.9197350144386292, |
|
"eval_runtime": 35.9615, |
|
"eval_samples_per_second": 1.919, |
|
"eval_steps_per_second": 1.919, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.8457, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.799475182674942e-06, |
|
"loss": 0.8894, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.684449266961101e-06, |
|
"loss": 0.8995, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.568408701994459e-06, |
|
"loss": 0.9544, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.451423386272312e-06, |
|
"loss": 0.8938, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.333563787374493e-06, |
|
"loss": 0.8898, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.21490089951632e-06, |
|
"loss": 0.8327, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_loss": 0.9180477261543274, |
|
"eval_runtime": 35.9929, |
|
"eval_samples_per_second": 1.917, |
|
"eval_steps_per_second": 1.917, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.095506200784349e-06, |
|
"loss": 0.8413, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 5.975451610080643e-06, |
|
"loss": 0.9434, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.8548094438015065e-06, |
|
"loss": 0.9523, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.733652372276809e-06, |
|
"loss": 0.8919, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.612053375996082e-06, |
|
"loss": 0.85, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.490085701647805e-06, |
|
"loss": 0.8806, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.367822817998338e-06, |
|
"loss": 0.8033, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_loss": 0.9181221127510071, |
|
"eval_runtime": 36.0174, |
|
"eval_samples_per_second": 1.916, |
|
"eval_steps_per_second": 1.916, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 5.245338371637091e-06, |
|
"loss": 0.9186, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.122706142614562e-06, |
|
"loss": 0.8516, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8434, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.87729385738544e-06, |
|
"loss": 0.8888, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.75466162836291e-06, |
|
"loss": 0.8871, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.6321771820016635e-06, |
|
"loss": 0.8806, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.509914298352197e-06, |
|
"loss": 0.8538, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"eval_loss": 0.9172544479370117, |
|
"eval_runtime": 35.8611, |
|
"eval_samples_per_second": 1.924, |
|
"eval_steps_per_second": 1.924, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.38794662400392e-06, |
|
"loss": 0.8353, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.266347627723192e-06, |
|
"loss": 0.8561, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.145190556198494e-06, |
|
"loss": 0.9676, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.02454838991936e-06, |
|
"loss": 0.8629, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.904493799215652e-06, |
|
"loss": 0.7983, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.7850991004836813e-06, |
|
"loss": 0.9275, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.6664362126255087e-06, |
|
"loss": 0.9481, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"eval_loss": 0.9153242707252502, |
|
"eval_runtime": 35.946, |
|
"eval_samples_per_second": 1.92, |
|
"eval_steps_per_second": 1.92, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.5485766137276894e-06, |
|
"loss": 0.8156, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.4315912980055433e-06, |
|
"loss": 0.7228, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.3155507330389004e-06, |
|
"loss": 0.9645, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.2005248173250593e-06, |
|
"loss": 0.7754, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.8191, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.9737934299750514e-06, |
|
"loss": 0.8938, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.862224532848591e-06, |
|
"loss": 0.8995, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"eval_loss": 0.9135288596153259, |
|
"eval_runtime": 35.9776, |
|
"eval_samples_per_second": 1.918, |
|
"eval_steps_per_second": 1.918, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.7519433517269665e-06, |
|
"loss": 0.8728, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.6430163158700116e-06, |
|
"loss": 0.9105, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.5355090388510806e-06, |
|
"loss": 0.7815, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.429486279033892e-06, |
|
"loss": 0.8562, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.325011900564515e-06, |
|
"loss": 0.8367, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.2221488349019903e-06, |
|
"loss": 0.8954, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.1209590429107734e-06, |
|
"loss": 0.8555, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"eval_loss": 0.9140611886978149, |
|
"eval_runtime": 35.9673, |
|
"eval_samples_per_second": 1.918, |
|
"eval_steps_per_second": 1.918, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.0215034775378336e-06, |
|
"loss": 0.8164, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.9238420470968665e-06, |
|
"loss": 0.8467, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.8280335791817733e-06, |
|
"loss": 0.8462, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7341357852311175e-06, |
|
"loss": 0.8948, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.642205225764908e-06, |
|
"loss": 0.836, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.5522972763146653e-06, |
|
"loss": 0.8551, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.884, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"eval_loss": 0.9140763282775879, |
|
"eval_runtime": 35.972, |
|
"eval_samples_per_second": 1.918, |
|
"eval_steps_per_second": 1.918, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.3787645852426663e-06, |
|
"loss": 0.8857, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.2952443732252058e-06, |
|
"loss": 0.7897, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.2139557674675773e-06, |
|
"loss": 0.8527, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.134947733186315e-06, |
|
"loss": 0.9348, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.058267861866969e-06, |
|
"loss": 0.9066, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 9.83962342596776e-07, |
|
"loss": 0.8794, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.120759342420821e-07, |
|
"loss": 0.878, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"eval_loss": 0.9139989614486694, |
|
"eval_runtime": 36.0023, |
|
"eval_samples_per_second": 1.917, |
|
"eval_steps_per_second": 1.917, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.426519384872733e-07, |
|
"loss": 0.8052, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 7.757321737514645e-07, |
|
"loss": 0.8845, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.113569499986401e-07, |
|
"loss": 0.8134, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 6.495650444564433e-07, |
|
"loss": 0.8164, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.903936782582253e-07, |
|
"loss": 0.7922, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 5.338784940224239e-07, |
|
"loss": 0.7529, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.800535343827834e-07, |
|
"loss": 0.8288, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"eval_loss": 0.9135251641273499, |
|
"eval_runtime": 35.8899, |
|
"eval_samples_per_second": 1.923, |
|
"eval_steps_per_second": 1.923, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.289512214823466e-07, |
|
"loss": 0.8892, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.8427, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.350360058263058e-07, |
|
"loss": 0.9283, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.9227967408489653e-07, |
|
"loss": 0.8674, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.523590970348166e-07, |
|
"loss": 0.924, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.152983213389559e-07, |
|
"loss": 0.8693, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.8111967102280082e-07, |
|
"loss": 0.8456, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"eval_loss": 0.9141875505447388, |
|
"eval_runtime": 35.8271, |
|
"eval_samples_per_second": 1.926, |
|
"eval_steps_per_second": 1.926, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.4984373402728014e-07, |
|
"loss": 0.8763, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.2148934980735772e-07, |
|
"loss": 0.8042, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 9.607359798384785e-08, |
|
"loss": 0.8456, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 7.36117880552939e-08, |
|
"loss": 0.8505, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 5.411745017609493e-08, |
|
"loss": 0.8404, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.7602327006450166e-08, |
|
"loss": 0.8483, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.4076366639015914e-08, |
|
"loss": 0.9049, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"eval_loss": 0.9144126176834106, |
|
"eval_runtime": 35.948, |
|
"eval_samples_per_second": 1.919, |
|
"eval_steps_per_second": 1.919, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.3547716606548967e-08, |
|
"loss": 0.8482, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.022718974137976e-09, |
|
"loss": 0.9232, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.5059065189787502e-09, |
|
"loss": 0.8653, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 0.0, |
|
"loss": 0.9167, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"step": 130, |
|
"total_flos": 3.500008779892654e+17, |
|
"train_loss": 0.8952723305958968, |
|
"train_runtime": 7687.2741, |
|
"train_samples_per_second": 0.825, |
|
"train_steps_per_second": 0.017 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 130, |
|
"num_train_epochs": 5, |
|
"save_steps": 13, |
|
"total_flos": 3.500008779892654e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|