|
{ |
|
"best_metric": 0.01938004605472088, |
|
"best_model_checkpoint": "runs/deepseek_lora_/home/mac/air2/runs/deepseek_lora_20240422-141601/checkpoint-30000_20240423-210253/checkpoint-2000", |
|
"epoch": 1.5664160401002505, |
|
"eval_steps": 500, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.4201024770736694, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.4508, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.6313552856445312, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.4463, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.2772644758224487, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.4462, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.3495179414749146, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.463, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3023079633712769, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.4931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2231166362762451, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.4886, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.5659693479537964, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.4963, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8523041009902954, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.3668, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.399263620376587, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.296, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.6715140342712402, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2411, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.1111788749694824, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.0886, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9370793104171753, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.0653, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.3835300207138062, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.8878, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.646286129951477, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.7921, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.2522785663604736, |
|
"learning_rate": 6e-06, |
|
"loss": 0.7373, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9030014872550964, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.5454, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8196408152580261, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.4356, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4333338141441345, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.3329, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.37946629524230957, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.2524, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2624123096466064, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.1601, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.7884108424186707, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.1937, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.46105730533599854, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.0807, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6495872735977173, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.1566, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9866824150085449, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.2515, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.31797927618026733, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0861, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.07518763095140457, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.0498, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.23027415573596954, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.0473, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.22770297527313232, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.0469, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.22862806916236877, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.0974, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3330214023590088, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.0597, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.966628909111023, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.0315, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.022704431787133217, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.042, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.216924786567688, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.0353, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6081690192222595, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.0247, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.012569287791848183, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.0251, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.010333358310163021, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0236, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.37758347392082214, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.0268, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.015816116705536842, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0107, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.025576846674084663, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.0213, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.31375065445899963, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.0177, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.008338396437466145, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.023, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.24885450303554535, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.013, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.0110774552449584, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.0397, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.006729575805366039, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0155, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.01409532967954874, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0198, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3871189057826996, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.0212, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.006107095163315535, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.07, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.010062485933303833, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.0157, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1705617755651474, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.0241, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5236091613769531, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0208, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 0.04813479632139206, |
|
"eval_runtime": 105.8284, |
|
"eval_samples_per_second": 9.449, |
|
"eval_steps_per_second": 9.449, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.007713190745562315, |
|
"learning_rate": 1.9955555555555557e-05, |
|
"loss": 0.007, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.028844956308603287, |
|
"learning_rate": 1.9911111111111112e-05, |
|
"loss": 0.018, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.004528562072664499, |
|
"learning_rate": 1.9866666666666667e-05, |
|
"loss": 0.0519, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5867981910705566, |
|
"learning_rate": 1.9822222222222226e-05, |
|
"loss": 0.0193, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.08260989189147949, |
|
"learning_rate": 1.977777777777778e-05, |
|
"loss": 0.0359, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.005006667226552963, |
|
"learning_rate": 1.9733333333333336e-05, |
|
"loss": 0.0187, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.017591852694749832, |
|
"learning_rate": 1.968888888888889e-05, |
|
"loss": 0.0143, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8018626570701599, |
|
"learning_rate": 1.9644444444444447e-05, |
|
"loss": 0.0161, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.005714430473744869, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.0159, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.006446035113185644, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 0.0249, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.00910472497344017, |
|
"learning_rate": 1.9511111111111113e-05, |
|
"loss": 0.009, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.0680340826511383, |
|
"learning_rate": 1.9466666666666668e-05, |
|
"loss": 0.01, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.009966106154024601, |
|
"learning_rate": 1.9422222222222223e-05, |
|
"loss": 0.0189, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.004922552965581417, |
|
"learning_rate": 1.9377777777777778e-05, |
|
"loss": 0.0189, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0045297518372535706, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 0.0636, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.17961235344409943, |
|
"learning_rate": 1.928888888888889e-05, |
|
"loss": 0.0077, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.010727491229772568, |
|
"learning_rate": 1.9244444444444444e-05, |
|
"loss": 0.0338, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.14529484510421753, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.0162, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.011789410375058651, |
|
"learning_rate": 1.9155555555555558e-05, |
|
"loss": 0.0371, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.03757067024707794, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 0.0082, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.11487198621034622, |
|
"learning_rate": 1.9066666666666668e-05, |
|
"loss": 0.0039, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.005442380905151367, |
|
"learning_rate": 1.9022222222222223e-05, |
|
"loss": 0.0108, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.005606868304312229, |
|
"learning_rate": 1.897777777777778e-05, |
|
"loss": 0.0016, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.00637522479519248, |
|
"learning_rate": 1.8933333333333334e-05, |
|
"loss": 0.0268, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1989319920539856, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 0.0227, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.13159094750881195, |
|
"learning_rate": 1.8844444444444444e-05, |
|
"loss": 0.0313, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.0459349155426025, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.0201, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1915451288223267, |
|
"learning_rate": 1.8755555555555558e-05, |
|
"loss": 0.0245, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.013696914538741112, |
|
"learning_rate": 1.8711111111111113e-05, |
|
"loss": 0.0018, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1634104251861572, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 0.026, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.007169550750404596, |
|
"learning_rate": 1.8622222222222224e-05, |
|
"loss": 0.0071, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.004371534567326307, |
|
"learning_rate": 1.857777777777778e-05, |
|
"loss": 0.0153, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.020073844119906425, |
|
"learning_rate": 1.8533333333333334e-05, |
|
"loss": 0.0096, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.008414236828684807, |
|
"learning_rate": 1.848888888888889e-05, |
|
"loss": 0.007, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.003889534855261445, |
|
"learning_rate": 1.8444444444444448e-05, |
|
"loss": 0.002, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.002301928121596575, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.0025, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.002069896785542369, |
|
"learning_rate": 1.835555555555556e-05, |
|
"loss": 0.0147, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.0038117747753858566, |
|
"learning_rate": 1.8311111111111114e-05, |
|
"loss": 0.0414, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.007180278189480305, |
|
"learning_rate": 1.826666666666667e-05, |
|
"loss": 0.0007, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.722541332244873, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 0.0173, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.021056558936834335, |
|
"learning_rate": 1.817777777777778e-05, |
|
"loss": 0.0111, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.009726395830512047, |
|
"learning_rate": 1.8133333333333335e-05, |
|
"loss": 0.0045, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.00808757171034813, |
|
"learning_rate": 1.808888888888889e-05, |
|
"loss": 0.0021, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3323584794998169, |
|
"learning_rate": 1.8044444444444445e-05, |
|
"loss": 0.0052, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.45399293303489685, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.0095, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.43379417061805725, |
|
"learning_rate": 1.7955555555555556e-05, |
|
"loss": 0.0401, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.7372444868087769, |
|
"learning_rate": 1.791111111111111e-05, |
|
"loss": 0.0184, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.013180572539567947, |
|
"learning_rate": 1.7866666666666666e-05, |
|
"loss": 0.0116, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.008782428689301014, |
|
"learning_rate": 1.782222222222222e-05, |
|
"loss": 0.0223, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.011336304247379303, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 0.0026, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 0.02973618172109127, |
|
"eval_runtime": 105.7708, |
|
"eval_samples_per_second": 9.454, |
|
"eval_steps_per_second": 9.454, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.0016499263001605868, |
|
"learning_rate": 1.7733333333333335e-05, |
|
"loss": 0.0055, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.005521496757864952, |
|
"learning_rate": 1.768888888888889e-05, |
|
"loss": 0.0103, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.011227923445403576, |
|
"learning_rate": 1.7644444444444446e-05, |
|
"loss": 0.0271, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1072327122092247, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.0061, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1942812204360962, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 0.0068, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.0058549391105771065, |
|
"learning_rate": 1.751111111111111e-05, |
|
"loss": 0.0056, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.003338587237522006, |
|
"learning_rate": 1.7466666666666667e-05, |
|
"loss": 0.004, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.0738811194896698, |
|
"learning_rate": 1.7422222222222222e-05, |
|
"loss": 0.0446, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.03248901665210724, |
|
"learning_rate": 1.737777777777778e-05, |
|
"loss": 0.0159, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.005992035381495953, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 0.018, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12098544836044312, |
|
"learning_rate": 1.728888888888889e-05, |
|
"loss": 0.0293, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.007133137434720993, |
|
"learning_rate": 1.7244444444444446e-05, |
|
"loss": 0.008, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.17562967538833618, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.0133, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6992379426956177, |
|
"learning_rate": 1.7155555555555557e-05, |
|
"loss": 0.0144, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.004463555756956339, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 0.0506, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0051247915253043175, |
|
"learning_rate": 1.706666666666667e-05, |
|
"loss": 0.0084, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.007146921940147877, |
|
"learning_rate": 1.7022222222222226e-05, |
|
"loss": 0.0048, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.0036419702228158712, |
|
"learning_rate": 1.697777777777778e-05, |
|
"loss": 0.0039, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.043528199195861816, |
|
"learning_rate": 1.6933333333333336e-05, |
|
"loss": 0.0126, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.004960165359079838, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 0.0371, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4058115780353546, |
|
"learning_rate": 1.6844444444444447e-05, |
|
"loss": 0.0022, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.19414691627025604, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.001, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.005380582995712757, |
|
"learning_rate": 1.6755555555555557e-05, |
|
"loss": 0.014, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.004692175891250372, |
|
"learning_rate": 1.6711111111111112e-05, |
|
"loss": 0.0046, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.002111697569489479, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.0015, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.004453285597264767, |
|
"learning_rate": 1.6622222222222223e-05, |
|
"loss": 0.018, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5204005241394043, |
|
"learning_rate": 1.6577777777777778e-05, |
|
"loss": 0.0041, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.002364647574722767, |
|
"learning_rate": 1.6533333333333333e-05, |
|
"loss": 0.0026, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.012862365692853928, |
|
"learning_rate": 1.648888888888889e-05, |
|
"loss": 0.0188, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.007581356912851334, |
|
"learning_rate": 1.6444444444444444e-05, |
|
"loss": 0.0073, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.21286563575267792, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.0054, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.003324658377096057, |
|
"learning_rate": 1.6355555555555557e-05, |
|
"loss": 0.0116, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.37529128789901733, |
|
"learning_rate": 1.6311111111111113e-05, |
|
"loss": 0.0055, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.10134778171777725, |
|
"learning_rate": 1.6266666666666668e-05, |
|
"loss": 0.0123, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.002546050352975726, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 0.0004, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.11726546287536621, |
|
"learning_rate": 1.617777777777778e-05, |
|
"loss": 0.0725, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0032152272760868073, |
|
"learning_rate": 1.6133333333333334e-05, |
|
"loss": 0.0065, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.14680413901805878, |
|
"learning_rate": 1.608888888888889e-05, |
|
"loss": 0.0137, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.004721660166978836, |
|
"learning_rate": 1.6044444444444444e-05, |
|
"loss": 0.0065, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.003223953302949667, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.006, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.0024323295801877975, |
|
"learning_rate": 1.5955555555555558e-05, |
|
"loss": 0.0005, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.08590155094861984, |
|
"learning_rate": 1.5911111111111113e-05, |
|
"loss": 0.04, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12949270009994507, |
|
"learning_rate": 1.586666666666667e-05, |
|
"loss": 0.0012, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.17392052710056305, |
|
"learning_rate": 1.5822222222222224e-05, |
|
"loss": 0.0171, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.3131588399410248, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 0.015, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.04804832488298416, |
|
"learning_rate": 1.5733333333333334e-05, |
|
"loss": 0.0027, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.0027218549512326717, |
|
"learning_rate": 1.5688888888888893e-05, |
|
"loss": 0.0023, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5712968111038208, |
|
"learning_rate": 1.5644444444444448e-05, |
|
"loss": 0.0161, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.005681733135133982, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.0078, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.29326629638671875, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 0.0069, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 0.02430753782391548, |
|
"eval_runtime": 105.8642, |
|
"eval_samples_per_second": 9.446, |
|
"eval_steps_per_second": 9.446, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.007336648181080818, |
|
"learning_rate": 1.5511111111111114e-05, |
|
"loss": 0.0126, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.13389843702316284, |
|
"learning_rate": 1.546666666666667e-05, |
|
"loss": 0.0041, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.006098288577049971, |
|
"learning_rate": 1.5422222222222224e-05, |
|
"loss": 0.0256, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.0023613509256392717, |
|
"learning_rate": 1.537777777777778e-05, |
|
"loss": 0.0226, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.011803489178419113, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 0.0223, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.08194795995950699, |
|
"learning_rate": 1.528888888888889e-05, |
|
"loss": 0.0024, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.0029789546970278025, |
|
"learning_rate": 1.5244444444444447e-05, |
|
"loss": 0.0026, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.001265739556401968, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.0268, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.012406809255480766, |
|
"learning_rate": 1.5155555555555557e-05, |
|
"loss": 0.0067, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6606098413467407, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 0.0189, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.014978533610701561, |
|
"learning_rate": 1.5066666666666668e-05, |
|
"loss": 0.0176, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.08984213322401047, |
|
"learning_rate": 1.5022222222222223e-05, |
|
"loss": 0.008, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0033486252650618553, |
|
"learning_rate": 1.497777777777778e-05, |
|
"loss": 0.0112, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.002099987817928195, |
|
"learning_rate": 1.4933333333333335e-05, |
|
"loss": 0.0258, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.3023240268230438, |
|
"learning_rate": 1.488888888888889e-05, |
|
"loss": 0.0056, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8300777077674866, |
|
"learning_rate": 1.4844444444444445e-05, |
|
"loss": 0.0258, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.0023328212555497885, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.0021, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.3725988566875458, |
|
"learning_rate": 1.4755555555555556e-05, |
|
"loss": 0.0086, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.14515481889247894, |
|
"learning_rate": 1.4711111111111111e-05, |
|
"loss": 0.0054, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.0050906166434288025, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 0.0097, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.6300042271614075, |
|
"learning_rate": 1.4622222222222225e-05, |
|
"loss": 0.0121, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.010772451758384705, |
|
"learning_rate": 1.457777777777778e-05, |
|
"loss": 0.0162, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.05193562060594559, |
|
"learning_rate": 1.4533333333333335e-05, |
|
"loss": 0.014, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3744001090526581, |
|
"learning_rate": 1.448888888888889e-05, |
|
"loss": 0.0055, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.07020632922649384, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 0.0025, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.002155319321900606, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.0002, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0031439345329999924, |
|
"learning_rate": 1.4355555555555556e-05, |
|
"loss": 0.0102, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.33559954166412354, |
|
"learning_rate": 1.4311111111111111e-05, |
|
"loss": 0.0106, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.09780018031597137, |
|
"learning_rate": 1.4266666666666668e-05, |
|
"loss": 0.0006, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21637512743473053, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 0.0081, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.19896027445793152, |
|
"learning_rate": 1.4177777777777779e-05, |
|
"loss": 0.002, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.46836480498313904, |
|
"learning_rate": 1.4133333333333334e-05, |
|
"loss": 0.01, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.0012917339336127043, |
|
"learning_rate": 1.408888888888889e-05, |
|
"loss": 0.0007, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0009248697315342724, |
|
"learning_rate": 1.4044444444444445e-05, |
|
"loss": 0.0065, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0015130062820389867, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.0138, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.015873389318585396, |
|
"learning_rate": 1.3955555555555558e-05, |
|
"loss": 0.0104, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0025395406410098076, |
|
"learning_rate": 1.3911111111111114e-05, |
|
"loss": 0.0091, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.0019395744893699884, |
|
"learning_rate": 1.3866666666666669e-05, |
|
"loss": 0.0154, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.001661272719502449, |
|
"learning_rate": 1.3822222222222224e-05, |
|
"loss": 0.0069, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.48684021830558777, |
|
"learning_rate": 1.377777777777778e-05, |
|
"loss": 0.0047, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8865891695022583, |
|
"learning_rate": 1.3733333333333335e-05, |
|
"loss": 0.0181, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.0010564266704022884, |
|
"learning_rate": 1.368888888888889e-05, |
|
"loss": 0.0047, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10301809757947922, |
|
"learning_rate": 1.3644444444444445e-05, |
|
"loss": 0.0056, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22648899257183075, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.0013, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.0013131586601957679, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 0.0048, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.0011880842503160238, |
|
"learning_rate": 1.3511111111111112e-05, |
|
"loss": 0.0101, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.0013722589937970042, |
|
"learning_rate": 1.3466666666666668e-05, |
|
"loss": 0.0028, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.001899394323118031, |
|
"learning_rate": 1.3422222222222223e-05, |
|
"loss": 0.0418, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.0032492605969309807, |
|
"learning_rate": 1.3377777777777778e-05, |
|
"loss": 0.0013, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.0010039942571893334, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.0044, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 0.01938004605472088, |
|
"eval_runtime": 106.0395, |
|
"eval_samples_per_second": 9.43, |
|
"eval_steps_per_second": 9.43, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.001601343508809805, |
|
"learning_rate": 1.3288888888888889e-05, |
|
"loss": 0.0111, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.006534218788146973, |
|
"learning_rate": 1.3244444444444447e-05, |
|
"loss": 0.0041, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.11454477906227112, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.0148, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.0012094610137864947, |
|
"learning_rate": 1.3155555555555558e-05, |
|
"loss": 0.0001, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.017126798629760742, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 0.0035, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.0010203116107732058, |
|
"learning_rate": 1.3066666666666668e-05, |
|
"loss": 0.0185, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.07288641482591629, |
|
"learning_rate": 1.3022222222222223e-05, |
|
"loss": 0.0091, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.0013487342512235045, |
|
"learning_rate": 1.2977777777777779e-05, |
|
"loss": 0.0156, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.28272658586502075, |
|
"learning_rate": 1.2933333333333334e-05, |
|
"loss": 0.0146, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.4402059018611908, |
|
"learning_rate": 1.288888888888889e-05, |
|
"loss": 0.0157, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.00559567753225565, |
|
"learning_rate": 1.2844444444444446e-05, |
|
"loss": 0.0093, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.02444939874112606, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.0065, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0012545145582407713, |
|
"learning_rate": 1.2755555555555556e-05, |
|
"loss": 0.0057, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.001707903342321515, |
|
"learning_rate": 1.2711111111111112e-05, |
|
"loss": 0.0162, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0023830528371036053, |
|
"learning_rate": 1.2666666666666667e-05, |
|
"loss": 0.0276, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0030489948112517595, |
|
"learning_rate": 1.2622222222222222e-05, |
|
"loss": 0.014, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0017784954980015755, |
|
"learning_rate": 1.257777777777778e-05, |
|
"loss": 0.0206, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.06302628666162491, |
|
"learning_rate": 1.2533333333333336e-05, |
|
"loss": 0.0023, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0011160870781168342, |
|
"learning_rate": 1.2488888888888891e-05, |
|
"loss": 0.0204, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0013328393688425422, |
|
"learning_rate": 1.2444444444444446e-05, |
|
"loss": 0.0071, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.0011977130779996514, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.011, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.28556379675865173, |
|
"learning_rate": 1.2355555555555557e-05, |
|
"loss": 0.0032, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.001730318646878004, |
|
"learning_rate": 1.2311111111111112e-05, |
|
"loss": 0.0085, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.7156691551208496, |
|
"learning_rate": 1.2266666666666667e-05, |
|
"loss": 0.0243, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.09367219358682632, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 0.0108, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.0033970875665545464, |
|
"learning_rate": 1.217777777777778e-05, |
|
"loss": 0.0013, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.13453590869903564, |
|
"learning_rate": 1.2133333333333335e-05, |
|
"loss": 0.0171, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.0015542698092758656, |
|
"learning_rate": 1.208888888888889e-05, |
|
"loss": 0.0082, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0018359808018431067, |
|
"learning_rate": 1.2044444444444445e-05, |
|
"loss": 0.0035, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.002221127972006798, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.015, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.0010089320130646229, |
|
"learning_rate": 1.1955555555555556e-05, |
|
"loss": 0.012, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.0028728065080940723, |
|
"learning_rate": 1.191111111111111e-05, |
|
"loss": 0.0118, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.001342312665656209, |
|
"learning_rate": 1.186666666666667e-05, |
|
"loss": 0.0153, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2403033971786499, |
|
"learning_rate": 1.1822222222222225e-05, |
|
"loss": 0.0077, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.04793378338217735, |
|
"learning_rate": 1.177777777777778e-05, |
|
"loss": 0.0129, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.0022574837785214186, |
|
"learning_rate": 1.1733333333333335e-05, |
|
"loss": 0.0073, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.26575639843940735, |
|
"learning_rate": 1.168888888888889e-05, |
|
"loss": 0.0089, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.0020264522172510624, |
|
"learning_rate": 1.1644444444444446e-05, |
|
"loss": 0.0045, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.0012554072309285402, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.0031, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.0011454842751845717, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 0.0023, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.20630207657814026, |
|
"learning_rate": 1.1511111111111113e-05, |
|
"loss": 0.0023, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.0007169468444772065, |
|
"learning_rate": 1.1466666666666668e-05, |
|
"loss": 0.0121, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.0023526553995907307, |
|
"learning_rate": 1.1422222222222223e-05, |
|
"loss": 0.0052, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.39723196625709534, |
|
"learning_rate": 1.1377777777777779e-05, |
|
"loss": 0.0211, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.4188237190246582, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 0.0107, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8699719905853271, |
|
"learning_rate": 1.1288888888888889e-05, |
|
"loss": 0.0065, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.030738549306988716, |
|
"learning_rate": 1.1244444444444444e-05, |
|
"loss": 0.0136, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.000931159476749599, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.0227, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0705338716506958, |
|
"learning_rate": 1.1155555555555556e-05, |
|
"loss": 0.0027, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0007929333951324224, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 0.0133, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 0.020793654024600983, |
|
"eval_runtime": 105.636, |
|
"eval_samples_per_second": 9.466, |
|
"eval_steps_per_second": 9.466, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.0016222142148762941, |
|
"learning_rate": 1.1066666666666669e-05, |
|
"loss": 0.0435, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.002197889843955636, |
|
"learning_rate": 1.1022222222222224e-05, |
|
"loss": 0.002, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.33299392461776733, |
|
"learning_rate": 1.0977777777777779e-05, |
|
"loss": 0.0326, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0010998391080647707, |
|
"learning_rate": 1.0933333333333334e-05, |
|
"loss": 0.0001, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0011729354737326503, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 0.001, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.000858838262502104, |
|
"learning_rate": 1.0844444444444446e-05, |
|
"loss": 0.0037, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.38744401931762695, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.0065, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2493005245923996, |
|
"learning_rate": 1.0755555555555557e-05, |
|
"loss": 0.0043, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.20194557309150696, |
|
"learning_rate": 1.0711111111111112e-05, |
|
"loss": 0.0115, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.0024039980489760637, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 0.0084, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.0008128687040880322, |
|
"learning_rate": 1.0622222222222223e-05, |
|
"loss": 0.0061, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.0018899840069934726, |
|
"learning_rate": 1.0577777777777778e-05, |
|
"loss": 0.007, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.01610325276851654, |
|
"learning_rate": 1.0533333333333333e-05, |
|
"loss": 0.007, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.0006350704934448004, |
|
"learning_rate": 1.048888888888889e-05, |
|
"loss": 0.0066, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.09355136752128601, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 0.0255, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.3234637975692749, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.0076, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.05527418851852417, |
|
"learning_rate": 1.0355555555555557e-05, |
|
"loss": 0.0012, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0009735400672070682, |
|
"learning_rate": 1.0311111111111113e-05, |
|
"loss": 0.0045, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.0008769531850703061, |
|
"learning_rate": 1.0266666666666668e-05, |
|
"loss": 0.0172, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.006668600253760815, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 0.0012, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.07360995560884476, |
|
"learning_rate": 1.0177777777777778e-05, |
|
"loss": 0.0145, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.08141136914491653, |
|
"learning_rate": 1.0133333333333335e-05, |
|
"loss": 0.0039, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.027105676010251045, |
|
"learning_rate": 1.008888888888889e-05, |
|
"loss": 0.0035, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0010070588905364275, |
|
"learning_rate": 1.0044444444444446e-05, |
|
"loss": 0.0011, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.007085401564836502, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0096, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0018968915101140738, |
|
"learning_rate": 9.955555555555556e-06, |
|
"loss": 0.0057, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0006910761003382504, |
|
"learning_rate": 9.911111111111113e-06, |
|
"loss": 0.007, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0008984303567558527, |
|
"learning_rate": 9.866666666666668e-06, |
|
"loss": 0.02, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0017594619421288371, |
|
"learning_rate": 9.822222222222223e-06, |
|
"loss": 0.0037, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0012592363636940718, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.0058, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.0005507588502950966, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 0.0152, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.09182553738355637, |
|
"learning_rate": 9.688888888888889e-06, |
|
"loss": 0.02, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.08707510679960251, |
|
"learning_rate": 9.644444444444444e-06, |
|
"loss": 0.0076, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.20231454074382782, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0104, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.011587344110012054, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.0034, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.001095921266824007, |
|
"learning_rate": 9.511111111111112e-06, |
|
"loss": 0.0096, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0018781009130179882, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.0121, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0017853602766990662, |
|
"learning_rate": 9.422222222222222e-06, |
|
"loss": 0.0033, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0022252665366977453, |
|
"learning_rate": 9.377777777777779e-06, |
|
"loss": 0.0172, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.02110256254673004, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.0084, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0018381946720182896, |
|
"learning_rate": 9.28888888888889e-06, |
|
"loss": 0.0155, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.0018717021448537707, |
|
"learning_rate": 9.244444444444445e-06, |
|
"loss": 0.0037, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.003086686599999666, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.0039, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.0013741503935307264, |
|
"learning_rate": 9.155555555555557e-06, |
|
"loss": 0.005, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.0013554570032283664, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.0013, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.0009741095709614456, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 0.0053, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3708273768424988, |
|
"learning_rate": 9.022222222222223e-06, |
|
"loss": 0.0085, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.0005603479221463203, |
|
"learning_rate": 8.977777777777778e-06, |
|
"loss": 0.0008, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0005208374932408333, |
|
"learning_rate": 8.933333333333333e-06, |
|
"loss": 0.0037, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.0006605735397897661, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.0079, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"eval_loss": 0.024190960451960564, |
|
"eval_runtime": 106.0996, |
|
"eval_samples_per_second": 9.425, |
|
"eval_steps_per_second": 9.425, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.6728457808494568, |
|
"learning_rate": 8.844444444444445e-06, |
|
"loss": 0.0112, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.0006901975139044225, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.0048, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.0037365430034697056, |
|
"learning_rate": 8.755555555555556e-06, |
|
"loss": 0.0015, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.16512754559516907, |
|
"learning_rate": 8.711111111111111e-06, |
|
"loss": 0.0159, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.0016021672636270523, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.006, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.002898729406297207, |
|
"learning_rate": 8.622222222222223e-06, |
|
"loss": 0.0099, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2749406397342682, |
|
"learning_rate": 8.577777777777778e-06, |
|
"loss": 0.0058, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.0005188034847378731, |
|
"learning_rate": 8.533333333333335e-06, |
|
"loss": 0.0052, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2788861095905304, |
|
"learning_rate": 8.48888888888889e-06, |
|
"loss": 0.0022, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.00042978368583135307, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.0157, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.0011739007895812392, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.026, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.008115693926811218, |
|
"learning_rate": 8.355555555555556e-06, |
|
"loss": 0.0035, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.001202278072014451, |
|
"learning_rate": 8.311111111111111e-06, |
|
"loss": 0.0202, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.000537244020961225, |
|
"learning_rate": 8.266666666666667e-06, |
|
"loss": 0.0122, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.21055370569229126, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.0017, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0015918203862383962, |
|
"learning_rate": 8.177777777777779e-06, |
|
"loss": 0.0069, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0006304767448455095, |
|
"learning_rate": 8.133333333333334e-06, |
|
"loss": 0.0018, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.00048176213749684393, |
|
"learning_rate": 8.08888888888889e-06, |
|
"loss": 0.0085, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.0005605188198387623, |
|
"learning_rate": 8.044444444444444e-06, |
|
"loss": 0.0041, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.407652884721756, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.005, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.043511051684617996, |
|
"learning_rate": 7.955555555555557e-06, |
|
"loss": 0.0016, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.07123250514268875, |
|
"learning_rate": 7.911111111111112e-06, |
|
"loss": 0.0187, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.0005245661013759673, |
|
"learning_rate": 7.866666666666667e-06, |
|
"loss": 0.0039, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.07147853076457977, |
|
"learning_rate": 7.822222222222224e-06, |
|
"loss": 0.0014, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.0022907687816768885, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.0029, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.0005335509194992483, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.0022, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5879516005516052, |
|
"learning_rate": 7.68888888888889e-06, |
|
"loss": 0.0114, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0005007328581996262, |
|
"learning_rate": 7.644444444444445e-06, |
|
"loss": 0.0008, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.000808278564363718, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.0003, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.0009639998315833509, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.0043, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.21140816807746887, |
|
"learning_rate": 7.511111111111111e-06, |
|
"loss": 0.0063, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.07628853619098663, |
|
"learning_rate": 7.4666666666666675e-06, |
|
"loss": 0.0055, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.3622307777404785, |
|
"learning_rate": 7.422222222222223e-06, |
|
"loss": 0.0544, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.11922428011894226, |
|
"learning_rate": 7.377777777777778e-06, |
|
"loss": 0.0042, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.005126808770000935, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.0034, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.0009374683722853661, |
|
"learning_rate": 7.28888888888889e-06, |
|
"loss": 0.0055, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.0010315098334103823, |
|
"learning_rate": 7.244444444444445e-06, |
|
"loss": 0.0068, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.0023387514520436525, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0165, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5151880979537964, |
|
"learning_rate": 7.155555555555556e-06, |
|
"loss": 0.0019, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0006280681700445712, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.0016, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.05279850959777832, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 0.0027, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0003787504101637751, |
|
"learning_rate": 7.022222222222222e-06, |
|
"loss": 0.0026, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.0009172603604383767, |
|
"learning_rate": 6.977777777777779e-06, |
|
"loss": 0.0055, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0016954115126281977, |
|
"learning_rate": 6.9333333333333344e-06, |
|
"loss": 0.0077, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0003665974363684654, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.0014, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.0005018340889364481, |
|
"learning_rate": 6.844444444444445e-06, |
|
"loss": 0.0079, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.00042706538806669414, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.0018, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.10292913764715195, |
|
"learning_rate": 6.755555555555556e-06, |
|
"loss": 0.0066, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4878827631473541, |
|
"learning_rate": 6.711111111111111e-06, |
|
"loss": 0.0117, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.0002568361524026841, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.006, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"eval_loss": 0.02246909774839878, |
|
"eval_runtime": 105.9805, |
|
"eval_samples_per_second": 9.436, |
|
"eval_steps_per_second": 9.436, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.0013885730877518654, |
|
"learning_rate": 6.6222222222222236e-06, |
|
"loss": 0.0089, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.00039995237602852285, |
|
"learning_rate": 6.577777777777779e-06, |
|
"loss": 0.002, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.1092824637889862, |
|
"learning_rate": 6.533333333333334e-06, |
|
"loss": 0.0199, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.0009667383274063468, |
|
"learning_rate": 6.488888888888889e-06, |
|
"loss": 0.003, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.0005986980977468193, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.0011, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0008668617811053991, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0072, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.00075616902904585, |
|
"learning_rate": 6.355555555555556e-06, |
|
"loss": 0.0044, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.00048376142513006926, |
|
"learning_rate": 6.311111111111111e-06, |
|
"loss": 0.0023, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.0010572660248726606, |
|
"learning_rate": 6.266666666666668e-06, |
|
"loss": 0.0009, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0005315226735547185, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.0006, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0009846779284998775, |
|
"learning_rate": 6.177777777777778e-06, |
|
"loss": 0.0022, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.0004815957508981228, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 0.0027, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.0925995334982872, |
|
"learning_rate": 6.08888888888889e-06, |
|
"loss": 0.0013, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.0006595251616090536, |
|
"learning_rate": 6.044444444444445e-06, |
|
"loss": 0.0039, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.0003916506830137223, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0024, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.2614765465259552, |
|
"learning_rate": 5.955555555555555e-06, |
|
"loss": 0.0069, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.07495523989200592, |
|
"learning_rate": 5.911111111111112e-06, |
|
"loss": 0.0075, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.00033707538386806846, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 0.0036, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.47413933277130127, |
|
"learning_rate": 5.822222222222223e-06, |
|
"loss": 0.0205, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.3025084435939789, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.0041, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.0008836881606839597, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 0.0073, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0095978993922472, |
|
"learning_rate": 5.688888888888889e-06, |
|
"loss": 0.0195, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.0003117706801276654, |
|
"learning_rate": 5.6444444444444445e-06, |
|
"loss": 0.0029, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.00034295892692171037, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.0039, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.013034949079155922, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.0031, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0005166982300579548, |
|
"learning_rate": 5.511111111111112e-06, |
|
"loss": 0.0024, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0005639269948005676, |
|
"learning_rate": 5.466666666666667e-06, |
|
"loss": 0.0162, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.0016196731012314558, |
|
"learning_rate": 5.422222222222223e-06, |
|
"loss": 0.0016, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.04477536678314209, |
|
"learning_rate": 5.3777777777777784e-06, |
|
"loss": 0.0128, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.00038292576209641993, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.0027, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.000574207108002156, |
|
"learning_rate": 5.288888888888889e-06, |
|
"loss": 0.0011, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.00024517590645700693, |
|
"learning_rate": 5.244444444444445e-06, |
|
"loss": 0.0054, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.022648360580205917, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.0016, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.0010837001027539372, |
|
"learning_rate": 5.155555555555556e-06, |
|
"loss": 0.0044, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.00025768097839318216, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.0014, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.34753185510635376, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 0.0053, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.000370951893273741, |
|
"learning_rate": 5.022222222222223e-06, |
|
"loss": 0.0054, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.00029738256125710905, |
|
"learning_rate": 4.977777777777778e-06, |
|
"loss": 0.0017, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.15227927267551422, |
|
"learning_rate": 4.933333333333334e-06, |
|
"loss": 0.0019, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.1128775030374527, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.0052, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.00027570247766561806, |
|
"learning_rate": 4.8444444444444446e-06, |
|
"loss": 0.0005, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.005237150471657515, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.0129, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.6701110005378723, |
|
"learning_rate": 4.755555555555556e-06, |
|
"loss": 0.0044, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.00061286665732041, |
|
"learning_rate": 4.711111111111111e-06, |
|
"loss": 0.0001, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0007666970486752689, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.0049, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0005051186308264732, |
|
"learning_rate": 4.622222222222222e-06, |
|
"loss": 0.0044, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.0005429310840554535, |
|
"learning_rate": 4.5777777777777785e-06, |
|
"loss": 0.0128, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.00036512824590317905, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 0.0057, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.025857826694846153, |
|
"learning_rate": 4.488888888888889e-06, |
|
"loss": 0.0003, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.01597726158797741, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.0028, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 0.02211879752576351, |
|
"eval_runtime": 105.9232, |
|
"eval_samples_per_second": 9.441, |
|
"eval_steps_per_second": 9.441, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.12713465094566345, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0046, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.0022403071634471416, |
|
"learning_rate": 4.3555555555555555e-06, |
|
"loss": 0.0039, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.3125491738319397, |
|
"learning_rate": 4.3111111111111115e-06, |
|
"loss": 0.0147, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.0007740085129626095, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 0.0031, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.07295132428407669, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.0172, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.0005750702694058418, |
|
"learning_rate": 4.177777777777778e-06, |
|
"loss": 0.0021, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.008953329175710678, |
|
"learning_rate": 4.133333333333333e-06, |
|
"loss": 0.0019, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.00033226358937099576, |
|
"learning_rate": 4.088888888888889e-06, |
|
"loss": 0.0046, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.0006687557324767113, |
|
"learning_rate": 4.044444444444445e-06, |
|
"loss": 0.004, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.009847434237599373, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.0045, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.00036680695484392345, |
|
"learning_rate": 3.955555555555556e-06, |
|
"loss": 0.0036, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2396153211593628, |
|
"learning_rate": 3.911111111111112e-06, |
|
"loss": 0.0092, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.40363165736198425, |
|
"learning_rate": 3.866666666666667e-06, |
|
"loss": 0.0051, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.058287497609853745, |
|
"learning_rate": 3.8222222222222224e-06, |
|
"loss": 0.0034, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.0005693346611224115, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.0089, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.1548708975315094, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 0.0076, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.1720203459262848, |
|
"learning_rate": 3.688888888888889e-06, |
|
"loss": 0.0072, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.00042649006354622543, |
|
"learning_rate": 3.644444444444445e-06, |
|
"loss": 0.0049, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.015147966332733631, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.0018, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.000541534333024174, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.0007, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.0003232716699130833, |
|
"learning_rate": 3.511111111111111e-06, |
|
"loss": 0.0003, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.00029635836835950613, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 0.0059, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.000614231510553509, |
|
"learning_rate": 3.4222222222222224e-06, |
|
"loss": 0.0089, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.00032767339143902063, |
|
"learning_rate": 3.377777777777778e-06, |
|
"loss": 0.0039, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.00045120881986804307, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.003, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.0003937460423912853, |
|
"learning_rate": 3.2888888888888894e-06, |
|
"loss": 0.0034, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.0006157084717415273, |
|
"learning_rate": 3.2444444444444446e-06, |
|
"loss": 0.0025, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.0005464738933369517, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.0539, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.00032850648858584464, |
|
"learning_rate": 3.1555555555555555e-06, |
|
"loss": 0.0031, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0006490522064268589, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0019, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0008282049675472081, |
|
"learning_rate": 3.066666666666667e-06, |
|
"loss": 0.0019, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.0007463882211595774, |
|
"learning_rate": 3.0222222222222225e-06, |
|
"loss": 0.0173, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.10642620921134949, |
|
"learning_rate": 2.9777777777777777e-06, |
|
"loss": 0.0155, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.00021003976871725172, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 0.0083, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.003369180951267481, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0047, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.31446290016174316, |
|
"learning_rate": 2.8444444444444446e-06, |
|
"loss": 0.0158, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.4788835644721985, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.0058, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.0008424910483881831, |
|
"learning_rate": 2.755555555555556e-06, |
|
"loss": 0.0035, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.0002819730725605041, |
|
"learning_rate": 2.7111111111111116e-06, |
|
"loss": 0.0061, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.14431558549404144, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.0021, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.00966221559792757, |
|
"learning_rate": 2.6222222222222225e-06, |
|
"loss": 0.0102, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.394972026348114, |
|
"learning_rate": 2.577777777777778e-06, |
|
"loss": 0.0035, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.0002667000226210803, |
|
"learning_rate": 2.5333333333333338e-06, |
|
"loss": 0.004, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.6008601784706116, |
|
"learning_rate": 2.488888888888889e-06, |
|
"loss": 0.0102, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.3898264765739441, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.0069, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.0005270984838716686, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.0046, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.5697627067565918, |
|
"learning_rate": 2.3555555555555555e-06, |
|
"loss": 0.0109, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.18540015816688538, |
|
"learning_rate": 2.311111111111111e-06, |
|
"loss": 0.0088, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.031030097976326942, |
|
"learning_rate": 2.266666666666667e-06, |
|
"loss": 0.0035, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.0005811112932860851, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.0019, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"eval_loss": 0.017604324966669083, |
|
"eval_runtime": 105.8228, |
|
"eval_samples_per_second": 9.45, |
|
"eval_steps_per_second": 9.45, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.00042306468822062016, |
|
"learning_rate": 2.1777777777777777e-06, |
|
"loss": 0.0037, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.27694016695022583, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 0.0028, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.0005545311141759157, |
|
"learning_rate": 2.088888888888889e-06, |
|
"loss": 0.0078, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.000449089624453336, |
|
"learning_rate": 2.0444444444444447e-06, |
|
"loss": 0.0009, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.00020077303634025156, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.0026, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.00046969045069999993, |
|
"learning_rate": 1.955555555555556e-06, |
|
"loss": 0.0025, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.00019804837938863784, |
|
"learning_rate": 1.9111111111111112e-06, |
|
"loss": 0.001, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.0003414790553506464, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 0.0012, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.0005488083697855473, |
|
"learning_rate": 1.8222222222222225e-06, |
|
"loss": 0.0139, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.0003426743787713349, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.0132, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.0003723677364178002, |
|
"learning_rate": 1.7333333333333336e-06, |
|
"loss": 0.0047, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.00019281951244920492, |
|
"learning_rate": 1.688888888888889e-06, |
|
"loss": 0.0037, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.00028308393666520715, |
|
"learning_rate": 1.6444444444444447e-06, |
|
"loss": 0.0175, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.0002835668856278062, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.0012, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.10114676505327225, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.0067, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.10014703124761581, |
|
"learning_rate": 1.5111111111111112e-06, |
|
"loss": 0.0141, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.0002140298020094633, |
|
"learning_rate": 1.4666666666666669e-06, |
|
"loss": 0.004, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.00019562234228942543, |
|
"learning_rate": 1.4222222222222223e-06, |
|
"loss": 0.0019, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.0138946771621704, |
|
"learning_rate": 1.377777777777778e-06, |
|
"loss": 0.0105, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.06462077796459198, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.0009, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.000309393391944468, |
|
"learning_rate": 1.288888888888889e-06, |
|
"loss": 0.0025, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.27942171692848206, |
|
"learning_rate": 1.2444444444444445e-06, |
|
"loss": 0.0037, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.21164338290691376, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.0162, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.3361373841762543, |
|
"learning_rate": 1.1555555555555556e-06, |
|
"loss": 0.0077, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.0007883062935434282, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.0136, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.0004852807614952326, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 0.0004, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.37207266688346863, |
|
"learning_rate": 1.0222222222222223e-06, |
|
"loss": 0.0204, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.0012206656392663717, |
|
"learning_rate": 9.77777777777778e-07, |
|
"loss": 0.0265, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.000247218762524426, |
|
"learning_rate": 9.333333333333334e-07, |
|
"loss": 0.0021, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.19631250202655792, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.014, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.00041647368925623596, |
|
"learning_rate": 8.444444444444445e-07, |
|
"loss": 0.0027, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.0005213326658122241, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.0127, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.10857554525136948, |
|
"learning_rate": 7.555555555555556e-07, |
|
"loss": 0.0109, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.14191143214702606, |
|
"learning_rate": 7.111111111111112e-07, |
|
"loss": 0.0039, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.036222055554389954, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.0143, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.0002471502812113613, |
|
"learning_rate": 6.222222222222223e-07, |
|
"loss": 0.0029, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.0002270705153932795, |
|
"learning_rate": 5.777777777777778e-07, |
|
"loss": 0.0167, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.00021865036978852004, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 0.0023, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.002427297178655863, |
|
"learning_rate": 4.88888888888889e-07, |
|
"loss": 0.0012, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.00024957634741440415, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.0099, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.5699187517166138, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.0316, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.000845414528157562, |
|
"learning_rate": 3.555555555555556e-07, |
|
"loss": 0.0032, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.00035873273736797273, |
|
"learning_rate": 3.111111111111111e-07, |
|
"loss": 0.0053, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.00040514240390621126, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 0.0015, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.0003163764195051044, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.0031, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.0004442491626832634, |
|
"learning_rate": 1.777777777777778e-07, |
|
"loss": 0.0119, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.0004997169016860425, |
|
"learning_rate": 1.3333333333333336e-07, |
|
"loss": 0.0173, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.00029198831180110574, |
|
"learning_rate": 8.88888888888889e-08, |
|
"loss": 0.0056, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.0015217772452160716, |
|
"learning_rate": 4.444444444444445e-08, |
|
"loss": 0.0066, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.0002698723692446947, |
|
"learning_rate": 0.0, |
|
"loss": 0.0115, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"eval_loss": 0.022028431296348572, |
|
"eval_runtime": 105.7251, |
|
"eval_samples_per_second": 9.458, |
|
"eval_steps_per_second": 9.458, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"total_flos": 8.05066044384215e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|