|
{ |
|
"best_metric": 0.7773267737033928, |
|
"best_model_checkpoint": "models/combined_dist_10.0-esm2_t6_8M_UR50D-finetuned-receptor_pred_cluspro_propedia_pepnn/checkpoint-282331", |
|
"epoch": 9.999893741742849, |
|
"eval_steps": 500, |
|
"global_step": 352910, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.614975690841675, |
|
"learning_rate": 1.997166416366779e-05, |
|
"loss": 7.4545, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.9494409561157227, |
|
"learning_rate": 1.9943328327335584e-05, |
|
"loss": 6.7082, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.9211537837982178, |
|
"learning_rate": 1.9914992491003373e-05, |
|
"loss": 6.2699, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.27163553237915, |
|
"learning_rate": 1.9886656654671162e-05, |
|
"loss": 6.0185, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.343883991241455, |
|
"learning_rate": 1.9858320818338955e-05, |
|
"loss": 5.8031, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.585379123687744, |
|
"learning_rate": 1.9829984982006744e-05, |
|
"loss": 5.6227, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.197877883911133, |
|
"learning_rate": 1.9801649145674537e-05, |
|
"loss": 5.4341, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.071269512176514, |
|
"learning_rate": 1.977331330934233e-05, |
|
"loss": 5.282, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.63652229309082, |
|
"learning_rate": 1.974497747301012e-05, |
|
"loss": 5.0468, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.155987739562988, |
|
"learning_rate": 1.9716641636677908e-05, |
|
"loss": 4.8222, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.847218036651611, |
|
"learning_rate": 1.96883058003457e-05, |
|
"loss": 4.7802, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.746351718902588, |
|
"learning_rate": 1.965996996401349e-05, |
|
"loss": 4.5951, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.8343939781188965, |
|
"learning_rate": 1.9631634127681282e-05, |
|
"loss": 4.4475, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.076068878173828, |
|
"learning_rate": 1.960329829134907e-05, |
|
"loss": 4.266, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.7831315994262695, |
|
"learning_rate": 1.957496245501686e-05, |
|
"loss": 4.1708, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.758873462677002, |
|
"learning_rate": 1.9546626618684653e-05, |
|
"loss": 4.0085, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.676722049713135, |
|
"learning_rate": 1.9518290782352443e-05, |
|
"loss": 3.7978, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.958544731140137, |
|
"learning_rate": 1.9489954946020232e-05, |
|
"loss": 3.7165, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.333941459655762, |
|
"learning_rate": 1.9461619109688024e-05, |
|
"loss": 3.629, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.761513710021973, |
|
"learning_rate": 1.9433283273355814e-05, |
|
"loss": 3.4656, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.946700096130371, |
|
"learning_rate": 1.9404947437023603e-05, |
|
"loss": 3.3146, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.658429145812988, |
|
"learning_rate": 1.9376611600691396e-05, |
|
"loss": 3.2532, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.190903663635254, |
|
"learning_rate": 1.9348275764359185e-05, |
|
"loss": 3.1859, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.14591646194458, |
|
"learning_rate": 1.9319939928026977e-05, |
|
"loss": 2.9643, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.31158447265625, |
|
"learning_rate": 1.929160409169477e-05, |
|
"loss": 2.9481, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.342416763305664, |
|
"learning_rate": 1.926326825536256e-05, |
|
"loss": 2.8739, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.94139289855957, |
|
"learning_rate": 1.923493241903035e-05, |
|
"loss": 2.7749, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.6980950832366943, |
|
"learning_rate": 1.920659658269814e-05, |
|
"loss": 2.5832, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.225959777832031, |
|
"learning_rate": 1.917826074636593e-05, |
|
"loss": 2.6088, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.793159484863281, |
|
"learning_rate": 1.9149924910033723e-05, |
|
"loss": 2.5069, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.690148830413818, |
|
"learning_rate": 1.9121589073701512e-05, |
|
"loss": 2.4667, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.850300312042236, |
|
"learning_rate": 1.90932532373693e-05, |
|
"loss": 2.3974, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.971298933029175, |
|
"learning_rate": 1.9064917401037094e-05, |
|
"loss": 2.3625, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.971684217453003, |
|
"learning_rate": 1.9036581564704883e-05, |
|
"loss": 2.3342, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.61134147644043, |
|
"learning_rate": 1.9008245728372673e-05, |
|
"loss": 2.2263, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.9258928298950195, |
|
"learning_rate": 1.8979909892040465e-05, |
|
"loss": 2.2668, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.6153666973114014, |
|
"learning_rate": 1.8951574055708255e-05, |
|
"loss": 2.1847, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.767505645751953, |
|
"learning_rate": 1.8923238219376044e-05, |
|
"loss": 2.1653, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.658140659332275, |
|
"learning_rate": 1.8894902383043836e-05, |
|
"loss": 2.1216, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 5.809523582458496, |
|
"learning_rate": 1.886656654671163e-05, |
|
"loss": 2.1138, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.878647327423096, |
|
"learning_rate": 1.883823071037942e-05, |
|
"loss": 2.0043, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.507823467254639, |
|
"learning_rate": 1.880989487404721e-05, |
|
"loss": 1.9965, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.389089107513428, |
|
"learning_rate": 1.8781559037715e-05, |
|
"loss": 1.9863, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.876653671264648, |
|
"learning_rate": 1.875322320138279e-05, |
|
"loss": 1.9836, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.195200443267822, |
|
"learning_rate": 1.8724887365050582e-05, |
|
"loss": 1.9457, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.636641502380371, |
|
"learning_rate": 1.869655152871837e-05, |
|
"loss": 1.9271, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.557649850845337, |
|
"learning_rate": 1.8668215692386164e-05, |
|
"loss": 1.8922, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.440142631530762, |
|
"learning_rate": 1.8639879856053953e-05, |
|
"loss": 1.8782, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.779470920562744, |
|
"learning_rate": 1.8611544019721743e-05, |
|
"loss": 1.8265, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.1893739700317383, |
|
"learning_rate": 1.8583208183389535e-05, |
|
"loss": 1.8315, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.712510585784912, |
|
"learning_rate": 1.8554872347057324e-05, |
|
"loss": 1.8203, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.149935722351074, |
|
"learning_rate": 1.8526536510725114e-05, |
|
"loss": 1.8559, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.496318817138672, |
|
"learning_rate": 1.8498200674392906e-05, |
|
"loss": 1.7193, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.580072402954102, |
|
"learning_rate": 1.8469864838060696e-05, |
|
"loss": 1.7393, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.071699619293213, |
|
"learning_rate": 1.8441529001728488e-05, |
|
"loss": 1.7212, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 6.9725470542907715, |
|
"learning_rate": 1.841319316539628e-05, |
|
"loss": 1.7477, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.311365842819214, |
|
"learning_rate": 1.838485732906407e-05, |
|
"loss": 1.6702, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.8417956829071045, |
|
"learning_rate": 1.835652149273186e-05, |
|
"loss": 1.6546, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.032675266265869, |
|
"learning_rate": 1.8328185656399652e-05, |
|
"loss": 1.6497, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.1385064125061035, |
|
"learning_rate": 1.829984982006744e-05, |
|
"loss": 1.6649, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.662952423095703, |
|
"learning_rate": 1.827151398373523e-05, |
|
"loss": 1.6297, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.526814937591553, |
|
"learning_rate": 1.8243178147403023e-05, |
|
"loss": 1.5955, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.363564491271973, |
|
"learning_rate": 1.8214842311070812e-05, |
|
"loss": 1.6026, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.3690953254699707, |
|
"learning_rate": 1.8186506474738605e-05, |
|
"loss": 1.5352, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 7.161606311798096, |
|
"learning_rate": 1.8158170638406394e-05, |
|
"loss": 1.6161, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.376026153564453, |
|
"learning_rate": 1.8129834802074183e-05, |
|
"loss": 1.6121, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.4227709770202637, |
|
"learning_rate": 1.8101498965741976e-05, |
|
"loss": 1.4918, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.050368785858154, |
|
"learning_rate": 1.8073163129409765e-05, |
|
"loss": 1.4924, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 8.424703598022461, |
|
"learning_rate": 1.8044827293077555e-05, |
|
"loss": 1.5637, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.532356262207031, |
|
"learning_rate": 1.8016491456745347e-05, |
|
"loss": 1.4816, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.6981436813974987, |
|
"eval_loss": 1.491289734840393, |
|
"eval_runtime": 2747.372, |
|
"eval_samples_per_second": 34.255, |
|
"eval_steps_per_second": 34.255, |
|
"step": 35291 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.4316861629486084, |
|
"learning_rate": 1.7988155620413136e-05, |
|
"loss": 1.5285, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 7.24040412902832, |
|
"learning_rate": 1.795981978408093e-05, |
|
"loss": 1.4974, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.9627156257629395, |
|
"learning_rate": 1.7931483947748722e-05, |
|
"loss": 1.5143, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.244486331939697, |
|
"learning_rate": 1.790314811141651e-05, |
|
"loss": 1.4756, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 6.913219928741455, |
|
"learning_rate": 1.78748122750843e-05, |
|
"loss": 1.4885, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 7.830247402191162, |
|
"learning_rate": 1.7846476438752093e-05, |
|
"loss": 1.4262, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.569477081298828, |
|
"learning_rate": 1.7818140602419882e-05, |
|
"loss": 1.4444, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 7.476200103759766, |
|
"learning_rate": 1.778980476608767e-05, |
|
"loss": 1.4168, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.149771213531494, |
|
"learning_rate": 1.7761468929755464e-05, |
|
"loss": 1.44, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 6.547873020172119, |
|
"learning_rate": 1.7733133093423253e-05, |
|
"loss": 1.3884, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.382103443145752, |
|
"learning_rate": 1.7704797257091046e-05, |
|
"loss": 1.3833, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 5.518494606018066, |
|
"learning_rate": 1.7676461420758835e-05, |
|
"loss": 1.4183, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.328868389129639, |
|
"learning_rate": 1.7648125584426624e-05, |
|
"loss": 1.4387, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.008655548095703, |
|
"learning_rate": 1.7619789748094417e-05, |
|
"loss": 1.3613, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 6.35288667678833, |
|
"learning_rate": 1.7591453911762206e-05, |
|
"loss": 1.3565, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 6.100168704986572, |
|
"learning_rate": 1.7563118075429995e-05, |
|
"loss": 1.3506, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 3.2157440185546875, |
|
"learning_rate": 1.7534782239097788e-05, |
|
"loss": 1.3395, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.379615306854248, |
|
"learning_rate": 1.750644640276558e-05, |
|
"loss": 1.3396, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 2.793736457824707, |
|
"learning_rate": 1.747811056643337e-05, |
|
"loss": 1.3319, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 6.41762113571167, |
|
"learning_rate": 1.7449774730101163e-05, |
|
"loss": 1.301, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 6.420482635498047, |
|
"learning_rate": 1.7421438893768952e-05, |
|
"loss": 1.3574, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 2.2033274173736572, |
|
"learning_rate": 1.739310305743674e-05, |
|
"loss": 1.316, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 5.3276801109313965, |
|
"learning_rate": 1.7364767221104534e-05, |
|
"loss": 1.3369, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 4.429421901702881, |
|
"learning_rate": 1.7336431384772323e-05, |
|
"loss": 1.3232, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 7.140500068664551, |
|
"learning_rate": 1.7308095548440112e-05, |
|
"loss": 1.3108, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 4.972752571105957, |
|
"learning_rate": 1.7279759712107905e-05, |
|
"loss": 1.2425, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.659143447875977, |
|
"learning_rate": 1.7251423875775694e-05, |
|
"loss": 1.2828, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 3.439525842666626, |
|
"learning_rate": 1.7223088039443487e-05, |
|
"loss": 1.2559, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 5.8029656410217285, |
|
"learning_rate": 1.7194752203111276e-05, |
|
"loss": 1.2982, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.9915771484375, |
|
"learning_rate": 1.7166416366779065e-05, |
|
"loss": 1.2627, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 5.220378875732422, |
|
"learning_rate": 1.7138080530446858e-05, |
|
"loss": 1.32, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 6.145462989807129, |
|
"learning_rate": 1.7109744694114647e-05, |
|
"loss": 1.2215, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 3.8394155502319336, |
|
"learning_rate": 1.7081408857782436e-05, |
|
"loss": 1.2521, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 4.981057643890381, |
|
"learning_rate": 1.705307302145023e-05, |
|
"loss": 1.2893, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.4461212158203125, |
|
"learning_rate": 1.702473718511802e-05, |
|
"loss": 1.3046, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.8721215724945068, |
|
"learning_rate": 1.699640134878581e-05, |
|
"loss": 1.2423, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 5.1794843673706055, |
|
"learning_rate": 1.6968065512453604e-05, |
|
"loss": 1.2604, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 4.042840480804443, |
|
"learning_rate": 1.6939729676121393e-05, |
|
"loss": 1.2357, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 5.862013816833496, |
|
"learning_rate": 1.6911393839789182e-05, |
|
"loss": 1.2256, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.3453152179718018, |
|
"learning_rate": 1.6883058003456975e-05, |
|
"loss": 1.1843, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 8.414984703063965, |
|
"learning_rate": 1.6854722167124764e-05, |
|
"loss": 1.2016, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 2.847531795501709, |
|
"learning_rate": 1.6826386330792553e-05, |
|
"loss": 1.1997, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 3.7932987213134766, |
|
"learning_rate": 1.6798050494460346e-05, |
|
"loss": 1.2276, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 4.8044562339782715, |
|
"learning_rate": 1.6769714658128135e-05, |
|
"loss": 1.1733, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 5.216464996337891, |
|
"learning_rate": 1.6741378821795928e-05, |
|
"loss": 1.1884, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.36829137802124, |
|
"learning_rate": 1.6713042985463717e-05, |
|
"loss": 1.1851, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 4.850276947021484, |
|
"learning_rate": 1.6684707149131506e-05, |
|
"loss": 1.1902, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 5.7681989669799805, |
|
"learning_rate": 1.66563713127993e-05, |
|
"loss": 1.1787, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 2.202500581741333, |
|
"learning_rate": 1.662803547646709e-05, |
|
"loss": 1.1648, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 3.1508541107177734, |
|
"learning_rate": 1.659969964013488e-05, |
|
"loss": 1.1421, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 7.434388637542725, |
|
"learning_rate": 1.657136380380267e-05, |
|
"loss": 1.174, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.1375887393951416, |
|
"learning_rate": 1.6543027967470463e-05, |
|
"loss": 1.1941, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 5.083194732666016, |
|
"learning_rate": 1.6514692131138252e-05, |
|
"loss": 1.1457, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.154632568359375, |
|
"learning_rate": 1.6486356294806044e-05, |
|
"loss": 1.2178, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 3.088094711303711, |
|
"learning_rate": 1.6458020458473834e-05, |
|
"loss": 1.1661, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 5.301969528198242, |
|
"learning_rate": 1.6429684622141623e-05, |
|
"loss": 1.119, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 4.429632186889648, |
|
"learning_rate": 1.6401348785809416e-05, |
|
"loss": 1.1533, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.935004234313965, |
|
"learning_rate": 1.6373012949477205e-05, |
|
"loss": 1.1492, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.975069999694824, |
|
"learning_rate": 1.6344677113144994e-05, |
|
"loss": 1.1214, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 2.3240180015563965, |
|
"learning_rate": 1.6316341276812787e-05, |
|
"loss": 1.2013, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 3.9896786212921143, |
|
"learning_rate": 1.6288005440480576e-05, |
|
"loss": 1.1176, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 5.830460071563721, |
|
"learning_rate": 1.625966960414837e-05, |
|
"loss": 1.117, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.111226558685303, |
|
"learning_rate": 1.6231333767816158e-05, |
|
"loss": 1.1321, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 7.154306411743164, |
|
"learning_rate": 1.6202997931483947e-05, |
|
"loss": 1.1416, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 2.1327767372131348, |
|
"learning_rate": 1.617466209515174e-05, |
|
"loss": 1.1103, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.014294624328613, |
|
"learning_rate": 1.6146326258819532e-05, |
|
"loss": 1.0615, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.03513578325510025, |
|
"learning_rate": 1.611799042248732e-05, |
|
"loss": 1.1205, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 2.805485725402832, |
|
"learning_rate": 1.608965458615511e-05, |
|
"loss": 1.136, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 3.611574172973633, |
|
"learning_rate": 1.6061318749822904e-05, |
|
"loss": 1.1159, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 5.143590450286865, |
|
"learning_rate": 1.6032982913490693e-05, |
|
"loss": 1.111, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.690364360809326, |
|
"learning_rate": 1.6004647077158485e-05, |
|
"loss": 1.1491, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7476809299656788, |
|
"eval_loss": 1.085469126701355, |
|
"eval_runtime": 2745.5383, |
|
"eval_samples_per_second": 34.278, |
|
"eval_steps_per_second": 34.278, |
|
"step": 70582 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 4.023772239685059, |
|
"learning_rate": 1.5976311240826275e-05, |
|
"loss": 1.055, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 3.253685474395752, |
|
"learning_rate": 1.5947975404494064e-05, |
|
"loss": 1.0495, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 4.396245956420898, |
|
"learning_rate": 1.5919639568161857e-05, |
|
"loss": 1.0747, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 8.438695907592773, |
|
"learning_rate": 1.5891303731829646e-05, |
|
"loss": 1.0484, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 4.8430914878845215, |
|
"learning_rate": 1.5862967895497435e-05, |
|
"loss": 1.1275, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.183419704437256, |
|
"learning_rate": 1.5834632059165228e-05, |
|
"loss": 1.0809, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.4836373329162598, |
|
"learning_rate": 1.5806296222833017e-05, |
|
"loss": 1.1175, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 5.866794109344482, |
|
"learning_rate": 1.577796038650081e-05, |
|
"loss": 1.0826, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 7.779801368713379, |
|
"learning_rate": 1.57496245501686e-05, |
|
"loss": 1.0051, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 7.138259410858154, |
|
"learning_rate": 1.572128871383639e-05, |
|
"loss": 1.0388, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 5.617405414581299, |
|
"learning_rate": 1.569295287750418e-05, |
|
"loss": 1.0332, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 3.4417684078216553, |
|
"learning_rate": 1.5664617041171973e-05, |
|
"loss": 1.1104, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 3.4162983894348145, |
|
"learning_rate": 1.5636281204839763e-05, |
|
"loss": 1.0992, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 8.070796966552734, |
|
"learning_rate": 1.5607945368507552e-05, |
|
"loss": 1.0486, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 4.65944242477417, |
|
"learning_rate": 1.5579609532175344e-05, |
|
"loss": 1.0532, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 7.3272385597229, |
|
"learning_rate": 1.5551273695843134e-05, |
|
"loss": 1.0081, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 2.9811854362487793, |
|
"learning_rate": 1.5522937859510926e-05, |
|
"loss": 1.0228, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.284489154815674, |
|
"learning_rate": 1.5494602023178716e-05, |
|
"loss": 1.0208, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 6.056519985198975, |
|
"learning_rate": 1.5466266186846505e-05, |
|
"loss": 1.032, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.048289719969034195, |
|
"learning_rate": 1.5437930350514297e-05, |
|
"loss": 1.0771, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 4.455281734466553, |
|
"learning_rate": 1.5409594514182087e-05, |
|
"loss": 1.0366, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 5.394782066345215, |
|
"learning_rate": 1.5381258677849876e-05, |
|
"loss": 1.0811, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 4.059425354003906, |
|
"learning_rate": 1.535292284151767e-05, |
|
"loss": 1.0072, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 5.580400466918945, |
|
"learning_rate": 1.5324587005185458e-05, |
|
"loss": 1.053, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.141561508178711, |
|
"learning_rate": 1.529625116885325e-05, |
|
"loss": 1.0425, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.186253547668457, |
|
"learning_rate": 1.5267915332521043e-05, |
|
"loss": 1.0205, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 3.237588405609131, |
|
"learning_rate": 1.5239579496188832e-05, |
|
"loss": 0.9956, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 4.83936071395874, |
|
"learning_rate": 1.5211243659856623e-05, |
|
"loss": 0.9885, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 5.182650089263916, |
|
"learning_rate": 1.5182907823524413e-05, |
|
"loss": 1.0748, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 1.2307519912719727, |
|
"learning_rate": 1.5154571987192203e-05, |
|
"loss": 0.9792, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 4.785423278808594, |
|
"learning_rate": 1.5126236150859994e-05, |
|
"loss": 1.0311, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 6.569101333618164, |
|
"learning_rate": 1.5097900314527785e-05, |
|
"loss": 0.9967, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.734511613845825, |
|
"learning_rate": 1.5069564478195575e-05, |
|
"loss": 1.0641, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 3.6517486572265625, |
|
"learning_rate": 1.5041228641863366e-05, |
|
"loss": 0.9817, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.1969916820526123, |
|
"learning_rate": 1.5012892805531156e-05, |
|
"loss": 0.9867, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 3.1304757595062256, |
|
"learning_rate": 1.4984556969198947e-05, |
|
"loss": 1.0322, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 3.666090965270996, |
|
"learning_rate": 1.4956221132866737e-05, |
|
"loss": 1.0294, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 6.77361536026001, |
|
"learning_rate": 1.4927885296534528e-05, |
|
"loss": 0.9871, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 5.582375526428223, |
|
"learning_rate": 1.4899549460202319e-05, |
|
"loss": 0.9809, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 5.1192827224731445, |
|
"learning_rate": 1.487121362387011e-05, |
|
"loss": 0.9964, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 3.1294760704040527, |
|
"learning_rate": 1.4842877787537899e-05, |
|
"loss": 1.0106, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.579064130783081, |
|
"learning_rate": 1.4814541951205691e-05, |
|
"loss": 0.9774, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 5.767348766326904, |
|
"learning_rate": 1.4786206114873482e-05, |
|
"loss": 0.9883, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.4669153690338135, |
|
"learning_rate": 1.4757870278541273e-05, |
|
"loss": 1.0007, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 3.2495174407958984, |
|
"learning_rate": 1.4729534442209064e-05, |
|
"loss": 1.0275, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 4.541181564331055, |
|
"learning_rate": 1.4701198605876853e-05, |
|
"loss": 1.0074, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 4.682742118835449, |
|
"learning_rate": 1.4672862769544644e-05, |
|
"loss": 1.0366, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 3.1074297428131104, |
|
"learning_rate": 1.4644526933212435e-05, |
|
"loss": 1.0267, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.548527240753174, |
|
"learning_rate": 1.4616191096880226e-05, |
|
"loss": 0.9915, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.168649673461914, |
|
"learning_rate": 1.4587855260548016e-05, |
|
"loss": 1.0054, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 5.411528587341309, |
|
"learning_rate": 1.4559519424215806e-05, |
|
"loss": 1.0045, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 3.786655902862549, |
|
"learning_rate": 1.4531183587883597e-05, |
|
"loss": 0.9932, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 3.0948727130889893, |
|
"learning_rate": 1.4502847751551388e-05, |
|
"loss": 0.9707, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.989771842956543, |
|
"learning_rate": 1.4474511915219178e-05, |
|
"loss": 0.9774, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 3.2359373569488525, |
|
"learning_rate": 1.4446176078886969e-05, |
|
"loss": 0.9306, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 8.07835865020752, |
|
"learning_rate": 1.441784024255476e-05, |
|
"loss": 0.9524, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 3.9400320053100586, |
|
"learning_rate": 1.438950440622255e-05, |
|
"loss": 0.9501, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 4.814745903015137, |
|
"learning_rate": 1.4361168569890343e-05, |
|
"loss": 0.9887, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.6948797702789307, |
|
"learning_rate": 1.4332832733558132e-05, |
|
"loss": 0.9708, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.974473476409912, |
|
"learning_rate": 1.4304496897225923e-05, |
|
"loss": 0.9709, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 7.195821762084961, |
|
"learning_rate": 1.4276161060893714e-05, |
|
"loss": 0.8956, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 4.091536045074463, |
|
"learning_rate": 1.4247825224561505e-05, |
|
"loss": 0.9442, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.1364340782165527, |
|
"learning_rate": 1.4219489388229294e-05, |
|
"loss": 1.0104, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.952352523803711, |
|
"learning_rate": 1.4191153551897085e-05, |
|
"loss": 0.918, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.175086736679077, |
|
"learning_rate": 1.4162817715564876e-05, |
|
"loss": 0.9514, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 3.7156364917755127, |
|
"learning_rate": 1.4134481879232667e-05, |
|
"loss": 0.9795, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 3.0967416763305664, |
|
"learning_rate": 1.4106146042900456e-05, |
|
"loss": 0.9774, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 3.4768569469451904, |
|
"learning_rate": 1.4077810206568247e-05, |
|
"loss": 0.9217, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 8.810585975646973, |
|
"learning_rate": 1.4049474370236038e-05, |
|
"loss": 0.9517, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 4.477392673492432, |
|
"learning_rate": 1.402113853390383e-05, |
|
"loss": 0.9538, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7630351393567171, |
|
"eval_loss": 0.9388064742088318, |
|
"eval_runtime": 2748.5992, |
|
"eval_samples_per_second": 34.24, |
|
"eval_steps_per_second": 34.24, |
|
"step": 105874 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 3.1107003688812256, |
|
"learning_rate": 1.3992802697571619e-05, |
|
"loss": 0.9243, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 4.796999931335449, |
|
"learning_rate": 1.396446686123941e-05, |
|
"loss": 0.9163, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 3.348123073577881, |
|
"learning_rate": 1.3936131024907202e-05, |
|
"loss": 0.9476, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 3.8326098918914795, |
|
"learning_rate": 1.3907795188574993e-05, |
|
"loss": 0.9742, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 3.05477237701416, |
|
"learning_rate": 1.3879459352242784e-05, |
|
"loss": 0.9613, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.1962602138519287, |
|
"learning_rate": 1.3851123515910573e-05, |
|
"loss": 0.9059, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 4.850438117980957, |
|
"learning_rate": 1.3822787679578364e-05, |
|
"loss": 0.9147, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 4.167685508728027, |
|
"learning_rate": 1.3794451843246155e-05, |
|
"loss": 0.8815, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 3.5904150009155273, |
|
"learning_rate": 1.3766116006913946e-05, |
|
"loss": 0.9336, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 3.102593421936035, |
|
"learning_rate": 1.3737780170581735e-05, |
|
"loss": 0.9543, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 3.3601255416870117, |
|
"learning_rate": 1.3709444334249526e-05, |
|
"loss": 0.8912, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 3.960665702819824, |
|
"learning_rate": 1.3681108497917317e-05, |
|
"loss": 0.9416, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.2088279724121094, |
|
"learning_rate": 1.3652772661585108e-05, |
|
"loss": 0.9303, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 3.3079452514648438, |
|
"learning_rate": 1.3624436825252897e-05, |
|
"loss": 0.9343, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 3.0884592533111572, |
|
"learning_rate": 1.3596100988920688e-05, |
|
"loss": 0.8972, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 3.246960401535034, |
|
"learning_rate": 1.356776515258848e-05, |
|
"loss": 0.9042, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.40205252170562744, |
|
"learning_rate": 1.353942931625627e-05, |
|
"loss": 0.9483, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 3.8221993446350098, |
|
"learning_rate": 1.351109347992406e-05, |
|
"loss": 0.9081, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.204099655151367, |
|
"learning_rate": 1.3482757643591852e-05, |
|
"loss": 0.9174, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 5.331653118133545, |
|
"learning_rate": 1.3454421807259643e-05, |
|
"loss": 0.9708, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 0.37105703353881836, |
|
"learning_rate": 1.3426085970927434e-05, |
|
"loss": 0.9209, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 4.132419109344482, |
|
"learning_rate": 1.3397750134595225e-05, |
|
"loss": 0.9223, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 3.2373697757720947, |
|
"learning_rate": 1.3369414298263014e-05, |
|
"loss": 0.9449, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 2.200536012649536, |
|
"learning_rate": 1.3341078461930805e-05, |
|
"loss": 0.924, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 4.251534938812256, |
|
"learning_rate": 1.3312742625598596e-05, |
|
"loss": 0.9245, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 4.964018821716309, |
|
"learning_rate": 1.3284406789266387e-05, |
|
"loss": 0.8797, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 8.006040573120117, |
|
"learning_rate": 1.3256070952934176e-05, |
|
"loss": 0.9154, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 3.682969331741333, |
|
"learning_rate": 1.3227735116601967e-05, |
|
"loss": 0.9182, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 5.011803150177002, |
|
"learning_rate": 1.3199399280269758e-05, |
|
"loss": 0.9166, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 3.1646368503570557, |
|
"learning_rate": 1.3171063443937549e-05, |
|
"loss": 0.9276, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 3.1225647926330566, |
|
"learning_rate": 1.3142727607605338e-05, |
|
"loss": 0.9428, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 3.139483690261841, |
|
"learning_rate": 1.311439177127313e-05, |
|
"loss": 0.9391, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 4.643157482147217, |
|
"learning_rate": 1.308605593494092e-05, |
|
"loss": 0.9136, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 4.578587532043457, |
|
"learning_rate": 1.3057720098608711e-05, |
|
"loss": 0.9092, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 2.2071352005004883, |
|
"learning_rate": 1.3029384262276504e-05, |
|
"loss": 0.915, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 2.4921517372131348, |
|
"learning_rate": 1.3001048425944293e-05, |
|
"loss": 0.8868, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 4.749142646789551, |
|
"learning_rate": 1.2972712589612084e-05, |
|
"loss": 0.9282, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 2.181434154510498, |
|
"learning_rate": 1.2944376753279875e-05, |
|
"loss": 0.9086, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 4.025723457336426, |
|
"learning_rate": 1.2916040916947666e-05, |
|
"loss": 0.8491, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 3.551264524459839, |
|
"learning_rate": 1.2887705080615455e-05, |
|
"loss": 0.9228, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 2.884403705596924, |
|
"learning_rate": 1.2859369244283246e-05, |
|
"loss": 0.9004, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 2.999499559402466, |
|
"learning_rate": 1.2831033407951037e-05, |
|
"loss": 0.8974, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.017392095178365707, |
|
"learning_rate": 1.2802697571618828e-05, |
|
"loss": 0.9185, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 3.908944845199585, |
|
"learning_rate": 1.2774361735286617e-05, |
|
"loss": 0.9096, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 4.624220848083496, |
|
"learning_rate": 1.2746025898954408e-05, |
|
"loss": 0.9109, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 3.6561858654022217, |
|
"learning_rate": 1.2717690062622199e-05, |
|
"loss": 0.9003, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.10583093762397766, |
|
"learning_rate": 1.268935422628999e-05, |
|
"loss": 0.9076, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 3.653130292892456, |
|
"learning_rate": 1.266101838995778e-05, |
|
"loss": 0.9232, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 3.191444158554077, |
|
"learning_rate": 1.263268255362557e-05, |
|
"loss": 0.9331, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 5.10831880569458, |
|
"learning_rate": 1.2604346717293361e-05, |
|
"loss": 0.9038, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 3.668541193008423, |
|
"learning_rate": 1.2576010880961154e-05, |
|
"loss": 0.8835, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 4.350325584411621, |
|
"learning_rate": 1.2547675044628945e-05, |
|
"loss": 0.8649, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 5.0062079429626465, |
|
"learning_rate": 1.2519339208296734e-05, |
|
"loss": 0.9066, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 3.533888816833496, |
|
"learning_rate": 1.2491003371964525e-05, |
|
"loss": 0.893, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 2.9693057537078857, |
|
"learning_rate": 1.2462667535632316e-05, |
|
"loss": 0.8672, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 3.295539140701294, |
|
"learning_rate": 1.2434331699300107e-05, |
|
"loss": 0.8736, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 2.2124862670898438, |
|
"learning_rate": 1.2405995862967896e-05, |
|
"loss": 0.8584, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 3.2220728397369385, |
|
"learning_rate": 1.2377660026635687e-05, |
|
"loss": 0.8644, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 4.965516567230225, |
|
"learning_rate": 1.2349324190303478e-05, |
|
"loss": 0.8814, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 4.129695415496826, |
|
"learning_rate": 1.2320988353971269e-05, |
|
"loss": 0.86, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 2.1921725273132324, |
|
"learning_rate": 1.2292652517639058e-05, |
|
"loss": 0.8896, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 0.049632515758275986, |
|
"learning_rate": 1.2264316681306849e-05, |
|
"loss": 0.8845, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 3.807011604309082, |
|
"learning_rate": 1.223598084497464e-05, |
|
"loss": 0.9152, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 5.094349384307861, |
|
"learning_rate": 1.2207645008642431e-05, |
|
"loss": 0.8885, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 6.3345537185668945, |
|
"learning_rate": 1.217930917231022e-05, |
|
"loss": 0.8714, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 4.590448379516602, |
|
"learning_rate": 1.2150973335978011e-05, |
|
"loss": 0.857, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 3.5549352169036865, |
|
"learning_rate": 1.2122637499645804e-05, |
|
"loss": 0.9091, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 3.162019968032837, |
|
"learning_rate": 1.2094301663313595e-05, |
|
"loss": 0.8527, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 7.230122089385986, |
|
"learning_rate": 1.2065965826981386e-05, |
|
"loss": 0.8646, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 0.9568886756896973, |
|
"learning_rate": 1.2037629990649175e-05, |
|
"loss": 0.8996, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 6.046046257019043, |
|
"learning_rate": 1.2009294154316966e-05, |
|
"loss": 0.902, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.770122514902615, |
|
"eval_loss": 0.8713550567626953, |
|
"eval_runtime": 2750.3895, |
|
"eval_samples_per_second": 34.217, |
|
"eval_steps_per_second": 34.217, |
|
"step": 141165 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 4.080121040344238, |
|
"learning_rate": 1.1980958317984757e-05, |
|
"loss": 0.8701, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 3.5020222663879395, |
|
"learning_rate": 1.1952622481652548e-05, |
|
"loss": 0.9201, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 5.760317802429199, |
|
"learning_rate": 1.1924286645320337e-05, |
|
"loss": 0.8844, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 3.131807565689087, |
|
"learning_rate": 1.1895950808988128e-05, |
|
"loss": 0.8403, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 2.6976137161254883, |
|
"learning_rate": 1.1867614972655919e-05, |
|
"loss": 0.837, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 4.363113880157471, |
|
"learning_rate": 1.183927913632371e-05, |
|
"loss": 0.8528, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 3.6650633811950684, |
|
"learning_rate": 1.1810943299991499e-05, |
|
"loss": 0.866, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 2.1963698863983154, |
|
"learning_rate": 1.178260746365929e-05, |
|
"loss": 0.8799, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 6.11692476272583, |
|
"learning_rate": 1.1754271627327081e-05, |
|
"loss": 0.862, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 3.8074452877044678, |
|
"learning_rate": 1.1725935790994872e-05, |
|
"loss": 0.8481, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 3.0709991455078125, |
|
"learning_rate": 1.1697599954662661e-05, |
|
"loss": 0.8673, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 4.780150413513184, |
|
"learning_rate": 1.1669264118330454e-05, |
|
"loss": 0.8413, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 3.2353837490081787, |
|
"learning_rate": 1.1640928281998245e-05, |
|
"loss": 0.8593, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 2.6519854068756104, |
|
"learning_rate": 1.1612592445666036e-05, |
|
"loss": 0.8726, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 6.630087852478027, |
|
"learning_rate": 1.1584256609333827e-05, |
|
"loss": 0.8422, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 4.064183712005615, |
|
"learning_rate": 1.1555920773001616e-05, |
|
"loss": 0.8463, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 2.2233190536499023, |
|
"learning_rate": 1.1527584936669407e-05, |
|
"loss": 0.8529, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 0.32811805605888367, |
|
"learning_rate": 1.1499249100337198e-05, |
|
"loss": 0.8887, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 3.161423683166504, |
|
"learning_rate": 1.1470913264004989e-05, |
|
"loss": 0.8435, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.14471185207366943, |
|
"learning_rate": 1.1442577427672778e-05, |
|
"loss": 0.8758, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 2.2590670585632324, |
|
"learning_rate": 1.1414241591340569e-05, |
|
"loss": 0.8808, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 8.431534767150879, |
|
"learning_rate": 1.138590575500836e-05, |
|
"loss": 0.8463, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 3.280068874359131, |
|
"learning_rate": 1.135756991867615e-05, |
|
"loss": 0.8644, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 3.7241225242614746, |
|
"learning_rate": 1.132923408234394e-05, |
|
"loss": 0.8531, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 3.3345284461975098, |
|
"learning_rate": 1.130089824601173e-05, |
|
"loss": 0.8671, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 0.11300093680620193, |
|
"learning_rate": 1.1272562409679522e-05, |
|
"loss": 0.8489, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 3.3431150913238525, |
|
"learning_rate": 1.1244226573347313e-05, |
|
"loss": 0.9055, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 2.2208619117736816, |
|
"learning_rate": 1.1215890737015105e-05, |
|
"loss": 0.8338, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 3.340219020843506, |
|
"learning_rate": 1.1187554900682895e-05, |
|
"loss": 0.8709, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 0.9095796942710876, |
|
"learning_rate": 1.1159219064350686e-05, |
|
"loss": 0.8795, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 6.427603721618652, |
|
"learning_rate": 1.1130883228018476e-05, |
|
"loss": 0.8372, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 3.5089049339294434, |
|
"learning_rate": 1.1102547391686267e-05, |
|
"loss": 0.9082, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 0.014750667847692966, |
|
"learning_rate": 1.1074211555354057e-05, |
|
"loss": 0.8351, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 5.520388126373291, |
|
"learning_rate": 1.1045875719021848e-05, |
|
"loss": 0.8342, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 4.782327651977539, |
|
"learning_rate": 1.1017539882689639e-05, |
|
"loss": 0.8209, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 2.2101173400878906, |
|
"learning_rate": 1.098920404635743e-05, |
|
"loss": 0.7976, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 5.672389030456543, |
|
"learning_rate": 1.0960868210025219e-05, |
|
"loss": 0.8703, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 3.9403274059295654, |
|
"learning_rate": 1.093253237369301e-05, |
|
"loss": 0.8647, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 4.439927577972412, |
|
"learning_rate": 1.09041965373608e-05, |
|
"loss": 0.8725, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 3.5873897075653076, |
|
"learning_rate": 1.0875860701028592e-05, |
|
"loss": 0.8628, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 2.261143922805786, |
|
"learning_rate": 1.084752486469638e-05, |
|
"loss": 0.8657, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.026868639513850212, |
|
"learning_rate": 1.0819189028364172e-05, |
|
"loss": 0.8614, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 3.7286293506622314, |
|
"learning_rate": 1.0790853192031964e-05, |
|
"loss": 0.8624, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.272576093673706, |
|
"learning_rate": 1.0762517355699755e-05, |
|
"loss": 0.8414, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 3.6233222484588623, |
|
"learning_rate": 1.0734181519367546e-05, |
|
"loss": 0.8429, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 2.8637378215789795, |
|
"learning_rate": 1.0705845683035336e-05, |
|
"loss": 0.8615, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 3.5243945121765137, |
|
"learning_rate": 1.0677509846703126e-05, |
|
"loss": 0.8324, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 3.835604667663574, |
|
"learning_rate": 1.0649174010370917e-05, |
|
"loss": 0.8001, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 4.268076419830322, |
|
"learning_rate": 1.0620838174038708e-05, |
|
"loss": 0.8573, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 5.4616312980651855, |
|
"learning_rate": 1.0592502337706498e-05, |
|
"loss": 0.8347, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 4.885875701904297, |
|
"learning_rate": 1.0564166501374289e-05, |
|
"loss": 0.8819, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 3.8010919094085693, |
|
"learning_rate": 1.053583066504208e-05, |
|
"loss": 0.8585, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 2.4292032718658447, |
|
"learning_rate": 1.050749482870987e-05, |
|
"loss": 0.8869, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 3.1070659160614014, |
|
"learning_rate": 1.047915899237766e-05, |
|
"loss": 0.8033, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 3.7243080139160156, |
|
"learning_rate": 1.045082315604545e-05, |
|
"loss": 0.8518, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 3.1299855709075928, |
|
"learning_rate": 1.0422487319713242e-05, |
|
"loss": 0.8439, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.01023823581635952, |
|
"learning_rate": 1.0394151483381032e-05, |
|
"loss": 0.8251, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 5.466165542602539, |
|
"learning_rate": 1.0365815647048822e-05, |
|
"loss": 0.8065, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 3.4839234352111816, |
|
"learning_rate": 1.0337479810716614e-05, |
|
"loss": 0.8351, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 2.2356865406036377, |
|
"learning_rate": 1.0309143974384405e-05, |
|
"loss": 0.8775, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 4.999177932739258, |
|
"learning_rate": 1.0280808138052196e-05, |
|
"loss": 0.8641, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 3.0965590476989746, |
|
"learning_rate": 1.0252472301719987e-05, |
|
"loss": 0.8145, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 3.6084883213043213, |
|
"learning_rate": 1.0224136465387776e-05, |
|
"loss": 0.8015, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 0.03004506602883339, |
|
"learning_rate": 1.0195800629055567e-05, |
|
"loss": 0.8532, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 7.562097072601318, |
|
"learning_rate": 1.0167464792723358e-05, |
|
"loss": 0.8199, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 5.141594886779785, |
|
"learning_rate": 1.013912895639115e-05, |
|
"loss": 0.8355, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 3.5604329109191895, |
|
"learning_rate": 1.0110793120058939e-05, |
|
"loss": 0.8121, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 2.2066256999969482, |
|
"learning_rate": 1.008245728372673e-05, |
|
"loss": 0.8497, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 4.226958274841309, |
|
"learning_rate": 1.005412144739452e-05, |
|
"loss": 0.7996, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 3.455479383468628, |
|
"learning_rate": 1.0025785611062311e-05, |
|
"loss": 0.8377, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7734058717896951, |
|
"eval_loss": 0.8303263783454895, |
|
"eval_runtime": 2743.1767, |
|
"eval_samples_per_second": 34.307, |
|
"eval_steps_per_second": 34.307, |
|
"step": 176456 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.007345318328589201, |
|
"learning_rate": 9.997449774730102e-06, |
|
"loss": 0.8228, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 3.3040854930877686, |
|
"learning_rate": 9.969113938397893e-06, |
|
"loss": 0.8059, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 0.11892475187778473, |
|
"learning_rate": 9.940778102065684e-06, |
|
"loss": 0.8316, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 3.803138017654419, |
|
"learning_rate": 9.912442265733473e-06, |
|
"loss": 0.8172, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 10.063855171203613, |
|
"learning_rate": 9.884106429401264e-06, |
|
"loss": 0.7939, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 2.2473671436309814, |
|
"learning_rate": 9.855770593069055e-06, |
|
"loss": 0.8246, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 3.1692700386047363, |
|
"learning_rate": 9.827434756736846e-06, |
|
"loss": 0.816, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 2.5137248039245605, |
|
"learning_rate": 9.799098920404635e-06, |
|
"loss": 0.7863, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 2.4693000316619873, |
|
"learning_rate": 9.770763084072428e-06, |
|
"loss": 0.8297, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 3.0473134517669678, |
|
"learning_rate": 9.742427247740217e-06, |
|
"loss": 0.8124, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 3.729397773742676, |
|
"learning_rate": 9.714091411408008e-06, |
|
"loss": 0.8146, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 1.7608509063720703, |
|
"learning_rate": 9.6857555750758e-06, |
|
"loss": 0.8478, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 2.2976291179656982, |
|
"learning_rate": 9.65741973874359e-06, |
|
"loss": 0.8438, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 3.66800856590271, |
|
"learning_rate": 9.62908390241138e-06, |
|
"loss": 0.8117, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 2.1089248657226562, |
|
"learning_rate": 9.60074806607917e-06, |
|
"loss": 0.7711, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"grad_norm": 2.214057207107544, |
|
"learning_rate": 9.572412229746961e-06, |
|
"loss": 0.8131, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 2.953176736831665, |
|
"learning_rate": 9.544076393414752e-06, |
|
"loss": 0.8358, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"grad_norm": 4.549135684967041, |
|
"learning_rate": 9.515740557082543e-06, |
|
"loss": 0.7986, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 5.39935827255249, |
|
"learning_rate": 9.487404720750334e-06, |
|
"loss": 0.8279, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 2.80328631401062, |
|
"learning_rate": 9.459068884418125e-06, |
|
"loss": 0.8442, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 3.071382522583008, |
|
"learning_rate": 9.430733048085914e-06, |
|
"loss": 0.8557, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 3.065718412399292, |
|
"learning_rate": 9.402397211753705e-06, |
|
"loss": 0.8119, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 3.371381998062134, |
|
"learning_rate": 9.374061375421496e-06, |
|
"loss": 0.7994, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 4.33438777923584, |
|
"learning_rate": 9.345725539089287e-06, |
|
"loss": 0.804, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 5.77909517288208, |
|
"learning_rate": 9.317389702757078e-06, |
|
"loss": 0.8357, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 2.6496472358703613, |
|
"learning_rate": 9.289053866424869e-06, |
|
"loss": 0.8209, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 4.3701348304748535, |
|
"learning_rate": 9.260718030092658e-06, |
|
"loss": 0.8317, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 3.137705087661743, |
|
"learning_rate": 9.23238219376045e-06, |
|
"loss": 0.7959, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 2.3616700172424316, |
|
"learning_rate": 9.20404635742824e-06, |
|
"loss": 0.7894, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 2.2935869693756104, |
|
"learning_rate": 9.175710521096031e-06, |
|
"loss": 0.8328, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 4.851805686950684, |
|
"learning_rate": 9.14737468476382e-06, |
|
"loss": 0.8681, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 9.607928276062012, |
|
"learning_rate": 9.119038848431611e-06, |
|
"loss": 0.7672, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 3.8751378059387207, |
|
"learning_rate": 9.090703012099404e-06, |
|
"loss": 0.8453, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 4.63936710357666, |
|
"learning_rate": 9.062367175767193e-06, |
|
"loss": 0.8093, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 3.8076059818267822, |
|
"learning_rate": 9.034031339434984e-06, |
|
"loss": 0.7815, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 3.0797765254974365, |
|
"learning_rate": 9.005695503102775e-06, |
|
"loss": 0.805, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 4.025183200836182, |
|
"learning_rate": 8.977359666770566e-06, |
|
"loss": 0.8292, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 3.398597002029419, |
|
"learning_rate": 8.949023830438355e-06, |
|
"loss": 0.8072, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 3.7402262687683105, |
|
"learning_rate": 8.920687994106146e-06, |
|
"loss": 0.8492, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 4.7148919105529785, |
|
"learning_rate": 8.892352157773937e-06, |
|
"loss": 0.8189, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 2.135432004928589, |
|
"learning_rate": 8.864016321441728e-06, |
|
"loss": 0.8146, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 3.6518137454986572, |
|
"learning_rate": 8.835680485109519e-06, |
|
"loss": 0.8169, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.9229252338409424, |
|
"learning_rate": 8.80734464877731e-06, |
|
"loss": 0.8226, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 0.1361069679260254, |
|
"learning_rate": 8.7790088124451e-06, |
|
"loss": 0.8053, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 0.059051357209682465, |
|
"learning_rate": 8.75067297611289e-06, |
|
"loss": 0.8288, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 4.741939544677734, |
|
"learning_rate": 8.722337139780681e-06, |
|
"loss": 0.8408, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 3.188978433609009, |
|
"learning_rate": 8.694001303448472e-06, |
|
"loss": 0.8116, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 3.483867883682251, |
|
"learning_rate": 8.665665467116261e-06, |
|
"loss": 0.8168, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 3.1518492698669434, |
|
"learning_rate": 8.637329630784054e-06, |
|
"loss": 0.832, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 5.365024089813232, |
|
"learning_rate": 8.608993794451845e-06, |
|
"loss": 0.8259, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 3.5916318893432617, |
|
"learning_rate": 8.580657958119634e-06, |
|
"loss": 0.8245, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"grad_norm": 3.5970184803009033, |
|
"learning_rate": 8.552322121787425e-06, |
|
"loss": 0.8316, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 4.369210243225098, |
|
"learning_rate": 8.523986285455216e-06, |
|
"loss": 0.8032, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 2.211902618408203, |
|
"learning_rate": 8.495650449123007e-06, |
|
"loss": 0.8216, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 0.11311174184083939, |
|
"learning_rate": 8.467314612790796e-06, |
|
"loss": 0.8386, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 2.7481818199157715, |
|
"learning_rate": 8.438978776458587e-06, |
|
"loss": 0.8298, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 2.241297960281372, |
|
"learning_rate": 8.41064294012638e-06, |
|
"loss": 0.7622, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 3.433582067489624, |
|
"learning_rate": 8.382307103794169e-06, |
|
"loss": 0.7888, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 3.3529977798461914, |
|
"learning_rate": 8.35397126746196e-06, |
|
"loss": 0.8105, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 2.6061885356903076, |
|
"learning_rate": 8.325635431129751e-06, |
|
"loss": 0.8184, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 2.5410923957824707, |
|
"learning_rate": 8.297299594797542e-06, |
|
"loss": 0.8144, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 4.426595211029053, |
|
"learning_rate": 8.268963758465331e-06, |
|
"loss": 0.8693, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 4.819573879241943, |
|
"learning_rate": 8.240627922133122e-06, |
|
"loss": 0.8314, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 2.823551893234253, |
|
"learning_rate": 8.212292085800913e-06, |
|
"loss": 0.789, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 3.179856777191162, |
|
"learning_rate": 8.183956249468704e-06, |
|
"loss": 0.8096, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 3.1199803352355957, |
|
"learning_rate": 8.155620413136495e-06, |
|
"loss": 0.8197, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 0.029315292835235596, |
|
"learning_rate": 8.127284576804286e-06, |
|
"loss": 0.7781, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 4.753857135772705, |
|
"learning_rate": 8.098948740472075e-06, |
|
"loss": 0.7788, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 2.8551526069641113, |
|
"learning_rate": 8.070612904139866e-06, |
|
"loss": 0.8241, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 2.6687989234924316, |
|
"learning_rate": 8.042277067807657e-06, |
|
"loss": 0.805, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"grad_norm": 3.9277312755584717, |
|
"learning_rate": 8.013941231475448e-06, |
|
"loss": 0.8204, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.7752760038677732, |
|
"eval_loss": 0.8054434061050415, |
|
"eval_runtime": 2742.5432, |
|
"eval_samples_per_second": 34.315, |
|
"eval_steps_per_second": 34.315, |
|
"step": 211748 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"grad_norm": 2.4980616569519043, |
|
"learning_rate": 7.985605395143239e-06, |
|
"loss": 0.8555, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 1.9361926317214966, |
|
"learning_rate": 7.95726955881103e-06, |
|
"loss": 0.7816, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 2.223845958709717, |
|
"learning_rate": 7.92893372247882e-06, |
|
"loss": 0.7757, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"grad_norm": 2.364459991455078, |
|
"learning_rate": 7.90059788614661e-06, |
|
"loss": 0.8488, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"grad_norm": 4.380248069763184, |
|
"learning_rate": 7.872262049814401e-06, |
|
"loss": 0.7997, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 4.055036544799805, |
|
"learning_rate": 7.843926213482192e-06, |
|
"loss": 0.7863, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"grad_norm": 0.13833500444889069, |
|
"learning_rate": 7.815590377149983e-06, |
|
"loss": 0.8032, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"grad_norm": 3.1694560050964355, |
|
"learning_rate": 7.787254540817772e-06, |
|
"loss": 0.793, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 3.2722058296203613, |
|
"learning_rate": 7.758918704485565e-06, |
|
"loss": 0.8196, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"grad_norm": 3.0875654220581055, |
|
"learning_rate": 7.730582868153354e-06, |
|
"loss": 0.7951, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"grad_norm": 1.9850670099258423, |
|
"learning_rate": 7.702247031821145e-06, |
|
"loss": 0.804, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 3.0951926708221436, |
|
"learning_rate": 7.673911195488936e-06, |
|
"loss": 0.8405, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"grad_norm": 3.2018356323242188, |
|
"learning_rate": 7.645575359156727e-06, |
|
"loss": 0.8358, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 2.13598895072937, |
|
"learning_rate": 7.617239522824517e-06, |
|
"loss": 0.7789, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"grad_norm": 3.089243173599243, |
|
"learning_rate": 7.588903686492307e-06, |
|
"loss": 0.7905, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"grad_norm": 5.413113594055176, |
|
"learning_rate": 7.560567850160098e-06, |
|
"loss": 0.834, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 1.8769762516021729, |
|
"learning_rate": 7.53223201382789e-06, |
|
"loss": 0.7967, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 0.30497410893440247, |
|
"learning_rate": 7.50389617749568e-06, |
|
"loss": 0.8116, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 3.9341723918914795, |
|
"learning_rate": 7.475560341163471e-06, |
|
"loss": 0.8227, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 4.053406238555908, |
|
"learning_rate": 7.447224504831261e-06, |
|
"loss": 0.7984, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"grad_norm": 4.562232494354248, |
|
"learning_rate": 7.418888668499052e-06, |
|
"loss": 0.7988, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 4.173889636993408, |
|
"learning_rate": 7.390552832166842e-06, |
|
"loss": 0.8079, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 2.6685173511505127, |
|
"learning_rate": 7.362216995834633e-06, |
|
"loss": 0.7419, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 3.2578206062316895, |
|
"learning_rate": 7.333881159502423e-06, |
|
"loss": 0.783, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"grad_norm": 3.737518548965454, |
|
"learning_rate": 7.305545323170215e-06, |
|
"loss": 0.7971, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 3.875011920928955, |
|
"learning_rate": 7.277209486838005e-06, |
|
"loss": 0.7866, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"grad_norm": 2.1486947536468506, |
|
"learning_rate": 7.248873650505796e-06, |
|
"loss": 0.7771, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"grad_norm": 3.4426918029785156, |
|
"learning_rate": 7.220537814173586e-06, |
|
"loss": 0.7717, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 3.809903621673584, |
|
"learning_rate": 7.192201977841377e-06, |
|
"loss": 0.8186, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"grad_norm": 0.17552083730697632, |
|
"learning_rate": 7.163866141509167e-06, |
|
"loss": 0.7982, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"grad_norm": 3.331289768218994, |
|
"learning_rate": 7.135530305176958e-06, |
|
"loss": 0.7963, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"grad_norm": 3.937941074371338, |
|
"learning_rate": 7.107194468844748e-06, |
|
"loss": 0.8209, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"grad_norm": 12.834056854248047, |
|
"learning_rate": 7.07885863251254e-06, |
|
"loss": 0.8353, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 3.121537923812866, |
|
"learning_rate": 7.0505227961803305e-06, |
|
"loss": 0.7912, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 3.6076176166534424, |
|
"learning_rate": 7.022186959848121e-06, |
|
"loss": 0.7881, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"grad_norm": 2.1992852687835693, |
|
"learning_rate": 6.993851123515912e-06, |
|
"loss": 0.7787, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 2.540170192718506, |
|
"learning_rate": 6.965515287183702e-06, |
|
"loss": 0.7942, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"grad_norm": 4.526458740234375, |
|
"learning_rate": 6.937179450851493e-06, |
|
"loss": 0.8014, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"grad_norm": 3.644343376159668, |
|
"learning_rate": 6.908843614519283e-06, |
|
"loss": 0.8333, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 3.894827127456665, |
|
"learning_rate": 6.880507778187074e-06, |
|
"loss": 0.801, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"grad_norm": 3.7568604946136475, |
|
"learning_rate": 6.852171941854865e-06, |
|
"loss": 0.7923, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"grad_norm": 4.713413238525391, |
|
"learning_rate": 6.8238361055226555e-06, |
|
"loss": 0.777, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 2.4898388385772705, |
|
"learning_rate": 6.795500269190446e-06, |
|
"loss": 0.8041, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"grad_norm": 2.9944307804107666, |
|
"learning_rate": 6.7671644328582366e-06, |
|
"loss": 0.8332, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"grad_norm": 4.278744697570801, |
|
"learning_rate": 6.738828596526027e-06, |
|
"loss": 0.7571, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 2.043044328689575, |
|
"learning_rate": 6.710492760193818e-06, |
|
"loss": 0.7547, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"grad_norm": 3.0850257873535156, |
|
"learning_rate": 6.682156923861608e-06, |
|
"loss": 0.7262, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"grad_norm": 3.963240385055542, |
|
"learning_rate": 6.653821087529399e-06, |
|
"loss": 0.7709, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"grad_norm": 2.9758195877075195, |
|
"learning_rate": 6.6254852511971904e-06, |
|
"loss": 0.8393, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 3.1119496822357178, |
|
"learning_rate": 6.5971494148649805e-06, |
|
"loss": 0.8186, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 0.15055912733078003, |
|
"learning_rate": 6.5688135785327715e-06, |
|
"loss": 0.7837, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 4.089719295501709, |
|
"learning_rate": 6.5404777422005616e-06, |
|
"loss": 0.8045, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"grad_norm": 4.55492639541626, |
|
"learning_rate": 6.5121419058683525e-06, |
|
"loss": 0.7813, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 4.595581531524658, |
|
"learning_rate": 6.483806069536143e-06, |
|
"loss": 0.8184, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"grad_norm": 1.8500324487686157, |
|
"learning_rate": 6.4554702332039335e-06, |
|
"loss": 0.8151, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"grad_norm": 4.76149320602417, |
|
"learning_rate": 6.427134396871724e-06, |
|
"loss": 0.7687, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 4.253459930419922, |
|
"learning_rate": 6.398798560539515e-06, |
|
"loss": 0.7684, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"grad_norm": 4.767841815948486, |
|
"learning_rate": 6.3704627242073055e-06, |
|
"loss": 0.777, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"grad_norm": 13.834861755371094, |
|
"learning_rate": 6.3421268878750965e-06, |
|
"loss": 0.8122, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"grad_norm": 2.1560964584350586, |
|
"learning_rate": 6.3137910515428866e-06, |
|
"loss": 0.8281, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"grad_norm": 3.589592218399048, |
|
"learning_rate": 6.2854552152106775e-06, |
|
"loss": 0.7957, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 2.0722033977508545, |
|
"learning_rate": 6.257119378878468e-06, |
|
"loss": 0.7793, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 3.5615785121917725, |
|
"learning_rate": 6.2287835425462585e-06, |
|
"loss": 0.7737, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"grad_norm": 3.982910394668579, |
|
"learning_rate": 6.200447706214049e-06, |
|
"loss": 0.749, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 4.356442451477051, |
|
"learning_rate": 6.17211186988184e-06, |
|
"loss": 0.7996, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"grad_norm": 0.04942583665251732, |
|
"learning_rate": 6.143776033549631e-06, |
|
"loss": 0.7455, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"grad_norm": 1.8689194917678833, |
|
"learning_rate": 6.1154401972174214e-06, |
|
"loss": 0.7809, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 2.919508934020996, |
|
"learning_rate": 6.087104360885212e-06, |
|
"loss": 0.7589, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"grad_norm": 3.5863306522369385, |
|
"learning_rate": 6.0587685245530025e-06, |
|
"loss": 0.7854, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"grad_norm": 1.6574618816375732, |
|
"learning_rate": 6.030432688220793e-06, |
|
"loss": 0.7658, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 4.045894145965576, |
|
"learning_rate": 6.0020968518885835e-06, |
|
"loss": 0.7482, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.7764873394183465, |
|
"eval_loss": 0.7901994585990906, |
|
"eval_runtime": 2745.9709, |
|
"eval_samples_per_second": 34.272, |
|
"eval_steps_per_second": 34.272, |
|
"step": 247039 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"grad_norm": 3.3407113552093506, |
|
"learning_rate": 5.9737610155563745e-06, |
|
"loss": 0.7677, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 4.4228692054748535, |
|
"learning_rate": 5.945425179224165e-06, |
|
"loss": 0.7874, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 3.2873871326446533, |
|
"learning_rate": 5.917089342891956e-06, |
|
"loss": 0.7588, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 2.2169015407562256, |
|
"learning_rate": 5.8887535065597464e-06, |
|
"loss": 0.7579, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 1.9542386531829834, |
|
"learning_rate": 5.860417670227537e-06, |
|
"loss": 0.7618, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"grad_norm": 3.451892852783203, |
|
"learning_rate": 5.8320818338953275e-06, |
|
"loss": 0.7734, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"grad_norm": 4.9330034255981445, |
|
"learning_rate": 5.803745997563118e-06, |
|
"loss": 0.8035, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 3.184312105178833, |
|
"learning_rate": 5.7754101612309085e-06, |
|
"loss": 0.7663, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"grad_norm": 3.387808084487915, |
|
"learning_rate": 5.7470743248986994e-06, |
|
"loss": 0.77, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"grad_norm": 2.910405158996582, |
|
"learning_rate": 5.718738488566491e-06, |
|
"loss": 0.8006, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 3.2209646701812744, |
|
"learning_rate": 5.690402652234281e-06, |
|
"loss": 0.7645, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"grad_norm": 2.874213218688965, |
|
"learning_rate": 5.662066815902072e-06, |
|
"loss": 0.7371, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"grad_norm": 4.348395824432373, |
|
"learning_rate": 5.633730979569862e-06, |
|
"loss": 0.7177, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 2.4898056983947754, |
|
"learning_rate": 5.605395143237653e-06, |
|
"loss": 0.7727, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"grad_norm": 4.091867923736572, |
|
"learning_rate": 5.577059306905443e-06, |
|
"loss": 0.8085, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 4.121910095214844, |
|
"learning_rate": 5.548723470573234e-06, |
|
"loss": 0.7768, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": 2.1994853019714355, |
|
"learning_rate": 5.5203876342410244e-06, |
|
"loss": 0.7847, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"grad_norm": 3.1094110012054443, |
|
"learning_rate": 5.492051797908816e-06, |
|
"loss": 0.7496, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 4.486825942993164, |
|
"learning_rate": 5.463715961576606e-06, |
|
"loss": 0.7434, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 4.349071502685547, |
|
"learning_rate": 5.435380125244397e-06, |
|
"loss": 0.7563, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 2.1635329723358154, |
|
"learning_rate": 5.407044288912187e-06, |
|
"loss": 0.8022, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"grad_norm": 3.0839200019836426, |
|
"learning_rate": 5.378708452579978e-06, |
|
"loss": 0.7507, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"grad_norm": 3.0056662559509277, |
|
"learning_rate": 5.350372616247768e-06, |
|
"loss": 0.7807, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"grad_norm": 3.784560441970825, |
|
"learning_rate": 5.322036779915559e-06, |
|
"loss": 0.7763, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"grad_norm": 4.388574123382568, |
|
"learning_rate": 5.293700943583351e-06, |
|
"loss": 0.8446, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 3.1023905277252197, |
|
"learning_rate": 5.265365107251141e-06, |
|
"loss": 0.7713, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"grad_norm": 4.7277045249938965, |
|
"learning_rate": 5.237029270918932e-06, |
|
"loss": 0.7975, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"grad_norm": 5.080424785614014, |
|
"learning_rate": 5.208693434586722e-06, |
|
"loss": 0.7836, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 3.0420889854431152, |
|
"learning_rate": 5.180357598254513e-06, |
|
"loss": 0.8012, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 2.417198657989502, |
|
"learning_rate": 5.152021761922303e-06, |
|
"loss": 0.8363, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 3.7752904891967773, |
|
"learning_rate": 5.123685925590094e-06, |
|
"loss": 0.7595, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 2.991297721862793, |
|
"learning_rate": 5.095350089257884e-06, |
|
"loss": 0.7564, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 2.7910547256469727, |
|
"learning_rate": 5.067014252925676e-06, |
|
"loss": 0.7458, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 3.118992567062378, |
|
"learning_rate": 5.038678416593466e-06, |
|
"loss": 0.8259, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"grad_norm": 2.306201696395874, |
|
"learning_rate": 5.010342580261257e-06, |
|
"loss": 0.7895, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 2.723785638809204, |
|
"learning_rate": 4.982006743929047e-06, |
|
"loss": 0.8224, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 5.79103422164917, |
|
"learning_rate": 4.953670907596838e-06, |
|
"loss": 0.7698, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 2.896071195602417, |
|
"learning_rate": 4.925335071264628e-06, |
|
"loss": 0.8117, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 4.713535308837891, |
|
"learning_rate": 4.89699923493242e-06, |
|
"loss": 0.8028, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"grad_norm": 3.329005241394043, |
|
"learning_rate": 4.86866339860021e-06, |
|
"loss": 0.762, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"grad_norm": 2.388073444366455, |
|
"learning_rate": 4.840327562268001e-06, |
|
"loss": 0.7489, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 2.9465584754943848, |
|
"learning_rate": 4.811991725935791e-06, |
|
"loss": 0.7755, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"grad_norm": 4.375637531280518, |
|
"learning_rate": 4.783655889603582e-06, |
|
"loss": 0.7994, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 3.1931121349334717, |
|
"learning_rate": 4.755320053271373e-06, |
|
"loss": 0.8111, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"grad_norm": 4.697356700897217, |
|
"learning_rate": 4.726984216939163e-06, |
|
"loss": 0.7568, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 3.1443934440612793, |
|
"learning_rate": 4.698648380606954e-06, |
|
"loss": 0.7723, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 4.355726718902588, |
|
"learning_rate": 4.670312544274745e-06, |
|
"loss": 0.7451, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 3.7708301544189453, |
|
"learning_rate": 4.641976707942535e-06, |
|
"loss": 0.7662, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 3.0741500854492188, |
|
"learning_rate": 4.613640871610326e-06, |
|
"loss": 0.7939, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"grad_norm": 4.382043361663818, |
|
"learning_rate": 4.585305035278116e-06, |
|
"loss": 0.7949, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 0.0032790284603834152, |
|
"learning_rate": 4.556969198945907e-06, |
|
"loss": 0.776, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"grad_norm": 4.499131679534912, |
|
"learning_rate": 4.528633362613698e-06, |
|
"loss": 0.7683, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 2.200047016143799, |
|
"learning_rate": 4.500297526281488e-06, |
|
"loss": 0.8006, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 3.781383752822876, |
|
"learning_rate": 4.471961689949279e-06, |
|
"loss": 0.7454, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 3.239480972290039, |
|
"learning_rate": 4.44362585361707e-06, |
|
"loss": 0.7665, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"grad_norm": 0.20325414836406708, |
|
"learning_rate": 4.415290017284861e-06, |
|
"loss": 0.8246, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"grad_norm": 3.527724504470825, |
|
"learning_rate": 4.386954180952651e-06, |
|
"loss": 0.7981, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 4.077188968658447, |
|
"learning_rate": 4.358618344620442e-06, |
|
"loss": 0.7873, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 2.584451675415039, |
|
"learning_rate": 4.330282508288233e-06, |
|
"loss": 0.8097, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 3.91054368019104, |
|
"learning_rate": 4.301946671956023e-06, |
|
"loss": 0.781, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"grad_norm": 1.5081390142440796, |
|
"learning_rate": 4.273610835623814e-06, |
|
"loss": 0.7504, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"grad_norm": 2.7629621028900146, |
|
"learning_rate": 4.245274999291604e-06, |
|
"loss": 0.7698, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"grad_norm": 0.04077434912323952, |
|
"learning_rate": 4.216939162959395e-06, |
|
"loss": 0.794, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"grad_norm": 3.088022232055664, |
|
"learning_rate": 4.188603326627186e-06, |
|
"loss": 0.7957, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 0.009913645684719086, |
|
"learning_rate": 4.160267490294976e-06, |
|
"loss": 0.7477, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"grad_norm": 3.2786879539489746, |
|
"learning_rate": 4.131931653962767e-06, |
|
"loss": 0.7746, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"grad_norm": 4.847500801086426, |
|
"learning_rate": 4.103595817630558e-06, |
|
"loss": 0.7769, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 3.0839521884918213, |
|
"learning_rate": 4.075259981298348e-06, |
|
"loss": 0.7969, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"grad_norm": 5.6063337326049805, |
|
"learning_rate": 4.046924144966139e-06, |
|
"loss": 0.8007, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 2.7218992710113525, |
|
"learning_rate": 4.018588308633929e-06, |
|
"loss": 0.777, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.7773267737033928, |
|
"eval_loss": 0.7822893857955933, |
|
"eval_runtime": 2744.096, |
|
"eval_samples_per_second": 34.296, |
|
"eval_steps_per_second": 34.296, |
|
"step": 282331 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 3.1049580574035645, |
|
"learning_rate": 3.990252472301721e-06, |
|
"loss": 0.792, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 2.2240233421325684, |
|
"learning_rate": 3.961916635969511e-06, |
|
"loss": 0.7679, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"grad_norm": 3.7481648921966553, |
|
"learning_rate": 3.933580799637302e-06, |
|
"loss": 0.783, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 2.1633012294769287, |
|
"learning_rate": 3.905244963305092e-06, |
|
"loss": 0.7946, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 2.2068793773651123, |
|
"learning_rate": 3.876909126972883e-06, |
|
"loss": 0.739, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 2.721085548400879, |
|
"learning_rate": 3.848573290640674e-06, |
|
"loss": 0.7119, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"grad_norm": 4.786925792694092, |
|
"learning_rate": 3.820237454308464e-06, |
|
"loss": 0.7804, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"grad_norm": 2.967390537261963, |
|
"learning_rate": 3.7919016179762545e-06, |
|
"loss": 0.8006, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 3.6855547428131104, |
|
"learning_rate": 3.763565781644046e-06, |
|
"loss": 0.7571, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"grad_norm": 3.163161516189575, |
|
"learning_rate": 3.7352299453118364e-06, |
|
"loss": 0.7706, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"grad_norm": 2.217069387435913, |
|
"learning_rate": 3.706894108979627e-06, |
|
"loss": 0.7855, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"grad_norm": 3.2176852226257324, |
|
"learning_rate": 3.6785582726474174e-06, |
|
"loss": 0.7804, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 4.42471981048584, |
|
"learning_rate": 3.6502224363152084e-06, |
|
"loss": 0.7368, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"grad_norm": 3.7135393619537354, |
|
"learning_rate": 3.621886599982999e-06, |
|
"loss": 0.7546, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 1.9810535907745361, |
|
"learning_rate": 3.5935507636507894e-06, |
|
"loss": 0.7748, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"grad_norm": 2.208085060119629, |
|
"learning_rate": 3.56521492731858e-06, |
|
"loss": 0.7382, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 4.069818019866943, |
|
"learning_rate": 3.536879090986371e-06, |
|
"loss": 0.7717, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"grad_norm": 3.083171844482422, |
|
"learning_rate": 3.5085432546541614e-06, |
|
"loss": 0.7637, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 1.8702301979064941, |
|
"learning_rate": 3.480207418321952e-06, |
|
"loss": 0.7632, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"grad_norm": 2.7875142097473145, |
|
"learning_rate": 3.4518715819897424e-06, |
|
"loss": 0.7447, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 2.318441152572632, |
|
"learning_rate": 3.4235357456575333e-06, |
|
"loss": 0.7445, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 3.098846912384033, |
|
"learning_rate": 3.395199909325324e-06, |
|
"loss": 0.7527, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 4.272964954376221, |
|
"learning_rate": 3.3668640729931144e-06, |
|
"loss": 0.7604, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 2.8859071731567383, |
|
"learning_rate": 3.338528236660905e-06, |
|
"loss": 0.7715, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 2.219268560409546, |
|
"learning_rate": 3.3101924003286963e-06, |
|
"loss": 0.7369, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"grad_norm": 3.0765175819396973, |
|
"learning_rate": 3.2818565639964868e-06, |
|
"loss": 0.7948, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 3.4554049968719482, |
|
"learning_rate": 3.2535207276642773e-06, |
|
"loss": 0.7596, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"grad_norm": 3.8051280975341797, |
|
"learning_rate": 3.225184891332068e-06, |
|
"loss": 0.792, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"grad_norm": 2.201718330383301, |
|
"learning_rate": 3.1968490549998588e-06, |
|
"loss": 0.7968, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"grad_norm": 3.527601718902588, |
|
"learning_rate": 3.1685132186676493e-06, |
|
"loss": 0.7582, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"grad_norm": 3.275254726409912, |
|
"learning_rate": 3.14017738233544e-06, |
|
"loss": 0.8061, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 2.419063091278076, |
|
"learning_rate": 3.1118415460032303e-06, |
|
"loss": 0.7874, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"grad_norm": 2.1207940578460693, |
|
"learning_rate": 3.0835057096710213e-06, |
|
"loss": 0.7635, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"grad_norm": 3.7095024585723877, |
|
"learning_rate": 3.0551698733388118e-06, |
|
"loss": 0.7789, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"grad_norm": 3.1537423133850098, |
|
"learning_rate": 3.0268340370066023e-06, |
|
"loss": 0.7847, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 2.2112996578216553, |
|
"learning_rate": 2.998498200674393e-06, |
|
"loss": 0.7716, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 2.808809280395508, |
|
"learning_rate": 2.970162364342184e-06, |
|
"loss": 0.7903, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 2.72741961479187, |
|
"learning_rate": 2.9418265280099747e-06, |
|
"loss": 0.7859, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"grad_norm": 2.9039785861968994, |
|
"learning_rate": 2.9134906916777648e-06, |
|
"loss": 0.7756, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 0.06406772136688232, |
|
"learning_rate": 2.8851548553455553e-06, |
|
"loss": 0.7672, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"grad_norm": 0.002435769187286496, |
|
"learning_rate": 2.8568190190133467e-06, |
|
"loss": 0.7799, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 2.7473349571228027, |
|
"learning_rate": 2.828483182681137e-06, |
|
"loss": 0.7607, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"grad_norm": 4.246364116668701, |
|
"learning_rate": 2.8001473463489277e-06, |
|
"loss": 0.7616, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"grad_norm": 4.9080891609191895, |
|
"learning_rate": 2.7718115100167182e-06, |
|
"loss": 0.7813, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"grad_norm": 2.181150197982788, |
|
"learning_rate": 2.743475673684509e-06, |
|
"loss": 0.7604, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"grad_norm": 4.190260410308838, |
|
"learning_rate": 2.7151398373522997e-06, |
|
"loss": 0.7934, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"grad_norm": 3.007934331893921, |
|
"learning_rate": 2.68680400102009e-06, |
|
"loss": 0.7615, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"grad_norm": 3.8845651149749756, |
|
"learning_rate": 2.6584681646878807e-06, |
|
"loss": 0.7446, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 3.1428420543670654, |
|
"learning_rate": 2.6301323283556717e-06, |
|
"loss": 0.7861, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"grad_norm": 2.3537344932556152, |
|
"learning_rate": 2.601796492023462e-06, |
|
"loss": 0.7959, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 2.065415143966675, |
|
"learning_rate": 2.5734606556912527e-06, |
|
"loss": 0.7744, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"grad_norm": 2.2324647903442383, |
|
"learning_rate": 2.545124819359043e-06, |
|
"loss": 0.7655, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 3.919121742248535, |
|
"learning_rate": 2.5167889830268346e-06, |
|
"loss": 0.7631, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"grad_norm": 1.9113390445709229, |
|
"learning_rate": 2.488453146694625e-06, |
|
"loss": 0.7907, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 3.220702648162842, |
|
"learning_rate": 2.4601173103624156e-06, |
|
"loss": 0.7797, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"grad_norm": 3.0107169151306152, |
|
"learning_rate": 2.431781474030206e-06, |
|
"loss": 0.7633, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 3.342789649963379, |
|
"learning_rate": 2.403445637697997e-06, |
|
"loss": 0.7681, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 4.381540298461914, |
|
"learning_rate": 2.3751098013657876e-06, |
|
"loss": 0.7389, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 2.1664342880249023, |
|
"learning_rate": 2.346773965033578e-06, |
|
"loss": 0.7606, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"grad_norm": 1.6208741664886475, |
|
"learning_rate": 2.3184381287013686e-06, |
|
"loss": 0.7427, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"grad_norm": 3.916785955429077, |
|
"learning_rate": 2.2901022923691596e-06, |
|
"loss": 0.7548, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"grad_norm": 4.270482063293457, |
|
"learning_rate": 2.26176645603695e-06, |
|
"loss": 0.7702, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"grad_norm": 2.844374418258667, |
|
"learning_rate": 2.233430619704741e-06, |
|
"loss": 0.7754, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"grad_norm": 5.132666110992432, |
|
"learning_rate": 2.2050947833725315e-06, |
|
"loss": 0.7667, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"grad_norm": 3.2758193016052246, |
|
"learning_rate": 2.176758947040322e-06, |
|
"loss": 0.792, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 0.15504547953605652, |
|
"learning_rate": 2.1484231107081126e-06, |
|
"loss": 0.7549, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"grad_norm": 4.870330333709717, |
|
"learning_rate": 2.1200872743759035e-06, |
|
"loss": 0.7631, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 7.014795303344727, |
|
"learning_rate": 2.091751438043694e-06, |
|
"loss": 0.7784, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"grad_norm": 9.451394081115723, |
|
"learning_rate": 2.063415601711485e-06, |
|
"loss": 0.7642, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 0.6156451106071472, |
|
"learning_rate": 2.0350797653792755e-06, |
|
"loss": 0.7692, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 4.711402893066406, |
|
"learning_rate": 2.006743929047066e-06, |
|
"loss": 0.8045, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.7766467256750008, |
|
"eval_loss": 0.7740858197212219, |
|
"eval_runtime": 2742.9816, |
|
"eval_samples_per_second": 34.31, |
|
"eval_steps_per_second": 34.31, |
|
"step": 317622 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 3.5747580528259277, |
|
"learning_rate": 1.9784080927148565e-06, |
|
"loss": 0.75, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"grad_norm": 2.482762575149536, |
|
"learning_rate": 1.9500722563826475e-06, |
|
"loss": 0.7759, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 3.532433032989502, |
|
"learning_rate": 1.921736420050438e-06, |
|
"loss": 0.79, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 1.5587611198425293, |
|
"learning_rate": 1.8934005837182287e-06, |
|
"loss": 0.7424, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 3.8560965061187744, |
|
"learning_rate": 1.8650647473860192e-06, |
|
"loss": 0.7464, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"grad_norm": 3.5473949909210205, |
|
"learning_rate": 1.83672891105381e-06, |
|
"loss": 0.7857, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 2.0293056964874268, |
|
"learning_rate": 1.8083930747216005e-06, |
|
"loss": 0.7538, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"grad_norm": 5.718314170837402, |
|
"learning_rate": 1.7800572383893912e-06, |
|
"loss": 0.7467, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 3.7162270545959473, |
|
"learning_rate": 1.7517214020571817e-06, |
|
"loss": 0.7627, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"grad_norm": 3.278439998626709, |
|
"learning_rate": 1.7233855657249727e-06, |
|
"loss": 0.7898, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"grad_norm": 3.3073856830596924, |
|
"learning_rate": 1.6950497293927632e-06, |
|
"loss": 0.7535, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"grad_norm": 0.3235242962837219, |
|
"learning_rate": 1.666713893060554e-06, |
|
"loss": 0.7559, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"grad_norm": 3.050361394882202, |
|
"learning_rate": 1.6383780567283444e-06, |
|
"loss": 0.7876, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 2.169379472732544, |
|
"learning_rate": 1.6100422203961352e-06, |
|
"loss": 0.7864, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"grad_norm": 2.979144811630249, |
|
"learning_rate": 1.5817063840639257e-06, |
|
"loss": 0.7855, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 2.187973976135254, |
|
"learning_rate": 1.5533705477317166e-06, |
|
"loss": 0.7744, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"grad_norm": 2.242929220199585, |
|
"learning_rate": 1.525034711399507e-06, |
|
"loss": 0.7929, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 3.240936756134033, |
|
"learning_rate": 1.4966988750672979e-06, |
|
"loss": 0.7504, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"grad_norm": 5.772107124328613, |
|
"learning_rate": 1.4683630387350884e-06, |
|
"loss": 0.7653, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 3.1591999530792236, |
|
"learning_rate": 1.4400272024028791e-06, |
|
"loss": 0.7102, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"grad_norm": 2.9955251216888428, |
|
"learning_rate": 1.4116913660706696e-06, |
|
"loss": 0.7689, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 3.1016738414764404, |
|
"learning_rate": 1.3833555297384604e-06, |
|
"loss": 0.7729, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"grad_norm": 4.220294952392578, |
|
"learning_rate": 1.3550196934062509e-06, |
|
"loss": 0.7901, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 2.13430118560791, |
|
"learning_rate": 1.3266838570740418e-06, |
|
"loss": 0.764, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"grad_norm": 3.9360506534576416, |
|
"learning_rate": 1.2983480207418323e-06, |
|
"loss": 0.7443, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"grad_norm": 2.117910623550415, |
|
"learning_rate": 1.270012184409623e-06, |
|
"loss": 0.7349, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"grad_norm": 4.577652931213379, |
|
"learning_rate": 1.2416763480774136e-06, |
|
"loss": 0.7891, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"grad_norm": 4.693737506866455, |
|
"learning_rate": 1.2133405117452041e-06, |
|
"loss": 0.7812, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 3.70219349861145, |
|
"learning_rate": 1.1850046754129948e-06, |
|
"loss": 0.77, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"grad_norm": 3.6610214710235596, |
|
"learning_rate": 1.1566688390807856e-06, |
|
"loss": 0.7, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 2.40402889251709, |
|
"learning_rate": 1.128333002748576e-06, |
|
"loss": 0.7968, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"grad_norm": 3.7406198978424072, |
|
"learning_rate": 1.0999971664163668e-06, |
|
"loss": 0.7786, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 4.944614410400391, |
|
"learning_rate": 1.0716613300841576e-06, |
|
"loss": 0.7554, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 0.21894210577011108, |
|
"learning_rate": 1.043325493751948e-06, |
|
"loss": 0.7585, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 4.879526138305664, |
|
"learning_rate": 1.0149896574197388e-06, |
|
"loss": 0.767, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"grad_norm": 3.0120060443878174, |
|
"learning_rate": 9.866538210875293e-07, |
|
"loss": 0.7692, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 2.9848334789276123, |
|
"learning_rate": 9.5831798475532e-07, |
|
"loss": 0.8005, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"grad_norm": 4.2996110916137695, |
|
"learning_rate": 9.299821484231109e-07, |
|
"loss": 0.7887, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 3.8129289150238037, |
|
"learning_rate": 9.016463120909015e-07, |
|
"loss": 0.7608, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 3.8413565158843994, |
|
"learning_rate": 8.733104757586921e-07, |
|
"loss": 0.7477, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 2.2064590454101562, |
|
"learning_rate": 8.449746394264829e-07, |
|
"loss": 0.7587, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"grad_norm": 5.598100662231445, |
|
"learning_rate": 8.166388030942735e-07, |
|
"loss": 0.782, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 3.09802508354187, |
|
"learning_rate": 7.883029667620641e-07, |
|
"loss": 0.7799, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"grad_norm": 2.217261791229248, |
|
"learning_rate": 7.599671304298547e-07, |
|
"loss": 0.744, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"grad_norm": 2.2989118099212646, |
|
"learning_rate": 7.316312940976455e-07, |
|
"loss": 0.7724, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"grad_norm": 4.708602428436279, |
|
"learning_rate": 7.032954577654361e-07, |
|
"loss": 0.7564, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"grad_norm": 2.177694797515869, |
|
"learning_rate": 6.749596214332267e-07, |
|
"loss": 0.7935, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 3.017601728439331, |
|
"learning_rate": 6.466237851010173e-07, |
|
"loss": 0.7764, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"grad_norm": 4.6326494216918945, |
|
"learning_rate": 6.18287948768808e-07, |
|
"loss": 0.7858, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 3.898557424545288, |
|
"learning_rate": 5.899521124365986e-07, |
|
"loss": 0.7539, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"grad_norm": 3.546487808227539, |
|
"learning_rate": 5.616162761043892e-07, |
|
"loss": 0.7912, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 3.868692398071289, |
|
"learning_rate": 5.332804397721799e-07, |
|
"loss": 0.7661, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"grad_norm": 4.169196605682373, |
|
"learning_rate": 5.049446034399706e-07, |
|
"loss": 0.7642, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 0.08999128639698029, |
|
"learning_rate": 4.766087671077612e-07, |
|
"loss": 0.7434, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"grad_norm": 3.686347007751465, |
|
"learning_rate": 4.4827293077555185e-07, |
|
"loss": 0.7124, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 4.009047031402588, |
|
"learning_rate": 4.1993709444334253e-07, |
|
"loss": 0.75, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"grad_norm": 3.0990748405456543, |
|
"learning_rate": 3.9160125811113315e-07, |
|
"loss": 0.7539, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 2.179380178451538, |
|
"learning_rate": 3.6326542177892383e-07, |
|
"loss": 0.7654, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"grad_norm": 4.667028903961182, |
|
"learning_rate": 3.3492958544671445e-07, |
|
"loss": 0.7645, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 0.0834590345621109, |
|
"learning_rate": 3.0659374911450513e-07, |
|
"loss": 0.74, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"grad_norm": 1.7832657098770142, |
|
"learning_rate": 2.782579127822958e-07, |
|
"loss": 0.7738, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"grad_norm": 3.8339791297912598, |
|
"learning_rate": 2.4992207645008643e-07, |
|
"loss": 0.7494, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 4.224211692810059, |
|
"learning_rate": 2.215862401178771e-07, |
|
"loss": 0.7532, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"grad_norm": 4.752499580383301, |
|
"learning_rate": 1.9325040378566776e-07, |
|
"loss": 0.7239, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 2.172379732131958, |
|
"learning_rate": 1.649145674534584e-07, |
|
"loss": 0.7724, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"grad_norm": 2.954164743423462, |
|
"learning_rate": 1.3657873112124906e-07, |
|
"loss": 0.7716, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"grad_norm": 3.167682647705078, |
|
"learning_rate": 1.082428947890397e-07, |
|
"loss": 0.7789, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"grad_norm": 3.5074291229248047, |
|
"learning_rate": 7.990705845683036e-08, |
|
"loss": 0.7563, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 2.857052803039551, |
|
"learning_rate": 5.1571222124621016e-08, |
|
"loss": 0.7174, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"grad_norm": 4.333151340484619, |
|
"learning_rate": 2.3235385792411667e-08, |
|
"loss": 0.7338, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.7761048124023759, |
|
"eval_loss": 0.7718502879142761, |
|
"eval_runtime": 2734.4878, |
|
"eval_samples_per_second": 34.416, |
|
"eval_steps_per_second": 34.416, |
|
"step": 352910 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 352910, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 1.4322010384662528e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|