|
{ |
|
"best_metric": 0.5985733866691589, |
|
"best_model_checkpoint": "data/Llama-31-8B_task-2_60-samples_config-2_auto/checkpoint-25", |
|
"epoch": 16.0, |
|
"eval_steps": 500, |
|
"global_step": 46, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.3837566673755646, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8962, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.3758746385574341, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8574, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"eval_loss": 0.8903820514678955, |
|
"eval_runtime": 13.347, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 0.38522425293922424, |
|
"learning_rate": 4e-05, |
|
"loss": 0.8746, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"eval_loss": 0.8490483164787292, |
|
"eval_runtime": 13.3435, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 2.0869565217391304, |
|
"grad_norm": 0.34568658471107483, |
|
"learning_rate": 6e-05, |
|
"loss": 0.8584, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 2.782608695652174, |
|
"grad_norm": 0.31414464116096497, |
|
"learning_rate": 8e-05, |
|
"loss": 0.7842, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 2.782608695652174, |
|
"eval_loss": 0.771674394607544, |
|
"eval_runtime": 13.3469, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 3.4782608695652173, |
|
"grad_norm": 0.32403564453125, |
|
"learning_rate": 0.0001, |
|
"loss": 0.728, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 3.8260869565217392, |
|
"eval_loss": 0.7139511108398438, |
|
"eval_runtime": 13.3421, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 4.173913043478261, |
|
"grad_norm": 0.3469023108482361, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 0.6997, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 4.869565217391305, |
|
"grad_norm": 0.31055331230163574, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 0.6405, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 4.869565217391305, |
|
"eval_loss": 0.6586517691612244, |
|
"eval_runtime": 13.3436, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 5.565217391304348, |
|
"grad_norm": 0.2623463273048401, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.5991, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 5.913043478260869, |
|
"eval_loss": 0.6365043520927429, |
|
"eval_runtime": 13.3397, |
|
"eval_samples_per_second": 0.9, |
|
"eval_steps_per_second": 0.9, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 6.260869565217392, |
|
"grad_norm": 0.26579177379608154, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.5578, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 6.956521739130435, |
|
"grad_norm": 0.26274964213371277, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.5371, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 6.956521739130435, |
|
"eval_loss": 0.6146900057792664, |
|
"eval_runtime": 13.3427, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 7.6521739130434785, |
|
"grad_norm": 0.2567163407802582, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.5007, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.6017631888389587, |
|
"eval_runtime": 13.3431, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 8.347826086956522, |
|
"grad_norm": 0.2583542466163635, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 0.4783, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 8.695652173913043, |
|
"eval_loss": 0.5985733866691589, |
|
"eval_runtime": 13.346, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 9.043478260869565, |
|
"grad_norm": 0.26865774393081665, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.459, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 9.73913043478261, |
|
"grad_norm": 0.2833639979362488, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.4118, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 9.73913043478261, |
|
"eval_loss": 0.6050412058830261, |
|
"eval_runtime": 13.3451, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 10.434782608695652, |
|
"grad_norm": 0.290280818939209, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.4002, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 10.782608695652174, |
|
"eval_loss": 0.6277570128440857, |
|
"eval_runtime": 13.3488, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 11.130434782608695, |
|
"grad_norm": 0.3147587776184082, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.3665, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 11.826086956521738, |
|
"grad_norm": 0.31184399127960205, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.3234, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 11.826086956521738, |
|
"eval_loss": 0.6697214245796204, |
|
"eval_runtime": 13.3457, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 12.521739130434783, |
|
"grad_norm": 0.347219854593277, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.2816, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 12.869565217391305, |
|
"eval_loss": 0.7073009610176086, |
|
"eval_runtime": 13.3437, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 13.217391304347826, |
|
"grad_norm": 0.47505876421928406, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.2706, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 13.91304347826087, |
|
"grad_norm": 0.3857143223285675, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2159, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 13.91304347826087, |
|
"eval_loss": 0.719702959060669, |
|
"eval_runtime": 13.3447, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 14.608695652173914, |
|
"grad_norm": 0.3344462513923645, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 0.1785, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 14.956521739130435, |
|
"eval_loss": 0.7951470017433167, |
|
"eval_runtime": 13.3459, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 15.304347826086957, |
|
"grad_norm": 0.3927725553512573, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 0.1504, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.42083150148391724, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.1209, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 0.9000644683837891, |
|
"eval_runtime": 13.3444, |
|
"eval_samples_per_second": 0.899, |
|
"eval_steps_per_second": 0.899, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"step": 46, |
|
"total_flos": 7.404745154730394e+16, |
|
"train_loss": 0.49191281426212063, |
|
"train_runtime": 2654.7246, |
|
"train_samples_per_second": 0.866, |
|
"train_steps_per_second": 0.038 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 7, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.404745154730394e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|