File size: 218 Bytes
b2f6182 |
1 2 3 4 5 6 7 8 |
{
"epoch": 2.998027613412229,
"total_flos": 287426369617920.0,
"train_loss": 0.5032803327368017,
"train_runtime": 76434.0426,
"train_samples_per_second": 1.433,
"train_steps_per_second": 0.024
} |