Training in progress, step 2432, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 891644712
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9fb6ca324bbc0d4a10be7c0350b30be0f913204c23a469349b3b17a18bcc4e8
|
3 |
size 891644712
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1783444794
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7c8d159b5dee730df7336003b85ecea019dce682761839b81884861f1a4d673d
|
3 |
size 1783444794
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3904070ba4a4b6edf1dbf51afc14e663b106f8e8c79f79b87ba5d9600f008cda
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a7aa87f2f27bdf14618fcb9838c6fc76e75fdc1bff232aaca8a077a7f724abaf
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -6055,6 +6055,2478 @@
|
|
6055 |
"learning_rate": 0.0001116076390155682,
|
6056 |
"loss": 0.4248,
|
6057 |
"step": 1728
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6058 |
}
|
6059 |
],
|
6060 |
"logging_steps": 2,
|
@@ -6074,7 +8546,7 @@
|
|
6074 |
"attributes": {}
|
6075 |
}
|
6076 |
},
|
6077 |
-
"total_flos":
|
6078 |
"train_batch_size": 8,
|
6079 |
"trial_name": null,
|
6080 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.348863006100943,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 2432,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
6055 |
"learning_rate": 0.0001116076390155682,
|
6056 |
"loss": 0.4248,
|
6057 |
"step": 1728
|
6058 |
+
},
|
6059 |
+
{
|
6060 |
+
"epoch": 0.9595119245701609,
|
6061 |
+
"grad_norm": 0.24086622893810272,
|
6062 |
+
"learning_rate": 0.00011142916133073948,
|
6063 |
+
"loss": 0.3039,
|
6064 |
+
"step": 1730
|
6065 |
+
},
|
6066 |
+
{
|
6067 |
+
"epoch": 0.9606211869107044,
|
6068 |
+
"grad_norm": 0.25115931034088135,
|
6069 |
+
"learning_rate": 0.00011125064674961913,
|
6070 |
+
"loss": 0.3836,
|
6071 |
+
"step": 1732
|
6072 |
+
},
|
6073 |
+
{
|
6074 |
+
"epoch": 0.961730449251248,
|
6075 |
+
"grad_norm": 0.274597704410553,
|
6076 |
+
"learning_rate": 0.00011107209584849845,
|
6077 |
+
"loss": 0.393,
|
6078 |
+
"step": 1734
|
6079 |
+
},
|
6080 |
+
{
|
6081 |
+
"epoch": 0.9628397115917915,
|
6082 |
+
"grad_norm": 0.2955986559391022,
|
6083 |
+
"learning_rate": 0.00011089350920378592,
|
6084 |
+
"loss": 0.3905,
|
6085 |
+
"step": 1736
|
6086 |
+
},
|
6087 |
+
{
|
6088 |
+
"epoch": 0.963948973932335,
|
6089 |
+
"grad_norm": 0.3580799400806427,
|
6090 |
+
"learning_rate": 0.00011071488739200551,
|
6091 |
+
"loss": 0.3933,
|
6092 |
+
"step": 1738
|
6093 |
+
},
|
6094 |
+
{
|
6095 |
+
"epoch": 0.9650582362728786,
|
6096 |
+
"grad_norm": 0.3148622512817383,
|
6097 |
+
"learning_rate": 0.00011053623098979465,
|
6098 |
+
"loss": 0.5094,
|
6099 |
+
"step": 1740
|
6100 |
+
},
|
6101 |
+
{
|
6102 |
+
"epoch": 0.9661674986134221,
|
6103 |
+
"grad_norm": 0.23635894060134888,
|
6104 |
+
"learning_rate": 0.00011035754057390247,
|
6105 |
+
"loss": 0.4101,
|
6106 |
+
"step": 1742
|
6107 |
+
},
|
6108 |
+
{
|
6109 |
+
"epoch": 0.9672767609539656,
|
6110 |
+
"grad_norm": 0.3264128863811493,
|
6111 |
+
"learning_rate": 0.00011017881672118786,
|
6112 |
+
"loss": 0.3513,
|
6113 |
+
"step": 1744
|
6114 |
+
},
|
6115 |
+
{
|
6116 |
+
"epoch": 0.9683860232945092,
|
6117 |
+
"grad_norm": 0.2713950276374817,
|
6118 |
+
"learning_rate": 0.00011000006000861771,
|
6119 |
+
"loss": 0.41,
|
6120 |
+
"step": 1746
|
6121 |
+
},
|
6122 |
+
{
|
6123 |
+
"epoch": 0.9694952856350527,
|
6124 |
+
"grad_norm": 0.4210665822029114,
|
6125 |
+
"learning_rate": 0.00010982127101326498,
|
6126 |
+
"loss": 0.3747,
|
6127 |
+
"step": 1748
|
6128 |
+
},
|
6129 |
+
{
|
6130 |
+
"epoch": 0.9706045479755963,
|
6131 |
+
"grad_norm": 0.21227708458900452,
|
6132 |
+
"learning_rate": 0.00010964245031230684,
|
6133 |
+
"loss": 0.4087,
|
6134 |
+
"step": 1750
|
6135 |
+
},
|
6136 |
+
{
|
6137 |
+
"epoch": 0.9717138103161398,
|
6138 |
+
"grad_norm": 0.4090538024902344,
|
6139 |
+
"learning_rate": 0.00010946359848302275,
|
6140 |
+
"loss": 0.5652,
|
6141 |
+
"step": 1752
|
6142 |
+
},
|
6143 |
+
{
|
6144 |
+
"epoch": 0.9728230726566833,
|
6145 |
+
"grad_norm": 0.2974146604537964,
|
6146 |
+
"learning_rate": 0.00010928471610279278,
|
6147 |
+
"loss": 0.3913,
|
6148 |
+
"step": 1754
|
6149 |
+
},
|
6150 |
+
{
|
6151 |
+
"epoch": 0.9739323349972269,
|
6152 |
+
"grad_norm": 0.2675114870071411,
|
6153 |
+
"learning_rate": 0.00010910580374909551,
|
6154 |
+
"loss": 0.4146,
|
6155 |
+
"step": 1756
|
6156 |
+
},
|
6157 |
+
{
|
6158 |
+
"epoch": 0.9750415973377704,
|
6159 |
+
"grad_norm": 0.2641230821609497,
|
6160 |
+
"learning_rate": 0.0001089268619995064,
|
6161 |
+
"loss": 0.4093,
|
6162 |
+
"step": 1758
|
6163 |
+
},
|
6164 |
+
{
|
6165 |
+
"epoch": 0.9761508596783139,
|
6166 |
+
"grad_norm": 0.30527427792549133,
|
6167 |
+
"learning_rate": 0.00010874789143169568,
|
6168 |
+
"loss": 0.4269,
|
6169 |
+
"step": 1760
|
6170 |
+
},
|
6171 |
+
{
|
6172 |
+
"epoch": 0.9772601220188575,
|
6173 |
+
"grad_norm": 0.25456637144088745,
|
6174 |
+
"learning_rate": 0.0001085688926234267,
|
6175 |
+
"loss": 0.4085,
|
6176 |
+
"step": 1762
|
6177 |
+
},
|
6178 |
+
{
|
6179 |
+
"epoch": 0.978369384359401,
|
6180 |
+
"grad_norm": 0.2829885184764862,
|
6181 |
+
"learning_rate": 0.00010838986615255397,
|
6182 |
+
"loss": 0.4017,
|
6183 |
+
"step": 1764
|
6184 |
+
},
|
6185 |
+
{
|
6186 |
+
"epoch": 0.9794786466999446,
|
6187 |
+
"grad_norm": 0.31530699133872986,
|
6188 |
+
"learning_rate": 0.00010821081259702128,
|
6189 |
+
"loss": 0.3994,
|
6190 |
+
"step": 1766
|
6191 |
+
},
|
6192 |
+
{
|
6193 |
+
"epoch": 0.9805879090404881,
|
6194 |
+
"grad_norm": 0.26515138149261475,
|
6195 |
+
"learning_rate": 0.00010803173253485983,
|
6196 |
+
"loss": 0.3345,
|
6197 |
+
"step": 1768
|
6198 |
+
},
|
6199 |
+
{
|
6200 |
+
"epoch": 0.9816971713810316,
|
6201 |
+
"grad_norm": 0.26404955983161926,
|
6202 |
+
"learning_rate": 0.00010785262654418647,
|
6203 |
+
"loss": 0.3681,
|
6204 |
+
"step": 1770
|
6205 |
+
},
|
6206 |
+
{
|
6207 |
+
"epoch": 0.9828064337215752,
|
6208 |
+
"grad_norm": 0.35473427176475525,
|
6209 |
+
"learning_rate": 0.00010767349520320167,
|
6210 |
+
"loss": 0.4161,
|
6211 |
+
"step": 1772
|
6212 |
+
},
|
6213 |
+
{
|
6214 |
+
"epoch": 0.9839156960621187,
|
6215 |
+
"grad_norm": 0.26969531178474426,
|
6216 |
+
"learning_rate": 0.00010749433909018778,
|
6217 |
+
"loss": 0.4034,
|
6218 |
+
"step": 1774
|
6219 |
+
},
|
6220 |
+
{
|
6221 |
+
"epoch": 0.9850249584026622,
|
6222 |
+
"grad_norm": 0.949966311454773,
|
6223 |
+
"learning_rate": 0.0001073151587835071,
|
6224 |
+
"loss": 0.4101,
|
6225 |
+
"step": 1776
|
6226 |
+
},
|
6227 |
+
{
|
6228 |
+
"epoch": 0.9861342207432058,
|
6229 |
+
"grad_norm": 0.26887133717536926,
|
6230 |
+
"learning_rate": 0.00010713595486160013,
|
6231 |
+
"loss": 0.365,
|
6232 |
+
"step": 1778
|
6233 |
+
},
|
6234 |
+
{
|
6235 |
+
"epoch": 0.9872434830837493,
|
6236 |
+
"grad_norm": 0.2992730140686035,
|
6237 |
+
"learning_rate": 0.00010695672790298341,
|
6238 |
+
"loss": 0.4104,
|
6239 |
+
"step": 1780
|
6240 |
+
},
|
6241 |
+
{
|
6242 |
+
"epoch": 0.9883527454242929,
|
6243 |
+
"grad_norm": 0.29282405972480774,
|
6244 |
+
"learning_rate": 0.00010677747848624804,
|
6245 |
+
"loss": 0.3768,
|
6246 |
+
"step": 1782
|
6247 |
+
},
|
6248 |
+
{
|
6249 |
+
"epoch": 0.9894620077648364,
|
6250 |
+
"grad_norm": 0.3003996014595032,
|
6251 |
+
"learning_rate": 0.00010659820719005748,
|
6252 |
+
"loss": 0.3454,
|
6253 |
+
"step": 1784
|
6254 |
+
},
|
6255 |
+
{
|
6256 |
+
"epoch": 0.9905712701053799,
|
6257 |
+
"grad_norm": 0.3009726107120514,
|
6258 |
+
"learning_rate": 0.00010641891459314597,
|
6259 |
+
"loss": 0.3189,
|
6260 |
+
"step": 1786
|
6261 |
+
},
|
6262 |
+
{
|
6263 |
+
"epoch": 0.9916805324459235,
|
6264 |
+
"grad_norm": 0.32993796467781067,
|
6265 |
+
"learning_rate": 0.00010623960127431636,
|
6266 |
+
"loss": 0.3873,
|
6267 |
+
"step": 1788
|
6268 |
+
},
|
6269 |
+
{
|
6270 |
+
"epoch": 0.992789794786467,
|
6271 |
+
"grad_norm": 0.28225094079971313,
|
6272 |
+
"learning_rate": 0.00010606026781243847,
|
6273 |
+
"loss": 0.3549,
|
6274 |
+
"step": 1790
|
6275 |
+
},
|
6276 |
+
{
|
6277 |
+
"epoch": 0.9938990571270105,
|
6278 |
+
"grad_norm": 0.30678993463516235,
|
6279 |
+
"learning_rate": 0.00010588091478644715,
|
6280 |
+
"loss": 0.3856,
|
6281 |
+
"step": 1792
|
6282 |
+
},
|
6283 |
+
{
|
6284 |
+
"epoch": 0.9950083194675541,
|
6285 |
+
"grad_norm": 0.25429633259773254,
|
6286 |
+
"learning_rate": 0.00010570154277534042,
|
6287 |
+
"loss": 0.3276,
|
6288 |
+
"step": 1794
|
6289 |
+
},
|
6290 |
+
{
|
6291 |
+
"epoch": 0.9961175818080976,
|
6292 |
+
"grad_norm": 0.20127154886722565,
|
6293 |
+
"learning_rate": 0.00010552215235817754,
|
6294 |
+
"loss": 0.3675,
|
6295 |
+
"step": 1796
|
6296 |
+
},
|
6297 |
+
{
|
6298 |
+
"epoch": 0.9972268441486412,
|
6299 |
+
"grad_norm": 0.2773531973361969,
|
6300 |
+
"learning_rate": 0.00010534274411407725,
|
6301 |
+
"loss": 0.3993,
|
6302 |
+
"step": 1798
|
6303 |
+
},
|
6304 |
+
{
|
6305 |
+
"epoch": 0.9983361064891847,
|
6306 |
+
"grad_norm": 0.3768630921840668,
|
6307 |
+
"learning_rate": 0.00010516331862221582,
|
6308 |
+
"loss": 0.4841,
|
6309 |
+
"step": 1800
|
6310 |
+
},
|
6311 |
+
{
|
6312 |
+
"epoch": 0.9994453688297282,
|
6313 |
+
"grad_norm": 0.3159618079662323,
|
6314 |
+
"learning_rate": 0.00010498387646182512,
|
6315 |
+
"loss": 0.4443,
|
6316 |
+
"step": 1802
|
6317 |
+
},
|
6318 |
+
{
|
6319 |
+
"epoch": 1.0,
|
6320 |
+
"eval_loss": 0.38768309354782104,
|
6321 |
+
"eval_runtime": 38.6863,
|
6322 |
+
"eval_samples_per_second": 58.031,
|
6323 |
+
"eval_steps_per_second": 29.028,
|
6324 |
+
"step": 1803
|
6325 |
+
},
|
6326 |
+
{
|
6327 |
+
"epoch": 1.0005546311702718,
|
6328 |
+
"grad_norm": 0.18592089414596558,
|
6329 |
+
"learning_rate": 0.00010480441821219096,
|
6330 |
+
"loss": 0.3043,
|
6331 |
+
"step": 1804
|
6332 |
+
},
|
6333 |
+
{
|
6334 |
+
"epoch": 1.0016638935108153,
|
6335 |
+
"grad_norm": 0.23864133656024933,
|
6336 |
+
"learning_rate": 0.00010462494445265098,
|
6337 |
+
"loss": 0.3108,
|
6338 |
+
"step": 1806
|
6339 |
+
},
|
6340 |
+
{
|
6341 |
+
"epoch": 1.0027731558513588,
|
6342 |
+
"grad_norm": 0.33824145793914795,
|
6343 |
+
"learning_rate": 0.00010444545576259304,
|
6344 |
+
"loss": 0.3777,
|
6345 |
+
"step": 1808
|
6346 |
+
},
|
6347 |
+
{
|
6348 |
+
"epoch": 1.0038824181919024,
|
6349 |
+
"grad_norm": 0.2805129289627075,
|
6350 |
+
"learning_rate": 0.00010426595272145296,
|
6351 |
+
"loss": 0.3282,
|
6352 |
+
"step": 1810
|
6353 |
+
},
|
6354 |
+
{
|
6355 |
+
"epoch": 1.004991680532446,
|
6356 |
+
"grad_norm": 0.2824130058288574,
|
6357 |
+
"learning_rate": 0.00010408643590871312,
|
6358 |
+
"loss": 0.3395,
|
6359 |
+
"step": 1812
|
6360 |
+
},
|
6361 |
+
{
|
6362 |
+
"epoch": 1.0061009428729895,
|
6363 |
+
"grad_norm": 0.2482519894838333,
|
6364 |
+
"learning_rate": 0.00010390690590390023,
|
6365 |
+
"loss": 0.3296,
|
6366 |
+
"step": 1814
|
6367 |
+
},
|
6368 |
+
{
|
6369 |
+
"epoch": 1.007210205213533,
|
6370 |
+
"grad_norm": 0.31125134229660034,
|
6371 |
+
"learning_rate": 0.00010372736328658363,
|
6372 |
+
"loss": 0.3626,
|
6373 |
+
"step": 1816
|
6374 |
+
},
|
6375 |
+
{
|
6376 |
+
"epoch": 1.0083194675540765,
|
6377 |
+
"grad_norm": 0.2817044258117676,
|
6378 |
+
"learning_rate": 0.00010354780863637339,
|
6379 |
+
"loss": 0.3466,
|
6380 |
+
"step": 1818
|
6381 |
+
},
|
6382 |
+
{
|
6383 |
+
"epoch": 1.00942872989462,
|
6384 |
+
"grad_norm": 0.28125834465026855,
|
6385 |
+
"learning_rate": 0.00010336824253291837,
|
6386 |
+
"loss": 0.3274,
|
6387 |
+
"step": 1820
|
6388 |
+
},
|
6389 |
+
{
|
6390 |
+
"epoch": 1.0105379922351636,
|
6391 |
+
"grad_norm": 0.18501298129558563,
|
6392 |
+
"learning_rate": 0.0001031886655559045,
|
6393 |
+
"loss": 0.2943,
|
6394 |
+
"step": 1822
|
6395 |
+
},
|
6396 |
+
{
|
6397 |
+
"epoch": 1.0116472545757071,
|
6398 |
+
"grad_norm": 0.3577679395675659,
|
6399 |
+
"learning_rate": 0.00010300907828505269,
|
6400 |
+
"loss": 0.3546,
|
6401 |
+
"step": 1824
|
6402 |
+
},
|
6403 |
+
{
|
6404 |
+
"epoch": 1.0127565169162507,
|
6405 |
+
"grad_norm": 0.26521244645118713,
|
6406 |
+
"learning_rate": 0.00010282948130011715,
|
6407 |
+
"loss": 0.3626,
|
6408 |
+
"step": 1826
|
6409 |
+
},
|
6410 |
+
{
|
6411 |
+
"epoch": 1.0138657792567942,
|
6412 |
+
"grad_norm": 0.25927355885505676,
|
6413 |
+
"learning_rate": 0.00010264987518088347,
|
6414 |
+
"loss": 0.4119,
|
6415 |
+
"step": 1828
|
6416 |
+
},
|
6417 |
+
{
|
6418 |
+
"epoch": 1.0149750415973378,
|
6419 |
+
"grad_norm": 0.2581847608089447,
|
6420 |
+
"learning_rate": 0.0001024702605071667,
|
6421 |
+
"loss": 0.3306,
|
6422 |
+
"step": 1830
|
6423 |
+
},
|
6424 |
+
{
|
6425 |
+
"epoch": 1.0160843039378813,
|
6426 |
+
"grad_norm": 0.2600712478160858,
|
6427 |
+
"learning_rate": 0.00010229063785880948,
|
6428 |
+
"loss": 0.3486,
|
6429 |
+
"step": 1832
|
6430 |
+
},
|
6431 |
+
{
|
6432 |
+
"epoch": 1.0171935662784248,
|
6433 |
+
"grad_norm": 0.2579622268676758,
|
6434 |
+
"learning_rate": 0.00010211100781568024,
|
6435 |
+
"loss": 0.3191,
|
6436 |
+
"step": 1834
|
6437 |
+
},
|
6438 |
+
{
|
6439 |
+
"epoch": 1.0183028286189684,
|
6440 |
+
"grad_norm": 0.3741399645805359,
|
6441 |
+
"learning_rate": 0.00010193137095767125,
|
6442 |
+
"loss": 0.4502,
|
6443 |
+
"step": 1836
|
6444 |
+
},
|
6445 |
+
{
|
6446 |
+
"epoch": 1.019412090959512,
|
6447 |
+
"grad_norm": 0.31590569019317627,
|
6448 |
+
"learning_rate": 0.0001017517278646968,
|
6449 |
+
"loss": 0.3766,
|
6450 |
+
"step": 1838
|
6451 |
+
},
|
6452 |
+
{
|
6453 |
+
"epoch": 1.0205213533000554,
|
6454 |
+
"grad_norm": 0.2550656199455261,
|
6455 |
+
"learning_rate": 0.00010157207911669132,
|
6456 |
+
"loss": 0.3151,
|
6457 |
+
"step": 1840
|
6458 |
+
},
|
6459 |
+
{
|
6460 |
+
"epoch": 1.021630615640599,
|
6461 |
+
"grad_norm": 0.28483080863952637,
|
6462 |
+
"learning_rate": 0.00010139242529360744,
|
6463 |
+
"loss": 0.3957,
|
6464 |
+
"step": 1842
|
6465 |
+
},
|
6466 |
+
{
|
6467 |
+
"epoch": 1.0227398779811425,
|
6468 |
+
"grad_norm": 0.27653104066848755,
|
6469 |
+
"learning_rate": 0.00010121276697541427,
|
6470 |
+
"loss": 0.4507,
|
6471 |
+
"step": 1844
|
6472 |
+
},
|
6473 |
+
{
|
6474 |
+
"epoch": 1.023849140321686,
|
6475 |
+
"grad_norm": 0.24907124042510986,
|
6476 |
+
"learning_rate": 0.00010103310474209528,
|
6477 |
+
"loss": 0.4098,
|
6478 |
+
"step": 1846
|
6479 |
+
},
|
6480 |
+
{
|
6481 |
+
"epoch": 1.0249584026622296,
|
6482 |
+
"grad_norm": 0.26746734976768494,
|
6483 |
+
"learning_rate": 0.00010085343917364675,
|
6484 |
+
"loss": 0.376,
|
6485 |
+
"step": 1848
|
6486 |
+
},
|
6487 |
+
{
|
6488 |
+
"epoch": 1.0260676650027731,
|
6489 |
+
"grad_norm": 0.3076433539390564,
|
6490 |
+
"learning_rate": 0.00010067377085007561,
|
6491 |
+
"loss": 0.4063,
|
6492 |
+
"step": 1850
|
6493 |
+
},
|
6494 |
+
{
|
6495 |
+
"epoch": 1.0271769273433167,
|
6496 |
+
"grad_norm": 0.2736207842826843,
|
6497 |
+
"learning_rate": 0.00010049410035139774,
|
6498 |
+
"loss": 0.3646,
|
6499 |
+
"step": 1852
|
6500 |
+
},
|
6501 |
+
{
|
6502 |
+
"epoch": 1.0282861896838602,
|
6503 |
+
"grad_norm": 0.2720719277858734,
|
6504 |
+
"learning_rate": 0.00010031442825763603,
|
6505 |
+
"loss": 0.361,
|
6506 |
+
"step": 1854
|
6507 |
+
},
|
6508 |
+
{
|
6509 |
+
"epoch": 1.0293954520244037,
|
6510 |
+
"grad_norm": 0.2995122969150543,
|
6511 |
+
"learning_rate": 0.00010013475514881852,
|
6512 |
+
"loss": 0.3633,
|
6513 |
+
"step": 1856
|
6514 |
+
},
|
6515 |
+
{
|
6516 |
+
"epoch": 1.0305047143649473,
|
6517 |
+
"grad_norm": 0.3213959038257599,
|
6518 |
+
"learning_rate": 9.99550816049765e-05,
|
6519 |
+
"loss": 0.393,
|
6520 |
+
"step": 1858
|
6521 |
+
},
|
6522 |
+
{
|
6523 |
+
"epoch": 1.0316139767054908,
|
6524 |
+
"grad_norm": 0.2832462191581726,
|
6525 |
+
"learning_rate": 9.977540820614266e-05,
|
6526 |
+
"loss": 0.372,
|
6527 |
+
"step": 1860
|
6528 |
+
},
|
6529 |
+
{
|
6530 |
+
"epoch": 1.0327232390460344,
|
6531 |
+
"grad_norm": 0.2622850239276886,
|
6532 |
+
"learning_rate": 9.959573553234931e-05,
|
6533 |
+
"loss": 0.3433,
|
6534 |
+
"step": 1862
|
6535 |
+
},
|
6536 |
+
{
|
6537 |
+
"epoch": 1.033832501386578,
|
6538 |
+
"grad_norm": 0.2624775469303131,
|
6539 |
+
"learning_rate": 9.941606416362629e-05,
|
6540 |
+
"loss": 0.2845,
|
6541 |
+
"step": 1864
|
6542 |
+
},
|
6543 |
+
{
|
6544 |
+
"epoch": 1.0349417637271214,
|
6545 |
+
"grad_norm": 0.2570095956325531,
|
6546 |
+
"learning_rate": 9.92363946799993e-05,
|
6547 |
+
"loss": 0.3519,
|
6548 |
+
"step": 1866
|
6549 |
+
},
|
6550 |
+
{
|
6551 |
+
"epoch": 1.036051026067665,
|
6552 |
+
"grad_norm": 0.342472642660141,
|
6553 |
+
"learning_rate": 9.9056727661488e-05,
|
6554 |
+
"loss": 0.4085,
|
6555 |
+
"step": 1868
|
6556 |
+
},
|
6557 |
+
{
|
6558 |
+
"epoch": 1.0371602884082085,
|
6559 |
+
"grad_norm": 0.22180654108524323,
|
6560 |
+
"learning_rate": 9.887706368810403e-05,
|
6561 |
+
"loss": 0.3064,
|
6562 |
+
"step": 1870
|
6563 |
+
},
|
6564 |
+
{
|
6565 |
+
"epoch": 1.038269550748752,
|
6566 |
+
"grad_norm": 0.2561916410923004,
|
6567 |
+
"learning_rate": 9.869740333984917e-05,
|
6568 |
+
"loss": 0.469,
|
6569 |
+
"step": 1872
|
6570 |
+
},
|
6571 |
+
{
|
6572 |
+
"epoch": 1.0393788130892956,
|
6573 |
+
"grad_norm": 0.278629332780838,
|
6574 |
+
"learning_rate": 9.851774719671355e-05,
|
6575 |
+
"loss": 0.3506,
|
6576 |
+
"step": 1874
|
6577 |
+
},
|
6578 |
+
{
|
6579 |
+
"epoch": 1.0404880754298391,
|
6580 |
+
"grad_norm": 0.2417076975107193,
|
6581 |
+
"learning_rate": 9.833809583867374e-05,
|
6582 |
+
"loss": 0.3787,
|
6583 |
+
"step": 1876
|
6584 |
+
},
|
6585 |
+
{
|
6586 |
+
"epoch": 1.0415973377703827,
|
6587 |
+
"grad_norm": 0.3406512141227722,
|
6588 |
+
"learning_rate": 9.815844984569079e-05,
|
6589 |
+
"loss": 0.4375,
|
6590 |
+
"step": 1878
|
6591 |
+
},
|
6592 |
+
{
|
6593 |
+
"epoch": 1.0427066001109262,
|
6594 |
+
"grad_norm": 0.4049924612045288,
|
6595 |
+
"learning_rate": 9.797880979770845e-05,
|
6596 |
+
"loss": 0.3977,
|
6597 |
+
"step": 1880
|
6598 |
+
},
|
6599 |
+
{
|
6600 |
+
"epoch": 1.0438158624514697,
|
6601 |
+
"grad_norm": 0.2261233627796173,
|
6602 |
+
"learning_rate": 9.779917627465139e-05,
|
6603 |
+
"loss": 0.3227,
|
6604 |
+
"step": 1882
|
6605 |
+
},
|
6606 |
+
{
|
6607 |
+
"epoch": 1.0449251247920133,
|
6608 |
+
"grad_norm": 0.4143763780593872,
|
6609 |
+
"learning_rate": 9.761954985642308e-05,
|
6610 |
+
"loss": 0.4072,
|
6611 |
+
"step": 1884
|
6612 |
+
},
|
6613 |
+
{
|
6614 |
+
"epoch": 1.0460343871325568,
|
6615 |
+
"grad_norm": 0.42435726523399353,
|
6616 |
+
"learning_rate": 9.743993112290408e-05,
|
6617 |
+
"loss": 0.4488,
|
6618 |
+
"step": 1886
|
6619 |
+
},
|
6620 |
+
{
|
6621 |
+
"epoch": 1.0471436494731003,
|
6622 |
+
"grad_norm": 0.3034514784812927,
|
6623 |
+
"learning_rate": 9.726032065395022e-05,
|
6624 |
+
"loss": 0.4157,
|
6625 |
+
"step": 1888
|
6626 |
+
},
|
6627 |
+
{
|
6628 |
+
"epoch": 1.0482529118136439,
|
6629 |
+
"grad_norm": 0.34837406873703003,
|
6630 |
+
"learning_rate": 9.708071902939054e-05,
|
6631 |
+
"loss": 0.4582,
|
6632 |
+
"step": 1890
|
6633 |
+
},
|
6634 |
+
{
|
6635 |
+
"epoch": 1.0493621741541874,
|
6636 |
+
"grad_norm": 16.98454475402832,
|
6637 |
+
"learning_rate": 9.690112682902558e-05,
|
6638 |
+
"loss": 0.487,
|
6639 |
+
"step": 1892
|
6640 |
+
},
|
6641 |
+
{
|
6642 |
+
"epoch": 1.050471436494731,
|
6643 |
+
"grad_norm": 0.32965266704559326,
|
6644 |
+
"learning_rate": 9.672154463262545e-05,
|
6645 |
+
"loss": 0.4454,
|
6646 |
+
"step": 1894
|
6647 |
+
},
|
6648 |
+
{
|
6649 |
+
"epoch": 1.0515806988352745,
|
6650 |
+
"grad_norm": 0.3266843855381012,
|
6651 |
+
"learning_rate": 9.654197301992805e-05,
|
6652 |
+
"loss": 0.4016,
|
6653 |
+
"step": 1896
|
6654 |
+
},
|
6655 |
+
{
|
6656 |
+
"epoch": 1.052689961175818,
|
6657 |
+
"grad_norm": 0.28000080585479736,
|
6658 |
+
"learning_rate": 9.636241257063697e-05,
|
6659 |
+
"loss": 0.2961,
|
6660 |
+
"step": 1898
|
6661 |
+
},
|
6662 |
+
{
|
6663 |
+
"epoch": 1.0537992235163616,
|
6664 |
+
"grad_norm": 0.26942679286003113,
|
6665 |
+
"learning_rate": 9.618286386441981e-05,
|
6666 |
+
"loss": 0.4189,
|
6667 |
+
"step": 1900
|
6668 |
+
},
|
6669 |
+
{
|
6670 |
+
"epoch": 1.054908485856905,
|
6671 |
+
"grad_norm": 0.30495479702949524,
|
6672 |
+
"learning_rate": 9.600332748090633e-05,
|
6673 |
+
"loss": 0.3766,
|
6674 |
+
"step": 1902
|
6675 |
+
},
|
6676 |
+
{
|
6677 |
+
"epoch": 1.0560177481974486,
|
6678 |
+
"grad_norm": 0.41964420676231384,
|
6679 |
+
"learning_rate": 9.582380399968643e-05,
|
6680 |
+
"loss": 0.454,
|
6681 |
+
"step": 1904
|
6682 |
+
},
|
6683 |
+
{
|
6684 |
+
"epoch": 1.0571270105379922,
|
6685 |
+
"grad_norm": 0.28320369124412537,
|
6686 |
+
"learning_rate": 9.564429400030837e-05,
|
6687 |
+
"loss": 0.3732,
|
6688 |
+
"step": 1906
|
6689 |
+
},
|
6690 |
+
{
|
6691 |
+
"epoch": 1.0582362728785357,
|
6692 |
+
"grad_norm": 0.2792145907878876,
|
6693 |
+
"learning_rate": 9.546479806227691e-05,
|
6694 |
+
"loss": 0.356,
|
6695 |
+
"step": 1908
|
6696 |
+
},
|
6697 |
+
{
|
6698 |
+
"epoch": 1.0593455352190793,
|
6699 |
+
"grad_norm": 0.29180654883384705,
|
6700 |
+
"learning_rate": 9.52853167650514e-05,
|
6701 |
+
"loss": 0.436,
|
6702 |
+
"step": 1910
|
6703 |
+
},
|
6704 |
+
{
|
6705 |
+
"epoch": 1.0604547975596228,
|
6706 |
+
"grad_norm": 0.23264792561531067,
|
6707 |
+
"learning_rate": 9.510585068804394e-05,
|
6708 |
+
"loss": 0.3064,
|
6709 |
+
"step": 1912
|
6710 |
+
},
|
6711 |
+
{
|
6712 |
+
"epoch": 1.0615640599001663,
|
6713 |
+
"grad_norm": 0.3678199350833893,
|
6714 |
+
"learning_rate": 9.492640041061752e-05,
|
6715 |
+
"loss": 0.4138,
|
6716 |
+
"step": 1914
|
6717 |
+
},
|
6718 |
+
{
|
6719 |
+
"epoch": 1.0626733222407099,
|
6720 |
+
"grad_norm": 0.4804568886756897,
|
6721 |
+
"learning_rate": 9.474696651208406e-05,
|
6722 |
+
"loss": 0.4117,
|
6723 |
+
"step": 1916
|
6724 |
+
},
|
6725 |
+
{
|
6726 |
+
"epoch": 1.0637825845812534,
|
6727 |
+
"grad_norm": 0.30506354570388794,
|
6728 |
+
"learning_rate": 9.456754957170262e-05,
|
6729 |
+
"loss": 0.328,
|
6730 |
+
"step": 1918
|
6731 |
+
},
|
6732 |
+
{
|
6733 |
+
"epoch": 1.064891846921797,
|
6734 |
+
"grad_norm": 0.28367388248443604,
|
6735 |
+
"learning_rate": 9.438815016867757e-05,
|
6736 |
+
"loss": 0.3548,
|
6737 |
+
"step": 1920
|
6738 |
+
},
|
6739 |
+
{
|
6740 |
+
"epoch": 1.0660011092623405,
|
6741 |
+
"grad_norm": 0.2342025488615036,
|
6742 |
+
"learning_rate": 9.420876888215661e-05,
|
6743 |
+
"loss": 0.3227,
|
6744 |
+
"step": 1922
|
6745 |
+
},
|
6746 |
+
{
|
6747 |
+
"epoch": 1.067110371602884,
|
6748 |
+
"grad_norm": 0.3206954598426819,
|
6749 |
+
"learning_rate": 9.402940629122894e-05,
|
6750 |
+
"loss": 0.4336,
|
6751 |
+
"step": 1924
|
6752 |
+
},
|
6753 |
+
{
|
6754 |
+
"epoch": 1.0682196339434276,
|
6755 |
+
"grad_norm": 0.35998135805130005,
|
6756 |
+
"learning_rate": 9.38500629749235e-05,
|
6757 |
+
"loss": 0.4726,
|
6758 |
+
"step": 1926
|
6759 |
+
},
|
6760 |
+
{
|
6761 |
+
"epoch": 1.069328896283971,
|
6762 |
+
"grad_norm": 0.25793230533599854,
|
6763 |
+
"learning_rate": 9.367073951220693e-05,
|
6764 |
+
"loss": 0.3685,
|
6765 |
+
"step": 1928
|
6766 |
+
},
|
6767 |
+
{
|
6768 |
+
"epoch": 1.0704381586245146,
|
6769 |
+
"grad_norm": 0.27258947491645813,
|
6770 |
+
"learning_rate": 9.349143648198176e-05,
|
6771 |
+
"loss": 0.3911,
|
6772 |
+
"step": 1930
|
6773 |
+
},
|
6774 |
+
{
|
6775 |
+
"epoch": 1.0715474209650582,
|
6776 |
+
"grad_norm": 0.31704410910606384,
|
6777 |
+
"learning_rate": 9.33121544630846e-05,
|
6778 |
+
"loss": 0.4008,
|
6779 |
+
"step": 1932
|
6780 |
+
},
|
6781 |
+
{
|
6782 |
+
"epoch": 1.0726566833056017,
|
6783 |
+
"grad_norm": 0.28461799025535583,
|
6784 |
+
"learning_rate": 9.313289403428427e-05,
|
6785 |
+
"loss": 0.4084,
|
6786 |
+
"step": 1934
|
6787 |
+
},
|
6788 |
+
{
|
6789 |
+
"epoch": 1.0737659456461452,
|
6790 |
+
"grad_norm": 0.32281526923179626,
|
6791 |
+
"learning_rate": 9.295365577427976e-05,
|
6792 |
+
"loss": 0.3423,
|
6793 |
+
"step": 1936
|
6794 |
+
},
|
6795 |
+
{
|
6796 |
+
"epoch": 1.0748752079866888,
|
6797 |
+
"grad_norm": 0.2618202865123749,
|
6798 |
+
"learning_rate": 9.27744402616986e-05,
|
6799 |
+
"loss": 0.4544,
|
6800 |
+
"step": 1938
|
6801 |
+
},
|
6802 |
+
{
|
6803 |
+
"epoch": 1.0759844703272323,
|
6804 |
+
"grad_norm": 0.24186968803405762,
|
6805 |
+
"learning_rate": 9.259524807509491e-05,
|
6806 |
+
"loss": 0.2999,
|
6807 |
+
"step": 1940
|
6808 |
+
},
|
6809 |
+
{
|
6810 |
+
"epoch": 1.0770937326677759,
|
6811 |
+
"grad_norm": 0.3159604072570801,
|
6812 |
+
"learning_rate": 9.241607979294745e-05,
|
6813 |
+
"loss": 0.3966,
|
6814 |
+
"step": 1942
|
6815 |
+
},
|
6816 |
+
{
|
6817 |
+
"epoch": 1.0782029950083194,
|
6818 |
+
"grad_norm": 0.2324291318655014,
|
6819 |
+
"learning_rate": 9.223693599365777e-05,
|
6820 |
+
"loss": 0.3959,
|
6821 |
+
"step": 1944
|
6822 |
+
},
|
6823 |
+
{
|
6824 |
+
"epoch": 1.079312257348863,
|
6825 |
+
"grad_norm": 0.3115401566028595,
|
6826 |
+
"learning_rate": 9.205781725554849e-05,
|
6827 |
+
"loss": 0.4472,
|
6828 |
+
"step": 1946
|
6829 |
+
},
|
6830 |
+
{
|
6831 |
+
"epoch": 1.0804215196894065,
|
6832 |
+
"grad_norm": 0.23825664818286896,
|
6833 |
+
"learning_rate": 9.18787241568612e-05,
|
6834 |
+
"loss": 0.3472,
|
6835 |
+
"step": 1948
|
6836 |
+
},
|
6837 |
+
{
|
6838 |
+
"epoch": 1.08153078202995,
|
6839 |
+
"grad_norm": 0.22805361449718475,
|
6840 |
+
"learning_rate": 9.169965727575482e-05,
|
6841 |
+
"loss": 0.3072,
|
6842 |
+
"step": 1950
|
6843 |
+
},
|
6844 |
+
{
|
6845 |
+
"epoch": 1.0826400443704935,
|
6846 |
+
"grad_norm": 0.26474443078041077,
|
6847 |
+
"learning_rate": 9.152061719030364e-05,
|
6848 |
+
"loss": 0.394,
|
6849 |
+
"step": 1952
|
6850 |
+
},
|
6851 |
+
{
|
6852 |
+
"epoch": 1.083749306711037,
|
6853 |
+
"grad_norm": 0.3261571526527405,
|
6854 |
+
"learning_rate": 9.134160447849534e-05,
|
6855 |
+
"loss": 0.4263,
|
6856 |
+
"step": 1954
|
6857 |
+
},
|
6858 |
+
{
|
6859 |
+
"epoch": 1.0848585690515806,
|
6860 |
+
"grad_norm": 0.22368104755878448,
|
6861 |
+
"learning_rate": 9.116261971822932e-05,
|
6862 |
+
"loss": 0.2821,
|
6863 |
+
"step": 1956
|
6864 |
+
},
|
6865 |
+
{
|
6866 |
+
"epoch": 1.0859678313921242,
|
6867 |
+
"grad_norm": 0.2612870931625366,
|
6868 |
+
"learning_rate": 9.098366348731476e-05,
|
6869 |
+
"loss": 0.3488,
|
6870 |
+
"step": 1958
|
6871 |
+
},
|
6872 |
+
{
|
6873 |
+
"epoch": 1.0870770937326677,
|
6874 |
+
"grad_norm": 0.3046448230743408,
|
6875 |
+
"learning_rate": 9.080473636346869e-05,
|
6876 |
+
"loss": 0.3153,
|
6877 |
+
"step": 1960
|
6878 |
+
},
|
6879 |
+
{
|
6880 |
+
"epoch": 1.0881863560732112,
|
6881 |
+
"grad_norm": 0.2403235137462616,
|
6882 |
+
"learning_rate": 9.062583892431414e-05,
|
6883 |
+
"loss": 0.301,
|
6884 |
+
"step": 1962
|
6885 |
+
},
|
6886 |
+
{
|
6887 |
+
"epoch": 1.0892956184137548,
|
6888 |
+
"grad_norm": 0.28794658184051514,
|
6889 |
+
"learning_rate": 9.044697174737843e-05,
|
6890 |
+
"loss": 0.3576,
|
6891 |
+
"step": 1964
|
6892 |
+
},
|
6893 |
+
{
|
6894 |
+
"epoch": 1.0904048807542983,
|
6895 |
+
"grad_norm": 0.2567608654499054,
|
6896 |
+
"learning_rate": 9.026813541009104e-05,
|
6897 |
+
"loss": 0.3865,
|
6898 |
+
"step": 1966
|
6899 |
+
},
|
6900 |
+
{
|
6901 |
+
"epoch": 1.0915141430948418,
|
6902 |
+
"grad_norm": 0.2786438465118408,
|
6903 |
+
"learning_rate": 9.008933048978198e-05,
|
6904 |
+
"loss": 0.2924,
|
6905 |
+
"step": 1968
|
6906 |
+
},
|
6907 |
+
{
|
6908 |
+
"epoch": 1.0926234054353854,
|
6909 |
+
"grad_norm": 0.31907153129577637,
|
6910 |
+
"learning_rate": 8.991055756367988e-05,
|
6911 |
+
"loss": 0.3104,
|
6912 |
+
"step": 1970
|
6913 |
+
},
|
6914 |
+
{
|
6915 |
+
"epoch": 1.093732667775929,
|
6916 |
+
"grad_norm": 0.24627485871315002,
|
6917 |
+
"learning_rate": 8.973181720891e-05,
|
6918 |
+
"loss": 0.3708,
|
6919 |
+
"step": 1972
|
6920 |
+
},
|
6921 |
+
{
|
6922 |
+
"epoch": 1.0948419301164725,
|
6923 |
+
"grad_norm": 0.3072524070739746,
|
6924 |
+
"learning_rate": 8.955311000249244e-05,
|
6925 |
+
"loss": 0.3699,
|
6926 |
+
"step": 1974
|
6927 |
+
},
|
6928 |
+
{
|
6929 |
+
"epoch": 1.095951192457016,
|
6930 |
+
"grad_norm": 0.3048143684864044,
|
6931 |
+
"learning_rate": 8.937443652134037e-05,
|
6932 |
+
"loss": 0.5596,
|
6933 |
+
"step": 1976
|
6934 |
+
},
|
6935 |
+
{
|
6936 |
+
"epoch": 1.0970604547975595,
|
6937 |
+
"grad_norm": 0.24488745629787445,
|
6938 |
+
"learning_rate": 8.919579734225803e-05,
|
6939 |
+
"loss": 0.327,
|
6940 |
+
"step": 1978
|
6941 |
+
},
|
6942 |
+
{
|
6943 |
+
"epoch": 1.098169717138103,
|
6944 |
+
"grad_norm": 0.30899879336357117,
|
6945 |
+
"learning_rate": 8.901719304193894e-05,
|
6946 |
+
"loss": 0.3743,
|
6947 |
+
"step": 1980
|
6948 |
+
},
|
6949 |
+
{
|
6950 |
+
"epoch": 1.0992789794786466,
|
6951 |
+
"grad_norm": 0.3292604982852936,
|
6952 |
+
"learning_rate": 8.883862419696398e-05,
|
6953 |
+
"loss": 0.414,
|
6954 |
+
"step": 1982
|
6955 |
+
},
|
6956 |
+
{
|
6957 |
+
"epoch": 1.1003882418191901,
|
6958 |
+
"grad_norm": 0.339619517326355,
|
6959 |
+
"learning_rate": 8.866009138379967e-05,
|
6960 |
+
"loss": 0.3926,
|
6961 |
+
"step": 1984
|
6962 |
+
},
|
6963 |
+
{
|
6964 |
+
"epoch": 1.1014975041597337,
|
6965 |
+
"grad_norm": 0.23006640374660492,
|
6966 |
+
"learning_rate": 8.848159517879616e-05,
|
6967 |
+
"loss": 0.318,
|
6968 |
+
"step": 1986
|
6969 |
+
},
|
6970 |
+
{
|
6971 |
+
"epoch": 1.1026067665002772,
|
6972 |
+
"grad_norm": 0.24744057655334473,
|
6973 |
+
"learning_rate": 8.830313615818535e-05,
|
6974 |
+
"loss": 0.3816,
|
6975 |
+
"step": 1988
|
6976 |
+
},
|
6977 |
+
{
|
6978 |
+
"epoch": 1.1037160288408208,
|
6979 |
+
"grad_norm": 0.2574522793292999,
|
6980 |
+
"learning_rate": 8.812471489807921e-05,
|
6981 |
+
"loss": 0.3559,
|
6982 |
+
"step": 1990
|
6983 |
+
},
|
6984 |
+
{
|
6985 |
+
"epoch": 1.1048252911813643,
|
6986 |
+
"grad_norm": 0.27559810876846313,
|
6987 |
+
"learning_rate": 8.79463319744677e-05,
|
6988 |
+
"loss": 0.3162,
|
6989 |
+
"step": 1992
|
6990 |
+
},
|
6991 |
+
{
|
6992 |
+
"epoch": 1.1059345535219078,
|
6993 |
+
"grad_norm": 0.2662334144115448,
|
6994 |
+
"learning_rate": 8.776798796321715e-05,
|
6995 |
+
"loss": 0.3153,
|
6996 |
+
"step": 1994
|
6997 |
+
},
|
6998 |
+
{
|
6999 |
+
"epoch": 1.1070438158624514,
|
7000 |
+
"grad_norm": 0.5994970798492432,
|
7001 |
+
"learning_rate": 8.758968344006812e-05,
|
7002 |
+
"loss": 0.4635,
|
7003 |
+
"step": 1996
|
7004 |
+
},
|
7005 |
+
{
|
7006 |
+
"epoch": 1.108153078202995,
|
7007 |
+
"grad_norm": 0.24415504932403564,
|
7008 |
+
"learning_rate": 8.741141898063386e-05,
|
7009 |
+
"loss": 0.2697,
|
7010 |
+
"step": 1998
|
7011 |
+
},
|
7012 |
+
{
|
7013 |
+
"epoch": 1.1092623405435384,
|
7014 |
+
"grad_norm": 0.26829585433006287,
|
7015 |
+
"learning_rate": 8.723319516039813e-05,
|
7016 |
+
"loss": 0.3761,
|
7017 |
+
"step": 2000
|
7018 |
+
},
|
7019 |
+
{
|
7020 |
+
"epoch": 1.110371602884082,
|
7021 |
+
"grad_norm": 0.2905944585800171,
|
7022 |
+
"learning_rate": 8.705501255471357e-05,
|
7023 |
+
"loss": 0.295,
|
7024 |
+
"step": 2002
|
7025 |
+
},
|
7026 |
+
{
|
7027 |
+
"epoch": 1.1114808652246255,
|
7028 |
+
"grad_norm": 0.2600457966327667,
|
7029 |
+
"learning_rate": 8.687687173879981e-05,
|
7030 |
+
"loss": 0.3804,
|
7031 |
+
"step": 2004
|
7032 |
+
},
|
7033 |
+
{
|
7034 |
+
"epoch": 1.112590127565169,
|
7035 |
+
"grad_norm": 0.2356429547071457,
|
7036 |
+
"learning_rate": 8.669877328774146e-05,
|
7037 |
+
"loss": 0.3239,
|
7038 |
+
"step": 2006
|
7039 |
+
},
|
7040 |
+
{
|
7041 |
+
"epoch": 1.1136993899057126,
|
7042 |
+
"grad_norm": 0.3630295991897583,
|
7043 |
+
"learning_rate": 8.652071777648646e-05,
|
7044 |
+
"loss": 0.3918,
|
7045 |
+
"step": 2008
|
7046 |
+
},
|
7047 |
+
{
|
7048 |
+
"epoch": 1.1148086522462561,
|
7049 |
+
"grad_norm": 0.26642143726348877,
|
7050 |
+
"learning_rate": 8.634270577984411e-05,
|
7051 |
+
"loss": 0.2999,
|
7052 |
+
"step": 2010
|
7053 |
+
},
|
7054 |
+
{
|
7055 |
+
"epoch": 1.1159179145867997,
|
7056 |
+
"grad_norm": 0.18832941353321075,
|
7057 |
+
"learning_rate": 8.616473787248314e-05,
|
7058 |
+
"loss": 0.3064,
|
7059 |
+
"step": 2012
|
7060 |
+
},
|
7061 |
+
{
|
7062 |
+
"epoch": 1.1170271769273432,
|
7063 |
+
"grad_norm": 0.3464590013027191,
|
7064 |
+
"learning_rate": 8.598681462893012e-05,
|
7065 |
+
"loss": 0.4163,
|
7066 |
+
"step": 2014
|
7067 |
+
},
|
7068 |
+
{
|
7069 |
+
"epoch": 1.1181364392678868,
|
7070 |
+
"grad_norm": 0.2680515646934509,
|
7071 |
+
"learning_rate": 8.580893662356731e-05,
|
7072 |
+
"loss": 0.4601,
|
7073 |
+
"step": 2016
|
7074 |
+
},
|
7075 |
+
{
|
7076 |
+
"epoch": 1.1192457016084303,
|
7077 |
+
"grad_norm": 0.30981600284576416,
|
7078 |
+
"learning_rate": 8.563110443063098e-05,
|
7079 |
+
"loss": 0.3951,
|
7080 |
+
"step": 2018
|
7081 |
+
},
|
7082 |
+
{
|
7083 |
+
"epoch": 1.1203549639489738,
|
7084 |
+
"grad_norm": 0.26390036940574646,
|
7085 |
+
"learning_rate": 8.545331862420944e-05,
|
7086 |
+
"loss": 0.3546,
|
7087 |
+
"step": 2020
|
7088 |
+
},
|
7089 |
+
{
|
7090 |
+
"epoch": 1.1214642262895174,
|
7091 |
+
"grad_norm": 0.2888481616973877,
|
7092 |
+
"learning_rate": 8.527557977824137e-05,
|
7093 |
+
"loss": 0.317,
|
7094 |
+
"step": 2022
|
7095 |
+
},
|
7096 |
+
{
|
7097 |
+
"epoch": 1.122573488630061,
|
7098 |
+
"grad_norm": 0.2808952331542969,
|
7099 |
+
"learning_rate": 8.50978884665137e-05,
|
7100 |
+
"loss": 0.4164,
|
7101 |
+
"step": 2024
|
7102 |
+
},
|
7103 |
+
{
|
7104 |
+
"epoch": 1.1236827509706044,
|
7105 |
+
"grad_norm": 0.19364522397518158,
|
7106 |
+
"learning_rate": 8.492024526265999e-05,
|
7107 |
+
"loss": 0.3127,
|
7108 |
+
"step": 2026
|
7109 |
+
},
|
7110 |
+
{
|
7111 |
+
"epoch": 1.124792013311148,
|
7112 |
+
"grad_norm": 0.28427523374557495,
|
7113 |
+
"learning_rate": 8.474265074015857e-05,
|
7114 |
+
"loss": 0.3471,
|
7115 |
+
"step": 2028
|
7116 |
+
},
|
7117 |
+
{
|
7118 |
+
"epoch": 1.1259012756516915,
|
7119 |
+
"grad_norm": 0.37454891204833984,
|
7120 |
+
"learning_rate": 8.45651054723305e-05,
|
7121 |
+
"loss": 0.3614,
|
7122 |
+
"step": 2030
|
7123 |
+
},
|
7124 |
+
{
|
7125 |
+
"epoch": 1.127010537992235,
|
7126 |
+
"grad_norm": 0.33179330825805664,
|
7127 |
+
"learning_rate": 8.438761003233784e-05,
|
7128 |
+
"loss": 0.3646,
|
7129 |
+
"step": 2032
|
7130 |
+
},
|
7131 |
+
{
|
7132 |
+
"epoch": 1.1281198003327786,
|
7133 |
+
"grad_norm": 0.35652342438697815,
|
7134 |
+
"learning_rate": 8.421016499318185e-05,
|
7135 |
+
"loss": 0.4751,
|
7136 |
+
"step": 2034
|
7137 |
+
},
|
7138 |
+
{
|
7139 |
+
"epoch": 1.1292290626733221,
|
7140 |
+
"grad_norm": 0.26760151982307434,
|
7141 |
+
"learning_rate": 8.403277092770106e-05,
|
7142 |
+
"loss": 0.3846,
|
7143 |
+
"step": 2036
|
7144 |
+
},
|
7145 |
+
{
|
7146 |
+
"epoch": 1.1303383250138657,
|
7147 |
+
"grad_norm": 0.3007453978061676,
|
7148 |
+
"learning_rate": 8.385542840856939e-05,
|
7149 |
+
"loss": 0.3132,
|
7150 |
+
"step": 2038
|
7151 |
+
},
|
7152 |
+
{
|
7153 |
+
"epoch": 1.1314475873544092,
|
7154 |
+
"grad_norm": 0.24367092549800873,
|
7155 |
+
"learning_rate": 8.367813800829443e-05,
|
7156 |
+
"loss": 0.3935,
|
7157 |
+
"step": 2040
|
7158 |
+
},
|
7159 |
+
{
|
7160 |
+
"epoch": 1.1325568496949527,
|
7161 |
+
"grad_norm": 0.27487999200820923,
|
7162 |
+
"learning_rate": 8.35009002992155e-05,
|
7163 |
+
"loss": 0.3476,
|
7164 |
+
"step": 2042
|
7165 |
+
},
|
7166 |
+
{
|
7167 |
+
"epoch": 1.1336661120354963,
|
7168 |
+
"grad_norm": 0.29086869955062866,
|
7169 |
+
"learning_rate": 8.332371585350186e-05,
|
7170 |
+
"loss": 0.3752,
|
7171 |
+
"step": 2044
|
7172 |
+
},
|
7173 |
+
{
|
7174 |
+
"epoch": 1.1347753743760398,
|
7175 |
+
"grad_norm": 0.29050594568252563,
|
7176 |
+
"learning_rate": 8.314658524315069e-05,
|
7177 |
+
"loss": 0.3457,
|
7178 |
+
"step": 2046
|
7179 |
+
},
|
7180 |
+
{
|
7181 |
+
"epoch": 1.1358846367165834,
|
7182 |
+
"grad_norm": 0.36133047938346863,
|
7183 |
+
"learning_rate": 8.29695090399855e-05,
|
7184 |
+
"loss": 0.3433,
|
7185 |
+
"step": 2048
|
7186 |
+
},
|
7187 |
+
{
|
7188 |
+
"epoch": 1.1369938990571269,
|
7189 |
+
"grad_norm": 0.25438520312309265,
|
7190 |
+
"learning_rate": 8.279248781565407e-05,
|
7191 |
+
"loss": 0.3761,
|
7192 |
+
"step": 2050
|
7193 |
+
},
|
7194 |
+
{
|
7195 |
+
"epoch": 1.1381031613976704,
|
7196 |
+
"grad_norm": 0.28709906339645386,
|
7197 |
+
"learning_rate": 8.261552214162679e-05,
|
7198 |
+
"loss": 0.3383,
|
7199 |
+
"step": 2052
|
7200 |
+
},
|
7201 |
+
{
|
7202 |
+
"epoch": 1.139212423738214,
|
7203 |
+
"grad_norm": 0.36977148056030273,
|
7204 |
+
"learning_rate": 8.243861258919466e-05,
|
7205 |
+
"loss": 0.3919,
|
7206 |
+
"step": 2054
|
7207 |
+
},
|
7208 |
+
{
|
7209 |
+
"epoch": 1.1403216860787575,
|
7210 |
+
"grad_norm": 0.2531401216983795,
|
7211 |
+
"learning_rate": 8.226175972946746e-05,
|
7212 |
+
"loss": 0.349,
|
7213 |
+
"step": 2056
|
7214 |
+
},
|
7215 |
+
{
|
7216 |
+
"epoch": 1.1414309484193013,
|
7217 |
+
"grad_norm": 0.2995207607746124,
|
7218 |
+
"learning_rate": 8.20849641333721e-05,
|
7219 |
+
"loss": 0.4156,
|
7220 |
+
"step": 2058
|
7221 |
+
},
|
7222 |
+
{
|
7223 |
+
"epoch": 1.1425402107598448,
|
7224 |
+
"grad_norm": 0.27246829867362976,
|
7225 |
+
"learning_rate": 8.190822637165047e-05,
|
7226 |
+
"loss": 0.4034,
|
7227 |
+
"step": 2060
|
7228 |
+
},
|
7229 |
+
{
|
7230 |
+
"epoch": 1.1436494731003883,
|
7231 |
+
"grad_norm": 0.3243946135044098,
|
7232 |
+
"learning_rate": 8.173154701485787e-05,
|
7233 |
+
"loss": 0.3872,
|
7234 |
+
"step": 2062
|
7235 |
+
},
|
7236 |
+
{
|
7237 |
+
"epoch": 1.1447587354409319,
|
7238 |
+
"grad_norm": 0.4028877317905426,
|
7239 |
+
"learning_rate": 8.155492663336094e-05,
|
7240 |
+
"loss": 0.4174,
|
7241 |
+
"step": 2064
|
7242 |
+
},
|
7243 |
+
{
|
7244 |
+
"epoch": 1.1458679977814754,
|
7245 |
+
"grad_norm": 0.23149579763412476,
|
7246 |
+
"learning_rate": 8.137836579733606e-05,
|
7247 |
+
"loss": 0.3133,
|
7248 |
+
"step": 2066
|
7249 |
+
},
|
7250 |
+
{
|
7251 |
+
"epoch": 1.146977260122019,
|
7252 |
+
"grad_norm": 0.36684414744377136,
|
7253 |
+
"learning_rate": 8.120186507676724e-05,
|
7254 |
+
"loss": 0.4227,
|
7255 |
+
"step": 2068
|
7256 |
+
},
|
7257 |
+
{
|
7258 |
+
"epoch": 1.1480865224625625,
|
7259 |
+
"grad_norm": 0.3168555498123169,
|
7260 |
+
"learning_rate": 8.102542504144455e-05,
|
7261 |
+
"loss": 0.3852,
|
7262 |
+
"step": 2070
|
7263 |
+
},
|
7264 |
+
{
|
7265 |
+
"epoch": 1.149195784803106,
|
7266 |
+
"grad_norm": 0.27771732211112976,
|
7267 |
+
"learning_rate": 8.08490462609621e-05,
|
7268 |
+
"loss": 0.3955,
|
7269 |
+
"step": 2072
|
7270 |
+
},
|
7271 |
+
{
|
7272 |
+
"epoch": 1.1503050471436496,
|
7273 |
+
"grad_norm": 0.3797549605369568,
|
7274 |
+
"learning_rate": 8.06727293047163e-05,
|
7275 |
+
"loss": 0.3989,
|
7276 |
+
"step": 2074
|
7277 |
+
},
|
7278 |
+
{
|
7279 |
+
"epoch": 1.151414309484193,
|
7280 |
+
"grad_norm": 0.28603595495224,
|
7281 |
+
"learning_rate": 8.049647474190384e-05,
|
7282 |
+
"loss": 0.3518,
|
7283 |
+
"step": 2076
|
7284 |
+
},
|
7285 |
+
{
|
7286 |
+
"epoch": 1.1525235718247366,
|
7287 |
+
"grad_norm": 0.3149471879005432,
|
7288 |
+
"learning_rate": 8.032028314152013e-05,
|
7289 |
+
"loss": 0.4822,
|
7290 |
+
"step": 2078
|
7291 |
+
},
|
7292 |
+
{
|
7293 |
+
"epoch": 1.1536328341652802,
|
7294 |
+
"grad_norm": 0.307466059923172,
|
7295 |
+
"learning_rate": 8.014415507235728e-05,
|
7296 |
+
"loss": 0.4285,
|
7297 |
+
"step": 2080
|
7298 |
+
},
|
7299 |
+
{
|
7300 |
+
"epoch": 1.1547420965058237,
|
7301 |
+
"grad_norm": 0.27271440625190735,
|
7302 |
+
"learning_rate": 7.996809110300226e-05,
|
7303 |
+
"loss": 0.3527,
|
7304 |
+
"step": 2082
|
7305 |
+
},
|
7306 |
+
{
|
7307 |
+
"epoch": 1.1558513588463672,
|
7308 |
+
"grad_norm": 0.2561948299407959,
|
7309 |
+
"learning_rate": 7.979209180183515e-05,
|
7310 |
+
"loss": 0.3103,
|
7311 |
+
"step": 2084
|
7312 |
+
},
|
7313 |
+
{
|
7314 |
+
"epoch": 1.1569606211869108,
|
7315 |
+
"grad_norm": 0.3454659879207611,
|
7316 |
+
"learning_rate": 7.961615773702727e-05,
|
7317 |
+
"loss": 0.4885,
|
7318 |
+
"step": 2086
|
7319 |
+
},
|
7320 |
+
{
|
7321 |
+
"epoch": 1.1580698835274543,
|
7322 |
+
"grad_norm": 0.41174525022506714,
|
7323 |
+
"learning_rate": 7.944028947653936e-05,
|
7324 |
+
"loss": 0.4334,
|
7325 |
+
"step": 2088
|
7326 |
+
},
|
7327 |
+
{
|
7328 |
+
"epoch": 1.1591791458679979,
|
7329 |
+
"grad_norm": 0.346825510263443,
|
7330 |
+
"learning_rate": 7.926448758811964e-05,
|
7331 |
+
"loss": 0.5005,
|
7332 |
+
"step": 2090
|
7333 |
+
},
|
7334 |
+
{
|
7335 |
+
"epoch": 1.1602884082085414,
|
7336 |
+
"grad_norm": 0.2304445505142212,
|
7337 |
+
"learning_rate": 7.908875263930214e-05,
|
7338 |
+
"loss": 0.3255,
|
7339 |
+
"step": 2092
|
7340 |
+
},
|
7341 |
+
{
|
7342 |
+
"epoch": 1.161397670549085,
|
7343 |
+
"grad_norm": 0.32904666662216187,
|
7344 |
+
"learning_rate": 7.891308519740479e-05,
|
7345 |
+
"loss": 0.3873,
|
7346 |
+
"step": 2094
|
7347 |
+
},
|
7348 |
+
{
|
7349 |
+
"epoch": 1.1625069328896285,
|
7350 |
+
"grad_norm": 0.31631243228912354,
|
7351 |
+
"learning_rate": 7.873748582952753e-05,
|
7352 |
+
"loss": 0.3743,
|
7353 |
+
"step": 2096
|
7354 |
+
},
|
7355 |
+
{
|
7356 |
+
"epoch": 1.163616195230172,
|
7357 |
+
"grad_norm": 0.2747056484222412,
|
7358 |
+
"learning_rate": 7.856195510255059e-05,
|
7359 |
+
"loss": 0.3566,
|
7360 |
+
"step": 2098
|
7361 |
+
},
|
7362 |
+
{
|
7363 |
+
"epoch": 1.1647254575707155,
|
7364 |
+
"grad_norm": 0.25638052821159363,
|
7365 |
+
"learning_rate": 7.838649358313262e-05,
|
7366 |
+
"loss": 0.3426,
|
7367 |
+
"step": 2100
|
7368 |
+
},
|
7369 |
+
{
|
7370 |
+
"epoch": 1.165834719911259,
|
7371 |
+
"grad_norm": 0.2546238303184509,
|
7372 |
+
"learning_rate": 7.821110183770884e-05,
|
7373 |
+
"loss": 0.3575,
|
7374 |
+
"step": 2102
|
7375 |
+
},
|
7376 |
+
{
|
7377 |
+
"epoch": 1.1669439822518026,
|
7378 |
+
"grad_norm": 0.25636041164398193,
|
7379 |
+
"learning_rate": 7.803578043248918e-05,
|
7380 |
+
"loss": 0.3488,
|
7381 |
+
"step": 2104
|
7382 |
+
},
|
7383 |
+
{
|
7384 |
+
"epoch": 1.1680532445923462,
|
7385 |
+
"grad_norm": 0.4820869266986847,
|
7386 |
+
"learning_rate": 7.786052993345656e-05,
|
7387 |
+
"loss": 0.5524,
|
7388 |
+
"step": 2106
|
7389 |
+
},
|
7390 |
+
{
|
7391 |
+
"epoch": 1.1691625069328897,
|
7392 |
+
"grad_norm": 0.23651207983493805,
|
7393 |
+
"learning_rate": 7.76853509063649e-05,
|
7394 |
+
"loss": 0.3091,
|
7395 |
+
"step": 2108
|
7396 |
+
},
|
7397 |
+
{
|
7398 |
+
"epoch": 1.1702717692734332,
|
7399 |
+
"grad_norm": 0.3688579797744751,
|
7400 |
+
"learning_rate": 7.75102439167375e-05,
|
7401 |
+
"loss": 0.5254,
|
7402 |
+
"step": 2110
|
7403 |
+
},
|
7404 |
+
{
|
7405 |
+
"epoch": 1.1713810316139768,
|
7406 |
+
"grad_norm": 0.2631542384624481,
|
7407 |
+
"learning_rate": 7.733520952986506e-05,
|
7408 |
+
"loss": 0.3707,
|
7409 |
+
"step": 2112
|
7410 |
+
},
|
7411 |
+
{
|
7412 |
+
"epoch": 1.1724902939545203,
|
7413 |
+
"grad_norm": 0.2712019383907318,
|
7414 |
+
"learning_rate": 7.716024831080383e-05,
|
7415 |
+
"loss": 0.2887,
|
7416 |
+
"step": 2114
|
7417 |
+
},
|
7418 |
+
{
|
7419 |
+
"epoch": 1.1735995562950639,
|
7420 |
+
"grad_norm": 0.3026323616504669,
|
7421 |
+
"learning_rate": 7.6985360824374e-05,
|
7422 |
+
"loss": 0.3424,
|
7423 |
+
"step": 2116
|
7424 |
+
},
|
7425 |
+
{
|
7426 |
+
"epoch": 1.1747088186356074,
|
7427 |
+
"grad_norm": 0.3098190426826477,
|
7428 |
+
"learning_rate": 7.68105476351576e-05,
|
7429 |
+
"loss": 0.4582,
|
7430 |
+
"step": 2118
|
7431 |
+
},
|
7432 |
+
{
|
7433 |
+
"epoch": 1.175818080976151,
|
7434 |
+
"grad_norm": 0.2829030454158783,
|
7435 |
+
"learning_rate": 7.663580930749693e-05,
|
7436 |
+
"loss": 0.481,
|
7437 |
+
"step": 2120
|
7438 |
+
},
|
7439 |
+
{
|
7440 |
+
"epoch": 1.1769273433166945,
|
7441 |
+
"grad_norm": 0.35570159554481506,
|
7442 |
+
"learning_rate": 7.646114640549246e-05,
|
7443 |
+
"loss": 0.3777,
|
7444 |
+
"step": 2122
|
7445 |
+
},
|
7446 |
+
{
|
7447 |
+
"epoch": 1.178036605657238,
|
7448 |
+
"grad_norm": 0.30424460768699646,
|
7449 |
+
"learning_rate": 7.628655949300133e-05,
|
7450 |
+
"loss": 0.3827,
|
7451 |
+
"step": 2124
|
7452 |
+
},
|
7453 |
+
{
|
7454 |
+
"epoch": 1.1791458679977815,
|
7455 |
+
"grad_norm": 0.35098132491111755,
|
7456 |
+
"learning_rate": 7.611204913363524e-05,
|
7457 |
+
"loss": 0.3636,
|
7458 |
+
"step": 2126
|
7459 |
+
},
|
7460 |
+
{
|
7461 |
+
"epoch": 1.180255130338325,
|
7462 |
+
"grad_norm": 0.27268752455711365,
|
7463 |
+
"learning_rate": 7.593761589075879e-05,
|
7464 |
+
"loss": 0.2896,
|
7465 |
+
"step": 2128
|
7466 |
+
},
|
7467 |
+
{
|
7468 |
+
"epoch": 1.1813643926788686,
|
7469 |
+
"grad_norm": 0.31950294971466064,
|
7470 |
+
"learning_rate": 7.576326032748772e-05,
|
7471 |
+
"loss": 0.4369,
|
7472 |
+
"step": 2130
|
7473 |
+
},
|
7474 |
+
{
|
7475 |
+
"epoch": 1.1824736550194122,
|
7476 |
+
"grad_norm": 0.3007744252681732,
|
7477 |
+
"learning_rate": 7.558898300668691e-05,
|
7478 |
+
"loss": 0.3927,
|
7479 |
+
"step": 2132
|
7480 |
+
},
|
7481 |
+
{
|
7482 |
+
"epoch": 1.1835829173599557,
|
7483 |
+
"grad_norm": 0.3327229917049408,
|
7484 |
+
"learning_rate": 7.541478449096861e-05,
|
7485 |
+
"loss": 0.4977,
|
7486 |
+
"step": 2134
|
7487 |
+
},
|
7488 |
+
{
|
7489 |
+
"epoch": 1.1846921797004992,
|
7490 |
+
"grad_norm": 0.2749979794025421,
|
7491 |
+
"learning_rate": 7.524066534269079e-05,
|
7492 |
+
"loss": 0.3875,
|
7493 |
+
"step": 2136
|
7494 |
+
},
|
7495 |
+
{
|
7496 |
+
"epoch": 1.1858014420410428,
|
7497 |
+
"grad_norm": 0.325305312871933,
|
7498 |
+
"learning_rate": 7.506662612395514e-05,
|
7499 |
+
"loss": 0.4653,
|
7500 |
+
"step": 2138
|
7501 |
+
},
|
7502 |
+
{
|
7503 |
+
"epoch": 1.1869107043815863,
|
7504 |
+
"grad_norm": 0.2727961242198944,
|
7505 |
+
"learning_rate": 7.489266739660524e-05,
|
7506 |
+
"loss": 0.357,
|
7507 |
+
"step": 2140
|
7508 |
+
},
|
7509 |
+
{
|
7510 |
+
"epoch": 1.1880199667221298,
|
7511 |
+
"grad_norm": 0.2543491721153259,
|
7512 |
+
"learning_rate": 7.471878972222495e-05,
|
7513 |
+
"loss": 0.3108,
|
7514 |
+
"step": 2142
|
7515 |
+
},
|
7516 |
+
{
|
7517 |
+
"epoch": 1.1891292290626734,
|
7518 |
+
"grad_norm": 0.30424436926841736,
|
7519 |
+
"learning_rate": 7.454499366213642e-05,
|
7520 |
+
"loss": 0.4161,
|
7521 |
+
"step": 2144
|
7522 |
+
},
|
7523 |
+
{
|
7524 |
+
"epoch": 1.190238491403217,
|
7525 |
+
"grad_norm": 0.35166969895362854,
|
7526 |
+
"learning_rate": 7.437127977739835e-05,
|
7527 |
+
"loss": 0.3729,
|
7528 |
+
"step": 2146
|
7529 |
+
},
|
7530 |
+
{
|
7531 |
+
"epoch": 1.1913477537437605,
|
7532 |
+
"grad_norm": 0.3051074743270874,
|
7533 |
+
"learning_rate": 7.419764862880408e-05,
|
7534 |
+
"loss": 0.3194,
|
7535 |
+
"step": 2148
|
7536 |
+
},
|
7537 |
+
{
|
7538 |
+
"epoch": 1.192457016084304,
|
7539 |
+
"grad_norm": 0.42905622720718384,
|
7540 |
+
"learning_rate": 7.402410077687993e-05,
|
7541 |
+
"loss": 0.4673,
|
7542 |
+
"step": 2150
|
7543 |
+
},
|
7544 |
+
{
|
7545 |
+
"epoch": 1.1935662784248475,
|
7546 |
+
"grad_norm": 0.273519903421402,
|
7547 |
+
"learning_rate": 7.38506367818833e-05,
|
7548 |
+
"loss": 0.4068,
|
7549 |
+
"step": 2152
|
7550 |
+
},
|
7551 |
+
{
|
7552 |
+
"epoch": 1.194675540765391,
|
7553 |
+
"grad_norm": 0.35866236686706543,
|
7554 |
+
"learning_rate": 7.367725720380087e-05,
|
7555 |
+
"loss": 0.4065,
|
7556 |
+
"step": 2154
|
7557 |
+
},
|
7558 |
+
{
|
7559 |
+
"epoch": 1.1957848031059346,
|
7560 |
+
"grad_norm": 0.23525385558605194,
|
7561 |
+
"learning_rate": 7.350396260234681e-05,
|
7562 |
+
"loss": 0.4038,
|
7563 |
+
"step": 2156
|
7564 |
+
},
|
7565 |
+
{
|
7566 |
+
"epoch": 1.1968940654464781,
|
7567 |
+
"grad_norm": 0.2770351767539978,
|
7568 |
+
"learning_rate": 7.333075353696089e-05,
|
7569 |
+
"loss": 0.3598,
|
7570 |
+
"step": 2158
|
7571 |
+
},
|
7572 |
+
{
|
7573 |
+
"epoch": 1.1980033277870217,
|
7574 |
+
"grad_norm": 0.2863808870315552,
|
7575 |
+
"learning_rate": 7.315763056680694e-05,
|
7576 |
+
"loss": 0.4592,
|
7577 |
+
"step": 2160
|
7578 |
+
},
|
7579 |
+
{
|
7580 |
+
"epoch": 1.1991125901275652,
|
7581 |
+
"grad_norm": 0.2790552079677582,
|
7582 |
+
"learning_rate": 7.298459425077064e-05,
|
7583 |
+
"loss": 0.3431,
|
7584 |
+
"step": 2162
|
7585 |
+
},
|
7586 |
+
{
|
7587 |
+
"epoch": 1.2002218524681088,
|
7588 |
+
"grad_norm": 0.3501036763191223,
|
7589 |
+
"learning_rate": 7.281164514745806e-05,
|
7590 |
+
"loss": 0.4216,
|
7591 |
+
"step": 2164
|
7592 |
+
},
|
7593 |
+
{
|
7594 |
+
"epoch": 1.2013311148086523,
|
7595 |
+
"grad_norm": 0.35109075903892517,
|
7596 |
+
"learning_rate": 7.263878381519365e-05,
|
7597 |
+
"loss": 0.3759,
|
7598 |
+
"step": 2166
|
7599 |
+
},
|
7600 |
+
{
|
7601 |
+
"epoch": 1.2024403771491958,
|
7602 |
+
"grad_norm": 0.23262017965316772,
|
7603 |
+
"learning_rate": 7.246601081201851e-05,
|
7604 |
+
"loss": 0.3056,
|
7605 |
+
"step": 2168
|
7606 |
+
},
|
7607 |
+
{
|
7608 |
+
"epoch": 1.2035496394897394,
|
7609 |
+
"grad_norm": 0.2854996919631958,
|
7610 |
+
"learning_rate": 7.229332669568871e-05,
|
7611 |
+
"loss": 0.3539,
|
7612 |
+
"step": 2170
|
7613 |
+
},
|
7614 |
+
{
|
7615 |
+
"epoch": 1.204658901830283,
|
7616 |
+
"grad_norm": 0.31174349784851074,
|
7617 |
+
"learning_rate": 7.21207320236732e-05,
|
7618 |
+
"loss": 0.4587,
|
7619 |
+
"step": 2172
|
7620 |
+
},
|
7621 |
+
{
|
7622 |
+
"epoch": 1.2057681641708264,
|
7623 |
+
"grad_norm": 0.23841983079910278,
|
7624 |
+
"learning_rate": 7.194822735315229e-05,
|
7625 |
+
"loss": 0.428,
|
7626 |
+
"step": 2174
|
7627 |
+
},
|
7628 |
+
{
|
7629 |
+
"epoch": 1.20687742651137,
|
7630 |
+
"grad_norm": 0.2881643772125244,
|
7631 |
+
"learning_rate": 7.177581324101576e-05,
|
7632 |
+
"loss": 0.3366,
|
7633 |
+
"step": 2176
|
7634 |
+
},
|
7635 |
+
{
|
7636 |
+
"epoch": 1.2079866888519135,
|
7637 |
+
"grad_norm": 0.25862303376197815,
|
7638 |
+
"learning_rate": 7.160349024386095e-05,
|
7639 |
+
"loss": 0.3604,
|
7640 |
+
"step": 2178
|
7641 |
+
},
|
7642 |
+
{
|
7643 |
+
"epoch": 1.209095951192457,
|
7644 |
+
"grad_norm": 0.2757657468318939,
|
7645 |
+
"learning_rate": 7.143125891799112e-05,
|
7646 |
+
"loss": 0.3398,
|
7647 |
+
"step": 2180
|
7648 |
+
},
|
7649 |
+
{
|
7650 |
+
"epoch": 1.2102052135330006,
|
7651 |
+
"grad_norm": 0.30355626344680786,
|
7652 |
+
"learning_rate": 7.12591198194136e-05,
|
7653 |
+
"loss": 0.3497,
|
7654 |
+
"step": 2182
|
7655 |
+
},
|
7656 |
+
{
|
7657 |
+
"epoch": 1.2113144758735441,
|
7658 |
+
"grad_norm": 0.2825808525085449,
|
7659 |
+
"learning_rate": 7.108707350383792e-05,
|
7660 |
+
"loss": 0.3918,
|
7661 |
+
"step": 2184
|
7662 |
+
},
|
7663 |
+
{
|
7664 |
+
"epoch": 1.2124237382140877,
|
7665 |
+
"grad_norm": 0.22622592747211456,
|
7666 |
+
"learning_rate": 7.091512052667413e-05,
|
7667 |
+
"loss": 0.2614,
|
7668 |
+
"step": 2186
|
7669 |
+
},
|
7670 |
+
{
|
7671 |
+
"epoch": 1.2135330005546312,
|
7672 |
+
"grad_norm": 0.23999890685081482,
|
7673 |
+
"learning_rate": 7.074326144303101e-05,
|
7674 |
+
"loss": 0.339,
|
7675 |
+
"step": 2188
|
7676 |
+
},
|
7677 |
+
{
|
7678 |
+
"epoch": 1.2146422628951747,
|
7679 |
+
"grad_norm": 0.3575845956802368,
|
7680 |
+
"learning_rate": 7.057149680771413e-05,
|
7681 |
+
"loss": 0.3956,
|
7682 |
+
"step": 2190
|
7683 |
+
},
|
7684 |
+
{
|
7685 |
+
"epoch": 1.2157515252357183,
|
7686 |
+
"grad_norm": 0.2398807853460312,
|
7687 |
+
"learning_rate": 7.039982717522422e-05,
|
7688 |
+
"loss": 0.3376,
|
7689 |
+
"step": 2192
|
7690 |
+
},
|
7691 |
+
{
|
7692 |
+
"epoch": 1.2168607875762618,
|
7693 |
+
"grad_norm": 0.2710171043872833,
|
7694 |
+
"learning_rate": 7.02282530997553e-05,
|
7695 |
+
"loss": 0.3466,
|
7696 |
+
"step": 2194
|
7697 |
+
},
|
7698 |
+
{
|
7699 |
+
"epoch": 1.2179700499168054,
|
7700 |
+
"grad_norm": 0.3714485764503479,
|
7701 |
+
"learning_rate": 7.005677513519288e-05,
|
7702 |
+
"loss": 0.3518,
|
7703 |
+
"step": 2196
|
7704 |
+
},
|
7705 |
+
{
|
7706 |
+
"epoch": 1.219079312257349,
|
7707 |
+
"grad_norm": 0.3260788321495056,
|
7708 |
+
"learning_rate": 6.988539383511224e-05,
|
7709 |
+
"loss": 0.4465,
|
7710 |
+
"step": 2198
|
7711 |
+
},
|
7712 |
+
{
|
7713 |
+
"epoch": 1.2201885745978924,
|
7714 |
+
"grad_norm": 0.34494900703430176,
|
7715 |
+
"learning_rate": 6.971410975277655e-05,
|
7716 |
+
"loss": 0.4824,
|
7717 |
+
"step": 2200
|
7718 |
+
},
|
7719 |
+
{
|
7720 |
+
"epoch": 1.221297836938436,
|
7721 |
+
"grad_norm": 0.3417797386646271,
|
7722 |
+
"learning_rate": 6.954292344113522e-05,
|
7723 |
+
"loss": 0.4585,
|
7724 |
+
"step": 2202
|
7725 |
+
},
|
7726 |
+
{
|
7727 |
+
"epoch": 1.2224070992789795,
|
7728 |
+
"grad_norm": 0.3638070821762085,
|
7729 |
+
"learning_rate": 6.937183545282199e-05,
|
7730 |
+
"loss": 0.4599,
|
7731 |
+
"step": 2204
|
7732 |
+
},
|
7733 |
+
{
|
7734 |
+
"epoch": 1.223516361619523,
|
7735 |
+
"grad_norm": 0.30623239278793335,
|
7736 |
+
"learning_rate": 6.920084634015314e-05,
|
7737 |
+
"loss": 0.2944,
|
7738 |
+
"step": 2206
|
7739 |
+
},
|
7740 |
+
{
|
7741 |
+
"epoch": 1.2246256239600666,
|
7742 |
+
"grad_norm": 0.26289814710617065,
|
7743 |
+
"learning_rate": 6.902995665512581e-05,
|
7744 |
+
"loss": 0.2996,
|
7745 |
+
"step": 2208
|
7746 |
+
},
|
7747 |
+
{
|
7748 |
+
"epoch": 1.2257348863006101,
|
7749 |
+
"grad_norm": 0.30422243475914,
|
7750 |
+
"learning_rate": 6.885916694941612e-05,
|
7751 |
+
"loss": 0.3768,
|
7752 |
+
"step": 2210
|
7753 |
+
},
|
7754 |
+
{
|
7755 |
+
"epoch": 1.2268441486411537,
|
7756 |
+
"grad_norm": 0.3084448575973511,
|
7757 |
+
"learning_rate": 6.868847777437748e-05,
|
7758 |
+
"loss": 0.3898,
|
7759 |
+
"step": 2212
|
7760 |
+
},
|
7761 |
+
{
|
7762 |
+
"epoch": 1.2279534109816972,
|
7763 |
+
"grad_norm": 0.27117034792900085,
|
7764 |
+
"learning_rate": 6.851788968103876e-05,
|
7765 |
+
"loss": 0.32,
|
7766 |
+
"step": 2214
|
7767 |
+
},
|
7768 |
+
{
|
7769 |
+
"epoch": 1.2290626733222407,
|
7770 |
+
"grad_norm": 0.31548282504081726,
|
7771 |
+
"learning_rate": 6.834740322010241e-05,
|
7772 |
+
"loss": 0.4279,
|
7773 |
+
"step": 2216
|
7774 |
+
},
|
7775 |
+
{
|
7776 |
+
"epoch": 1.2301719356627843,
|
7777 |
+
"grad_norm": 0.2990844249725342,
|
7778 |
+
"learning_rate": 6.817701894194294e-05,
|
7779 |
+
"loss": 0.3003,
|
7780 |
+
"step": 2218
|
7781 |
+
},
|
7782 |
+
{
|
7783 |
+
"epoch": 1.2312811980033278,
|
7784 |
+
"grad_norm": 0.31102094054222107,
|
7785 |
+
"learning_rate": 6.800673739660488e-05,
|
7786 |
+
"loss": 0.526,
|
7787 |
+
"step": 2220
|
7788 |
+
},
|
7789 |
+
{
|
7790 |
+
"epoch": 1.2323904603438713,
|
7791 |
+
"grad_norm": 0.3252849876880646,
|
7792 |
+
"learning_rate": 6.783655913380115e-05,
|
7793 |
+
"loss": 0.3848,
|
7794 |
+
"step": 2222
|
7795 |
+
},
|
7796 |
+
{
|
7797 |
+
"epoch": 1.2334997226844149,
|
7798 |
+
"grad_norm": 0.296875536441803,
|
7799 |
+
"learning_rate": 6.766648470291124e-05,
|
7800 |
+
"loss": 0.3791,
|
7801 |
+
"step": 2224
|
7802 |
+
},
|
7803 |
+
{
|
7804 |
+
"epoch": 1.2346089850249584,
|
7805 |
+
"grad_norm": 0.251607209444046,
|
7806 |
+
"learning_rate": 6.749651465297943e-05,
|
7807 |
+
"loss": 0.4348,
|
7808 |
+
"step": 2226
|
7809 |
+
},
|
7810 |
+
{
|
7811 |
+
"epoch": 1.235718247365502,
|
7812 |
+
"grad_norm": 0.3017038106918335,
|
7813 |
+
"learning_rate": 6.732664953271306e-05,
|
7814 |
+
"loss": 0.4041,
|
7815 |
+
"step": 2228
|
7816 |
+
},
|
7817 |
+
{
|
7818 |
+
"epoch": 1.2368275097060455,
|
7819 |
+
"grad_norm": 0.4208067059516907,
|
7820 |
+
"learning_rate": 6.715688989048066e-05,
|
7821 |
+
"loss": 0.4914,
|
7822 |
+
"step": 2230
|
7823 |
+
},
|
7824 |
+
{
|
7825 |
+
"epoch": 1.237936772046589,
|
7826 |
+
"grad_norm": 0.2521570324897766,
|
7827 |
+
"learning_rate": 6.698723627431038e-05,
|
7828 |
+
"loss": 0.2871,
|
7829 |
+
"step": 2232
|
7830 |
+
},
|
7831 |
+
{
|
7832 |
+
"epoch": 1.2390460343871326,
|
7833 |
+
"grad_norm": 0.2669265866279602,
|
7834 |
+
"learning_rate": 6.681768923188799e-05,
|
7835 |
+
"loss": 0.3533,
|
7836 |
+
"step": 2234
|
7837 |
+
},
|
7838 |
+
{
|
7839 |
+
"epoch": 1.240155296727676,
|
7840 |
+
"grad_norm": 0.27364587783813477,
|
7841 |
+
"learning_rate": 6.664824931055522e-05,
|
7842 |
+
"loss": 0.3058,
|
7843 |
+
"step": 2236
|
7844 |
+
},
|
7845 |
+
{
|
7846 |
+
"epoch": 1.2412645590682196,
|
7847 |
+
"grad_norm": 0.24516661465168,
|
7848 |
+
"learning_rate": 6.647891705730802e-05,
|
7849 |
+
"loss": 0.3059,
|
7850 |
+
"step": 2238
|
7851 |
+
},
|
7852 |
+
{
|
7853 |
+
"epoch": 1.2423738214087632,
|
7854 |
+
"grad_norm": 0.346921443939209,
|
7855 |
+
"learning_rate": 6.630969301879474e-05,
|
7856 |
+
"loss": 0.4405,
|
7857 |
+
"step": 2240
|
7858 |
+
},
|
7859 |
+
{
|
7860 |
+
"epoch": 1.2434830837493067,
|
7861 |
+
"grad_norm": 0.36792489886283875,
|
7862 |
+
"learning_rate": 6.614057774131437e-05,
|
7863 |
+
"loss": 0.5472,
|
7864 |
+
"step": 2242
|
7865 |
+
},
|
7866 |
+
{
|
7867 |
+
"epoch": 1.2445923460898503,
|
7868 |
+
"grad_norm": 0.3431490361690521,
|
7869 |
+
"learning_rate": 6.597157177081477e-05,
|
7870 |
+
"loss": 0.454,
|
7871 |
+
"step": 2244
|
7872 |
+
},
|
7873 |
+
{
|
7874 |
+
"epoch": 1.2457016084303938,
|
7875 |
+
"grad_norm": 0.3037920594215393,
|
7876 |
+
"learning_rate": 6.580267565289106e-05,
|
7877 |
+
"loss": 0.4218,
|
7878 |
+
"step": 2246
|
7879 |
+
},
|
7880 |
+
{
|
7881 |
+
"epoch": 1.2468108707709373,
|
7882 |
+
"grad_norm": 0.30732932686805725,
|
7883 |
+
"learning_rate": 6.56338899327836e-05,
|
7884 |
+
"loss": 0.3228,
|
7885 |
+
"step": 2248
|
7886 |
+
},
|
7887 |
+
{
|
7888 |
+
"epoch": 1.2479201331114809,
|
7889 |
+
"grad_norm": 0.28394436836242676,
|
7890 |
+
"learning_rate": 6.546521515537636e-05,
|
7891 |
+
"loss": 0.3824,
|
7892 |
+
"step": 2250
|
7893 |
+
},
|
7894 |
+
{
|
7895 |
+
"epoch": 1.2490293954520244,
|
7896 |
+
"grad_norm": 0.3070717751979828,
|
7897 |
+
"learning_rate": 6.52966518651952e-05,
|
7898 |
+
"loss": 0.4145,
|
7899 |
+
"step": 2252
|
7900 |
+
},
|
7901 |
+
{
|
7902 |
+
"epoch": 1.250138657792568,
|
7903 |
+
"grad_norm": 0.2168722301721573,
|
7904 |
+
"learning_rate": 6.512820060640607e-05,
|
7905 |
+
"loss": 0.3676,
|
7906 |
+
"step": 2254
|
7907 |
+
},
|
7908 |
+
{
|
7909 |
+
"epoch": 1.2512479201331115,
|
7910 |
+
"grad_norm": 0.3195607364177704,
|
7911 |
+
"learning_rate": 6.495986192281325e-05,
|
7912 |
+
"loss": 0.3998,
|
7913 |
+
"step": 2256
|
7914 |
+
},
|
7915 |
+
{
|
7916 |
+
"epoch": 1.252357182473655,
|
7917 |
+
"grad_norm": 0.23793111741542816,
|
7918 |
+
"learning_rate": 6.479163635785759e-05,
|
7919 |
+
"loss": 0.2888,
|
7920 |
+
"step": 2258
|
7921 |
+
},
|
7922 |
+
{
|
7923 |
+
"epoch": 1.2534664448141986,
|
7924 |
+
"grad_norm": 0.32127097249031067,
|
7925 |
+
"learning_rate": 6.462352445461469e-05,
|
7926 |
+
"loss": 0.3023,
|
7927 |
+
"step": 2260
|
7928 |
+
},
|
7929 |
+
{
|
7930 |
+
"epoch": 1.254575707154742,
|
7931 |
+
"grad_norm": 0.3184230625629425,
|
7932 |
+
"learning_rate": 6.445552675579341e-05,
|
7933 |
+
"loss": 0.3412,
|
7934 |
+
"step": 2262
|
7935 |
+
},
|
7936 |
+
{
|
7937 |
+
"epoch": 1.2556849694952856,
|
7938 |
+
"grad_norm": 0.3016446828842163,
|
7939 |
+
"learning_rate": 6.428764380373376e-05,
|
7940 |
+
"loss": 0.3669,
|
7941 |
+
"step": 2264
|
7942 |
+
},
|
7943 |
+
{
|
7944 |
+
"epoch": 1.2567942318358292,
|
7945 |
+
"grad_norm": 0.39701253175735474,
|
7946 |
+
"learning_rate": 6.41198761404054e-05,
|
7947 |
+
"loss": 0.515,
|
7948 |
+
"step": 2266
|
7949 |
+
},
|
7950 |
+
{
|
7951 |
+
"epoch": 1.2579034941763727,
|
7952 |
+
"grad_norm": 0.23390258848667145,
|
7953 |
+
"learning_rate": 6.395222430740573e-05,
|
7954 |
+
"loss": 0.3064,
|
7955 |
+
"step": 2268
|
7956 |
+
},
|
7957 |
+
{
|
7958 |
+
"epoch": 1.2590127565169162,
|
7959 |
+
"grad_norm": 0.3082529604434967,
|
7960 |
+
"learning_rate": 6.37846888459583e-05,
|
7961 |
+
"loss": 0.4432,
|
7962 |
+
"step": 2270
|
7963 |
+
},
|
7964 |
+
{
|
7965 |
+
"epoch": 1.2601220188574598,
|
7966 |
+
"grad_norm": 0.34247317910194397,
|
7967 |
+
"learning_rate": 6.361727029691096e-05,
|
7968 |
+
"loss": 0.4047,
|
7969 |
+
"step": 2272
|
7970 |
+
},
|
7971 |
+
{
|
7972 |
+
"epoch": 1.2612312811980033,
|
7973 |
+
"grad_norm": 0.37517714500427246,
|
7974 |
+
"learning_rate": 6.34499692007341e-05,
|
7975 |
+
"loss": 0.466,
|
7976 |
+
"step": 2274
|
7977 |
+
},
|
7978 |
+
{
|
7979 |
+
"epoch": 1.2623405435385469,
|
7980 |
+
"grad_norm": 0.2715705931186676,
|
7981 |
+
"learning_rate": 6.328278609751898e-05,
|
7982 |
+
"loss": 0.4771,
|
7983 |
+
"step": 2276
|
7984 |
+
},
|
7985 |
+
{
|
7986 |
+
"epoch": 1.2634498058790904,
|
7987 |
+
"grad_norm": 0.2877407670021057,
|
7988 |
+
"learning_rate": 6.311572152697598e-05,
|
7989 |
+
"loss": 0.4258,
|
7990 |
+
"step": 2278
|
7991 |
+
},
|
7992 |
+
{
|
7993 |
+
"epoch": 1.264559068219634,
|
7994 |
+
"grad_norm": 0.3554646074771881,
|
7995 |
+
"learning_rate": 6.294877602843275e-05,
|
7996 |
+
"loss": 0.5173,
|
7997 |
+
"step": 2280
|
7998 |
+
},
|
7999 |
+
{
|
8000 |
+
"epoch": 1.2656683305601775,
|
8001 |
+
"grad_norm": 0.27891042828559875,
|
8002 |
+
"learning_rate": 6.278195014083257e-05,
|
8003 |
+
"loss": 0.3525,
|
8004 |
+
"step": 2282
|
8005 |
+
},
|
8006 |
+
{
|
8007 |
+
"epoch": 1.266777592900721,
|
8008 |
+
"grad_norm": 0.27851444482803345,
|
8009 |
+
"learning_rate": 6.261524440273263e-05,
|
8010 |
+
"loss": 0.3509,
|
8011 |
+
"step": 2284
|
8012 |
+
},
|
8013 |
+
{
|
8014 |
+
"epoch": 1.2678868552412645,
|
8015 |
+
"grad_norm": 0.23373687267303467,
|
8016 |
+
"learning_rate": 6.244865935230215e-05,
|
8017 |
+
"loss": 0.3873,
|
8018 |
+
"step": 2286
|
8019 |
+
},
|
8020 |
+
{
|
8021 |
+
"epoch": 1.268996117581808,
|
8022 |
+
"grad_norm": 0.32285311818122864,
|
8023 |
+
"learning_rate": 6.228219552732083e-05,
|
8024 |
+
"loss": 0.4167,
|
8025 |
+
"step": 2288
|
8026 |
+
},
|
8027 |
+
{
|
8028 |
+
"epoch": 1.2701053799223516,
|
8029 |
+
"grad_norm": 0.30920663475990295,
|
8030 |
+
"learning_rate": 6.211585346517701e-05,
|
8031 |
+
"loss": 0.3945,
|
8032 |
+
"step": 2290
|
8033 |
+
},
|
8034 |
+
{
|
8035 |
+
"epoch": 1.2712146422628952,
|
8036 |
+
"grad_norm": 0.3399757444858551,
|
8037 |
+
"learning_rate": 6.194963370286595e-05,
|
8038 |
+
"loss": 0.481,
|
8039 |
+
"step": 2292
|
8040 |
+
},
|
8041 |
+
{
|
8042 |
+
"epoch": 1.2723239046034387,
|
8043 |
+
"grad_norm": 0.3406103849411011,
|
8044 |
+
"learning_rate": 6.178353677698801e-05,
|
8045 |
+
"loss": 0.3728,
|
8046 |
+
"step": 2294
|
8047 |
+
},
|
8048 |
+
{
|
8049 |
+
"epoch": 1.2734331669439822,
|
8050 |
+
"grad_norm": 0.28863847255706787,
|
8051 |
+
"learning_rate": 6.16175632237471e-05,
|
8052 |
+
"loss": 0.3582,
|
8053 |
+
"step": 2296
|
8054 |
+
},
|
8055 |
+
{
|
8056 |
+
"epoch": 1.2745424292845258,
|
8057 |
+
"grad_norm": 0.2774694859981537,
|
8058 |
+
"learning_rate": 6.145171357894885e-05,
|
8059 |
+
"loss": 0.2823,
|
8060 |
+
"step": 2298
|
8061 |
+
},
|
8062 |
+
{
|
8063 |
+
"epoch": 1.2756516916250693,
|
8064 |
+
"grad_norm": 0.312102735042572,
|
8065 |
+
"learning_rate": 6.12859883779988e-05,
|
8066 |
+
"loss": 0.3745,
|
8067 |
+
"step": 2300
|
8068 |
+
},
|
8069 |
+
{
|
8070 |
+
"epoch": 1.2767609539656128,
|
8071 |
+
"grad_norm": 0.2844535708427429,
|
8072 |
+
"learning_rate": 6.11203881559008e-05,
|
8073 |
+
"loss": 0.3743,
|
8074 |
+
"step": 2302
|
8075 |
+
},
|
8076 |
+
{
|
8077 |
+
"epoch": 1.2778702163061564,
|
8078 |
+
"grad_norm": 0.348458856344223,
|
8079 |
+
"learning_rate": 6.095491344725527e-05,
|
8080 |
+
"loss": 0.3885,
|
8081 |
+
"step": 2304
|
8082 |
+
},
|
8083 |
+
{
|
8084 |
+
"epoch": 1.2789794786467,
|
8085 |
+
"grad_norm": 0.2553083300590515,
|
8086 |
+
"learning_rate": 6.0789564786257425e-05,
|
8087 |
+
"loss": 0.3227,
|
8088 |
+
"step": 2306
|
8089 |
+
},
|
8090 |
+
{
|
8091 |
+
"epoch": 1.2800887409872435,
|
8092 |
+
"grad_norm": 0.32947540283203125,
|
8093 |
+
"learning_rate": 6.06243427066955e-05,
|
8094 |
+
"loss": 0.3981,
|
8095 |
+
"step": 2308
|
8096 |
+
},
|
8097 |
+
{
|
8098 |
+
"epoch": 1.281198003327787,
|
8099 |
+
"grad_norm": 0.3036400377750397,
|
8100 |
+
"learning_rate": 6.0459247741949166e-05,
|
8101 |
+
"loss": 0.4151,
|
8102 |
+
"step": 2310
|
8103 |
+
},
|
8104 |
+
{
|
8105 |
+
"epoch": 1.2823072656683305,
|
8106 |
+
"grad_norm": 0.3431527316570282,
|
8107 |
+
"learning_rate": 6.0294280424987724e-05,
|
8108 |
+
"loss": 0.363,
|
8109 |
+
"step": 2312
|
8110 |
+
},
|
8111 |
+
{
|
8112 |
+
"epoch": 1.283416528008874,
|
8113 |
+
"grad_norm": 0.3487553000450134,
|
8114 |
+
"learning_rate": 6.012944128836835e-05,
|
8115 |
+
"loss": 0.3893,
|
8116 |
+
"step": 2314
|
8117 |
+
},
|
8118 |
+
{
|
8119 |
+
"epoch": 1.2845257903494176,
|
8120 |
+
"grad_norm": 0.35408324003219604,
|
8121 |
+
"learning_rate": 5.99647308642345e-05,
|
8122 |
+
"loss": 0.5589,
|
8123 |
+
"step": 2316
|
8124 |
+
},
|
8125 |
+
{
|
8126 |
+
"epoch": 1.2856350526899611,
|
8127 |
+
"grad_norm": 0.1972445398569107,
|
8128 |
+
"learning_rate": 5.980014968431396e-05,
|
8129 |
+
"loss": 0.3754,
|
8130 |
+
"step": 2318
|
8131 |
+
},
|
8132 |
+
{
|
8133 |
+
"epoch": 1.2867443150305047,
|
8134 |
+
"grad_norm": 0.3481127619743347,
|
8135 |
+
"learning_rate": 5.963569827991752e-05,
|
8136 |
+
"loss": 0.4485,
|
8137 |
+
"step": 2320
|
8138 |
+
},
|
8139 |
+
{
|
8140 |
+
"epoch": 1.2878535773710482,
|
8141 |
+
"grad_norm": 0.2831405699253082,
|
8142 |
+
"learning_rate": 5.947137718193681e-05,
|
8143 |
+
"loss": 0.4482,
|
8144 |
+
"step": 2322
|
8145 |
+
},
|
8146 |
+
{
|
8147 |
+
"epoch": 1.2889628397115918,
|
8148 |
+
"grad_norm": 0.3052551746368408,
|
8149 |
+
"learning_rate": 5.930718692084289e-05,
|
8150 |
+
"loss": 0.4148,
|
8151 |
+
"step": 2324
|
8152 |
+
},
|
8153 |
+
{
|
8154 |
+
"epoch": 1.2900721020521353,
|
8155 |
+
"grad_norm": 0.2522166967391968,
|
8156 |
+
"learning_rate": 5.914312802668445e-05,
|
8157 |
+
"loss": 0.3882,
|
8158 |
+
"step": 2326
|
8159 |
+
},
|
8160 |
+
{
|
8161 |
+
"epoch": 1.2911813643926788,
|
8162 |
+
"grad_norm": 0.28503984212875366,
|
8163 |
+
"learning_rate": 5.897920102908603e-05,
|
8164 |
+
"loss": 0.3439,
|
8165 |
+
"step": 2328
|
8166 |
+
},
|
8167 |
+
{
|
8168 |
+
"epoch": 1.2922906267332224,
|
8169 |
+
"grad_norm": 0.36347198486328125,
|
8170 |
+
"learning_rate": 5.881540645724646e-05,
|
8171 |
+
"loss": 0.315,
|
8172 |
+
"step": 2330
|
8173 |
+
},
|
8174 |
+
{
|
8175 |
+
"epoch": 1.293399889073766,
|
8176 |
+
"grad_norm": 0.300930380821228,
|
8177 |
+
"learning_rate": 5.865174483993696e-05,
|
8178 |
+
"loss": 0.4299,
|
8179 |
+
"step": 2332
|
8180 |
+
},
|
8181 |
+
{
|
8182 |
+
"epoch": 1.2945091514143094,
|
8183 |
+
"grad_norm": 0.37335050106048584,
|
8184 |
+
"learning_rate": 5.8488216705499675e-05,
|
8185 |
+
"loss": 0.438,
|
8186 |
+
"step": 2334
|
8187 |
+
},
|
8188 |
+
{
|
8189 |
+
"epoch": 1.295618413754853,
|
8190 |
+
"grad_norm": 0.2799355983734131,
|
8191 |
+
"learning_rate": 5.832482258184575e-05,
|
8192 |
+
"loss": 0.3494,
|
8193 |
+
"step": 2336
|
8194 |
+
},
|
8195 |
+
{
|
8196 |
+
"epoch": 1.2967276760953965,
|
8197 |
+
"grad_norm": 0.33066707849502563,
|
8198 |
+
"learning_rate": 5.816156299645364e-05,
|
8199 |
+
"loss": 0.351,
|
8200 |
+
"step": 2338
|
8201 |
+
},
|
8202 |
+
{
|
8203 |
+
"epoch": 1.29783693843594,
|
8204 |
+
"grad_norm": 0.3257594704627991,
|
8205 |
+
"learning_rate": 5.799843847636766e-05,
|
8206 |
+
"loss": 0.3275,
|
8207 |
+
"step": 2340
|
8208 |
+
},
|
8209 |
+
{
|
8210 |
+
"epoch": 1.2989462007764836,
|
8211 |
+
"grad_norm": 0.31791582703590393,
|
8212 |
+
"learning_rate": 5.783544954819592e-05,
|
8213 |
+
"loss": 0.32,
|
8214 |
+
"step": 2342
|
8215 |
+
},
|
8216 |
+
{
|
8217 |
+
"epoch": 1.3000554631170271,
|
8218 |
+
"grad_norm": 0.18980185687541962,
|
8219 |
+
"learning_rate": 5.76725967381089e-05,
|
8220 |
+
"loss": 0.3309,
|
8221 |
+
"step": 2344
|
8222 |
+
},
|
8223 |
+
{
|
8224 |
+
"epoch": 1.3011647254575707,
|
8225 |
+
"grad_norm": 0.243652805685997,
|
8226 |
+
"learning_rate": 5.750988057183755e-05,
|
8227 |
+
"loss": 0.4156,
|
8228 |
+
"step": 2346
|
8229 |
+
},
|
8230 |
+
{
|
8231 |
+
"epoch": 1.3022739877981142,
|
8232 |
+
"grad_norm": 0.2872309386730194,
|
8233 |
+
"learning_rate": 5.7347301574671944e-05,
|
8234 |
+
"loss": 0.4077,
|
8235 |
+
"step": 2348
|
8236 |
+
},
|
8237 |
+
{
|
8238 |
+
"epoch": 1.3033832501386577,
|
8239 |
+
"grad_norm": 0.24327610433101654,
|
8240 |
+
"learning_rate": 5.718486027145906e-05,
|
8241 |
+
"loss": 0.348,
|
8242 |
+
"step": 2350
|
8243 |
+
},
|
8244 |
+
{
|
8245 |
+
"epoch": 1.3044925124792013,
|
8246 |
+
"grad_norm": 0.3590594530105591,
|
8247 |
+
"learning_rate": 5.702255718660149e-05,
|
8248 |
+
"loss": 0.4491,
|
8249 |
+
"step": 2352
|
8250 |
+
},
|
8251 |
+
{
|
8252 |
+
"epoch": 1.3056017748197448,
|
8253 |
+
"grad_norm": 0.27354371547698975,
|
8254 |
+
"learning_rate": 5.686039284405564e-05,
|
8255 |
+
"loss": 0.3492,
|
8256 |
+
"step": 2354
|
8257 |
+
},
|
8258 |
+
{
|
8259 |
+
"epoch": 1.3067110371602884,
|
8260 |
+
"grad_norm": 0.2964899241924286,
|
8261 |
+
"learning_rate": 5.6698367767329995e-05,
|
8262 |
+
"loss": 0.4393,
|
8263 |
+
"step": 2356
|
8264 |
+
},
|
8265 |
+
{
|
8266 |
+
"epoch": 1.307820299500832,
|
8267 |
+
"grad_norm": 0.2882802188396454,
|
8268 |
+
"learning_rate": 5.6536482479483424e-05,
|
8269 |
+
"loss": 0.3667,
|
8270 |
+
"step": 2358
|
8271 |
+
},
|
8272 |
+
{
|
8273 |
+
"epoch": 1.3089295618413754,
|
8274 |
+
"grad_norm": 0.288278728723526,
|
8275 |
+
"learning_rate": 5.63747375031235e-05,
|
8276 |
+
"loss": 0.3295,
|
8277 |
+
"step": 2360
|
8278 |
+
},
|
8279 |
+
{
|
8280 |
+
"epoch": 1.310038824181919,
|
8281 |
+
"grad_norm": 0.30691561102867126,
|
8282 |
+
"learning_rate": 5.6213133360404946e-05,
|
8283 |
+
"loss": 0.3473,
|
8284 |
+
"step": 2362
|
8285 |
+
},
|
8286 |
+
{
|
8287 |
+
"epoch": 1.3111480865224625,
|
8288 |
+
"grad_norm": 0.3028589189052582,
|
8289 |
+
"learning_rate": 5.605167057302778e-05,
|
8290 |
+
"loss": 0.3748,
|
8291 |
+
"step": 2364
|
8292 |
+
},
|
8293 |
+
{
|
8294 |
+
"epoch": 1.312257348863006,
|
8295 |
+
"grad_norm": 0.24918228387832642,
|
8296 |
+
"learning_rate": 5.589034966223568e-05,
|
8297 |
+
"loss": 0.4768,
|
8298 |
+
"step": 2366
|
8299 |
+
},
|
8300 |
+
{
|
8301 |
+
"epoch": 1.3133666112035496,
|
8302 |
+
"grad_norm": 0.24106858670711517,
|
8303 |
+
"learning_rate": 5.572917114881422e-05,
|
8304 |
+
"loss": 0.3659,
|
8305 |
+
"step": 2368
|
8306 |
+
},
|
8307 |
+
{
|
8308 |
+
"epoch": 1.3144758735440931,
|
8309 |
+
"grad_norm": 0.2618839740753174,
|
8310 |
+
"learning_rate": 5.5568135553089485e-05,
|
8311 |
+
"loss": 0.3333,
|
8312 |
+
"step": 2370
|
8313 |
+
},
|
8314 |
+
{
|
8315 |
+
"epoch": 1.3155851358846367,
|
8316 |
+
"grad_norm": 0.245490163564682,
|
8317 |
+
"learning_rate": 5.540724339492602e-05,
|
8318 |
+
"loss": 0.2983,
|
8319 |
+
"step": 2372
|
8320 |
+
},
|
8321 |
+
{
|
8322 |
+
"epoch": 1.3166943982251802,
|
8323 |
+
"grad_norm": 0.3315598666667938,
|
8324 |
+
"learning_rate": 5.5246495193725326e-05,
|
8325 |
+
"loss": 0.4043,
|
8326 |
+
"step": 2374
|
8327 |
+
},
|
8328 |
+
{
|
8329 |
+
"epoch": 1.3178036605657237,
|
8330 |
+
"grad_norm": 0.27348592877388,
|
8331 |
+
"learning_rate": 5.5085891468424245e-05,
|
8332 |
+
"loss": 0.4301,
|
8333 |
+
"step": 2376
|
8334 |
+
},
|
8335 |
+
{
|
8336 |
+
"epoch": 1.3189129229062673,
|
8337 |
+
"grad_norm": 0.2549262046813965,
|
8338 |
+
"learning_rate": 5.492543273749322e-05,
|
8339 |
+
"loss": 0.3471,
|
8340 |
+
"step": 2378
|
8341 |
+
},
|
8342 |
+
{
|
8343 |
+
"epoch": 1.3200221852468108,
|
8344 |
+
"grad_norm": 0.2704502046108246,
|
8345 |
+
"learning_rate": 5.476511951893454e-05,
|
8346 |
+
"loss": 0.3546,
|
8347 |
+
"step": 2380
|
8348 |
+
},
|
8349 |
+
{
|
8350 |
+
"epoch": 1.3211314475873543,
|
8351 |
+
"grad_norm": 0.3413357138633728,
|
8352 |
+
"learning_rate": 5.460495233028074e-05,
|
8353 |
+
"loss": 0.4929,
|
8354 |
+
"step": 2382
|
8355 |
+
},
|
8356 |
+
{
|
8357 |
+
"epoch": 1.3222407099278979,
|
8358 |
+
"grad_norm": 0.4276898205280304,
|
8359 |
+
"learning_rate": 5.444493168859304e-05,
|
8360 |
+
"loss": 0.4312,
|
8361 |
+
"step": 2384
|
8362 |
+
},
|
8363 |
+
{
|
8364 |
+
"epoch": 1.3233499722684414,
|
8365 |
+
"grad_norm": 0.36291375756263733,
|
8366 |
+
"learning_rate": 5.428505811045948e-05,
|
8367 |
+
"loss": 0.5678,
|
8368 |
+
"step": 2386
|
8369 |
+
},
|
8370 |
+
{
|
8371 |
+
"epoch": 1.324459234608985,
|
8372 |
+
"grad_norm": 0.18385890126228333,
|
8373 |
+
"learning_rate": 5.412533211199329e-05,
|
8374 |
+
"loss": 0.2451,
|
8375 |
+
"step": 2388
|
8376 |
+
},
|
8377 |
+
{
|
8378 |
+
"epoch": 1.3255684969495285,
|
8379 |
+
"grad_norm": 0.372714102268219,
|
8380 |
+
"learning_rate": 5.396575420883141e-05,
|
8381 |
+
"loss": 0.3532,
|
8382 |
+
"step": 2390
|
8383 |
+
},
|
8384 |
+
{
|
8385 |
+
"epoch": 1.326677759290072,
|
8386 |
+
"grad_norm": 0.4298381805419922,
|
8387 |
+
"learning_rate": 5.380632491613265e-05,
|
8388 |
+
"loss": 0.3826,
|
8389 |
+
"step": 2392
|
8390 |
+
},
|
8391 |
+
{
|
8392 |
+
"epoch": 1.3277870216306156,
|
8393 |
+
"grad_norm": 0.27926844358444214,
|
8394 |
+
"learning_rate": 5.3647044748576e-05,
|
8395 |
+
"loss": 0.3808,
|
8396 |
+
"step": 2394
|
8397 |
+
},
|
8398 |
+
{
|
8399 |
+
"epoch": 1.328896283971159,
|
8400 |
+
"grad_norm": 0.528834342956543,
|
8401 |
+
"learning_rate": 5.3487914220359035e-05,
|
8402 |
+
"loss": 0.3296,
|
8403 |
+
"step": 2396
|
8404 |
+
},
|
8405 |
+
{
|
8406 |
+
"epoch": 1.3300055463117026,
|
8407 |
+
"grad_norm": 0.32054948806762695,
|
8408 |
+
"learning_rate": 5.332893384519639e-05,
|
8409 |
+
"loss": 0.4404,
|
8410 |
+
"step": 2398
|
8411 |
+
},
|
8412 |
+
{
|
8413 |
+
"epoch": 1.3311148086522462,
|
8414 |
+
"grad_norm": 0.29870322346687317,
|
8415 |
+
"learning_rate": 5.317010413631782e-05,
|
8416 |
+
"loss": 0.3545,
|
8417 |
+
"step": 2400
|
8418 |
+
},
|
8419 |
+
{
|
8420 |
+
"epoch": 1.3322240709927897,
|
8421 |
+
"grad_norm": 2.708609104156494,
|
8422 |
+
"learning_rate": 5.301142560646677e-05,
|
8423 |
+
"loss": 0.3227,
|
8424 |
+
"step": 2402
|
8425 |
+
},
|
8426 |
+
{
|
8427 |
+
"epoch": 1.3333333333333333,
|
8428 |
+
"grad_norm": 0.2612524628639221,
|
8429 |
+
"learning_rate": 5.285289876789849e-05,
|
8430 |
+
"loss": 0.3167,
|
8431 |
+
"step": 2404
|
8432 |
+
},
|
8433 |
+
{
|
8434 |
+
"epoch": 1.3344425956738768,
|
8435 |
+
"grad_norm": 0.2819161117076874,
|
8436 |
+
"learning_rate": 5.269452413237885e-05,
|
8437 |
+
"loss": 0.3429,
|
8438 |
+
"step": 2406
|
8439 |
+
},
|
8440 |
+
{
|
8441 |
+
"epoch": 1.3355518580144203,
|
8442 |
+
"grad_norm": 0.3466659486293793,
|
8443 |
+
"learning_rate": 5.25363022111821e-05,
|
8444 |
+
"loss": 0.3428,
|
8445 |
+
"step": 2408
|
8446 |
+
},
|
8447 |
+
{
|
8448 |
+
"epoch": 1.3366611203549639,
|
8449 |
+
"grad_norm": 0.25839924812316895,
|
8450 |
+
"learning_rate": 5.237823351508953e-05,
|
8451 |
+
"loss": 0.2604,
|
8452 |
+
"step": 2410
|
8453 |
+
},
|
8454 |
+
{
|
8455 |
+
"epoch": 1.3377703826955074,
|
8456 |
+
"grad_norm": 0.30123212933540344,
|
8457 |
+
"learning_rate": 5.22203185543878e-05,
|
8458 |
+
"loss": 0.3768,
|
8459 |
+
"step": 2412
|
8460 |
+
},
|
8461 |
+
{
|
8462 |
+
"epoch": 1.338879645036051,
|
8463 |
+
"grad_norm": 0.30337926745414734,
|
8464 |
+
"learning_rate": 5.2062557838867354e-05,
|
8465 |
+
"loss": 0.3955,
|
8466 |
+
"step": 2414
|
8467 |
+
},
|
8468 |
+
{
|
8469 |
+
"epoch": 1.3399889073765945,
|
8470 |
+
"grad_norm": 0.29621419310569763,
|
8471 |
+
"learning_rate": 5.190495187782059e-05,
|
8472 |
+
"loss": 0.3334,
|
8473 |
+
"step": 2416
|
8474 |
+
},
|
8475 |
+
{
|
8476 |
+
"epoch": 1.341098169717138,
|
8477 |
+
"grad_norm": 0.3272341191768646,
|
8478 |
+
"learning_rate": 5.174750118004029e-05,
|
8479 |
+
"loss": 0.4122,
|
8480 |
+
"step": 2418
|
8481 |
+
},
|
8482 |
+
{
|
8483 |
+
"epoch": 1.3422074320576816,
|
8484 |
+
"grad_norm": 0.2955860197544098,
|
8485 |
+
"learning_rate": 5.159020625381814e-05,
|
8486 |
+
"loss": 0.3571,
|
8487 |
+
"step": 2420
|
8488 |
+
},
|
8489 |
+
{
|
8490 |
+
"epoch": 1.343316694398225,
|
8491 |
+
"grad_norm": 0.3072221875190735,
|
8492 |
+
"learning_rate": 5.1433067606942905e-05,
|
8493 |
+
"loss": 0.3084,
|
8494 |
+
"step": 2422
|
8495 |
+
},
|
8496 |
+
{
|
8497 |
+
"epoch": 1.3444259567387689,
|
8498 |
+
"grad_norm": 0.3217550814151764,
|
8499 |
+
"learning_rate": 5.12760857466988e-05,
|
8500 |
+
"loss": 0.4189,
|
8501 |
+
"step": 2424
|
8502 |
+
},
|
8503 |
+
{
|
8504 |
+
"epoch": 1.3455352190793124,
|
8505 |
+
"grad_norm": 0.2994224727153778,
|
8506 |
+
"learning_rate": 5.111926117986383e-05,
|
8507 |
+
"loss": 0.3359,
|
8508 |
+
"step": 2426
|
8509 |
+
},
|
8510 |
+
{
|
8511 |
+
"epoch": 1.346644481419856,
|
8512 |
+
"grad_norm": 0.35402461886405945,
|
8513 |
+
"learning_rate": 5.096259441270842e-05,
|
8514 |
+
"loss": 0.4055,
|
8515 |
+
"step": 2428
|
8516 |
+
},
|
8517 |
+
{
|
8518 |
+
"epoch": 1.3477537437603995,
|
8519 |
+
"grad_norm": 0.32511135935783386,
|
8520 |
+
"learning_rate": 5.080608595099339e-05,
|
8521 |
+
"loss": 0.4424,
|
8522 |
+
"step": 2430
|
8523 |
+
},
|
8524 |
+
{
|
8525 |
+
"epoch": 1.348863006100943,
|
8526 |
+
"grad_norm": 0.30743321776390076,
|
8527 |
+
"learning_rate": 5.064973629996853e-05,
|
8528 |
+
"loss": 0.3594,
|
8529 |
+
"step": 2432
|
8530 |
}
|
8531 |
],
|
8532 |
"logging_steps": 2,
|
|
|
8546 |
"attributes": {}
|
8547 |
}
|
8548 |
},
|
8549 |
+
"total_flos": 5923485640949760.0,
|
8550 |
"train_batch_size": 8,
|
8551 |
"trial_name": null,
|
8552 |
"trial_params": null
|