End of training
Browse files- README.md +4 -2
- all_results.json +10 -10
- eval_results.json +5 -5
- p_object.json +0 -0
- prediction_reference.json +0 -0
- train_results.json +6 -6
- trainer_state.json +1405 -10
README.md
CHANGED
@@ -2,6 +2,8 @@
|
|
2 |
license: apache-2.0
|
3 |
base_model: google/vit-base-patch16-224-in21k
|
4 |
tags:
|
|
|
|
|
5 |
- generated_from_trainer
|
6 |
metrics:
|
7 |
- f1
|
@@ -17,8 +19,8 @@ should probably proofread and complete it, then remove this comment. -->
|
|
17 |
|
18 |
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset.
|
19 |
It achieves the following results on the evaluation set:
|
20 |
-
- Loss: 0.
|
21 |
-
- F1: 0.
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
2 |
license: apache-2.0
|
3 |
base_model: google/vit-base-patch16-224-in21k
|
4 |
tags:
|
5 |
+
- image-classification
|
6 |
+
- vision
|
7 |
- generated_from_trainer
|
8 |
metrics:
|
9 |
- f1
|
|
|
19 |
|
20 |
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset.
|
21 |
It achieves the following results on the evaluation set:
|
22 |
+
- Loss: 0.0410
|
23 |
+
- F1: 0.9461
|
24 |
|
25 |
## Model description
|
26 |
|
all_results.json
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
{
|
2 |
-
"epoch":
|
3 |
-
"eval_f1": 0.
|
4 |
"eval_loss": 0.04101279005408287,
|
5 |
-
"eval_runtime":
|
6 |
-
"eval_samples_per_second":
|
7 |
-
"eval_steps_per_second": 2.
|
8 |
-
"total_flos":
|
9 |
-
"train_loss": 0.
|
10 |
-
"train_runtime":
|
11 |
-
"train_samples_per_second":
|
12 |
-
"train_steps_per_second":
|
13 |
}
|
|
|
1 |
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"eval_f1": 0.9460782887783499,
|
4 |
"eval_loss": 0.04101279005408287,
|
5 |
+
"eval_runtime": 559.6733,
|
6 |
+
"eval_samples_per_second": 368.54,
|
7 |
+
"eval_steps_per_second": 2.88,
|
8 |
+
"total_flos": 9.820471825285631e+19,
|
9 |
+
"train_loss": 0.011631221487809769,
|
10 |
+
"train_runtime": 2940.6816,
|
11 |
+
"train_samples_per_second": 430.955,
|
12 |
+
"train_steps_per_second": 3.368
|
13 |
}
|
eval_results.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
-
"epoch":
|
3 |
-
"eval_f1": 0.
|
4 |
"eval_loss": 0.04101279005408287,
|
5 |
-
"eval_runtime":
|
6 |
-
"eval_samples_per_second":
|
7 |
-
"eval_steps_per_second": 2.
|
8 |
}
|
|
|
1 |
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"eval_f1": 0.9460782887783499,
|
4 |
"eval_loss": 0.04101279005408287,
|
5 |
+
"eval_runtime": 559.6733,
|
6 |
+
"eval_samples_per_second": 368.54,
|
7 |
+
"eval_steps_per_second": 2.88
|
8 |
}
|
p_object.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
prediction_reference.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
train_results.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
-
"epoch":
|
3 |
-
"total_flos":
|
4 |
-
"train_loss": 0.
|
5 |
-
"train_runtime":
|
6 |
-
"train_samples_per_second":
|
7 |
-
"train_steps_per_second":
|
8 |
}
|
|
|
1 |
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"total_flos": 9.820471825285631e+19,
|
4 |
+
"train_loss": 0.011631221487809769,
|
5 |
+
"train_runtime": 2940.6816,
|
6 |
+
"train_samples_per_second": 430.955,
|
7 |
+
"train_steps_per_second": 3.368
|
8 |
}
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": 0.04101279005408287,
|
3 |
"best_model_checkpoint": "./test_default_model/checkpoint-3962",
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -5589,13 +5589,1408 @@
|
|
5589 |
"step": 7924
|
5590 |
},
|
5591 |
{
|
5592 |
-
"epoch": 4.
|
5593 |
-
"
|
5594 |
-
"
|
5595 |
-
"
|
5596 |
-
"
|
5597 |
-
|
5598 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5599 |
}
|
5600 |
],
|
5601 |
"logging_steps": 10,
|
@@ -5624,7 +7019,7 @@
|
|
5624 |
"attributes": {}
|
5625 |
}
|
5626 |
},
|
5627 |
-
"total_flos":
|
5628 |
"train_batch_size": 128,
|
5629 |
"trial_name": null,
|
5630 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": 0.04101279005408287,
|
3 |
"best_model_checkpoint": "./test_default_model/checkpoint-3962",
|
4 |
+
"epoch": 5.0,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 9905,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
5589 |
"step": 7924
|
5590 |
},
|
5591 |
{
|
5592 |
+
"epoch": 4.003028773346794,
|
5593 |
+
"grad_norm": 0.34277331829071045,
|
5594 |
+
"learning_rate": 5.981827359919233e-06,
|
5595 |
+
"loss": 0.0532,
|
5596 |
+
"step": 7930
|
5597 |
+
},
|
5598 |
+
{
|
5599 |
+
"epoch": 4.008076728924785,
|
5600 |
+
"grad_norm": 0.40653395652770996,
|
5601 |
+
"learning_rate": 5.951539626451287e-06,
|
5602 |
+
"loss": 0.0601,
|
5603 |
+
"step": 7940
|
5604 |
+
},
|
5605 |
+
{
|
5606 |
+
"epoch": 4.013124684502777,
|
5607 |
+
"grad_norm": 0.39089271426200867,
|
5608 |
+
"learning_rate": 5.921251892983342e-06,
|
5609 |
+
"loss": 0.0585,
|
5610 |
+
"step": 7950
|
5611 |
+
},
|
5612 |
+
{
|
5613 |
+
"epoch": 4.018172640080767,
|
5614 |
+
"grad_norm": 0.3117099404335022,
|
5615 |
+
"learning_rate": 5.890964159515397e-06,
|
5616 |
+
"loss": 0.0536,
|
5617 |
+
"step": 7960
|
5618 |
+
},
|
5619 |
+
{
|
5620 |
+
"epoch": 4.023220595658758,
|
5621 |
+
"grad_norm": 0.4908514618873596,
|
5622 |
+
"learning_rate": 5.860676426047451e-06,
|
5623 |
+
"loss": 0.0618,
|
5624 |
+
"step": 7970
|
5625 |
+
},
|
5626 |
+
{
|
5627 |
+
"epoch": 4.028268551236749,
|
5628 |
+
"grad_norm": 0.35001102089881897,
|
5629 |
+
"learning_rate": 5.830388692579505e-06,
|
5630 |
+
"loss": 0.0595,
|
5631 |
+
"step": 7980
|
5632 |
+
},
|
5633 |
+
{
|
5634 |
+
"epoch": 4.03331650681474,
|
5635 |
+
"grad_norm": 0.39042168855667114,
|
5636 |
+
"learning_rate": 5.80010095911156e-06,
|
5637 |
+
"loss": 0.0639,
|
5638 |
+
"step": 7990
|
5639 |
+
},
|
5640 |
+
{
|
5641 |
+
"epoch": 4.038364462392731,
|
5642 |
+
"grad_norm": 0.48590323328971863,
|
5643 |
+
"learning_rate": 5.769813225643615e-06,
|
5644 |
+
"loss": 0.0606,
|
5645 |
+
"step": 8000
|
5646 |
+
},
|
5647 |
+
{
|
5648 |
+
"epoch": 4.043412417970722,
|
5649 |
+
"grad_norm": 0.3951605558395386,
|
5650 |
+
"learning_rate": 5.739525492175669e-06,
|
5651 |
+
"loss": 0.0585,
|
5652 |
+
"step": 8010
|
5653 |
+
},
|
5654 |
+
{
|
5655 |
+
"epoch": 4.048460373548712,
|
5656 |
+
"grad_norm": 0.4090045690536499,
|
5657 |
+
"learning_rate": 5.709237758707723e-06,
|
5658 |
+
"loss": 0.064,
|
5659 |
+
"step": 8020
|
5660 |
+
},
|
5661 |
+
{
|
5662 |
+
"epoch": 4.053508329126704,
|
5663 |
+
"grad_norm": 0.5321690440177917,
|
5664 |
+
"learning_rate": 5.6789500252397786e-06,
|
5665 |
+
"loss": 0.0581,
|
5666 |
+
"step": 8030
|
5667 |
+
},
|
5668 |
+
{
|
5669 |
+
"epoch": 4.058556284704695,
|
5670 |
+
"grad_norm": 0.4750302731990814,
|
5671 |
+
"learning_rate": 5.648662291771832e-06,
|
5672 |
+
"loss": 0.066,
|
5673 |
+
"step": 8040
|
5674 |
+
},
|
5675 |
+
{
|
5676 |
+
"epoch": 4.063604240282685,
|
5677 |
+
"grad_norm": 0.36469149589538574,
|
5678 |
+
"learning_rate": 5.618374558303887e-06,
|
5679 |
+
"loss": 0.0604,
|
5680 |
+
"step": 8050
|
5681 |
+
},
|
5682 |
+
{
|
5683 |
+
"epoch": 4.068652195860676,
|
5684 |
+
"grad_norm": 0.35261520743370056,
|
5685 |
+
"learning_rate": 5.5880868248359414e-06,
|
5686 |
+
"loss": 0.061,
|
5687 |
+
"step": 8060
|
5688 |
+
},
|
5689 |
+
{
|
5690 |
+
"epoch": 4.0737001514386675,
|
5691 |
+
"grad_norm": 0.32109716534614563,
|
5692 |
+
"learning_rate": 5.557799091367996e-06,
|
5693 |
+
"loss": 0.0613,
|
5694 |
+
"step": 8070
|
5695 |
+
},
|
5696 |
+
{
|
5697 |
+
"epoch": 4.078748107016659,
|
5698 |
+
"grad_norm": 0.41034355759620667,
|
5699 |
+
"learning_rate": 5.527511357900051e-06,
|
5700 |
+
"loss": 0.0567,
|
5701 |
+
"step": 8080
|
5702 |
+
},
|
5703 |
+
{
|
5704 |
+
"epoch": 4.083796062594649,
|
5705 |
+
"grad_norm": 0.4242144823074341,
|
5706 |
+
"learning_rate": 5.497223624432105e-06,
|
5707 |
+
"loss": 0.0539,
|
5708 |
+
"step": 8090
|
5709 |
+
},
|
5710 |
+
{
|
5711 |
+
"epoch": 4.08884401817264,
|
5712 |
+
"grad_norm": 0.32515600323677063,
|
5713 |
+
"learning_rate": 5.4669358909641595e-06,
|
5714 |
+
"loss": 0.0581,
|
5715 |
+
"step": 8100
|
5716 |
+
},
|
5717 |
+
{
|
5718 |
+
"epoch": 4.093891973750631,
|
5719 |
+
"grad_norm": 0.6698907017707825,
|
5720 |
+
"learning_rate": 5.436648157496214e-06,
|
5721 |
+
"loss": 0.0686,
|
5722 |
+
"step": 8110
|
5723 |
+
},
|
5724 |
+
{
|
5725 |
+
"epoch": 4.098939929328622,
|
5726 |
+
"grad_norm": 0.2780954837799072,
|
5727 |
+
"learning_rate": 5.406360424028269e-06,
|
5728 |
+
"loss": 0.0518,
|
5729 |
+
"step": 8120
|
5730 |
+
},
|
5731 |
+
{
|
5732 |
+
"epoch": 4.103987884906613,
|
5733 |
+
"grad_norm": 0.3639545440673828,
|
5734 |
+
"learning_rate": 5.376072690560323e-06,
|
5735 |
+
"loss": 0.0569,
|
5736 |
+
"step": 8130
|
5737 |
+
},
|
5738 |
+
{
|
5739 |
+
"epoch": 4.109035840484604,
|
5740 |
+
"grad_norm": 0.4723798930644989,
|
5741 |
+
"learning_rate": 5.3457849570923775e-06,
|
5742 |
+
"loss": 0.0596,
|
5743 |
+
"step": 8140
|
5744 |
+
},
|
5745 |
+
{
|
5746 |
+
"epoch": 4.1140837960625944,
|
5747 |
+
"grad_norm": 0.30923640727996826,
|
5748 |
+
"learning_rate": 5.315497223624433e-06,
|
5749 |
+
"loss": 0.0564,
|
5750 |
+
"step": 8150
|
5751 |
+
},
|
5752 |
+
{
|
5753 |
+
"epoch": 4.119131751640586,
|
5754 |
+
"grad_norm": 0.3050035238265991,
|
5755 |
+
"learning_rate": 5.285209490156487e-06,
|
5756 |
+
"loss": 0.0653,
|
5757 |
+
"step": 8160
|
5758 |
+
},
|
5759 |
+
{
|
5760 |
+
"epoch": 4.124179707218577,
|
5761 |
+
"grad_norm": 0.5005570650100708,
|
5762 |
+
"learning_rate": 5.254921756688541e-06,
|
5763 |
+
"loss": 0.0623,
|
5764 |
+
"step": 8170
|
5765 |
+
},
|
5766 |
+
{
|
5767 |
+
"epoch": 4.129227662796567,
|
5768 |
+
"grad_norm": 0.5100895762443542,
|
5769 |
+
"learning_rate": 5.2246340232205955e-06,
|
5770 |
+
"loss": 0.0622,
|
5771 |
+
"step": 8180
|
5772 |
+
},
|
5773 |
+
{
|
5774 |
+
"epoch": 4.134275618374558,
|
5775 |
+
"grad_norm": 0.33904436230659485,
|
5776 |
+
"learning_rate": 5.194346289752651e-06,
|
5777 |
+
"loss": 0.0575,
|
5778 |
+
"step": 8190
|
5779 |
+
},
|
5780 |
+
{
|
5781 |
+
"epoch": 4.13932357395255,
|
5782 |
+
"grad_norm": 0.3320677876472473,
|
5783 |
+
"learning_rate": 5.164058556284704e-06,
|
5784 |
+
"loss": 0.0565,
|
5785 |
+
"step": 8200
|
5786 |
+
},
|
5787 |
+
{
|
5788 |
+
"epoch": 4.14437152953054,
|
5789 |
+
"grad_norm": 0.3176303803920746,
|
5790 |
+
"learning_rate": 5.133770822816759e-06,
|
5791 |
+
"loss": 0.0597,
|
5792 |
+
"step": 8210
|
5793 |
+
},
|
5794 |
+
{
|
5795 |
+
"epoch": 4.149419485108531,
|
5796 |
+
"grad_norm": 0.33052679896354675,
|
5797 |
+
"learning_rate": 5.103483089348814e-06,
|
5798 |
+
"loss": 0.0553,
|
5799 |
+
"step": 8220
|
5800 |
+
},
|
5801 |
+
{
|
5802 |
+
"epoch": 4.154467440686522,
|
5803 |
+
"grad_norm": 0.3024562895298004,
|
5804 |
+
"learning_rate": 5.073195355880868e-06,
|
5805 |
+
"loss": 0.0595,
|
5806 |
+
"step": 8230
|
5807 |
+
},
|
5808 |
+
{
|
5809 |
+
"epoch": 4.159515396264513,
|
5810 |
+
"grad_norm": 0.380520224571228,
|
5811 |
+
"learning_rate": 5.042907622412923e-06,
|
5812 |
+
"loss": 0.048,
|
5813 |
+
"step": 8240
|
5814 |
+
},
|
5815 |
+
{
|
5816 |
+
"epoch": 4.164563351842504,
|
5817 |
+
"grad_norm": 0.47053784132003784,
|
5818 |
+
"learning_rate": 5.012619888944977e-06,
|
5819 |
+
"loss": 0.0616,
|
5820 |
+
"step": 8250
|
5821 |
+
},
|
5822 |
+
{
|
5823 |
+
"epoch": 4.169611307420495,
|
5824 |
+
"grad_norm": 0.5295135378837585,
|
5825 |
+
"learning_rate": 4.982332155477032e-06,
|
5826 |
+
"loss": 0.0579,
|
5827 |
+
"step": 8260
|
5828 |
+
},
|
5829 |
+
{
|
5830 |
+
"epoch": 4.174659262998485,
|
5831 |
+
"grad_norm": 0.3950503468513489,
|
5832 |
+
"learning_rate": 4.952044422009086e-06,
|
5833 |
+
"loss": 0.0594,
|
5834 |
+
"step": 8270
|
5835 |
+
},
|
5836 |
+
{
|
5837 |
+
"epoch": 4.1797072185764765,
|
5838 |
+
"grad_norm": 0.40204277634620667,
|
5839 |
+
"learning_rate": 4.921756688541141e-06,
|
5840 |
+
"loss": 0.0568,
|
5841 |
+
"step": 8280
|
5842 |
+
},
|
5843 |
+
{
|
5844 |
+
"epoch": 4.184755174154468,
|
5845 |
+
"grad_norm": 0.4756285548210144,
|
5846 |
+
"learning_rate": 4.891468955073196e-06,
|
5847 |
+
"loss": 0.0684,
|
5848 |
+
"step": 8290
|
5849 |
+
},
|
5850 |
+
{
|
5851 |
+
"epoch": 4.189803129732458,
|
5852 |
+
"grad_norm": 0.42255735397338867,
|
5853 |
+
"learning_rate": 4.86118122160525e-06,
|
5854 |
+
"loss": 0.0551,
|
5855 |
+
"step": 8300
|
5856 |
+
},
|
5857 |
+
{
|
5858 |
+
"epoch": 4.194851085310449,
|
5859 |
+
"grad_norm": 0.35746055841445923,
|
5860 |
+
"learning_rate": 4.830893488137305e-06,
|
5861 |
+
"loss": 0.0536,
|
5862 |
+
"step": 8310
|
5863 |
+
},
|
5864 |
+
{
|
5865 |
+
"epoch": 4.1998990408884405,
|
5866 |
+
"grad_norm": 0.2798272371292114,
|
5867 |
+
"learning_rate": 4.800605754669359e-06,
|
5868 |
+
"loss": 0.0654,
|
5869 |
+
"step": 8320
|
5870 |
+
},
|
5871 |
+
{
|
5872 |
+
"epoch": 4.204946996466431,
|
5873 |
+
"grad_norm": 0.4099213778972626,
|
5874 |
+
"learning_rate": 4.770318021201413e-06,
|
5875 |
+
"loss": 0.0695,
|
5876 |
+
"step": 8330
|
5877 |
+
},
|
5878 |
+
{
|
5879 |
+
"epoch": 4.209994952044422,
|
5880 |
+
"grad_norm": 0.31809088587760925,
|
5881 |
+
"learning_rate": 4.7400302877334685e-06,
|
5882 |
+
"loss": 0.0567,
|
5883 |
+
"step": 8340
|
5884 |
+
},
|
5885 |
+
{
|
5886 |
+
"epoch": 4.215042907622413,
|
5887 |
+
"grad_norm": 0.3884822726249695,
|
5888 |
+
"learning_rate": 4.709742554265523e-06,
|
5889 |
+
"loss": 0.0621,
|
5890 |
+
"step": 8350
|
5891 |
+
},
|
5892 |
+
{
|
5893 |
+
"epoch": 4.2200908632004035,
|
5894 |
+
"grad_norm": 0.4989534020423889,
|
5895 |
+
"learning_rate": 4.679454820797577e-06,
|
5896 |
+
"loss": 0.0591,
|
5897 |
+
"step": 8360
|
5898 |
+
},
|
5899 |
+
{
|
5900 |
+
"epoch": 4.225138818778395,
|
5901 |
+
"grad_norm": 0.5055777430534363,
|
5902 |
+
"learning_rate": 4.649167087329631e-06,
|
5903 |
+
"loss": 0.0552,
|
5904 |
+
"step": 8370
|
5905 |
+
},
|
5906 |
+
{
|
5907 |
+
"epoch": 4.230186774356386,
|
5908 |
+
"grad_norm": 0.4415469765663147,
|
5909 |
+
"learning_rate": 4.6188793538616865e-06,
|
5910 |
+
"loss": 0.0726,
|
5911 |
+
"step": 8380
|
5912 |
+
},
|
5913 |
+
{
|
5914 |
+
"epoch": 4.235234729934376,
|
5915 |
+
"grad_norm": 0.24666030704975128,
|
5916 |
+
"learning_rate": 4.58859162039374e-06,
|
5917 |
+
"loss": 0.0526,
|
5918 |
+
"step": 8390
|
5919 |
+
},
|
5920 |
+
{
|
5921 |
+
"epoch": 4.240282685512367,
|
5922 |
+
"grad_norm": 0.49552977085113525,
|
5923 |
+
"learning_rate": 4.558303886925795e-06,
|
5924 |
+
"loss": 0.0607,
|
5925 |
+
"step": 8400
|
5926 |
+
},
|
5927 |
+
{
|
5928 |
+
"epoch": 4.245330641090359,
|
5929 |
+
"grad_norm": 0.3048471510410309,
|
5930 |
+
"learning_rate": 4.52801615345785e-06,
|
5931 |
+
"loss": 0.0628,
|
5932 |
+
"step": 8410
|
5933 |
+
},
|
5934 |
+
{
|
5935 |
+
"epoch": 4.250378596668349,
|
5936 |
+
"grad_norm": 0.3662854731082916,
|
5937 |
+
"learning_rate": 4.497728419989904e-06,
|
5938 |
+
"loss": 0.062,
|
5939 |
+
"step": 8420
|
5940 |
+
},
|
5941 |
+
{
|
5942 |
+
"epoch": 4.25542655224634,
|
5943 |
+
"grad_norm": 0.3893071711063385,
|
5944 |
+
"learning_rate": 4.467440686521959e-06,
|
5945 |
+
"loss": 0.0542,
|
5946 |
+
"step": 8430
|
5947 |
+
},
|
5948 |
+
{
|
5949 |
+
"epoch": 4.260474507824331,
|
5950 |
+
"grad_norm": 0.40179580450057983,
|
5951 |
+
"learning_rate": 4.437152953054013e-06,
|
5952 |
+
"loss": 0.0524,
|
5953 |
+
"step": 8440
|
5954 |
+
},
|
5955 |
+
{
|
5956 |
+
"epoch": 4.265522463402322,
|
5957 |
+
"grad_norm": 0.35265469551086426,
|
5958 |
+
"learning_rate": 4.406865219586068e-06,
|
5959 |
+
"loss": 0.0616,
|
5960 |
+
"step": 8450
|
5961 |
+
},
|
5962 |
+
{
|
5963 |
+
"epoch": 4.270570418980313,
|
5964 |
+
"grad_norm": 0.2585351765155792,
|
5965 |
+
"learning_rate": 4.376577486118122e-06,
|
5966 |
+
"loss": 0.058,
|
5967 |
+
"step": 8460
|
5968 |
+
},
|
5969 |
+
{
|
5970 |
+
"epoch": 4.275618374558304,
|
5971 |
+
"grad_norm": 0.4452759325504303,
|
5972 |
+
"learning_rate": 4.346289752650177e-06,
|
5973 |
+
"loss": 0.0533,
|
5974 |
+
"step": 8470
|
5975 |
+
},
|
5976 |
+
{
|
5977 |
+
"epoch": 4.280666330136295,
|
5978 |
+
"grad_norm": 0.40577125549316406,
|
5979 |
+
"learning_rate": 4.316002019182232e-06,
|
5980 |
+
"loss": 0.055,
|
5981 |
+
"step": 8480
|
5982 |
+
},
|
5983 |
+
{
|
5984 |
+
"epoch": 4.285714285714286,
|
5985 |
+
"grad_norm": 0.2692396938800812,
|
5986 |
+
"learning_rate": 4.2857142857142855e-06,
|
5987 |
+
"loss": 0.0616,
|
5988 |
+
"step": 8490
|
5989 |
+
},
|
5990 |
+
{
|
5991 |
+
"epoch": 4.290762241292277,
|
5992 |
+
"grad_norm": 0.47697675228118896,
|
5993 |
+
"learning_rate": 4.255426552246341e-06,
|
5994 |
+
"loss": 0.0596,
|
5995 |
+
"step": 8500
|
5996 |
+
},
|
5997 |
+
{
|
5998 |
+
"epoch": 4.295810196870267,
|
5999 |
+
"grad_norm": 0.4272094964981079,
|
6000 |
+
"learning_rate": 4.225138818778395e-06,
|
6001 |
+
"loss": 0.0571,
|
6002 |
+
"step": 8510
|
6003 |
+
},
|
6004 |
+
{
|
6005 |
+
"epoch": 4.300858152448258,
|
6006 |
+
"grad_norm": 0.5147340297698975,
|
6007 |
+
"learning_rate": 4.194851085310449e-06,
|
6008 |
+
"loss": 0.0432,
|
6009 |
+
"step": 8520
|
6010 |
+
},
|
6011 |
+
{
|
6012 |
+
"epoch": 4.3059061080262495,
|
6013 |
+
"grad_norm": 0.37690308690071106,
|
6014 |
+
"learning_rate": 4.1645633518425035e-06,
|
6015 |
+
"loss": 0.054,
|
6016 |
+
"step": 8530
|
6017 |
+
},
|
6018 |
+
{
|
6019 |
+
"epoch": 4.310954063604241,
|
6020 |
+
"grad_norm": 0.5072263479232788,
|
6021 |
+
"learning_rate": 4.134275618374559e-06,
|
6022 |
+
"loss": 0.0575,
|
6023 |
+
"step": 8540
|
6024 |
+
},
|
6025 |
+
{
|
6026 |
+
"epoch": 4.316002019182231,
|
6027 |
+
"grad_norm": 0.3782062232494354,
|
6028 |
+
"learning_rate": 4.103987884906613e-06,
|
6029 |
+
"loss": 0.0558,
|
6030 |
+
"step": 8550
|
6031 |
+
},
|
6032 |
+
{
|
6033 |
+
"epoch": 4.321049974760222,
|
6034 |
+
"grad_norm": 0.27360981702804565,
|
6035 |
+
"learning_rate": 4.073700151438667e-06,
|
6036 |
+
"loss": 0.0645,
|
6037 |
+
"step": 8560
|
6038 |
+
},
|
6039 |
+
{
|
6040 |
+
"epoch": 4.326097930338213,
|
6041 |
+
"grad_norm": 0.5791490077972412,
|
6042 |
+
"learning_rate": 4.043412417970722e-06,
|
6043 |
+
"loss": 0.0751,
|
6044 |
+
"step": 8570
|
6045 |
+
},
|
6046 |
+
{
|
6047 |
+
"epoch": 4.331145885916204,
|
6048 |
+
"grad_norm": 0.2799968421459198,
|
6049 |
+
"learning_rate": 4.013124684502776e-06,
|
6050 |
+
"loss": 0.0542,
|
6051 |
+
"step": 8580
|
6052 |
+
},
|
6053 |
+
{
|
6054 |
+
"epoch": 4.336193841494195,
|
6055 |
+
"grad_norm": 0.4403197467327118,
|
6056 |
+
"learning_rate": 3.982836951034831e-06,
|
6057 |
+
"loss": 0.0647,
|
6058 |
+
"step": 8590
|
6059 |
+
},
|
6060 |
+
{
|
6061 |
+
"epoch": 4.341241797072186,
|
6062 |
+
"grad_norm": 0.3798120319843292,
|
6063 |
+
"learning_rate": 3.952549217566885e-06,
|
6064 |
+
"loss": 0.0545,
|
6065 |
+
"step": 8600
|
6066 |
+
},
|
6067 |
+
{
|
6068 |
+
"epoch": 4.3462897526501765,
|
6069 |
+
"grad_norm": 0.40195682644844055,
|
6070 |
+
"learning_rate": 3.92226148409894e-06,
|
6071 |
+
"loss": 0.058,
|
6072 |
+
"step": 8610
|
6073 |
+
},
|
6074 |
+
{
|
6075 |
+
"epoch": 4.351337708228168,
|
6076 |
+
"grad_norm": 0.30205094814300537,
|
6077 |
+
"learning_rate": 3.891973750630995e-06,
|
6078 |
+
"loss": 0.0585,
|
6079 |
+
"step": 8620
|
6080 |
+
},
|
6081 |
+
{
|
6082 |
+
"epoch": 4.356385663806159,
|
6083 |
+
"grad_norm": 0.3941998779773712,
|
6084 |
+
"learning_rate": 3.861686017163049e-06,
|
6085 |
+
"loss": 0.0628,
|
6086 |
+
"step": 8630
|
6087 |
+
},
|
6088 |
+
{
|
6089 |
+
"epoch": 4.361433619384149,
|
6090 |
+
"grad_norm": 0.4298538267612457,
|
6091 |
+
"learning_rate": 3.831398283695104e-06,
|
6092 |
+
"loss": 0.0519,
|
6093 |
+
"step": 8640
|
6094 |
+
},
|
6095 |
+
{
|
6096 |
+
"epoch": 4.36648157496214,
|
6097 |
+
"grad_norm": 0.45147988200187683,
|
6098 |
+
"learning_rate": 3.801110550227158e-06,
|
6099 |
+
"loss": 0.0555,
|
6100 |
+
"step": 8650
|
6101 |
+
},
|
6102 |
+
{
|
6103 |
+
"epoch": 4.371529530540132,
|
6104 |
+
"grad_norm": 0.3213054835796356,
|
6105 |
+
"learning_rate": 3.7708228167592127e-06,
|
6106 |
+
"loss": 0.0573,
|
6107 |
+
"step": 8660
|
6108 |
+
},
|
6109 |
+
{
|
6110 |
+
"epoch": 4.376577486118122,
|
6111 |
+
"grad_norm": 0.3924931287765503,
|
6112 |
+
"learning_rate": 3.740535083291267e-06,
|
6113 |
+
"loss": 0.0609,
|
6114 |
+
"step": 8670
|
6115 |
+
},
|
6116 |
+
{
|
6117 |
+
"epoch": 4.381625441696113,
|
6118 |
+
"grad_norm": 0.3347417116165161,
|
6119 |
+
"learning_rate": 3.7102473498233217e-06,
|
6120 |
+
"loss": 0.0573,
|
6121 |
+
"step": 8680
|
6122 |
+
},
|
6123 |
+
{
|
6124 |
+
"epoch": 4.386673397274104,
|
6125 |
+
"grad_norm": 0.5916124582290649,
|
6126 |
+
"learning_rate": 3.679959616355376e-06,
|
6127 |
+
"loss": 0.0631,
|
6128 |
+
"step": 8690
|
6129 |
+
},
|
6130 |
+
{
|
6131 |
+
"epoch": 4.391721352852095,
|
6132 |
+
"grad_norm": 0.4623749852180481,
|
6133 |
+
"learning_rate": 3.6496718828874303e-06,
|
6134 |
+
"loss": 0.0603,
|
6135 |
+
"step": 8700
|
6136 |
+
},
|
6137 |
+
{
|
6138 |
+
"epoch": 4.396769308430086,
|
6139 |
+
"grad_norm": 0.3337404727935791,
|
6140 |
+
"learning_rate": 3.6193841494194855e-06,
|
6141 |
+
"loss": 0.0559,
|
6142 |
+
"step": 8710
|
6143 |
+
},
|
6144 |
+
{
|
6145 |
+
"epoch": 4.401817264008077,
|
6146 |
+
"grad_norm": 0.4419994652271271,
|
6147 |
+
"learning_rate": 3.5890964159515398e-06,
|
6148 |
+
"loss": 0.0574,
|
6149 |
+
"step": 8720
|
6150 |
+
},
|
6151 |
+
{
|
6152 |
+
"epoch": 4.406865219586067,
|
6153 |
+
"grad_norm": 0.47578585147857666,
|
6154 |
+
"learning_rate": 3.5588086824835945e-06,
|
6155 |
+
"loss": 0.0554,
|
6156 |
+
"step": 8730
|
6157 |
+
},
|
6158 |
+
{
|
6159 |
+
"epoch": 4.411913175164059,
|
6160 |
+
"grad_norm": 0.3991304337978363,
|
6161 |
+
"learning_rate": 3.5285209490156488e-06,
|
6162 |
+
"loss": 0.0522,
|
6163 |
+
"step": 8740
|
6164 |
+
},
|
6165 |
+
{
|
6166 |
+
"epoch": 4.41696113074205,
|
6167 |
+
"grad_norm": 0.2646455764770508,
|
6168 |
+
"learning_rate": 3.498233215547703e-06,
|
6169 |
+
"loss": 0.053,
|
6170 |
+
"step": 8750
|
6171 |
+
},
|
6172 |
+
{
|
6173 |
+
"epoch": 4.42200908632004,
|
6174 |
+
"grad_norm": 0.38998502492904663,
|
6175 |
+
"learning_rate": 3.4679454820797578e-06,
|
6176 |
+
"loss": 0.0697,
|
6177 |
+
"step": 8760
|
6178 |
+
},
|
6179 |
+
{
|
6180 |
+
"epoch": 4.427057041898031,
|
6181 |
+
"grad_norm": 0.39025184512138367,
|
6182 |
+
"learning_rate": 3.437657748611812e-06,
|
6183 |
+
"loss": 0.0564,
|
6184 |
+
"step": 8770
|
6185 |
+
},
|
6186 |
+
{
|
6187 |
+
"epoch": 4.4321049974760225,
|
6188 |
+
"grad_norm": 0.36179178953170776,
|
6189 |
+
"learning_rate": 3.407370015143867e-06,
|
6190 |
+
"loss": 0.0695,
|
6191 |
+
"step": 8780
|
6192 |
+
},
|
6193 |
+
{
|
6194 |
+
"epoch": 4.437152953054013,
|
6195 |
+
"grad_norm": 0.47754356265068054,
|
6196 |
+
"learning_rate": 3.3770822816759215e-06,
|
6197 |
+
"loss": 0.0599,
|
6198 |
+
"step": 8790
|
6199 |
+
},
|
6200 |
+
{
|
6201 |
+
"epoch": 4.442200908632004,
|
6202 |
+
"grad_norm": 0.3687341511249542,
|
6203 |
+
"learning_rate": 3.346794548207976e-06,
|
6204 |
+
"loss": 0.0577,
|
6205 |
+
"step": 8800
|
6206 |
+
},
|
6207 |
+
{
|
6208 |
+
"epoch": 4.447248864209995,
|
6209 |
+
"grad_norm": 0.4395473003387451,
|
6210 |
+
"learning_rate": 3.3165068147400305e-06,
|
6211 |
+
"loss": 0.0559,
|
6212 |
+
"step": 8810
|
6213 |
+
},
|
6214 |
+
{
|
6215 |
+
"epoch": 4.4522968197879855,
|
6216 |
+
"grad_norm": 0.3659065365791321,
|
6217 |
+
"learning_rate": 3.286219081272085e-06,
|
6218 |
+
"loss": 0.0591,
|
6219 |
+
"step": 8820
|
6220 |
+
},
|
6221 |
+
{
|
6222 |
+
"epoch": 4.457344775365977,
|
6223 |
+
"grad_norm": 0.47786960005760193,
|
6224 |
+
"learning_rate": 3.255931347804139e-06,
|
6225 |
+
"loss": 0.0591,
|
6226 |
+
"step": 8830
|
6227 |
+
},
|
6228 |
+
{
|
6229 |
+
"epoch": 4.462392730943968,
|
6230 |
+
"grad_norm": 0.44323790073394775,
|
6231 |
+
"learning_rate": 3.2256436143361943e-06,
|
6232 |
+
"loss": 0.0508,
|
6233 |
+
"step": 8840
|
6234 |
+
},
|
6235 |
+
{
|
6236 |
+
"epoch": 4.467440686521958,
|
6237 |
+
"grad_norm": 0.3510769307613373,
|
6238 |
+
"learning_rate": 3.1953558808682486e-06,
|
6239 |
+
"loss": 0.0554,
|
6240 |
+
"step": 8850
|
6241 |
+
},
|
6242 |
+
{
|
6243 |
+
"epoch": 4.4724886420999495,
|
6244 |
+
"grad_norm": 0.45277318358421326,
|
6245 |
+
"learning_rate": 3.165068147400303e-06,
|
6246 |
+
"loss": 0.0532,
|
6247 |
+
"step": 8860
|
6248 |
+
},
|
6249 |
+
{
|
6250 |
+
"epoch": 4.477536597677941,
|
6251 |
+
"grad_norm": 0.5000207424163818,
|
6252 |
+
"learning_rate": 3.1347804139323576e-06,
|
6253 |
+
"loss": 0.0654,
|
6254 |
+
"step": 8870
|
6255 |
+
},
|
6256 |
+
{
|
6257 |
+
"epoch": 4.482584553255931,
|
6258 |
+
"grad_norm": 0.37949642539024353,
|
6259 |
+
"learning_rate": 3.104492680464412e-06,
|
6260 |
+
"loss": 0.0549,
|
6261 |
+
"step": 8880
|
6262 |
+
},
|
6263 |
+
{
|
6264 |
+
"epoch": 4.487632508833922,
|
6265 |
+
"grad_norm": 0.3000931143760681,
|
6266 |
+
"learning_rate": 3.0742049469964666e-06,
|
6267 |
+
"loss": 0.0544,
|
6268 |
+
"step": 8890
|
6269 |
+
},
|
6270 |
+
{
|
6271 |
+
"epoch": 4.492680464411913,
|
6272 |
+
"grad_norm": 0.512484610080719,
|
6273 |
+
"learning_rate": 3.043917213528521e-06,
|
6274 |
+
"loss": 0.0651,
|
6275 |
+
"step": 8900
|
6276 |
+
},
|
6277 |
+
{
|
6278 |
+
"epoch": 4.497728419989904,
|
6279 |
+
"grad_norm": 0.4052237570285797,
|
6280 |
+
"learning_rate": 3.0136294800605756e-06,
|
6281 |
+
"loss": 0.0601,
|
6282 |
+
"step": 8910
|
6283 |
+
},
|
6284 |
+
{
|
6285 |
+
"epoch": 4.502776375567895,
|
6286 |
+
"grad_norm": 0.3805348873138428,
|
6287 |
+
"learning_rate": 2.9833417465926303e-06,
|
6288 |
+
"loss": 0.0553,
|
6289 |
+
"step": 8920
|
6290 |
+
},
|
6291 |
+
{
|
6292 |
+
"epoch": 4.507824331145886,
|
6293 |
+
"grad_norm": 0.4143049120903015,
|
6294 |
+
"learning_rate": 2.9530540131246846e-06,
|
6295 |
+
"loss": 0.0488,
|
6296 |
+
"step": 8930
|
6297 |
+
},
|
6298 |
+
{
|
6299 |
+
"epoch": 4.512872286723876,
|
6300 |
+
"grad_norm": 0.4691813290119171,
|
6301 |
+
"learning_rate": 2.922766279656739e-06,
|
6302 |
+
"loss": 0.0544,
|
6303 |
+
"step": 8940
|
6304 |
+
},
|
6305 |
+
{
|
6306 |
+
"epoch": 4.517920242301868,
|
6307 |
+
"grad_norm": 0.40783849358558655,
|
6308 |
+
"learning_rate": 2.8924785461887936e-06,
|
6309 |
+
"loss": 0.0678,
|
6310 |
+
"step": 8950
|
6311 |
+
},
|
6312 |
+
{
|
6313 |
+
"epoch": 4.522968197879859,
|
6314 |
+
"grad_norm": 0.36696454882621765,
|
6315 |
+
"learning_rate": 2.862190812720848e-06,
|
6316 |
+
"loss": 0.0591,
|
6317 |
+
"step": 8960
|
6318 |
+
},
|
6319 |
+
{
|
6320 |
+
"epoch": 4.52801615345785,
|
6321 |
+
"grad_norm": 0.43989595770835876,
|
6322 |
+
"learning_rate": 2.8319030792529026e-06,
|
6323 |
+
"loss": 0.0604,
|
6324 |
+
"step": 8970
|
6325 |
+
},
|
6326 |
+
{
|
6327 |
+
"epoch": 4.53306410903584,
|
6328 |
+
"grad_norm": 0.38078877329826355,
|
6329 |
+
"learning_rate": 2.8016153457849574e-06,
|
6330 |
+
"loss": 0.0578,
|
6331 |
+
"step": 8980
|
6332 |
+
},
|
6333 |
+
{
|
6334 |
+
"epoch": 4.5381120646138315,
|
6335 |
+
"grad_norm": 0.3941843807697296,
|
6336 |
+
"learning_rate": 2.7713276123170117e-06,
|
6337 |
+
"loss": 0.0694,
|
6338 |
+
"step": 8990
|
6339 |
+
},
|
6340 |
+
{
|
6341 |
+
"epoch": 4.543160020191822,
|
6342 |
+
"grad_norm": 0.3795044422149658,
|
6343 |
+
"learning_rate": 2.7410398788490664e-06,
|
6344 |
+
"loss": 0.0588,
|
6345 |
+
"step": 9000
|
6346 |
+
},
|
6347 |
+
{
|
6348 |
+
"epoch": 4.548207975769813,
|
6349 |
+
"grad_norm": 0.3949735462665558,
|
6350 |
+
"learning_rate": 2.7107521453811207e-06,
|
6351 |
+
"loss": 0.0623,
|
6352 |
+
"step": 9010
|
6353 |
+
},
|
6354 |
+
{
|
6355 |
+
"epoch": 4.553255931347804,
|
6356 |
+
"grad_norm": 0.5588275194168091,
|
6357 |
+
"learning_rate": 2.680464411913175e-06,
|
6358 |
+
"loss": 0.0588,
|
6359 |
+
"step": 9020
|
6360 |
+
},
|
6361 |
+
{
|
6362 |
+
"epoch": 4.5583038869257955,
|
6363 |
+
"grad_norm": 0.29749733209609985,
|
6364 |
+
"learning_rate": 2.6501766784452297e-06,
|
6365 |
+
"loss": 0.0445,
|
6366 |
+
"step": 9030
|
6367 |
+
},
|
6368 |
+
{
|
6369 |
+
"epoch": 4.563351842503786,
|
6370 |
+
"grad_norm": 0.4993056654930115,
|
6371 |
+
"learning_rate": 2.6198889449772844e-06,
|
6372 |
+
"loss": 0.0595,
|
6373 |
+
"step": 9040
|
6374 |
+
},
|
6375 |
+
{
|
6376 |
+
"epoch": 4.568399798081777,
|
6377 |
+
"grad_norm": 0.5257248878479004,
|
6378 |
+
"learning_rate": 2.589601211509339e-06,
|
6379 |
+
"loss": 0.0469,
|
6380 |
+
"step": 9050
|
6381 |
+
},
|
6382 |
+
{
|
6383 |
+
"epoch": 4.573447753659767,
|
6384 |
+
"grad_norm": 0.35071873664855957,
|
6385 |
+
"learning_rate": 2.5593134780413934e-06,
|
6386 |
+
"loss": 0.056,
|
6387 |
+
"step": 9060
|
6388 |
+
},
|
6389 |
+
{
|
6390 |
+
"epoch": 4.5784957092377585,
|
6391 |
+
"grad_norm": 0.49088719487190247,
|
6392 |
+
"learning_rate": 2.5290257445734477e-06,
|
6393 |
+
"loss": 0.0619,
|
6394 |
+
"step": 9070
|
6395 |
+
},
|
6396 |
+
{
|
6397 |
+
"epoch": 4.58354366481575,
|
6398 |
+
"grad_norm": 0.5432353019714355,
|
6399 |
+
"learning_rate": 2.4987380111055024e-06,
|
6400 |
+
"loss": 0.0583,
|
6401 |
+
"step": 9080
|
6402 |
+
},
|
6403 |
+
{
|
6404 |
+
"epoch": 4.588591620393741,
|
6405 |
+
"grad_norm": 0.5358169674873352,
|
6406 |
+
"learning_rate": 2.4684502776375567e-06,
|
6407 |
+
"loss": 0.0618,
|
6408 |
+
"step": 9090
|
6409 |
+
},
|
6410 |
+
{
|
6411 |
+
"epoch": 4.593639575971731,
|
6412 |
+
"grad_norm": 0.299734890460968,
|
6413 |
+
"learning_rate": 2.438162544169611e-06,
|
6414 |
+
"loss": 0.0587,
|
6415 |
+
"step": 9100
|
6416 |
+
},
|
6417 |
+
{
|
6418 |
+
"epoch": 4.598687531549722,
|
6419 |
+
"grad_norm": 0.28594735264778137,
|
6420 |
+
"learning_rate": 2.407874810701666e-06,
|
6421 |
+
"loss": 0.0552,
|
6422 |
+
"step": 9110
|
6423 |
+
},
|
6424 |
+
{
|
6425 |
+
"epoch": 4.603735487127714,
|
6426 |
+
"grad_norm": 0.440019428730011,
|
6427 |
+
"learning_rate": 2.3775870772337205e-06,
|
6428 |
+
"loss": 0.0616,
|
6429 |
+
"step": 9120
|
6430 |
+
},
|
6431 |
+
{
|
6432 |
+
"epoch": 4.608783442705704,
|
6433 |
+
"grad_norm": 0.3852064311504364,
|
6434 |
+
"learning_rate": 2.347299343765775e-06,
|
6435 |
+
"loss": 0.0544,
|
6436 |
+
"step": 9130
|
6437 |
+
},
|
6438 |
+
{
|
6439 |
+
"epoch": 4.613831398283695,
|
6440 |
+
"grad_norm": 0.47597625851631165,
|
6441 |
+
"learning_rate": 2.3170116102978295e-06,
|
6442 |
+
"loss": 0.0626,
|
6443 |
+
"step": 9140
|
6444 |
+
},
|
6445 |
+
{
|
6446 |
+
"epoch": 4.618879353861686,
|
6447 |
+
"grad_norm": 0.4893425703048706,
|
6448 |
+
"learning_rate": 2.2867238768298838e-06,
|
6449 |
+
"loss": 0.0493,
|
6450 |
+
"step": 9150
|
6451 |
+
},
|
6452 |
+
{
|
6453 |
+
"epoch": 4.623927309439677,
|
6454 |
+
"grad_norm": 0.4313579201698303,
|
6455 |
+
"learning_rate": 2.2564361433619385e-06,
|
6456 |
+
"loss": 0.0533,
|
6457 |
+
"step": 9160
|
6458 |
+
},
|
6459 |
+
{
|
6460 |
+
"epoch": 4.628975265017668,
|
6461 |
+
"grad_norm": 0.31476062536239624,
|
6462 |
+
"learning_rate": 2.2261484098939928e-06,
|
6463 |
+
"loss": 0.0586,
|
6464 |
+
"step": 9170
|
6465 |
+
},
|
6466 |
+
{
|
6467 |
+
"epoch": 4.634023220595659,
|
6468 |
+
"grad_norm": 0.4846239686012268,
|
6469 |
+
"learning_rate": 2.1958606764260475e-06,
|
6470 |
+
"loss": 0.0541,
|
6471 |
+
"step": 9180
|
6472 |
+
},
|
6473 |
+
{
|
6474 |
+
"epoch": 4.639071176173649,
|
6475 |
+
"grad_norm": 0.4027024805545807,
|
6476 |
+
"learning_rate": 2.1655729429581022e-06,
|
6477 |
+
"loss": 0.0532,
|
6478 |
+
"step": 9190
|
6479 |
+
},
|
6480 |
+
{
|
6481 |
+
"epoch": 4.644119131751641,
|
6482 |
+
"grad_norm": 0.43335291743278503,
|
6483 |
+
"learning_rate": 2.1352852094901565e-06,
|
6484 |
+
"loss": 0.0664,
|
6485 |
+
"step": 9200
|
6486 |
+
},
|
6487 |
+
{
|
6488 |
+
"epoch": 4.649167087329632,
|
6489 |
+
"grad_norm": 0.47337576746940613,
|
6490 |
+
"learning_rate": 2.1049974760222112e-06,
|
6491 |
+
"loss": 0.0592,
|
6492 |
+
"step": 9210
|
6493 |
+
},
|
6494 |
+
{
|
6495 |
+
"epoch": 4.654215042907622,
|
6496 |
+
"grad_norm": 0.44911569356918335,
|
6497 |
+
"learning_rate": 2.0747097425542655e-06,
|
6498 |
+
"loss": 0.0642,
|
6499 |
+
"step": 9220
|
6500 |
+
},
|
6501 |
+
{
|
6502 |
+
"epoch": 4.659262998485613,
|
6503 |
+
"grad_norm": 0.47989997267723083,
|
6504 |
+
"learning_rate": 2.04442200908632e-06,
|
6505 |
+
"loss": 0.0558,
|
6506 |
+
"step": 9230
|
6507 |
+
},
|
6508 |
+
{
|
6509 |
+
"epoch": 4.6643109540636045,
|
6510 |
+
"grad_norm": 0.3837885856628418,
|
6511 |
+
"learning_rate": 2.014134275618375e-06,
|
6512 |
+
"loss": 0.0534,
|
6513 |
+
"step": 9240
|
6514 |
+
},
|
6515 |
+
{
|
6516 |
+
"epoch": 4.669358909641595,
|
6517 |
+
"grad_norm": 0.33468201756477356,
|
6518 |
+
"learning_rate": 1.9838465421504293e-06,
|
6519 |
+
"loss": 0.0638,
|
6520 |
+
"step": 9250
|
6521 |
+
},
|
6522 |
+
{
|
6523 |
+
"epoch": 4.674406865219586,
|
6524 |
+
"grad_norm": 0.3218873143196106,
|
6525 |
+
"learning_rate": 1.9535588086824836e-06,
|
6526 |
+
"loss": 0.0562,
|
6527 |
+
"step": 9260
|
6528 |
+
},
|
6529 |
+
{
|
6530 |
+
"epoch": 4.679454820797577,
|
6531 |
+
"grad_norm": 0.4538477659225464,
|
6532 |
+
"learning_rate": 1.9232710752145383e-06,
|
6533 |
+
"loss": 0.0562,
|
6534 |
+
"step": 9270
|
6535 |
+
},
|
6536 |
+
{
|
6537 |
+
"epoch": 4.684502776375568,
|
6538 |
+
"grad_norm": 0.42905497550964355,
|
6539 |
+
"learning_rate": 1.8929833417465926e-06,
|
6540 |
+
"loss": 0.0581,
|
6541 |
+
"step": 9280
|
6542 |
+
},
|
6543 |
+
{
|
6544 |
+
"epoch": 4.689550731953559,
|
6545 |
+
"grad_norm": 0.3783353567123413,
|
6546 |
+
"learning_rate": 1.8626956082786473e-06,
|
6547 |
+
"loss": 0.0486,
|
6548 |
+
"step": 9290
|
6549 |
+
},
|
6550 |
+
{
|
6551 |
+
"epoch": 4.69459868753155,
|
6552 |
+
"grad_norm": 0.42233869433403015,
|
6553 |
+
"learning_rate": 1.8324078748107018e-06,
|
6554 |
+
"loss": 0.0534,
|
6555 |
+
"step": 9300
|
6556 |
+
},
|
6557 |
+
{
|
6558 |
+
"epoch": 4.69964664310954,
|
6559 |
+
"grad_norm": 0.2925800383090973,
|
6560 |
+
"learning_rate": 1.802120141342756e-06,
|
6561 |
+
"loss": 0.0557,
|
6562 |
+
"step": 9310
|
6563 |
+
},
|
6564 |
+
{
|
6565 |
+
"epoch": 4.7046945986875315,
|
6566 |
+
"grad_norm": 0.4508257210254669,
|
6567 |
+
"learning_rate": 1.7718324078748106e-06,
|
6568 |
+
"loss": 0.0615,
|
6569 |
+
"step": 9320
|
6570 |
+
},
|
6571 |
+
{
|
6572 |
+
"epoch": 4.709742554265523,
|
6573 |
+
"grad_norm": 0.5092118382453918,
|
6574 |
+
"learning_rate": 1.7415446744068653e-06,
|
6575 |
+
"loss": 0.0577,
|
6576 |
+
"step": 9330
|
6577 |
+
},
|
6578 |
+
{
|
6579 |
+
"epoch": 4.714790509843513,
|
6580 |
+
"grad_norm": 0.3694470524787903,
|
6581 |
+
"learning_rate": 1.7112569409389198e-06,
|
6582 |
+
"loss": 0.0485,
|
6583 |
+
"step": 9340
|
6584 |
+
},
|
6585 |
+
{
|
6586 |
+
"epoch": 4.719838465421504,
|
6587 |
+
"grad_norm": 0.4794639050960541,
|
6588 |
+
"learning_rate": 1.6809692074709741e-06,
|
6589 |
+
"loss": 0.0699,
|
6590 |
+
"step": 9350
|
6591 |
+
},
|
6592 |
+
{
|
6593 |
+
"epoch": 4.724886420999495,
|
6594 |
+
"grad_norm": 0.4152567982673645,
|
6595 |
+
"learning_rate": 1.6506814740030288e-06,
|
6596 |
+
"loss": 0.0521,
|
6597 |
+
"step": 9360
|
6598 |
+
},
|
6599 |
+
{
|
6600 |
+
"epoch": 4.729934376577486,
|
6601 |
+
"grad_norm": 0.48920056223869324,
|
6602 |
+
"learning_rate": 1.6203937405350833e-06,
|
6603 |
+
"loss": 0.0677,
|
6604 |
+
"step": 9370
|
6605 |
+
},
|
6606 |
+
{
|
6607 |
+
"epoch": 4.734982332155477,
|
6608 |
+
"grad_norm": 0.37886640429496765,
|
6609 |
+
"learning_rate": 1.5901060070671379e-06,
|
6610 |
+
"loss": 0.0575,
|
6611 |
+
"step": 9380
|
6612 |
+
},
|
6613 |
+
{
|
6614 |
+
"epoch": 4.740030287733468,
|
6615 |
+
"grad_norm": 0.5271609425544739,
|
6616 |
+
"learning_rate": 1.5598182735991924e-06,
|
6617 |
+
"loss": 0.0618,
|
6618 |
+
"step": 9390
|
6619 |
+
},
|
6620 |
+
{
|
6621 |
+
"epoch": 4.745078243311459,
|
6622 |
+
"grad_norm": 0.376953125,
|
6623 |
+
"learning_rate": 1.5295305401312469e-06,
|
6624 |
+
"loss": 0.0558,
|
6625 |
+
"step": 9400
|
6626 |
+
},
|
6627 |
+
{
|
6628 |
+
"epoch": 4.75012619888945,
|
6629 |
+
"grad_norm": 0.4146003723144531,
|
6630 |
+
"learning_rate": 1.4992428066633014e-06,
|
6631 |
+
"loss": 0.0567,
|
6632 |
+
"step": 9410
|
6633 |
+
},
|
6634 |
+
{
|
6635 |
+
"epoch": 4.755174154467441,
|
6636 |
+
"grad_norm": 0.5335793495178223,
|
6637 |
+
"learning_rate": 1.4689550731953559e-06,
|
6638 |
+
"loss": 0.0527,
|
6639 |
+
"step": 9420
|
6640 |
+
},
|
6641 |
+
{
|
6642 |
+
"epoch": 4.760222110045431,
|
6643 |
+
"grad_norm": 0.4028931260108948,
|
6644 |
+
"learning_rate": 1.4386673397274104e-06,
|
6645 |
+
"loss": 0.0546,
|
6646 |
+
"step": 9430
|
6647 |
+
},
|
6648 |
+
{
|
6649 |
+
"epoch": 4.765270065623422,
|
6650 |
+
"grad_norm": 0.4504133462905884,
|
6651 |
+
"learning_rate": 1.408379606259465e-06,
|
6652 |
+
"loss": 0.0608,
|
6653 |
+
"step": 9440
|
6654 |
+
},
|
6655 |
+
{
|
6656 |
+
"epoch": 4.770318021201414,
|
6657 |
+
"grad_norm": 0.4923204183578491,
|
6658 |
+
"learning_rate": 1.3780918727915194e-06,
|
6659 |
+
"loss": 0.0621,
|
6660 |
+
"step": 9450
|
6661 |
+
},
|
6662 |
+
{
|
6663 |
+
"epoch": 4.775365976779405,
|
6664 |
+
"grad_norm": 0.29700249433517456,
|
6665 |
+
"learning_rate": 1.3478041393235741e-06,
|
6666 |
+
"loss": 0.055,
|
6667 |
+
"step": 9460
|
6668 |
+
},
|
6669 |
+
{
|
6670 |
+
"epoch": 4.780413932357395,
|
6671 |
+
"grad_norm": 0.4809055030345917,
|
6672 |
+
"learning_rate": 1.3175164058556284e-06,
|
6673 |
+
"loss": 0.0546,
|
6674 |
+
"step": 9470
|
6675 |
+
},
|
6676 |
+
{
|
6677 |
+
"epoch": 4.785461887935386,
|
6678 |
+
"grad_norm": 0.5369795560836792,
|
6679 |
+
"learning_rate": 1.287228672387683e-06,
|
6680 |
+
"loss": 0.059,
|
6681 |
+
"step": 9480
|
6682 |
+
},
|
6683 |
+
{
|
6684 |
+
"epoch": 4.790509843513377,
|
6685 |
+
"grad_norm": 0.4439578652381897,
|
6686 |
+
"learning_rate": 1.2569409389197376e-06,
|
6687 |
+
"loss": 0.0615,
|
6688 |
+
"step": 9490
|
6689 |
+
},
|
6690 |
+
{
|
6691 |
+
"epoch": 4.795557799091368,
|
6692 |
+
"grad_norm": 0.39975985884666443,
|
6693 |
+
"learning_rate": 1.2266532054517921e-06,
|
6694 |
+
"loss": 0.0587,
|
6695 |
+
"step": 9500
|
6696 |
+
},
|
6697 |
+
{
|
6698 |
+
"epoch": 4.800605754669359,
|
6699 |
+
"grad_norm": 0.34285855293273926,
|
6700 |
+
"learning_rate": 1.1963654719838464e-06,
|
6701 |
+
"loss": 0.0497,
|
6702 |
+
"step": 9510
|
6703 |
+
},
|
6704 |
+
{
|
6705 |
+
"epoch": 4.80565371024735,
|
6706 |
+
"grad_norm": 0.3402077257633209,
|
6707 |
+
"learning_rate": 1.166077738515901e-06,
|
6708 |
+
"loss": 0.0579,
|
6709 |
+
"step": 9520
|
6710 |
+
},
|
6711 |
+
{
|
6712 |
+
"epoch": 4.8107016658253405,
|
6713 |
+
"grad_norm": 0.3736449182033539,
|
6714 |
+
"learning_rate": 1.1357900050479557e-06,
|
6715 |
+
"loss": 0.063,
|
6716 |
+
"step": 9530
|
6717 |
+
},
|
6718 |
+
{
|
6719 |
+
"epoch": 4.815749621403332,
|
6720 |
+
"grad_norm": 0.3561767637729645,
|
6721 |
+
"learning_rate": 1.1055022715800102e-06,
|
6722 |
+
"loss": 0.0633,
|
6723 |
+
"step": 9540
|
6724 |
+
},
|
6725 |
+
{
|
6726 |
+
"epoch": 4.820797576981323,
|
6727 |
+
"grad_norm": 0.447592556476593,
|
6728 |
+
"learning_rate": 1.0752145381120645e-06,
|
6729 |
+
"loss": 0.0484,
|
6730 |
+
"step": 9550
|
6731 |
+
},
|
6732 |
+
{
|
6733 |
+
"epoch": 4.825845532559313,
|
6734 |
+
"grad_norm": 0.3960745930671692,
|
6735 |
+
"learning_rate": 1.0449268046441192e-06,
|
6736 |
+
"loss": 0.0631,
|
6737 |
+
"step": 9560
|
6738 |
+
},
|
6739 |
+
{
|
6740 |
+
"epoch": 4.8308934881373045,
|
6741 |
+
"grad_norm": 0.2932693064212799,
|
6742 |
+
"learning_rate": 1.0146390711761737e-06,
|
6743 |
+
"loss": 0.0562,
|
6744 |
+
"step": 9570
|
6745 |
+
},
|
6746 |
+
{
|
6747 |
+
"epoch": 4.835941443715296,
|
6748 |
+
"grad_norm": 0.37769854068756104,
|
6749 |
+
"learning_rate": 9.843513377082282e-07,
|
6750 |
+
"loss": 0.0482,
|
6751 |
+
"step": 9580
|
6752 |
+
},
|
6753 |
+
{
|
6754 |
+
"epoch": 4.840989399293286,
|
6755 |
+
"grad_norm": 0.3415481150150299,
|
6756 |
+
"learning_rate": 9.540636042402827e-07,
|
6757 |
+
"loss": 0.055,
|
6758 |
+
"step": 9590
|
6759 |
+
},
|
6760 |
+
{
|
6761 |
+
"epoch": 4.846037354871277,
|
6762 |
+
"grad_norm": 0.38010311126708984,
|
6763 |
+
"learning_rate": 9.237758707723372e-07,
|
6764 |
+
"loss": 0.0599,
|
6765 |
+
"step": 9600
|
6766 |
+
},
|
6767 |
+
{
|
6768 |
+
"epoch": 4.851085310449268,
|
6769 |
+
"grad_norm": 0.3991403579711914,
|
6770 |
+
"learning_rate": 8.934881373043917e-07,
|
6771 |
+
"loss": 0.0637,
|
6772 |
+
"step": 9610
|
6773 |
+
},
|
6774 |
+
{
|
6775 |
+
"epoch": 4.856133266027259,
|
6776 |
+
"grad_norm": 0.5155503153800964,
|
6777 |
+
"learning_rate": 8.632004038364462e-07,
|
6778 |
+
"loss": 0.0671,
|
6779 |
+
"step": 9620
|
6780 |
+
},
|
6781 |
+
{
|
6782 |
+
"epoch": 4.86118122160525,
|
6783 |
+
"grad_norm": 0.42242443561553955,
|
6784 |
+
"learning_rate": 8.329126703685008e-07,
|
6785 |
+
"loss": 0.0565,
|
6786 |
+
"step": 9630
|
6787 |
+
},
|
6788 |
+
{
|
6789 |
+
"epoch": 4.866229177183241,
|
6790 |
+
"grad_norm": 0.4904538691043854,
|
6791 |
+
"learning_rate": 8.026249369005552e-07,
|
6792 |
+
"loss": 0.0568,
|
6793 |
+
"step": 9640
|
6794 |
+
},
|
6795 |
+
{
|
6796 |
+
"epoch": 4.871277132761231,
|
6797 |
+
"grad_norm": 0.5523189902305603,
|
6798 |
+
"learning_rate": 7.723372034326099e-07,
|
6799 |
+
"loss": 0.0559,
|
6800 |
+
"step": 9650
|
6801 |
+
},
|
6802 |
+
{
|
6803 |
+
"epoch": 4.876325088339223,
|
6804 |
+
"grad_norm": 0.4754299819469452,
|
6805 |
+
"learning_rate": 7.420494699646643e-07,
|
6806 |
+
"loss": 0.0653,
|
6807 |
+
"step": 9660
|
6808 |
+
},
|
6809 |
+
{
|
6810 |
+
"epoch": 4.881373043917214,
|
6811 |
+
"grad_norm": 0.3697846531867981,
|
6812 |
+
"learning_rate": 7.117617364967189e-07,
|
6813 |
+
"loss": 0.0539,
|
6814 |
+
"step": 9670
|
6815 |
+
},
|
6816 |
+
{
|
6817 |
+
"epoch": 4.886420999495204,
|
6818 |
+
"grad_norm": 0.46191075444221497,
|
6819 |
+
"learning_rate": 6.814740030287734e-07,
|
6820 |
+
"loss": 0.0676,
|
6821 |
+
"step": 9680
|
6822 |
+
},
|
6823 |
+
{
|
6824 |
+
"epoch": 4.891468955073195,
|
6825 |
+
"grad_norm": 0.3706737756729126,
|
6826 |
+
"learning_rate": 6.511862695608279e-07,
|
6827 |
+
"loss": 0.0576,
|
6828 |
+
"step": 9690
|
6829 |
+
},
|
6830 |
+
{
|
6831 |
+
"epoch": 4.8965169106511865,
|
6832 |
+
"grad_norm": 0.34824711084365845,
|
6833 |
+
"learning_rate": 6.208985360928824e-07,
|
6834 |
+
"loss": 0.0607,
|
6835 |
+
"step": 9700
|
6836 |
+
},
|
6837 |
+
{
|
6838 |
+
"epoch": 4.901564866229177,
|
6839 |
+
"grad_norm": 0.33516255021095276,
|
6840 |
+
"learning_rate": 5.906108026249369e-07,
|
6841 |
+
"loss": 0.0532,
|
6842 |
+
"step": 9710
|
6843 |
+
},
|
6844 |
+
{
|
6845 |
+
"epoch": 4.906612821807168,
|
6846 |
+
"grad_norm": 0.4216098189353943,
|
6847 |
+
"learning_rate": 5.603230691569914e-07,
|
6848 |
+
"loss": 0.0506,
|
6849 |
+
"step": 9720
|
6850 |
+
},
|
6851 |
+
{
|
6852 |
+
"epoch": 4.911660777385159,
|
6853 |
+
"grad_norm": 0.39393237233161926,
|
6854 |
+
"learning_rate": 5.30035335689046e-07,
|
6855 |
+
"loss": 0.0622,
|
6856 |
+
"step": 9730
|
6857 |
+
},
|
6858 |
+
{
|
6859 |
+
"epoch": 4.91670873296315,
|
6860 |
+
"grad_norm": 0.37353748083114624,
|
6861 |
+
"learning_rate": 4.997476022211004e-07,
|
6862 |
+
"loss": 0.0508,
|
6863 |
+
"step": 9740
|
6864 |
+
},
|
6865 |
+
{
|
6866 |
+
"epoch": 4.921756688541141,
|
6867 |
+
"grad_norm": 0.32179582118988037,
|
6868 |
+
"learning_rate": 4.69459868753155e-07,
|
6869 |
+
"loss": 0.0461,
|
6870 |
+
"step": 9750
|
6871 |
+
},
|
6872 |
+
{
|
6873 |
+
"epoch": 4.926804644119132,
|
6874 |
+
"grad_norm": 0.34863799810409546,
|
6875 |
+
"learning_rate": 4.3917213528520954e-07,
|
6876 |
+
"loss": 0.0513,
|
6877 |
+
"step": 9760
|
6878 |
+
},
|
6879 |
+
{
|
6880 |
+
"epoch": 4.931852599697122,
|
6881 |
+
"grad_norm": 0.4207555651664734,
|
6882 |
+
"learning_rate": 4.0888440181726405e-07,
|
6883 |
+
"loss": 0.0516,
|
6884 |
+
"step": 9770
|
6885 |
+
},
|
6886 |
+
{
|
6887 |
+
"epoch": 4.9369005552751135,
|
6888 |
+
"grad_norm": 0.372896283864975,
|
6889 |
+
"learning_rate": 3.7859666834931856e-07,
|
6890 |
+
"loss": 0.0476,
|
6891 |
+
"step": 9780
|
6892 |
+
},
|
6893 |
+
{
|
6894 |
+
"epoch": 4.941948510853105,
|
6895 |
+
"grad_norm": 0.5434166789054871,
|
6896 |
+
"learning_rate": 3.4830893488137306e-07,
|
6897 |
+
"loss": 0.0646,
|
6898 |
+
"step": 9790
|
6899 |
+
},
|
6900 |
+
{
|
6901 |
+
"epoch": 4.946996466431095,
|
6902 |
+
"grad_norm": 0.5460948348045349,
|
6903 |
+
"learning_rate": 3.1802120141342757e-07,
|
6904 |
+
"loss": 0.0562,
|
6905 |
+
"step": 9800
|
6906 |
+
},
|
6907 |
+
{
|
6908 |
+
"epoch": 4.952044422009086,
|
6909 |
+
"grad_norm": 0.4554930329322815,
|
6910 |
+
"learning_rate": 2.8773346794548213e-07,
|
6911 |
+
"loss": 0.0664,
|
6912 |
+
"step": 9810
|
6913 |
+
},
|
6914 |
+
{
|
6915 |
+
"epoch": 4.957092377587077,
|
6916 |
+
"grad_norm": 0.5326105356216431,
|
6917 |
+
"learning_rate": 2.5744573447753664e-07,
|
6918 |
+
"loss": 0.0536,
|
6919 |
+
"step": 9820
|
6920 |
+
},
|
6921 |
+
{
|
6922 |
+
"epoch": 4.962140333165069,
|
6923 |
+
"grad_norm": 0.3335418999195099,
|
6924 |
+
"learning_rate": 2.2715800100959112e-07,
|
6925 |
+
"loss": 0.0611,
|
6926 |
+
"step": 9830
|
6927 |
+
},
|
6928 |
+
{
|
6929 |
+
"epoch": 4.967188288743059,
|
6930 |
+
"grad_norm": 0.408489465713501,
|
6931 |
+
"learning_rate": 1.9687026754164563e-07,
|
6932 |
+
"loss": 0.056,
|
6933 |
+
"step": 9840
|
6934 |
+
},
|
6935 |
+
{
|
6936 |
+
"epoch": 4.97223624432105,
|
6937 |
+
"grad_norm": 0.49370092153549194,
|
6938 |
+
"learning_rate": 1.6658253407370016e-07,
|
6939 |
+
"loss": 0.0615,
|
6940 |
+
"step": 9850
|
6941 |
+
},
|
6942 |
+
{
|
6943 |
+
"epoch": 4.9772841998990405,
|
6944 |
+
"grad_norm": 0.47176486253738403,
|
6945 |
+
"learning_rate": 1.3629480060575467e-07,
|
6946 |
+
"loss": 0.0534,
|
6947 |
+
"step": 9860
|
6948 |
+
},
|
6949 |
+
{
|
6950 |
+
"epoch": 4.982332155477032,
|
6951 |
+
"grad_norm": 0.3332078158855438,
|
6952 |
+
"learning_rate": 1.0600706713780919e-07,
|
6953 |
+
"loss": 0.0484,
|
6954 |
+
"step": 9870
|
6955 |
+
},
|
6956 |
+
{
|
6957 |
+
"epoch": 4.987380111055023,
|
6958 |
+
"grad_norm": 0.4342339038848877,
|
6959 |
+
"learning_rate": 7.57193336698637e-08,
|
6960 |
+
"loss": 0.0557,
|
6961 |
+
"step": 9880
|
6962 |
+
},
|
6963 |
+
{
|
6964 |
+
"epoch": 4.992428066633014,
|
6965 |
+
"grad_norm": 0.3356720805168152,
|
6966 |
+
"learning_rate": 4.5431600201918226e-08,
|
6967 |
+
"loss": 0.0534,
|
6968 |
+
"step": 9890
|
6969 |
+
},
|
6970 |
+
{
|
6971 |
+
"epoch": 4.997476022211004,
|
6972 |
+
"grad_norm": 0.6361636519432068,
|
6973 |
+
"learning_rate": 1.514386673397274e-08,
|
6974 |
+
"loss": 0.0595,
|
6975 |
+
"step": 9900
|
6976 |
+
},
|
6977 |
+
{
|
6978 |
+
"epoch": 5.0,
|
6979 |
+
"eval_f1": 0.9429269569770486,
|
6980 |
+
"eval_loss": 0.0460049994289875,
|
6981 |
+
"eval_runtime": 555.0466,
|
6982 |
+
"eval_samples_per_second": 371.612,
|
6983 |
+
"eval_steps_per_second": 2.904,
|
6984 |
+
"step": 9905
|
6985 |
+
},
|
6986 |
+
{
|
6987 |
+
"epoch": 5.0,
|
6988 |
+
"step": 9905,
|
6989 |
+
"total_flos": 9.820471825285631e+19,
|
6990 |
+
"train_loss": 0.011631221487809769,
|
6991 |
+
"train_runtime": 2940.6816,
|
6992 |
+
"train_samples_per_second": 430.955,
|
6993 |
+
"train_steps_per_second": 3.368
|
6994 |
}
|
6995 |
],
|
6996 |
"logging_steps": 10,
|
|
|
7019 |
"attributes": {}
|
7020 |
}
|
7021 |
},
|
7022 |
+
"total_flos": 9.820471825285631e+19,
|
7023 |
"train_batch_size": 128,
|
7024 |
"trial_name": null,
|
7025 |
"trial_params": null
|