Training in progress, epoch 1, checkpoint
Browse files- last-checkpoint/adapter_model.safetensors +1 -1
- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/rng_state_0.pth +1 -1
- last-checkpoint/rng_state_1.pth +1 -1
- last-checkpoint/rng_state_2.pth +1 -1
- last-checkpoint/rng_state_3.pth +1 -1
- last-checkpoint/rng_state_4.pth +1 -1
- last-checkpoint/rng_state_5.pth +1 -1
- last-checkpoint/rng_state_6.pth +1 -1
- last-checkpoint/rng_state_7.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +1047 -4
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 319876032
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ee4e1aaa9d2db7c3ef3fb2eab4a196046078674952cc7b68a05eba9deda25b1a
|
3 |
size 319876032
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 162933844
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29b620fb16c10b4ba337d303168ce2fbeef576ee9e317f632914099fd335f7fe
|
3 |
size 162933844
|
last-checkpoint/rng_state_0.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ebfedca7ab47f0b4cc2430502bcdb600a0665d002982810d9249efbb3855a9c
|
3 |
size 15984
|
last-checkpoint/rng_state_1.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd1092108ebd1fc799073b607e32de389d66ec01448bb0f8705f4c02ab28ab1a
|
3 |
size 15984
|
last-checkpoint/rng_state_2.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25af45ba1bd08d20832a82fe5df9889d219d17d2584402da2a184b868ebb9c0c
|
3 |
size 15984
|
last-checkpoint/rng_state_3.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29dc487127a24cace94f15c78712ca5acec6e7700c1ed93442afd376b9cec1f7
|
3 |
size 15984
|
last-checkpoint/rng_state_4.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dc0c0e190b107593022e23da855aa4e0d2271b180010cb782058b7545ab8f6eb
|
3 |
size 15984
|
last-checkpoint/rng_state_5.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6a08505b33440147550305bb5976eee9f64d2f83965e061436fbad3986b65e4f
|
3 |
size 15984
|
last-checkpoint/rng_state_6.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c67225715b9dd3c17eed154063111cbf4ed8efae1dd903cd116daafc97231c28
|
3 |
size 15984
|
last-checkpoint/rng_state_7.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:03f7f0fe42821f379cf4003607dd3c90056a7afd40bd314e29e49290a9e85636
|
3 |
size 15984
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a94c53d2356205c284362b3ac76615aad82ee38e39757b75c4658177c95a098c
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -3164,6 +3164,1049 @@
|
|
3164 |
"learning_rate": 4.1121461163733016e-05,
|
3165 |
"loss": 0.0943,
|
3166 |
"step": 451
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3167 |
}
|
3168 |
],
|
3169 |
"logging_steps": 1,
|
@@ -3178,12 +4221,12 @@
|
|
3178 |
"should_evaluate": false,
|
3179 |
"should_log": false,
|
3180 |
"should_save": true,
|
3181 |
-
"should_training_stop":
|
3182 |
},
|
3183 |
"attributes": {}
|
3184 |
}
|
3185 |
},
|
3186 |
-
"total_flos":
|
3187 |
"train_batch_size": 2,
|
3188 |
"trial_name": null,
|
3189 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.3281682346430548,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 600,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
3164 |
"learning_rate": 4.1121461163733016e-05,
|
3165 |
"loss": 0.0943,
|
3166 |
"step": 451
|
3167 |
+
},
|
3168 |
+
{
|
3169 |
+
"epoch": 1.0005534034311012,
|
3170 |
+
"grad_norm": 0.06343565881252289,
|
3171 |
+
"learning_rate": 4.061159671119063e-05,
|
3172 |
+
"loss": 0.1098,
|
3173 |
+
"step": 452
|
3174 |
+
},
|
3175 |
+
{
|
3176 |
+
"epoch": 1.0027670171555063,
|
3177 |
+
"grad_norm": 0.04856511950492859,
|
3178 |
+
"learning_rate": 4.010411628940853e-05,
|
3179 |
+
"loss": 0.082,
|
3180 |
+
"step": 453
|
3181 |
+
},
|
3182 |
+
{
|
3183 |
+
"epoch": 1.0049806308799114,
|
3184 |
+
"grad_norm": 0.04715016856789589,
|
3185 |
+
"learning_rate": 3.959903993284488e-05,
|
3186 |
+
"loss": 0.0845,
|
3187 |
+
"step": 454
|
3188 |
+
},
|
3189 |
+
{
|
3190 |
+
"epoch": 1.0071942446043165,
|
3191 |
+
"grad_norm": 0.053920548409223557,
|
3192 |
+
"learning_rate": 3.90963875810494e-05,
|
3193 |
+
"loss": 0.0832,
|
3194 |
+
"step": 455
|
3195 |
+
},
|
3196 |
+
{
|
3197 |
+
"epoch": 1.0094078583287216,
|
3198 |
+
"grad_norm": 0.04623395577073097,
|
3199 |
+
"learning_rate": 3.859617907787625e-05,
|
3200 |
+
"loss": 0.0839,
|
3201 |
+
"step": 456
|
3202 |
+
},
|
3203 |
+
{
|
3204 |
+
"epoch": 1.0116214720531267,
|
3205 |
+
"grad_norm": 0.04793941602110863,
|
3206 |
+
"learning_rate": 3.809843417070065e-05,
|
3207 |
+
"loss": 0.0882,
|
3208 |
+
"step": 457
|
3209 |
+
},
|
3210 |
+
{
|
3211 |
+
"epoch": 1.0138350857775318,
|
3212 |
+
"grad_norm": 0.05396652594208717,
|
3213 |
+
"learning_rate": 3.760317250963926e-05,
|
3214 |
+
"loss": 0.0834,
|
3215 |
+
"step": 458
|
3216 |
+
},
|
3217 |
+
{
|
3218 |
+
"epoch": 1.0160486995019369,
|
3219 |
+
"grad_norm": 0.051411934196949005,
|
3220 |
+
"learning_rate": 3.7110413646774435e-05,
|
3221 |
+
"loss": 0.088,
|
3222 |
+
"step": 459
|
3223 |
+
},
|
3224 |
+
{
|
3225 |
+
"epoch": 1.018262313226342,
|
3226 |
+
"grad_norm": 0.05102306231856346,
|
3227 |
+
"learning_rate": 3.662017703538234e-05,
|
3228 |
+
"loss": 0.0849,
|
3229 |
+
"step": 460
|
3230 |
+
},
|
3231 |
+
{
|
3232 |
+
"epoch": 1.020475926950747,
|
3233 |
+
"grad_norm": 0.054891008883714676,
|
3234 |
+
"learning_rate": 3.6132482029164975e-05,
|
3235 |
+
"loss": 0.0906,
|
3236 |
+
"step": 461
|
3237 |
+
},
|
3238 |
+
{
|
3239 |
+
"epoch": 1.0226895406751522,
|
3240 |
+
"grad_norm": 0.05452824383974075,
|
3241 |
+
"learning_rate": 3.564734788148616e-05,
|
3242 |
+
"loss": 0.0882,
|
3243 |
+
"step": 462
|
3244 |
+
},
|
3245 |
+
{
|
3246 |
+
"epoch": 1.0249031543995573,
|
3247 |
+
"grad_norm": 0.0523720420897007,
|
3248 |
+
"learning_rate": 3.516479374461126e-05,
|
3249 |
+
"loss": 0.0833,
|
3250 |
+
"step": 463
|
3251 |
+
},
|
3252 |
+
{
|
3253 |
+
"epoch": 1.0271167681239624,
|
3254 |
+
"grad_norm": 0.05288210138678551,
|
3255 |
+
"learning_rate": 3.468483866895141e-05,
|
3256 |
+
"loss": 0.0839,
|
3257 |
+
"step": 464
|
3258 |
+
},
|
3259 |
+
{
|
3260 |
+
"epoch": 1.0293303818483674,
|
3261 |
+
"grad_norm": 0.04852289333939552,
|
3262 |
+
"learning_rate": 3.420750160231118e-05,
|
3263 |
+
"loss": 0.0777,
|
3264 |
+
"step": 465
|
3265 |
+
},
|
3266 |
+
{
|
3267 |
+
"epoch": 1.0315439955727725,
|
3268 |
+
"grad_norm": 0.05111468955874443,
|
3269 |
+
"learning_rate": 3.3732801389140596e-05,
|
3270 |
+
"loss": 0.0875,
|
3271 |
+
"step": 466
|
3272 |
+
},
|
3273 |
+
{
|
3274 |
+
"epoch": 1.0337576092971776,
|
3275 |
+
"grad_norm": 0.05297563970088959,
|
3276 |
+
"learning_rate": 3.326075676979128e-05,
|
3277 |
+
"loss": 0.0861,
|
3278 |
+
"step": 467
|
3279 |
+
},
|
3280 |
+
{
|
3281 |
+
"epoch": 1.0359712230215827,
|
3282 |
+
"grad_norm": 0.04983185604214668,
|
3283 |
+
"learning_rate": 3.2791386379776527e-05,
|
3284 |
+
"loss": 0.0799,
|
3285 |
+
"step": 468
|
3286 |
+
},
|
3287 |
+
{
|
3288 |
+
"epoch": 1.0381848367459878,
|
3289 |
+
"grad_norm": 0.049646344035863876,
|
3290 |
+
"learning_rate": 3.232470874903566e-05,
|
3291 |
+
"loss": 0.0804,
|
3292 |
+
"step": 469
|
3293 |
+
},
|
3294 |
+
{
|
3295 |
+
"epoch": 1.040398450470393,
|
3296 |
+
"grad_norm": 0.05048072710633278,
|
3297 |
+
"learning_rate": 3.186074230120244e-05,
|
3298 |
+
"loss": 0.0859,
|
3299 |
+
"step": 470
|
3300 |
+
},
|
3301 |
+
{
|
3302 |
+
"epoch": 1.042612064194798,
|
3303 |
+
"grad_norm": 0.04904315248131752,
|
3304 |
+
"learning_rate": 3.1399505352877826e-05,
|
3305 |
+
"loss": 0.0847,
|
3306 |
+
"step": 471
|
3307 |
+
},
|
3308 |
+
{
|
3309 |
+
"epoch": 1.044825677919203,
|
3310 |
+
"grad_norm": 0.052079539746046066,
|
3311 |
+
"learning_rate": 3.094101611290671e-05,
|
3312 |
+
"loss": 0.0834,
|
3313 |
+
"step": 472
|
3314 |
+
},
|
3315 |
+
{
|
3316 |
+
"epoch": 1.0470392916436082,
|
3317 |
+
"grad_norm": 0.052563026547431946,
|
3318 |
+
"learning_rate": 3.0485292681659277e-05,
|
3319 |
+
"loss": 0.0861,
|
3320 |
+
"step": 473
|
3321 |
+
},
|
3322 |
+
{
|
3323 |
+
"epoch": 1.0492529053680133,
|
3324 |
+
"grad_norm": 0.0512564554810524,
|
3325 |
+
"learning_rate": 3.0032353050316243e-05,
|
3326 |
+
"loss": 0.0883,
|
3327 |
+
"step": 474
|
3328 |
+
},
|
3329 |
+
{
|
3330 |
+
"epoch": 1.0514665190924184,
|
3331 |
+
"grad_norm": 0.05077826976776123,
|
3332 |
+
"learning_rate": 2.9582215100158706e-05,
|
3333 |
+
"loss": 0.0907,
|
3334 |
+
"step": 475
|
3335 |
+
},
|
3336 |
+
{
|
3337 |
+
"epoch": 1.0536801328168235,
|
3338 |
+
"grad_norm": 0.048108045011758804,
|
3339 |
+
"learning_rate": 2.913489660186218e-05,
|
3340 |
+
"loss": 0.0803,
|
3341 |
+
"step": 476
|
3342 |
+
},
|
3343 |
+
{
|
3344 |
+
"epoch": 1.0558937465412286,
|
3345 |
+
"grad_norm": 0.050823770463466644,
|
3346 |
+
"learning_rate": 2.8690415214795046e-05,
|
3347 |
+
"loss": 0.0837,
|
3348 |
+
"step": 477
|
3349 |
+
},
|
3350 |
+
{
|
3351 |
+
"epoch": 1.0581073602656337,
|
3352 |
+
"grad_norm": 0.051949527114629745,
|
3353 |
+
"learning_rate": 2.82487884863214e-05,
|
3354 |
+
"loss": 0.0896,
|
3355 |
+
"step": 478
|
3356 |
+
},
|
3357 |
+
{
|
3358 |
+
"epoch": 1.0603209739900388,
|
3359 |
+
"grad_norm": 0.05039558187127113,
|
3360 |
+
"learning_rate": 2.7810033851108284e-05,
|
3361 |
+
"loss": 0.0877,
|
3362 |
+
"step": 479
|
3363 |
+
},
|
3364 |
+
{
|
3365 |
+
"epoch": 1.0625345877144439,
|
3366 |
+
"grad_norm": 0.05233335867524147,
|
3367 |
+
"learning_rate": 2.7374168630437456e-05,
|
3368 |
+
"loss": 0.0844,
|
3369 |
+
"step": 480
|
3370 |
+
},
|
3371 |
+
{
|
3372 |
+
"epoch": 1.064748201438849,
|
3373 |
+
"grad_norm": 0.05141540616750717,
|
3374 |
+
"learning_rate": 2.6941210031521457e-05,
|
3375 |
+
"loss": 0.0875,
|
3376 |
+
"step": 481
|
3377 |
+
},
|
3378 |
+
{
|
3379 |
+
"epoch": 1.066961815163254,
|
3380 |
+
"grad_norm": 0.05313284695148468,
|
3381 |
+
"learning_rate": 2.6511175146824443e-05,
|
3382 |
+
"loss": 0.0765,
|
3383 |
+
"step": 482
|
3384 |
+
},
|
3385 |
+
{
|
3386 |
+
"epoch": 1.0691754288876592,
|
3387 |
+
"grad_norm": 0.05009685456752777,
|
3388 |
+
"learning_rate": 2.608408095338735e-05,
|
3389 |
+
"loss": 0.0799,
|
3390 |
+
"step": 483
|
3391 |
+
},
|
3392 |
+
{
|
3393 |
+
"epoch": 1.0713890426120642,
|
3394 |
+
"grad_norm": 0.04916118085384369,
|
3395 |
+
"learning_rate": 2.5659944312157606e-05,
|
3396 |
+
"loss": 0.0827,
|
3397 |
+
"step": 484
|
3398 |
+
},
|
3399 |
+
{
|
3400 |
+
"epoch": 1.0736026563364693,
|
3401 |
+
"grad_norm": 0.05077299475669861,
|
3402 |
+
"learning_rate": 2.523878196732358e-05,
|
3403 |
+
"loss": 0.0815,
|
3404 |
+
"step": 485
|
3405 |
+
},
|
3406 |
+
{
|
3407 |
+
"epoch": 1.0758162700608744,
|
3408 |
+
"grad_norm": 0.05161164328455925,
|
3409 |
+
"learning_rate": 2.482061054565351e-05,
|
3410 |
+
"loss": 0.0873,
|
3411 |
+
"step": 486
|
3412 |
+
},
|
3413 |
+
{
|
3414 |
+
"epoch": 1.0780298837852795,
|
3415 |
+
"grad_norm": 0.05135266110301018,
|
3416 |
+
"learning_rate": 2.440544655583909e-05,
|
3417 |
+
"loss": 0.0862,
|
3418 |
+
"step": 487
|
3419 |
+
},
|
3420 |
+
{
|
3421 |
+
"epoch": 1.0802434975096846,
|
3422 |
+
"grad_norm": 0.0509047694504261,
|
3423 |
+
"learning_rate": 2.399330638784375e-05,
|
3424 |
+
"loss": 0.0816,
|
3425 |
+
"step": 488
|
3426 |
+
},
|
3427 |
+
{
|
3428 |
+
"epoch": 1.0824571112340897,
|
3429 |
+
"grad_norm": 0.052165884524583817,
|
3430 |
+
"learning_rate": 2.3584206312255677e-05,
|
3431 |
+
"loss": 0.089,
|
3432 |
+
"step": 489
|
3433 |
+
},
|
3434 |
+
{
|
3435 |
+
"epoch": 1.0846707249584948,
|
3436 |
+
"grad_norm": 0.054725173860788345,
|
3437 |
+
"learning_rate": 2.31781624796453e-05,
|
3438 |
+
"loss": 0.0913,
|
3439 |
+
"step": 490
|
3440 |
+
},
|
3441 |
+
{
|
3442 |
+
"epoch": 1.0868843386829,
|
3443 |
+
"grad_norm": 0.051129017025232315,
|
3444 |
+
"learning_rate": 2.2775190919927896e-05,
|
3445 |
+
"loss": 0.0745,
|
3446 |
+
"step": 491
|
3447 |
+
},
|
3448 |
+
{
|
3449 |
+
"epoch": 1.089097952407305,
|
3450 |
+
"grad_norm": 0.05222058296203613,
|
3451 |
+
"learning_rate": 2.2375307541730643e-05,
|
3452 |
+
"loss": 0.0807,
|
3453 |
+
"step": 492
|
3454 |
+
},
|
3455 |
+
{
|
3456 |
+
"epoch": 1.09131156613171,
|
3457 |
+
"grad_norm": 0.054515715688467026,
|
3458 |
+
"learning_rate": 2.19785281317646e-05,
|
3459 |
+
"loss": 0.0777,
|
3460 |
+
"step": 493
|
3461 |
+
},
|
3462 |
+
{
|
3463 |
+
"epoch": 1.0935251798561152,
|
3464 |
+
"grad_norm": 0.048953328281641006,
|
3465 |
+
"learning_rate": 2.158486835420149e-05,
|
3466 |
+
"loss": 0.0788,
|
3467 |
+
"step": 494
|
3468 |
+
},
|
3469 |
+
{
|
3470 |
+
"epoch": 1.0957387935805203,
|
3471 |
+
"grad_norm": 0.05130873993039131,
|
3472 |
+
"learning_rate": 2.119434375005527e-05,
|
3473 |
+
"loss": 0.0818,
|
3474 |
+
"step": 495
|
3475 |
+
},
|
3476 |
+
{
|
3477 |
+
"epoch": 1.0979524073049254,
|
3478 |
+
"grad_norm": 0.05241890251636505,
|
3479 |
+
"learning_rate": 2.0806969736568627e-05,
|
3480 |
+
"loss": 0.0879,
|
3481 |
+
"step": 496
|
3482 |
+
},
|
3483 |
+
{
|
3484 |
+
"epoch": 1.1001660210293305,
|
3485 |
+
"grad_norm": 0.05658801272511482,
|
3486 |
+
"learning_rate": 2.042276160660432e-05,
|
3487 |
+
"loss": 0.0871,
|
3488 |
+
"step": 497
|
3489 |
+
},
|
3490 |
+
{
|
3491 |
+
"epoch": 1.1023796347537356,
|
3492 |
+
"grad_norm": 0.050439462065696716,
|
3493 |
+
"learning_rate": 2.004173452804145e-05,
|
3494 |
+
"loss": 0.0794,
|
3495 |
+
"step": 498
|
3496 |
+
},
|
3497 |
+
{
|
3498 |
+
"epoch": 1.1045932484781407,
|
3499 |
+
"grad_norm": 0.04868178814649582,
|
3500 |
+
"learning_rate": 1.966390354317669e-05,
|
3501 |
+
"loss": 0.0865,
|
3502 |
+
"step": 499
|
3503 |
+
},
|
3504 |
+
{
|
3505 |
+
"epoch": 1.1068068622025455,
|
3506 |
+
"grad_norm": 0.05438188835978508,
|
3507 |
+
"learning_rate": 1.928928356813032e-05,
|
3508 |
+
"loss": 0.0847,
|
3509 |
+
"step": 500
|
3510 |
+
},
|
3511 |
+
{
|
3512 |
+
"epoch": 1.1090204759269509,
|
3513 |
+
"grad_norm": 0.057815033942461014,
|
3514 |
+
"learning_rate": 1.8917889392257513e-05,
|
3515 |
+
"loss": 0.0808,
|
3516 |
+
"step": 501
|
3517 |
+
},
|
3518 |
+
{
|
3519 |
+
"epoch": 1.1112340896513557,
|
3520 |
+
"grad_norm": 0.05223087593913078,
|
3521 |
+
"learning_rate": 1.854973567756442e-05,
|
3522 |
+
"loss": 0.0858,
|
3523 |
+
"step": 502
|
3524 |
+
},
|
3525 |
+
{
|
3526 |
+
"epoch": 1.113447703375761,
|
3527 |
+
"grad_norm": 0.04764244705438614,
|
3528 |
+
"learning_rate": 1.8184836958129312e-05,
|
3529 |
+
"loss": 0.0815,
|
3530 |
+
"step": 503
|
3531 |
+
},
|
3532 |
+
{
|
3533 |
+
"epoch": 1.115661317100166,
|
3534 |
+
"grad_norm": 0.05304750055074692,
|
3535 |
+
"learning_rate": 1.7823207639528827e-05,
|
3536 |
+
"loss": 0.0834,
|
3537 |
+
"step": 504
|
3538 |
+
},
|
3539 |
+
{
|
3540 |
+
"epoch": 1.117874930824571,
|
3541 |
+
"grad_norm": 0.05036177486181259,
|
3542 |
+
"learning_rate": 1.7464861998269243e-05,
|
3543 |
+
"loss": 0.0775,
|
3544 |
+
"step": 505
|
3545 |
+
},
|
3546 |
+
{
|
3547 |
+
"epoch": 1.120088544548976,
|
3548 |
+
"grad_norm": 0.05070766806602478,
|
3549 |
+
"learning_rate": 1.710981418122291e-05,
|
3550 |
+
"loss": 0.0832,
|
3551 |
+
"step": 506
|
3552 |
+
},
|
3553 |
+
{
|
3554 |
+
"epoch": 1.1223021582733812,
|
3555 |
+
"grad_norm": 0.05284438282251358,
|
3556 |
+
"learning_rate": 1.6758078205069717e-05,
|
3557 |
+
"loss": 0.0809,
|
3558 |
+
"step": 507
|
3559 |
+
},
|
3560 |
+
{
|
3561 |
+
"epoch": 1.1245157719977863,
|
3562 |
+
"grad_norm": 0.05208960920572281,
|
3563 |
+
"learning_rate": 1.6409667955743693e-05,
|
3564 |
+
"loss": 0.0857,
|
3565 |
+
"step": 508
|
3566 |
+
},
|
3567 |
+
{
|
3568 |
+
"epoch": 1.1267293857221914,
|
3569 |
+
"grad_norm": 0.049338165670633316,
|
3570 |
+
"learning_rate": 1.606459718788493e-05,
|
3571 |
+
"loss": 0.0774,
|
3572 |
+
"step": 509
|
3573 |
+
},
|
3574 |
+
{
|
3575 |
+
"epoch": 1.1289429994465965,
|
3576 |
+
"grad_norm": 0.0496298149228096,
|
3577 |
+
"learning_rate": 1.57228795242965e-05,
|
3578 |
+
"loss": 0.0811,
|
3579 |
+
"step": 510
|
3580 |
+
},
|
3581 |
+
{
|
3582 |
+
"epoch": 1.1311566131710016,
|
3583 |
+
"grad_norm": 0.04850947484374046,
|
3584 |
+
"learning_rate": 1.5384528455406615e-05,
|
3585 |
+
"loss": 0.0858,
|
3586 |
+
"step": 511
|
3587 |
+
},
|
3588 |
+
{
|
3589 |
+
"epoch": 1.1333702268954067,
|
3590 |
+
"grad_norm": 0.0498884953558445,
|
3591 |
+
"learning_rate": 1.5049557338736136e-05,
|
3592 |
+
"loss": 0.0784,
|
3593 |
+
"step": 512
|
3594 |
+
},
|
3595 |
+
{
|
3596 |
+
"epoch": 1.1355838406198118,
|
3597 |
+
"grad_norm": 0.05427051708102226,
|
3598 |
+
"learning_rate": 1.47179793983712e-05,
|
3599 |
+
"loss": 0.084,
|
3600 |
+
"step": 513
|
3601 |
+
},
|
3602 |
+
{
|
3603 |
+
"epoch": 1.1377974543442169,
|
3604 |
+
"grad_norm": 0.053438618779182434,
|
3605 |
+
"learning_rate": 1.4389807724441138e-05,
|
3606 |
+
"loss": 0.0882,
|
3607 |
+
"step": 514
|
3608 |
+
},
|
3609 |
+
{
|
3610 |
+
"epoch": 1.140011068068622,
|
3611 |
+
"grad_norm": 0.049612775444984436,
|
3612 |
+
"learning_rate": 1.4065055272601703e-05,
|
3613 |
+
"loss": 0.0797,
|
3614 |
+
"step": 515
|
3615 |
+
},
|
3616 |
+
{
|
3617 |
+
"epoch": 1.142224681793027,
|
3618 |
+
"grad_norm": 0.04936014115810394,
|
3619 |
+
"learning_rate": 1.3743734863523637e-05,
|
3620 |
+
"loss": 0.076,
|
3621 |
+
"step": 516
|
3622 |
+
},
|
3623 |
+
{
|
3624 |
+
"epoch": 1.1444382955174321,
|
3625 |
+
"grad_norm": 0.05199284479022026,
|
3626 |
+
"learning_rate": 1.3425859182386506e-05,
|
3627 |
+
"loss": 0.0858,
|
3628 |
+
"step": 517
|
3629 |
+
},
|
3630 |
+
{
|
3631 |
+
"epoch": 1.1466519092418372,
|
3632 |
+
"grad_norm": 0.050271183252334595,
|
3633 |
+
"learning_rate": 1.3111440778377905e-05,
|
3634 |
+
"loss": 0.0779,
|
3635 |
+
"step": 518
|
3636 |
+
},
|
3637 |
+
{
|
3638 |
+
"epoch": 1.1488655229662423,
|
3639 |
+
"grad_norm": 0.05064354091882706,
|
3640 |
+
"learning_rate": 1.2800492064198088e-05,
|
3641 |
+
"loss": 0.0784,
|
3642 |
+
"step": 519
|
3643 |
+
},
|
3644 |
+
{
|
3645 |
+
"epoch": 1.1510791366906474,
|
3646 |
+
"grad_norm": 0.05049782246351242,
|
3647 |
+
"learning_rate": 1.2493025315569801e-05,
|
3648 |
+
"loss": 0.0756,
|
3649 |
+
"step": 520
|
3650 |
+
},
|
3651 |
+
{
|
3652 |
+
"epoch": 1.1532927504150525,
|
3653 |
+
"grad_norm": 0.049687910825014114,
|
3654 |
+
"learning_rate": 1.2189052670753833e-05,
|
3655 |
+
"loss": 0.0787,
|
3656 |
+
"step": 521
|
3657 |
+
},
|
3658 |
+
{
|
3659 |
+
"epoch": 1.1555063641394576,
|
3660 |
+
"grad_norm": 0.05238902196288109,
|
3661 |
+
"learning_rate": 1.188858613006973e-05,
|
3662 |
+
"loss": 0.0841,
|
3663 |
+
"step": 522
|
3664 |
+
},
|
3665 |
+
{
|
3666 |
+
"epoch": 1.1577199778638627,
|
3667 |
+
"grad_norm": 0.05193723365664482,
|
3668 |
+
"learning_rate": 1.1591637555422032e-05,
|
3669 |
+
"loss": 0.0791,
|
3670 |
+
"step": 523
|
3671 |
+
},
|
3672 |
+
{
|
3673 |
+
"epoch": 1.1599335915882678,
|
3674 |
+
"grad_norm": 0.053750574588775635,
|
3675 |
+
"learning_rate": 1.1298218669832009e-05,
|
3676 |
+
"loss": 0.0837,
|
3677 |
+
"step": 524
|
3678 |
+
},
|
3679 |
+
{
|
3680 |
+
"epoch": 1.162147205312673,
|
3681 |
+
"grad_norm": 0.051983386278152466,
|
3682 |
+
"learning_rate": 1.1008341056974854e-05,
|
3683 |
+
"loss": 0.0828,
|
3684 |
+
"step": 525
|
3685 |
+
},
|
3686 |
+
{
|
3687 |
+
"epoch": 1.164360819037078,
|
3688 |
+
"grad_norm": 0.0516984649002552,
|
3689 |
+
"learning_rate": 1.072201616072236e-05,
|
3690 |
+
"loss": 0.0847,
|
3691 |
+
"step": 526
|
3692 |
+
},
|
3693 |
+
{
|
3694 |
+
"epoch": 1.166574432761483,
|
3695 |
+
"grad_norm": 0.05225353688001633,
|
3696 |
+
"learning_rate": 1.0439255284691176e-05,
|
3697 |
+
"loss": 0.0772,
|
3698 |
+
"step": 527
|
3699 |
+
},
|
3700 |
+
{
|
3701 |
+
"epoch": 1.1687880464858882,
|
3702 |
+
"grad_norm": 0.05271396040916443,
|
3703 |
+
"learning_rate": 1.016006959179652e-05,
|
3704 |
+
"loss": 0.0877,
|
3705 |
+
"step": 528
|
3706 |
+
},
|
3707 |
+
{
|
3708 |
+
"epoch": 1.1710016602102933,
|
3709 |
+
"grad_norm": 0.048421863466501236,
|
3710 |
+
"learning_rate": 9.884470103811524e-06,
|
3711 |
+
"loss": 0.0806,
|
3712 |
+
"step": 529
|
3713 |
+
},
|
3714 |
+
{
|
3715 |
+
"epoch": 1.1732152739346984,
|
3716 |
+
"grad_norm": 0.05148995667695999,
|
3717 |
+
"learning_rate": 9.612467700932045e-06,
|
3718 |
+
"loss": 0.0854,
|
3719 |
+
"step": 530
|
3720 |
+
},
|
3721 |
+
{
|
3722 |
+
"epoch": 1.1754288876591035,
|
3723 |
+
"grad_norm": 0.051169902086257935,
|
3724 |
+
"learning_rate": 9.344073121347194e-06,
|
3725 |
+
"loss": 0.0822,
|
3726 |
+
"step": 531
|
3727 |
+
},
|
3728 |
+
{
|
3729 |
+
"epoch": 1.1776425013835086,
|
3730 |
+
"grad_norm": 0.051393430680036545,
|
3731 |
+
"learning_rate": 9.079296960815439e-06,
|
3732 |
+
"loss": 0.0879,
|
3733 |
+
"step": 532
|
3734 |
+
},
|
3735 |
+
{
|
3736 |
+
"epoch": 1.1798561151079137,
|
3737 |
+
"grad_norm": 0.053706392645835876,
|
3738 |
+
"learning_rate": 8.818149672246222e-06,
|
3739 |
+
"loss": 0.0886,
|
3740 |
+
"step": 533
|
3741 |
+
},
|
3742 |
+
{
|
3743 |
+
"epoch": 1.1820697288323188,
|
3744 |
+
"grad_norm": 0.05470919609069824,
|
3745 |
+
"learning_rate": 8.56064156528733e-06,
|
3746 |
+
"loss": 0.0826,
|
3747 |
+
"step": 534
|
3748 |
+
},
|
3749 |
+
{
|
3750 |
+
"epoch": 1.1842833425567239,
|
3751 |
+
"grad_norm": 0.05181707814335823,
|
3752 |
+
"learning_rate": 8.306782805917904e-06,
|
3753 |
+
"loss": 0.082,
|
3754 |
+
"step": 535
|
3755 |
+
},
|
3756 |
+
{
|
3757 |
+
"epoch": 1.186496956281129,
|
3758 |
+
"grad_norm": 0.05429249629378319,
|
3759 |
+
"learning_rate": 8.056583416047092e-06,
|
3760 |
+
"loss": 0.087,
|
3761 |
+
"step": 536
|
3762 |
+
},
|
3763 |
+
{
|
3764 |
+
"epoch": 1.188710570005534,
|
3765 |
+
"grad_norm": 0.05111720412969589,
|
3766 |
+
"learning_rate": 7.810053273118396e-06,
|
3767 |
+
"loss": 0.0831,
|
3768 |
+
"step": 537
|
3769 |
+
},
|
3770 |
+
{
|
3771 |
+
"epoch": 1.1909241837299391,
|
3772 |
+
"grad_norm": 0.051177989691495895,
|
3773 |
+
"learning_rate": 7.567202109719749e-06,
|
3774 |
+
"loss": 0.0814,
|
3775 |
+
"step": 538
|
3776 |
+
},
|
3777 |
+
{
|
3778 |
+
"epoch": 1.1931377974543442,
|
3779 |
+
"grad_norm": 0.05209766700863838,
|
3780 |
+
"learning_rate": 7.3280395131992125e-06,
|
3781 |
+
"loss": 0.0845,
|
3782 |
+
"step": 539
|
3783 |
+
},
|
3784 |
+
{
|
3785 |
+
"epoch": 1.1953514111787493,
|
3786 |
+
"grad_norm": 0.05545610189437866,
|
3787 |
+
"learning_rate": 7.092574925286614e-06,
|
3788 |
+
"loss": 0.0843,
|
3789 |
+
"step": 540
|
3790 |
+
},
|
3791 |
+
{
|
3792 |
+
"epoch": 1.1975650249031544,
|
3793 |
+
"grad_norm": 0.05091063678264618,
|
3794 |
+
"learning_rate": 6.860817641720694e-06,
|
3795 |
+
"loss": 0.084,
|
3796 |
+
"step": 541
|
3797 |
+
},
|
3798 |
+
{
|
3799 |
+
"epoch": 1.1997786386275595,
|
3800 |
+
"grad_norm": 0.05152612924575806,
|
3801 |
+
"learning_rate": 6.632776811882186e-06,
|
3802 |
+
"loss": 0.0838,
|
3803 |
+
"step": 542
|
3804 |
+
},
|
3805 |
+
{
|
3806 |
+
"epoch": 1.2019922523519646,
|
3807 |
+
"grad_norm": 0.055114369839429855,
|
3808 |
+
"learning_rate": 6.4084614384325795e-06,
|
3809 |
+
"loss": 0.0884,
|
3810 |
+
"step": 543
|
3811 |
+
},
|
3812 |
+
{
|
3813 |
+
"epoch": 1.2042058660763697,
|
3814 |
+
"grad_norm": 0.05061941221356392,
|
3815 |
+
"learning_rate": 6.187880376958719e-06,
|
3816 |
+
"loss": 0.0856,
|
3817 |
+
"step": 544
|
3818 |
+
},
|
3819 |
+
{
|
3820 |
+
"epoch": 1.2064194798007748,
|
3821 |
+
"grad_norm": 0.052969641983509064,
|
3822 |
+
"learning_rate": 5.971042335623229e-06,
|
3823 |
+
"loss": 0.0904,
|
3824 |
+
"step": 545
|
3825 |
+
},
|
3826 |
+
{
|
3827 |
+
"epoch": 1.20863309352518,
|
3828 |
+
"grad_norm": 0.050591930747032166,
|
3829 |
+
"learning_rate": 5.757955874820683e-06,
|
3830 |
+
"loss": 0.0849,
|
3831 |
+
"step": 546
|
3832 |
+
},
|
3833 |
+
{
|
3834 |
+
"epoch": 1.210846707249585,
|
3835 |
+
"grad_norm": 0.05166866257786751,
|
3836 |
+
"learning_rate": 5.5486294068397254e-06,
|
3837 |
+
"loss": 0.0863,
|
3838 |
+
"step": 547
|
3839 |
+
},
|
3840 |
+
{
|
3841 |
+
"epoch": 1.21306032097399,
|
3842 |
+
"grad_norm": 0.05161614343523979,
|
3843 |
+
"learning_rate": 5.3430711955308325e-06,
|
3844 |
+
"loss": 0.0817,
|
3845 |
+
"step": 548
|
3846 |
+
},
|
3847 |
+
{
|
3848 |
+
"epoch": 1.2152739346983952,
|
3849 |
+
"grad_norm": 0.05152802914381027,
|
3850 |
+
"learning_rate": 5.141289355980257e-06,
|
3851 |
+
"loss": 0.0876,
|
3852 |
+
"step": 549
|
3853 |
+
},
|
3854 |
+
{
|
3855 |
+
"epoch": 1.2174875484228003,
|
3856 |
+
"grad_norm": 0.05258704349398613,
|
3857 |
+
"learning_rate": 4.943291854189493e-06,
|
3858 |
+
"loss": 0.091,
|
3859 |
+
"step": 550
|
3860 |
+
},
|
3861 |
+
{
|
3862 |
+
"epoch": 1.2197011621472054,
|
3863 |
+
"grad_norm": 0.05155012384057045,
|
3864 |
+
"learning_rate": 4.749086506760907e-06,
|
3865 |
+
"loss": 0.081,
|
3866 |
+
"step": 551
|
3867 |
+
},
|
3868 |
+
{
|
3869 |
+
"epoch": 1.2219147758716105,
|
3870 |
+
"grad_norm": 0.052838440984487534,
|
3871 |
+
"learning_rate": 4.558680980589062e-06,
|
3872 |
+
"loss": 0.0876,
|
3873 |
+
"step": 552
|
3874 |
+
},
|
3875 |
+
{
|
3876 |
+
"epoch": 1.2241283895960156,
|
3877 |
+
"grad_norm": 0.055451128631830215,
|
3878 |
+
"learning_rate": 4.372082792558115e-06,
|
3879 |
+
"loss": 0.0869,
|
3880 |
+
"step": 553
|
3881 |
+
},
|
3882 |
+
{
|
3883 |
+
"epoch": 1.2263420033204206,
|
3884 |
+
"grad_norm": 0.04950740560889244,
|
3885 |
+
"learning_rate": 4.1892993092450295e-06,
|
3886 |
+
"loss": 0.0805,
|
3887 |
+
"step": 554
|
3888 |
+
},
|
3889 |
+
{
|
3890 |
+
"epoch": 1.2285556170448257,
|
3891 |
+
"grad_norm": 0.05245472490787506,
|
3892 |
+
"learning_rate": 4.010337746628751e-06,
|
3893 |
+
"loss": 0.0834,
|
3894 |
+
"step": 555
|
3895 |
+
},
|
3896 |
+
{
|
3897 |
+
"epoch": 1.2307692307692308,
|
3898 |
+
"grad_norm": 0.050240740180015564,
|
3899 |
+
"learning_rate": 3.835205169805321e-06,
|
3900 |
+
"loss": 0.0807,
|
3901 |
+
"step": 556
|
3902 |
+
},
|
3903 |
+
{
|
3904 |
+
"epoch": 1.232982844493636,
|
3905 |
+
"grad_norm": 0.05048750340938568,
|
3906 |
+
"learning_rate": 3.663908492709019e-06,
|
3907 |
+
"loss": 0.0823,
|
3908 |
+
"step": 557
|
3909 |
+
},
|
3910 |
+
{
|
3911 |
+
"epoch": 1.235196458218041,
|
3912 |
+
"grad_norm": 0.05028418451547623,
|
3913 |
+
"learning_rate": 3.4964544778393383e-06,
|
3914 |
+
"loss": 0.0841,
|
3915 |
+
"step": 558
|
3916 |
+
},
|
3917 |
+
{
|
3918 |
+
"epoch": 1.2374100719424461,
|
3919 |
+
"grad_norm": 0.048871058970689774,
|
3920 |
+
"learning_rate": 3.3328497359940654e-06,
|
3921 |
+
"loss": 0.0765,
|
3922 |
+
"step": 559
|
3923 |
+
},
|
3924 |
+
{
|
3925 |
+
"epoch": 1.2396236856668512,
|
3926 |
+
"grad_norm": 0.04969479888677597,
|
3927 |
+
"learning_rate": 3.1731007260082616e-06,
|
3928 |
+
"loss": 0.0817,
|
3929 |
+
"step": 560
|
3930 |
+
},
|
3931 |
+
{
|
3932 |
+
"epoch": 1.2418372993912563,
|
3933 |
+
"grad_norm": 0.05241498723626137,
|
3934 |
+
"learning_rate": 3.0172137544993147e-06,
|
3935 |
+
"loss": 0.0853,
|
3936 |
+
"step": 561
|
3937 |
+
},
|
3938 |
+
{
|
3939 |
+
"epoch": 1.2440509131156614,
|
3940 |
+
"grad_norm": 0.05134303495287895,
|
3941 |
+
"learning_rate": 2.865194975617929e-06,
|
3942 |
+
"loss": 0.0787,
|
3943 |
+
"step": 562
|
3944 |
+
},
|
3945 |
+
{
|
3946 |
+
"epoch": 1.2462645268400665,
|
3947 |
+
"grad_norm": 0.05156482383608818,
|
3948 |
+
"learning_rate": 2.7170503908052103e-06,
|
3949 |
+
"loss": 0.0867,
|
3950 |
+
"step": 563
|
3951 |
+
},
|
3952 |
+
{
|
3953 |
+
"epoch": 1.2484781405644716,
|
3954 |
+
"grad_norm": 0.05086381360888481,
|
3955 |
+
"learning_rate": 2.572785848555699e-06,
|
3956 |
+
"loss": 0.0844,
|
3957 |
+
"step": 564
|
3958 |
+
},
|
3959 |
+
{
|
3960 |
+
"epoch": 1.2506917542888765,
|
3961 |
+
"grad_norm": 0.05083329975605011,
|
3962 |
+
"learning_rate": 2.432407044186509e-06,
|
3963 |
+
"loss": 0.0838,
|
3964 |
+
"step": 565
|
3965 |
+
},
|
3966 |
+
{
|
3967 |
+
"epoch": 1.2529053680132818,
|
3968 |
+
"grad_norm": 0.05146085098385811,
|
3969 |
+
"learning_rate": 2.2959195196124583e-06,
|
3970 |
+
"loss": 0.0813,
|
3971 |
+
"step": 566
|
3972 |
+
},
|
3973 |
+
{
|
3974 |
+
"epoch": 1.2551189817376867,
|
3975 |
+
"grad_norm": 0.05358012765645981,
|
3976 |
+
"learning_rate": 2.1633286631273213e-06,
|
3977 |
+
"loss": 0.0863,
|
3978 |
+
"step": 567
|
3979 |
+
},
|
3980 |
+
{
|
3981 |
+
"epoch": 1.257332595462092,
|
3982 |
+
"grad_norm": 0.051807280629873276,
|
3983 |
+
"learning_rate": 2.0346397091910673e-06,
|
3984 |
+
"loss": 0.0835,
|
3985 |
+
"step": 568
|
3986 |
+
},
|
3987 |
+
{
|
3988 |
+
"epoch": 1.2595462091864968,
|
3989 |
+
"grad_norm": 0.05208767205476761,
|
3990 |
+
"learning_rate": 1.9098577382232685e-06,
|
3991 |
+
"loss": 0.083,
|
3992 |
+
"step": 569
|
3993 |
+
},
|
3994 |
+
{
|
3995 |
+
"epoch": 1.2617598229109022,
|
3996 |
+
"grad_norm": 0.05047999694943428,
|
3997 |
+
"learning_rate": 1.7889876764024505e-06,
|
3998 |
+
"loss": 0.0821,
|
3999 |
+
"step": 570
|
4000 |
+
},
|
4001 |
+
{
|
4002 |
+
"epoch": 1.263973436635307,
|
4003 |
+
"grad_norm": 0.050665419548749924,
|
4004 |
+
"learning_rate": 1.672034295471709e-06,
|
4005 |
+
"loss": 0.084,
|
4006 |
+
"step": 571
|
4007 |
+
},
|
4008 |
+
{
|
4009 |
+
"epoch": 1.2661870503597124,
|
4010 |
+
"grad_norm": 0.05311613902449608,
|
4011 |
+
"learning_rate": 1.5590022125502616e-06,
|
4012 |
+
"loss": 0.0875,
|
4013 |
+
"step": 572
|
4014 |
+
},
|
4015 |
+
{
|
4016 |
+
"epoch": 1.2684006640841172,
|
4017 |
+
"grad_norm": 0.053732745349407196,
|
4018 |
+
"learning_rate": 1.4498958899511971e-06,
|
4019 |
+
"loss": 0.0807,
|
4020 |
+
"step": 573
|
4021 |
+
},
|
4022 |
+
{
|
4023 |
+
"epoch": 1.2706142778085225,
|
4024 |
+
"grad_norm": 0.05082022398710251,
|
4025 |
+
"learning_rate": 1.3447196350053282e-06,
|
4026 |
+
"loss": 0.0861,
|
4027 |
+
"step": 574
|
4028 |
+
},
|
4029 |
+
{
|
4030 |
+
"epoch": 1.2728278915329274,
|
4031 |
+
"grad_norm": 0.05043462663888931,
|
4032 |
+
"learning_rate": 1.2434775998910964e-06,
|
4033 |
+
"loss": 0.083,
|
4034 |
+
"step": 575
|
4035 |
+
},
|
4036 |
+
{
|
4037 |
+
"epoch": 1.2750415052573327,
|
4038 |
+
"grad_norm": 0.053700484335422516,
|
4039 |
+
"learning_rate": 1.146173781470691e-06,
|
4040 |
+
"loss": 0.0851,
|
4041 |
+
"step": 576
|
4042 |
+
},
|
4043 |
+
{
|
4044 |
+
"epoch": 1.2772551189817376,
|
4045 |
+
"grad_norm": 0.05222710967063904,
|
4046 |
+
"learning_rate": 1.0528120211322557e-06,
|
4047 |
+
"loss": 0.0868,
|
4048 |
+
"step": 577
|
4049 |
+
},
|
4050 |
+
{
|
4051 |
+
"epoch": 1.2794687327061427,
|
4052 |
+
"grad_norm": 0.04993864893913269,
|
4053 |
+
"learning_rate": 9.6339600463823e-07,
|
4054 |
+
"loss": 0.0806,
|
4055 |
+
"step": 578
|
4056 |
+
},
|
4057 |
+
{
|
4058 |
+
"epoch": 1.2816823464305478,
|
4059 |
+
"grad_norm": 0.05103394389152527,
|
4060 |
+
"learning_rate": 8.779292619798456e-07,
|
4061 |
+
"loss": 0.0859,
|
4062 |
+
"step": 579
|
4063 |
+
},
|
4064 |
+
{
|
4065 |
+
"epoch": 1.2838959601549529,
|
4066 |
+
"grad_norm": 0.04791456460952759,
|
4067 |
+
"learning_rate": 7.964151672377458e-07,
|
4068 |
+
"loss": 0.0748,
|
4069 |
+
"step": 580
|
4070 |
+
},
|
4071 |
+
{
|
4072 |
+
"epoch": 1.286109573879358,
|
4073 |
+
"grad_norm": 0.049576062709093094,
|
4074 |
+
"learning_rate": 7.188569384488277e-07,
|
4075 |
+
"loss": 0.0835,
|
4076 |
+
"step": 581
|
4077 |
+
},
|
4078 |
+
{
|
4079 |
+
"epoch": 1.288323187603763,
|
4080 |
+
"grad_norm": 0.05148833245038986,
|
4081 |
+
"learning_rate": 6.452576374791521e-07,
|
4082 |
+
"loss": 0.086,
|
4083 |
+
"step": 582
|
4084 |
+
},
|
4085 |
+
{
|
4086 |
+
"epoch": 1.2905368013281682,
|
4087 |
+
"grad_norm": 0.050598274916410446,
|
4088 |
+
"learning_rate": 5.756201699031087e-07,
|
4089 |
+
"loss": 0.0796,
|
4090 |
+
"step": 583
|
4091 |
+
},
|
4092 |
+
{
|
4093 |
+
"epoch": 1.2927504150525733,
|
4094 |
+
"grad_norm": 0.05039560794830322,
|
4095 |
+
"learning_rate": 5.099472848886825e-07,
|
4096 |
+
"loss": 0.084,
|
4097 |
+
"step": 584
|
4098 |
+
},
|
4099 |
+
{
|
4100 |
+
"epoch": 1.2949640287769784,
|
4101 |
+
"grad_norm": 0.050472088158130646,
|
4102 |
+
"learning_rate": 4.482415750889204e-07,
|
4103 |
+
"loss": 0.0772,
|
4104 |
+
"step": 585
|
4105 |
+
},
|
4106 |
+
{
|
4107 |
+
"epoch": 1.2971776425013835,
|
4108 |
+
"grad_norm": 0.051853395998477936,
|
4109 |
+
"learning_rate": 3.90505476539577e-07,
|
4110 |
+
"loss": 0.0833,
|
4111 |
+
"step": 586
|
4112 |
+
},
|
4113 |
+
{
|
4114 |
+
"epoch": 1.2993912562257885,
|
4115 |
+
"grad_norm": 0.050441011786460876,
|
4116 |
+
"learning_rate": 3.367412685629833e-07,
|
4117 |
+
"loss": 0.0879,
|
4118 |
+
"step": 587
|
4119 |
+
},
|
4120 |
+
{
|
4121 |
+
"epoch": 1.3016048699501936,
|
4122 |
+
"grad_norm": 0.04932254180312157,
|
4123 |
+
"learning_rate": 2.869510736779927e-07,
|
4124 |
+
"loss": 0.0798,
|
4125 |
+
"step": 588
|
4126 |
+
},
|
4127 |
+
{
|
4128 |
+
"epoch": 1.3038184836745987,
|
4129 |
+
"grad_norm": 0.05223463475704193,
|
4130 |
+
"learning_rate": 2.4113685751625216e-07,
|
4131 |
+
"loss": 0.0873,
|
4132 |
+
"step": 589
|
4133 |
+
},
|
4134 |
+
{
|
4135 |
+
"epoch": 1.3060320973990038,
|
4136 |
+
"grad_norm": 0.05462180823087692,
|
4137 |
+
"learning_rate": 1.9930042874457254e-07,
|
4138 |
+
"loss": 0.0904,
|
4139 |
+
"step": 590
|
4140 |
+
},
|
4141 |
+
{
|
4142 |
+
"epoch": 1.308245711123409,
|
4143 |
+
"grad_norm": 0.04821028560400009,
|
4144 |
+
"learning_rate": 1.6144343899352277e-07,
|
4145 |
+
"loss": 0.0812,
|
4146 |
+
"step": 591
|
4147 |
+
},
|
4148 |
+
{
|
4149 |
+
"epoch": 1.310459324847814,
|
4150 |
+
"grad_norm": 0.048690084367990494,
|
4151 |
+
"learning_rate": 1.275673827922358e-07,
|
4152 |
+
"loss": 0.0798,
|
4153 |
+
"step": 592
|
4154 |
+
},
|
4155 |
+
{
|
4156 |
+
"epoch": 1.3126729385722191,
|
4157 |
+
"grad_norm": 0.05054464191198349,
|
4158 |
+
"learning_rate": 9.767359750940463e-08,
|
4159 |
+
"loss": 0.085,
|
4160 |
+
"step": 593
|
4161 |
+
},
|
4162 |
+
{
|
4163 |
+
"epoch": 1.3148865522966242,
|
4164 |
+
"grad_norm": 0.0533391498029232,
|
4165 |
+
"learning_rate": 7.176326330049032e-08,
|
4166 |
+
"loss": 0.0856,
|
4167 |
+
"step": 594
|
4168 |
+
},
|
4169 |
+
{
|
4170 |
+
"epoch": 1.3171001660210293,
|
4171 |
+
"grad_norm": 0.051400430500507355,
|
4172 |
+
"learning_rate": 4.98374030611084e-08,
|
4173 |
+
"loss": 0.0808,
|
4174 |
+
"step": 595
|
4175 |
+
},
|
4176 |
+
{
|
4177 |
+
"epoch": 1.3193137797454344,
|
4178 |
+
"grad_norm": 0.05192789435386658,
|
4179 |
+
"learning_rate": 3.189688238670607e-08,
|
4180 |
+
"loss": 0.0873,
|
4181 |
+
"step": 596
|
4182 |
+
},
|
4183 |
+
{
|
4184 |
+
"epoch": 1.3215273934698395,
|
4185 |
+
"grad_norm": 0.04824462905526161,
|
4186 |
+
"learning_rate": 1.7942409538294514e-08,
|
4187 |
+
"loss": 0.0786,
|
4188 |
+
"step": 597
|
4189 |
+
},
|
4190 |
+
{
|
4191 |
+
"epoch": 1.3237410071942446,
|
4192 |
+
"grad_norm": 0.052794598042964935,
|
4193 |
+
"learning_rate": 7.974535414572715e-09,
|
4194 |
+
"loss": 0.0829,
|
4195 |
+
"step": 598
|
4196 |
+
},
|
4197 |
+
{
|
4198 |
+
"epoch": 1.3259546209186497,
|
4199 |
+
"grad_norm": 0.053302157670259476,
|
4200 |
+
"learning_rate": 1.9936535301513516e-09,
|
4201 |
+
"loss": 0.0924,
|
4202 |
+
"step": 599
|
4203 |
+
},
|
4204 |
+
{
|
4205 |
+
"epoch": 1.3281682346430548,
|
4206 |
+
"grad_norm": 0.05163723602890968,
|
4207 |
+
"learning_rate": 0.0,
|
4208 |
+
"loss": 0.0822,
|
4209 |
+
"step": 600
|
4210 |
}
|
4211 |
],
|
4212 |
"logging_steps": 1,
|
|
|
4221 |
"should_evaluate": false,
|
4222 |
"should_log": false,
|
4223 |
"should_save": true,
|
4224 |
+
"should_training_stop": true
|
4225 |
},
|
4226 |
"attributes": {}
|
4227 |
}
|
4228 |
},
|
4229 |
+
"total_flos": 4.1166765036624937e+18,
|
4230 |
"train_batch_size": 2,
|
4231 |
"trial_name": null,
|
4232 |
"trial_params": null
|