|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 48.888888888888886, |
|
"eval_steps": 500, |
|
"global_step": 2200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 3663.33642578125, |
|
"learning_rate": 3.6750000000000003e-07, |
|
"loss": 1356.1239, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 3973.032470703125, |
|
"learning_rate": 7.425000000000001e-07, |
|
"loss": 1275.5178, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 2667.0068359375, |
|
"learning_rate": 1.1174999999999999e-06, |
|
"loss": 1123.6059, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 2320.48486328125, |
|
"learning_rate": 1.4925000000000001e-06, |
|
"loss": 922.8917, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.555555555555555, |
|
"grad_norm": 1611.3345947265625, |
|
"learning_rate": 1.8675000000000001e-06, |
|
"loss": 714.5638, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 1395.7064208984375, |
|
"learning_rate": 2.2425e-06, |
|
"loss": 542.1251, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.777777777777778, |
|
"grad_norm": 1019.4860229492188, |
|
"learning_rate": 2.6175e-06, |
|
"loss": 411.1387, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"grad_norm": 888.315185546875, |
|
"learning_rate": 2.9925e-06, |
|
"loss": 318.4567, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 2590.113525390625, |
|
"learning_rate": 3.3675000000000004e-06, |
|
"loss": 261.6995, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 11.11111111111111, |
|
"grad_norm": 711.677734375, |
|
"learning_rate": 3.7425e-06, |
|
"loss": 220.2936, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 12.222222222222221, |
|
"grad_norm": 548.9371948242188, |
|
"learning_rate": 4.117500000000001e-06, |
|
"loss": 187.9833, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 1127.1611328125, |
|
"learning_rate": 4.4925e-06, |
|
"loss": 159.1351, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 14.444444444444445, |
|
"grad_norm": 426.074951171875, |
|
"learning_rate": 4.8675e-06, |
|
"loss": 137.1092, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 15.555555555555555, |
|
"grad_norm": 348.842529296875, |
|
"learning_rate": 5.2425e-06, |
|
"loss": 119.822, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 16.666666666666668, |
|
"grad_norm": 373.2169189453125, |
|
"learning_rate": 5.6175e-06, |
|
"loss": 104.3366, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 17.77777777777778, |
|
"grad_norm": 324.51702880859375, |
|
"learning_rate": 5.992500000000001e-06, |
|
"loss": 90.8788, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 18.88888888888889, |
|
"grad_norm": 269.91827392578125, |
|
"learning_rate": 6.3675e-06, |
|
"loss": 78.4644, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 1744.54052734375, |
|
"learning_rate": 6.7425e-06, |
|
"loss": 70.3526, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 21.11111111111111, |
|
"grad_norm": 369.39837646484375, |
|
"learning_rate": 7.1175e-06, |
|
"loss": 63.9417, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 22.22222222222222, |
|
"grad_norm": 303.95977783203125, |
|
"learning_rate": 7.4925e-06, |
|
"loss": 63.4575, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 23.333333333333332, |
|
"grad_norm": 187.462890625, |
|
"learning_rate": 7.8675e-06, |
|
"loss": 54.7417, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 24.444444444444443, |
|
"grad_norm": 165.56666564941406, |
|
"learning_rate": 8.2425e-06, |
|
"loss": 49.6842, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 25.555555555555557, |
|
"grad_norm": 147.3148193359375, |
|
"learning_rate": 8.6175e-06, |
|
"loss": 43.027, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 26.666666666666668, |
|
"grad_norm": 125.53775024414062, |
|
"learning_rate": 8.9925e-06, |
|
"loss": 38.2579, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 27.77777777777778, |
|
"grad_norm": 109.85810089111328, |
|
"learning_rate": 9.367500000000001e-06, |
|
"loss": 34.3957, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 28.88888888888889, |
|
"grad_norm": 98.328369140625, |
|
"learning_rate": 9.7425e-06, |
|
"loss": 31.4378, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 109.77750396728516, |
|
"learning_rate": 1.01175e-05, |
|
"loss": 28.5084, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 31.11111111111111, |
|
"grad_norm": 112.47483825683594, |
|
"learning_rate": 1.04925e-05, |
|
"loss": 26.1671, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 32.22222222222222, |
|
"grad_norm": 85.60242462158203, |
|
"learning_rate": 1.08675e-05, |
|
"loss": 24.2309, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 33.333333333333336, |
|
"grad_norm": 73.19799041748047, |
|
"learning_rate": 1.1242500000000001e-05, |
|
"loss": 22.6248, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 34.44444444444444, |
|
"grad_norm": 80.70884704589844, |
|
"learning_rate": 1.16175e-05, |
|
"loss": 21.1187, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 35.55555555555556, |
|
"grad_norm": 110.98326110839844, |
|
"learning_rate": 1.19925e-05, |
|
"loss": 20.4828, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 36.666666666666664, |
|
"grad_norm": 113.65286254882812, |
|
"learning_rate": 1.23675e-05, |
|
"loss": 19.7366, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 37.77777777777778, |
|
"grad_norm": 77.65855407714844, |
|
"learning_rate": 1.27425e-05, |
|
"loss": 18.6632, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 38.888888888888886, |
|
"grad_norm": 88.96723175048828, |
|
"learning_rate": 1.3117500000000001e-05, |
|
"loss": 18.0793, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 79.1690902709961, |
|
"learning_rate": 1.34925e-05, |
|
"loss": 17.0667, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 41.111111111111114, |
|
"grad_norm": 93.60108184814453, |
|
"learning_rate": 1.38675e-05, |
|
"loss": 16.6106, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 42.22222222222222, |
|
"grad_norm": 66.16129302978516, |
|
"learning_rate": 1.4242500000000001e-05, |
|
"loss": 16.4905, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 43.333333333333336, |
|
"grad_norm": 62.41362380981445, |
|
"learning_rate": 1.46175e-05, |
|
"loss": 15.6427, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 44.44444444444444, |
|
"grad_norm": 107.30168151855469, |
|
"learning_rate": 1.4992500000000001e-05, |
|
"loss": 15.0731, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 45.55555555555556, |
|
"grad_norm": 100.18666076660156, |
|
"learning_rate": 1.2060000000000001e-05, |
|
"loss": 14.6973, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 46.666666666666664, |
|
"grad_norm": 68.78209686279297, |
|
"learning_rate": 9.06e-06, |
|
"loss": 13.8928, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 47.77777777777778, |
|
"grad_norm": 65.85958099365234, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 13.4318, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 48.888888888888886, |
|
"grad_norm": 72.31432342529297, |
|
"learning_rate": 3.06e-06, |
|
"loss": 12.6856, |
|
"step": 2200 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 2250, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.926395026677432e+19, |
|
"train_batch_size": 24, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|