|
{ |
|
"best_metric": 3.3120391368865967, |
|
"best_model_checkpoint": "./models/full-finetuning/german-gpt2/checkpoint-58000", |
|
"epoch": 1.0, |
|
"eval_steps": 1000, |
|
"global_step": 59835, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008356313194618534, |
|
"grad_norm": 7.7337164878845215, |
|
"learning_rate": 4.97e-05, |
|
"loss": 5.7213, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.016712626389237067, |
|
"grad_norm": 6.002913951873779, |
|
"learning_rate": 4.95811915395635e-05, |
|
"loss": 4.8528, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.016712626389237067, |
|
"eval_loss": 4.570379734039307, |
|
"eval_runtime": 33.8087, |
|
"eval_samples_per_second": 131.978, |
|
"eval_steps_per_second": 16.505, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.025068939583855605, |
|
"grad_norm": 5.462852478027344, |
|
"learning_rate": 4.915985506025112e-05, |
|
"loss": 4.6135, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.033425252778474135, |
|
"grad_norm": 6.940282821655273, |
|
"learning_rate": 4.873851858093874e-05, |
|
"loss": 4.4938, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.033425252778474135, |
|
"eval_loss": 4.2595720291137695, |
|
"eval_runtime": 36.1247, |
|
"eval_samples_per_second": 123.516, |
|
"eval_steps_per_second": 15.446, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04178156597309267, |
|
"grad_norm": 6.612778663635254, |
|
"learning_rate": 4.8317182101626365e-05, |
|
"loss": 4.3683, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05013787916771121, |
|
"grad_norm": 5.179454803466797, |
|
"learning_rate": 4.789584562231398e-05, |
|
"loss": 4.2089, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05013787916771121, |
|
"eval_loss": 4.104772567749023, |
|
"eval_runtime": 33.8319, |
|
"eval_samples_per_second": 131.887, |
|
"eval_steps_per_second": 16.493, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05849419236232974, |
|
"grad_norm": 4.007822036743164, |
|
"learning_rate": 4.74745091430016e-05, |
|
"loss": 4.2532, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06685050555694827, |
|
"grad_norm": 5.069295883178711, |
|
"learning_rate": 4.7053172663689226e-05, |
|
"loss": 4.2046, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.06685050555694827, |
|
"eval_loss": 4.0018157958984375, |
|
"eval_runtime": 33.841, |
|
"eval_samples_per_second": 131.852, |
|
"eval_steps_per_second": 16.489, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.0752068187515668, |
|
"grad_norm": 8.424234390258789, |
|
"learning_rate": 4.663183618437685e-05, |
|
"loss": 4.1513, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08356313194618534, |
|
"grad_norm": 5.6522016525268555, |
|
"learning_rate": 4.621049970506447e-05, |
|
"loss": 4.0912, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08356313194618534, |
|
"eval_loss": 3.927964687347412, |
|
"eval_runtime": 33.8173, |
|
"eval_samples_per_second": 131.944, |
|
"eval_steps_per_second": 16.5, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09191944514080387, |
|
"grad_norm": 6.062402248382568, |
|
"learning_rate": 4.5789163225752086e-05, |
|
"loss": 4.0602, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.10027575833542242, |
|
"grad_norm": 4.522783279418945, |
|
"learning_rate": 4.536782674643971e-05, |
|
"loss": 4.0243, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.10027575833542242, |
|
"eval_loss": 3.8684792518615723, |
|
"eval_runtime": 33.8061, |
|
"eval_samples_per_second": 131.988, |
|
"eval_steps_per_second": 16.506, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.10863207153004095, |
|
"grad_norm": 5.732029914855957, |
|
"learning_rate": 4.494649026712733e-05, |
|
"loss": 3.9523, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.11698838472465949, |
|
"grad_norm": 5.034173011779785, |
|
"learning_rate": 4.452599646077358e-05, |
|
"loss": 3.9991, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11698838472465949, |
|
"eval_loss": 3.818037748336792, |
|
"eval_runtime": 33.7911, |
|
"eval_samples_per_second": 132.047, |
|
"eval_steps_per_second": 16.513, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.12534469791927802, |
|
"grad_norm": 6.326226711273193, |
|
"learning_rate": 4.41046599814612e-05, |
|
"loss": 3.9681, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.13370101111389654, |
|
"grad_norm": 5.3602495193481445, |
|
"learning_rate": 4.368332350214882e-05, |
|
"loss": 3.9101, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13370101111389654, |
|
"eval_loss": 3.7742578983306885, |
|
"eval_runtime": 33.7987, |
|
"eval_samples_per_second": 132.017, |
|
"eval_steps_per_second": 16.51, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1420573243085151, |
|
"grad_norm": 6.7508111000061035, |
|
"learning_rate": 4.326198702283644e-05, |
|
"loss": 3.9032, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1504136375031336, |
|
"grad_norm": 5.866630554199219, |
|
"learning_rate": 4.2840650543524055e-05, |
|
"loss": 3.8962, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.1504136375031336, |
|
"eval_loss": 3.7523410320281982, |
|
"eval_runtime": 33.8046, |
|
"eval_samples_per_second": 131.994, |
|
"eval_steps_per_second": 16.507, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.15876995069775215, |
|
"grad_norm": 4.693000316619873, |
|
"learning_rate": 4.241931406421168e-05, |
|
"loss": 3.853, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.16712626389237067, |
|
"grad_norm": 5.653314113616943, |
|
"learning_rate": 4.199882025785793e-05, |
|
"loss": 3.8244, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16712626389237067, |
|
"eval_loss": 3.711071014404297, |
|
"eval_runtime": 33.8163, |
|
"eval_samples_per_second": 131.948, |
|
"eval_steps_per_second": 16.501, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.17548257708698922, |
|
"grad_norm": 4.951623439788818, |
|
"learning_rate": 4.157748377854555e-05, |
|
"loss": 3.8097, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.18383889028160774, |
|
"grad_norm": 9.94375228881836, |
|
"learning_rate": 4.115614729923317e-05, |
|
"loss": 3.7687, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.18383889028160774, |
|
"eval_loss": 3.6785600185394287, |
|
"eval_runtime": 33.809, |
|
"eval_samples_per_second": 131.977, |
|
"eval_steps_per_second": 16.504, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.1921952034762263, |
|
"grad_norm": 7.155759334564209, |
|
"learning_rate": 4.0734810819920794e-05, |
|
"loss": 3.8008, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.20055151667084484, |
|
"grad_norm": 3.9872701168060303, |
|
"learning_rate": 4.031347434060841e-05, |
|
"loss": 3.7853, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.20055151667084484, |
|
"eval_loss": 3.655622959136963, |
|
"eval_runtime": 33.807, |
|
"eval_samples_per_second": 131.985, |
|
"eval_steps_per_second": 16.505, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.20890782986546336, |
|
"grad_norm": 7.3907647132873535, |
|
"learning_rate": 3.989298053425466e-05, |
|
"loss": 3.8082, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.2172641430600819, |
|
"grad_norm": 4.089804172515869, |
|
"learning_rate": 3.947164405494228e-05, |
|
"loss": 3.7216, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2172641430600819, |
|
"eval_loss": 3.641737937927246, |
|
"eval_runtime": 33.7976, |
|
"eval_samples_per_second": 132.021, |
|
"eval_steps_per_second": 16.51, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.22562045625470042, |
|
"grad_norm": 3.7814671993255615, |
|
"learning_rate": 3.90503075756299e-05, |
|
"loss": 3.7132, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.23397676944931897, |
|
"grad_norm": 5.00490665435791, |
|
"learning_rate": 3.8628971096317526e-05, |
|
"loss": 3.7508, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.23397676944931897, |
|
"eval_loss": 3.6097123622894287, |
|
"eval_runtime": 33.8097, |
|
"eval_samples_per_second": 131.974, |
|
"eval_steps_per_second": 16.504, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.2423330826439375, |
|
"grad_norm": 4.1177659034729, |
|
"learning_rate": 3.820763461700515e-05, |
|
"loss": 3.7317, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.25068939583855604, |
|
"grad_norm": 5.807891845703125, |
|
"learning_rate": 3.7786298137692763e-05, |
|
"loss": 3.7186, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.25068939583855604, |
|
"eval_loss": 3.5940768718719482, |
|
"eval_runtime": 33.8398, |
|
"eval_samples_per_second": 131.856, |
|
"eval_steps_per_second": 16.489, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.2590457090331746, |
|
"grad_norm": 5.580984592437744, |
|
"learning_rate": 3.736496165838038e-05, |
|
"loss": 3.7059, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.2674020222277931, |
|
"grad_norm": 5.553014278411865, |
|
"learning_rate": 3.6943625179068e-05, |
|
"loss": 3.6317, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.2674020222277931, |
|
"eval_loss": 3.5815696716308594, |
|
"eval_runtime": 33.7945, |
|
"eval_samples_per_second": 132.033, |
|
"eval_steps_per_second": 16.512, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.2757583354224116, |
|
"grad_norm": 7.818964958190918, |
|
"learning_rate": 3.652313137271425e-05, |
|
"loss": 3.6837, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.2841146486170302, |
|
"grad_norm": 7.232082843780518, |
|
"learning_rate": 3.610179489340187e-05, |
|
"loss": 3.6965, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2841146486170302, |
|
"eval_loss": 3.559220552444458, |
|
"eval_runtime": 33.8032, |
|
"eval_samples_per_second": 131.999, |
|
"eval_steps_per_second": 16.507, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2924709618116487, |
|
"grad_norm": 5.058260917663574, |
|
"learning_rate": 3.5680458414089495e-05, |
|
"loss": 3.6218, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.3008272750062672, |
|
"grad_norm": 6.645322322845459, |
|
"learning_rate": 3.525912193477712e-05, |
|
"loss": 3.6203, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.3008272750062672, |
|
"eval_loss": 3.5445735454559326, |
|
"eval_runtime": 33.826, |
|
"eval_samples_per_second": 131.911, |
|
"eval_steps_per_second": 16.496, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.30918358820088576, |
|
"grad_norm": 6.7204084396362305, |
|
"learning_rate": 3.483862812842336e-05, |
|
"loss": 3.6135, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.3175399013955043, |
|
"grad_norm": 5.846601486206055, |
|
"learning_rate": 3.441729164911098e-05, |
|
"loss": 3.6139, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.3175399013955043, |
|
"eval_loss": 3.529482364654541, |
|
"eval_runtime": 33.8084, |
|
"eval_samples_per_second": 131.979, |
|
"eval_steps_per_second": 16.505, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.32589621459012286, |
|
"grad_norm": 4.412099838256836, |
|
"learning_rate": 3.3995955169798604e-05, |
|
"loss": 3.6202, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.33425252778474135, |
|
"grad_norm": 6.884310722351074, |
|
"learning_rate": 3.357461869048623e-05, |
|
"loss": 3.5862, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.33425252778474135, |
|
"eval_loss": 3.518963098526001, |
|
"eval_runtime": 33.8217, |
|
"eval_samples_per_second": 131.927, |
|
"eval_steps_per_second": 16.498, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.3426088409793599, |
|
"grad_norm": 4.762236595153809, |
|
"learning_rate": 3.315328221117385e-05, |
|
"loss": 3.613, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.35096515417397844, |
|
"grad_norm": 3.5604190826416016, |
|
"learning_rate": 3.273278840482009e-05, |
|
"loss": 3.5767, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.35096515417397844, |
|
"eval_loss": 3.5067789554595947, |
|
"eval_runtime": 33.8036, |
|
"eval_samples_per_second": 131.998, |
|
"eval_steps_per_second": 16.507, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.359321467368597, |
|
"grad_norm": 3.5096914768218994, |
|
"learning_rate": 3.2311451925507714e-05, |
|
"loss": 3.5692, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.3676777805632155, |
|
"grad_norm": 4.54230260848999, |
|
"learning_rate": 3.1890115446195336e-05, |
|
"loss": 3.5914, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.3676777805632155, |
|
"eval_loss": 3.491032361984253, |
|
"eval_runtime": 33.8108, |
|
"eval_samples_per_second": 131.97, |
|
"eval_steps_per_second": 16.504, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.37603409375783403, |
|
"grad_norm": 5.874061584472656, |
|
"learning_rate": 3.146877896688296e-05, |
|
"loss": 3.6494, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.3843904069524526, |
|
"grad_norm": 5.265365123748779, |
|
"learning_rate": 3.10482851605292e-05, |
|
"loss": 3.5796, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.3843904069524526, |
|
"eval_loss": 3.4834258556365967, |
|
"eval_runtime": 33.7961, |
|
"eval_samples_per_second": 132.027, |
|
"eval_steps_per_second": 16.511, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.3927467201470711, |
|
"grad_norm": 4.7764129638671875, |
|
"learning_rate": 3.062694868121682e-05, |
|
"loss": 3.5603, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.4011030333416897, |
|
"grad_norm": 4.89961576461792, |
|
"learning_rate": 3.0205612201904442e-05, |
|
"loss": 3.5427, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.4011030333416897, |
|
"eval_loss": 3.476203203201294, |
|
"eval_runtime": 33.8616, |
|
"eval_samples_per_second": 131.772, |
|
"eval_steps_per_second": 16.479, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.40945934653630817, |
|
"grad_norm": 6.659942150115967, |
|
"learning_rate": 2.9784275722592064e-05, |
|
"loss": 3.5559, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.4178156597309267, |
|
"grad_norm": 4.591799259185791, |
|
"learning_rate": 2.936378191623831e-05, |
|
"loss": 3.5087, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.4178156597309267, |
|
"eval_loss": 3.465503454208374, |
|
"eval_runtime": 33.8191, |
|
"eval_samples_per_second": 131.937, |
|
"eval_steps_per_second": 16.5, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.42617197292554526, |
|
"grad_norm": 4.84548282623291, |
|
"learning_rate": 2.894244543692593e-05, |
|
"loss": 3.5474, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.4345282861201638, |
|
"grad_norm": 5.839838027954102, |
|
"learning_rate": 2.852110895761355e-05, |
|
"loss": 3.5723, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.4345282861201638, |
|
"eval_loss": 3.458843946456909, |
|
"eval_runtime": 33.8846, |
|
"eval_samples_per_second": 131.682, |
|
"eval_steps_per_second": 16.468, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.4428845993147823, |
|
"grad_norm": 3.7624571323394775, |
|
"learning_rate": 2.8099772478301174e-05, |
|
"loss": 3.4898, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.45124091250940085, |
|
"grad_norm": 4.763157844543457, |
|
"learning_rate": 2.7678435998988793e-05, |
|
"loss": 3.5493, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.45124091250940085, |
|
"eval_loss": 3.4483096599578857, |
|
"eval_runtime": 33.7859, |
|
"eval_samples_per_second": 132.067, |
|
"eval_steps_per_second": 16.516, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.4595972257040194, |
|
"grad_norm": 4.820297718048096, |
|
"learning_rate": 2.725794219263504e-05, |
|
"loss": 3.5162, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.46795353889863794, |
|
"grad_norm": 8.342415809631348, |
|
"learning_rate": 2.683660571332266e-05, |
|
"loss": 3.4815, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.46795353889863794, |
|
"eval_loss": 3.4425435066223145, |
|
"eval_runtime": 33.8093, |
|
"eval_samples_per_second": 131.976, |
|
"eval_steps_per_second": 16.504, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.47630985209325644, |
|
"grad_norm": 3.9816768169403076, |
|
"learning_rate": 2.641526923401028e-05, |
|
"loss": 3.5168, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.484666165287875, |
|
"grad_norm": 4.662781715393066, |
|
"learning_rate": 2.5993932754697902e-05, |
|
"loss": 3.5442, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.484666165287875, |
|
"eval_loss": 3.4301340579986572, |
|
"eval_runtime": 33.7983, |
|
"eval_samples_per_second": 132.018, |
|
"eval_steps_per_second": 16.51, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.49302247848249353, |
|
"grad_norm": 5.855069160461426, |
|
"learning_rate": 2.5573438948344148e-05, |
|
"loss": 3.5048, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.5013787916771121, |
|
"grad_norm": 3.8957674503326416, |
|
"learning_rate": 2.515210246903177e-05, |
|
"loss": 3.5452, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.5013787916771121, |
|
"eval_loss": 3.4219019412994385, |
|
"eval_runtime": 33.8062, |
|
"eval_samples_per_second": 131.988, |
|
"eval_steps_per_second": 16.506, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.5097351048717306, |
|
"grad_norm": 5.273784160614014, |
|
"learning_rate": 2.473076598971939e-05, |
|
"loss": 3.4951, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.5180914180663492, |
|
"grad_norm": 6.109909534454346, |
|
"learning_rate": 2.430942951040701e-05, |
|
"loss": 3.4879, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.5180914180663492, |
|
"eval_loss": 3.419405698776245, |
|
"eval_runtime": 33.7875, |
|
"eval_samples_per_second": 132.061, |
|
"eval_steps_per_second": 16.515, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.5264477312609677, |
|
"grad_norm": 7.194368839263916, |
|
"learning_rate": 2.3888935704053257e-05, |
|
"loss": 3.5259, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.5348040444555862, |
|
"grad_norm": 4.303890228271484, |
|
"learning_rate": 2.3468441897699503e-05, |
|
"loss": 3.4956, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.5348040444555862, |
|
"eval_loss": 3.4062435626983643, |
|
"eval_runtime": 33.8183, |
|
"eval_samples_per_second": 131.94, |
|
"eval_steps_per_second": 16.5, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.5431603576502048, |
|
"grad_norm": 4.180033206939697, |
|
"learning_rate": 2.3047105418387125e-05, |
|
"loss": 3.5137, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.5515166708448233, |
|
"grad_norm": 4.65141487121582, |
|
"learning_rate": 2.2625768939074744e-05, |
|
"loss": 3.4635, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5515166708448233, |
|
"eval_loss": 3.3979876041412354, |
|
"eval_runtime": 33.8238, |
|
"eval_samples_per_second": 131.919, |
|
"eval_steps_per_second": 16.497, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5598729840394417, |
|
"grad_norm": 7.3677215576171875, |
|
"learning_rate": 2.2204432459762367e-05, |
|
"loss": 3.5071, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.5682292972340603, |
|
"grad_norm": 4.072124481201172, |
|
"learning_rate": 2.178309598044999e-05, |
|
"loss": 3.4836, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.5682292972340603, |
|
"eval_loss": 3.3961925506591797, |
|
"eval_runtime": 33.8074, |
|
"eval_samples_per_second": 131.983, |
|
"eval_steps_per_second": 16.505, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.5765856104286788, |
|
"grad_norm": 4.479409217834473, |
|
"learning_rate": 2.136175950113761e-05, |
|
"loss": 3.4763, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.5849419236232974, |
|
"grad_norm": 3.3214428424835205, |
|
"learning_rate": 2.094042302182523e-05, |
|
"loss": 3.4378, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.5849419236232974, |
|
"eval_loss": 3.389758825302124, |
|
"eval_runtime": 33.7986, |
|
"eval_samples_per_second": 132.017, |
|
"eval_steps_per_second": 16.51, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.5932982368179159, |
|
"grad_norm": 6.104400157928467, |
|
"learning_rate": 2.0519086542512852e-05, |
|
"loss": 3.4959, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.6016545500125344, |
|
"grad_norm": 5.734145641326904, |
|
"learning_rate": 2.0097750063200475e-05, |
|
"loss": 3.4673, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.6016545500125344, |
|
"eval_loss": 3.3848955631256104, |
|
"eval_runtime": 33.8025, |
|
"eval_samples_per_second": 132.002, |
|
"eval_steps_per_second": 16.508, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.610010863207153, |
|
"grad_norm": 4.3256683349609375, |
|
"learning_rate": 1.9677256256846717e-05, |
|
"loss": 3.4903, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.6183671764017715, |
|
"grad_norm": 5.769290447235107, |
|
"learning_rate": 1.925591977753434e-05, |
|
"loss": 3.4722, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.6183671764017715, |
|
"eval_loss": 3.3791496753692627, |
|
"eval_runtime": 33.8066, |
|
"eval_samples_per_second": 131.986, |
|
"eval_steps_per_second": 16.506, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.6267234895963901, |
|
"grad_norm": 4.285965442657471, |
|
"learning_rate": 1.8834583298221962e-05, |
|
"loss": 3.4922, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.6350798027910086, |
|
"grad_norm": 4.97435998916626, |
|
"learning_rate": 1.8413246818909584e-05, |
|
"loss": 3.4814, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.6350798027910086, |
|
"eval_loss": 3.3762271404266357, |
|
"eval_runtime": 33.8323, |
|
"eval_samples_per_second": 131.886, |
|
"eval_steps_per_second": 16.493, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.6434361159856271, |
|
"grad_norm": 4.531788349151611, |
|
"learning_rate": 1.7992753012555827e-05, |
|
"loss": 3.4144, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.6517924291802457, |
|
"grad_norm": 5.197137355804443, |
|
"learning_rate": 1.757141653324345e-05, |
|
"loss": 3.4358, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.6517924291802457, |
|
"eval_loss": 3.3685717582702637, |
|
"eval_runtime": 33.7899, |
|
"eval_samples_per_second": 132.051, |
|
"eval_steps_per_second": 16.514, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.6601487423748642, |
|
"grad_norm": 5.673033714294434, |
|
"learning_rate": 1.715008005393107e-05, |
|
"loss": 3.3936, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.6685050555694827, |
|
"grad_norm": 4.699774742126465, |
|
"learning_rate": 1.672874357461869e-05, |
|
"loss": 3.4711, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.6685050555694827, |
|
"eval_loss": 3.3646111488342285, |
|
"eval_runtime": 33.8073, |
|
"eval_samples_per_second": 131.983, |
|
"eval_steps_per_second": 16.505, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.6768613687641013, |
|
"grad_norm": 4.5016889572143555, |
|
"learning_rate": 1.630824976826494e-05, |
|
"loss": 3.4073, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.6852176819587198, |
|
"grad_norm": 9.633309364318848, |
|
"learning_rate": 1.5886913288952558e-05, |
|
"loss": 3.4437, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.6852176819587198, |
|
"eval_loss": 3.3601598739624023, |
|
"eval_runtime": 33.7786, |
|
"eval_samples_per_second": 132.096, |
|
"eval_steps_per_second": 16.519, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.6935739951533384, |
|
"grad_norm": 3.356966733932495, |
|
"learning_rate": 1.5465576809640177e-05, |
|
"loss": 3.3706, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.7019303083479569, |
|
"grad_norm": 4.3639678955078125, |
|
"learning_rate": 1.50442403303278e-05, |
|
"loss": 3.4171, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.7019303083479569, |
|
"eval_loss": 3.358569622039795, |
|
"eval_runtime": 33.8063, |
|
"eval_samples_per_second": 131.987, |
|
"eval_steps_per_second": 16.506, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.7102866215425754, |
|
"grad_norm": 6.138998985290527, |
|
"learning_rate": 1.4623746523974047e-05, |
|
"loss": 3.3822, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.718642934737194, |
|
"grad_norm": 5.975950717926025, |
|
"learning_rate": 1.4202410044661668e-05, |
|
"loss": 3.442, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.718642934737194, |
|
"eval_loss": 3.3521432876586914, |
|
"eval_runtime": 33.7743, |
|
"eval_samples_per_second": 132.112, |
|
"eval_steps_per_second": 16.521, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.7269992479318125, |
|
"grad_norm": 5.316736698150635, |
|
"learning_rate": 1.378107356534929e-05, |
|
"loss": 3.3837, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.735355561126431, |
|
"grad_norm": 4.64693021774292, |
|
"learning_rate": 1.335973708603691e-05, |
|
"loss": 3.3846, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.735355561126431, |
|
"eval_loss": 3.3473236560821533, |
|
"eval_runtime": 33.8058, |
|
"eval_samples_per_second": 131.989, |
|
"eval_steps_per_second": 16.506, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.7437118743210496, |
|
"grad_norm": 7.507499694824219, |
|
"learning_rate": 1.293840060672453e-05, |
|
"loss": 3.4229, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.7520681875156681, |
|
"grad_norm": 4.695137023925781, |
|
"learning_rate": 1.2517906800370777e-05, |
|
"loss": 3.4064, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.7520681875156681, |
|
"eval_loss": 3.3478496074676514, |
|
"eval_runtime": 33.7907, |
|
"eval_samples_per_second": 132.048, |
|
"eval_steps_per_second": 16.513, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.7604245007102867, |
|
"grad_norm": 6.782580375671387, |
|
"learning_rate": 1.2096570321058398e-05, |
|
"loss": 3.4665, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.7687808139049052, |
|
"grad_norm": 7.160044193267822, |
|
"learning_rate": 1.167523384174602e-05, |
|
"loss": 3.4181, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.7687808139049052, |
|
"eval_loss": 3.339061737060547, |
|
"eval_runtime": 33.8357, |
|
"eval_samples_per_second": 131.872, |
|
"eval_steps_per_second": 16.491, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.7771371270995237, |
|
"grad_norm": 6.559309482574463, |
|
"learning_rate": 1.1253897362433639e-05, |
|
"loss": 3.3691, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.7854934402941423, |
|
"grad_norm": 4.2476043701171875, |
|
"learning_rate": 1.0832560883121261e-05, |
|
"loss": 3.3825, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.7854934402941423, |
|
"eval_loss": 3.3343887329101562, |
|
"eval_runtime": 33.7813, |
|
"eval_samples_per_second": 132.085, |
|
"eval_steps_per_second": 16.518, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.7938497534887607, |
|
"grad_norm": 8.163984298706055, |
|
"learning_rate": 1.0411224403808882e-05, |
|
"loss": 3.4073, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.8022060666833793, |
|
"grad_norm": 4.705440998077393, |
|
"learning_rate": 9.989887924496504e-06, |
|
"loss": 3.4277, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.8022060666833793, |
|
"eval_loss": 3.334386110305786, |
|
"eval_runtime": 33.8549, |
|
"eval_samples_per_second": 131.798, |
|
"eval_steps_per_second": 16.482, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.8105623798779978, |
|
"grad_norm": 5.080317497253418, |
|
"learning_rate": 9.56939411814275e-06, |
|
"loss": 3.4014, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.8189186930726163, |
|
"grad_norm": 4.523305892944336, |
|
"learning_rate": 9.14805763883037e-06, |
|
"loss": 3.331, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.8189186930726163, |
|
"eval_loss": 3.33245587348938, |
|
"eval_runtime": 33.9406, |
|
"eval_samples_per_second": 131.465, |
|
"eval_steps_per_second": 16.44, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.8272750062672349, |
|
"grad_norm": 5.878600597381592, |
|
"learning_rate": 8.726721159517993e-06, |
|
"loss": 3.4097, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.8356313194618534, |
|
"grad_norm": 4.23352575302124, |
|
"learning_rate": 8.305384680205612e-06, |
|
"loss": 3.4073, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.8356313194618534, |
|
"eval_loss": 3.328063488006592, |
|
"eval_runtime": 33.9523, |
|
"eval_samples_per_second": 131.42, |
|
"eval_steps_per_second": 16.435, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.8439876326564719, |
|
"grad_norm": 6.142092227935791, |
|
"learning_rate": 7.884890873851858e-06, |
|
"loss": 3.3904, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.8523439458510905, |
|
"grad_norm": 7.910597324371338, |
|
"learning_rate": 7.463554394539479e-06, |
|
"loss": 3.3741, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.8523439458510905, |
|
"eval_loss": 3.327683448791504, |
|
"eval_runtime": 33.9417, |
|
"eval_samples_per_second": 131.461, |
|
"eval_steps_per_second": 16.44, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.860700259045709, |
|
"grad_norm": 4.868673324584961, |
|
"learning_rate": 7.0422179152271005e-06, |
|
"loss": 3.3615, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.8690565722403276, |
|
"grad_norm": 4.47282075881958, |
|
"learning_rate": 6.620881435914722e-06, |
|
"loss": 3.3467, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.8690565722403276, |
|
"eval_loss": 3.3206417560577393, |
|
"eval_runtime": 33.9544, |
|
"eval_samples_per_second": 131.412, |
|
"eval_steps_per_second": 16.434, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.8774128854349461, |
|
"grad_norm": 5.413076877593994, |
|
"learning_rate": 6.200387629560968e-06, |
|
"loss": 3.3221, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.8857691986295646, |
|
"grad_norm": 5.672983169555664, |
|
"learning_rate": 5.779051150248588e-06, |
|
"loss": 3.418, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.8857691986295646, |
|
"eval_loss": 3.320605516433716, |
|
"eval_runtime": 33.884, |
|
"eval_samples_per_second": 131.684, |
|
"eval_steps_per_second": 16.468, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.8941255118241832, |
|
"grad_norm": 8.368507385253906, |
|
"learning_rate": 5.35771467093621e-06, |
|
"loss": 3.3747, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.9024818250188017, |
|
"grad_norm": 6.918625831604004, |
|
"learning_rate": 4.936378191623831e-06, |
|
"loss": 3.416, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.9024818250188017, |
|
"eval_loss": 3.317139148712158, |
|
"eval_runtime": 33.9331, |
|
"eval_samples_per_second": 131.494, |
|
"eval_steps_per_second": 16.444, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.9108381382134202, |
|
"grad_norm": 6.566315174102783, |
|
"learning_rate": 4.515884385270077e-06, |
|
"loss": 3.3985, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.9191944514080388, |
|
"grad_norm": 4.790822982788086, |
|
"learning_rate": 4.094547905957698e-06, |
|
"loss": 3.3607, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.9191944514080388, |
|
"eval_loss": 3.3170626163482666, |
|
"eval_runtime": 33.9426, |
|
"eval_samples_per_second": 131.457, |
|
"eval_steps_per_second": 16.439, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.9275507646026573, |
|
"grad_norm": 5.501828193664551, |
|
"learning_rate": 3.6732114266453192e-06, |
|
"loss": 3.3586, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.9359070777972759, |
|
"grad_norm": 8.011107444763184, |
|
"learning_rate": 3.2518749473329403e-06, |
|
"loss": 3.4076, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.9359070777972759, |
|
"eval_loss": 3.3138890266418457, |
|
"eval_runtime": 33.9309, |
|
"eval_samples_per_second": 131.503, |
|
"eval_steps_per_second": 16.445, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.9442633909918944, |
|
"grad_norm": 5.630584239959717, |
|
"learning_rate": 2.8305384680205613e-06, |
|
"loss": 3.4309, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.9526197041865129, |
|
"grad_norm": 3.990445137023926, |
|
"learning_rate": 2.4092019887081824e-06, |
|
"loss": 3.3257, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.9526197041865129, |
|
"eval_loss": 3.3135273456573486, |
|
"eval_runtime": 33.8948, |
|
"eval_samples_per_second": 131.643, |
|
"eval_steps_per_second": 16.463, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.9609760173811315, |
|
"grad_norm": 4.2760396003723145, |
|
"learning_rate": 1.9878655093958034e-06, |
|
"loss": 3.3362, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.96933233057575, |
|
"grad_norm": 4.5603532791137695, |
|
"learning_rate": 1.5665290300834245e-06, |
|
"loss": 3.3657, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.96933233057575, |
|
"eval_loss": 3.3120391368865967, |
|
"eval_runtime": 33.957, |
|
"eval_samples_per_second": 131.401, |
|
"eval_steps_per_second": 16.433, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.9776886437703685, |
|
"grad_norm": 3.6152896881103516, |
|
"learning_rate": 1.1460352237296705e-06, |
|
"loss": 3.3847, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.9860449569649871, |
|
"grad_norm": 5.7242751121521, |
|
"learning_rate": 7.246987444172916e-07, |
|
"loss": 3.3509, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.9860449569649871, |
|
"eval_loss": 3.3113863468170166, |
|
"eval_runtime": 33.9749, |
|
"eval_samples_per_second": 131.332, |
|
"eval_steps_per_second": 16.424, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.9944012701596056, |
|
"grad_norm": 8.690220832824707, |
|
"learning_rate": 3.033622651049128e-07, |
|
"loss": 3.3511, |
|
"step": 59500 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 59835, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.50149494587392e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|