|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9690721649484537, |
|
"eval_steps": 500, |
|
"global_step": 180, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.016494845360824743, |
|
"grad_norm": 6.510887010044826, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.1235, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.032989690721649485, |
|
"grad_norm": 6.5268184982716475, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.1258, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.049484536082474224, |
|
"grad_norm": 4.24927908625525, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.0563, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06597938144329897, |
|
"grad_norm": 2.816200291689023, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.009, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08247422680412371, |
|
"grad_norm": 6.493905326694169, |
|
"learning_rate": 2.2222222222222227e-05, |
|
"loss": 1.013, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09896907216494845, |
|
"grad_norm": 7.726008627313003, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.0706, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1154639175257732, |
|
"grad_norm": 4.516493110783658, |
|
"learning_rate": 3.111111111111112e-05, |
|
"loss": 0.9695, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.13195876288659794, |
|
"grad_norm": 4.640178402684917, |
|
"learning_rate": 3.555555555555555e-05, |
|
"loss": 0.9444, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.14845360824742268, |
|
"grad_norm": 2.268866723993877, |
|
"learning_rate": 4e-05, |
|
"loss": 0.905, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.16494845360824742, |
|
"grad_norm": 3.54868006526656, |
|
"learning_rate": 4.444444444444445e-05, |
|
"loss": 0.8855, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.18144329896907216, |
|
"grad_norm": 2.702694507430541, |
|
"learning_rate": 4.88888888888889e-05, |
|
"loss": 0.8814, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1979381443298969, |
|
"grad_norm": 2.4163522339799424, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.8649, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.21443298969072164, |
|
"grad_norm": 1.7906871535399842, |
|
"learning_rate": 5.777777777777778e-05, |
|
"loss": 0.8351, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2309278350515464, |
|
"grad_norm": 2.073156178645725, |
|
"learning_rate": 6.222222222222223e-05, |
|
"loss": 0.8352, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.24742268041237114, |
|
"grad_norm": 4.016806278073859, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.8279, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2639175257731959, |
|
"grad_norm": 1.5493063583627422, |
|
"learning_rate": 7.11111111111111e-05, |
|
"loss": 0.8198, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2804123711340206, |
|
"grad_norm": 3.1170782607697487, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 0.8298, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.29690721649484536, |
|
"grad_norm": 1.754906513968234, |
|
"learning_rate": 8e-05, |
|
"loss": 0.813, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3134020618556701, |
|
"grad_norm": 2.9900814711935846, |
|
"learning_rate": 7.999247881794007e-05, |
|
"loss": 0.8145, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.32989690721649484, |
|
"grad_norm": 2.4128887550257256, |
|
"learning_rate": 7.996991810016922e-05, |
|
"loss": 0.8023, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3463917525773196, |
|
"grad_norm": 1.573430056889768, |
|
"learning_rate": 7.993232633085074e-05, |
|
"loss": 0.7798, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3628865979381443, |
|
"grad_norm": 1.7272127576229301, |
|
"learning_rate": 7.987971764671168e-05, |
|
"loss": 0.7822, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.37938144329896906, |
|
"grad_norm": 2.4051614980913256, |
|
"learning_rate": 7.981211183172663e-05, |
|
"loss": 0.7858, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3958762886597938, |
|
"grad_norm": 1.2504967615335536, |
|
"learning_rate": 7.972953430967773e-05, |
|
"loss": 0.773, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.41237113402061853, |
|
"grad_norm": 1.6023332952213127, |
|
"learning_rate": 7.963201613459381e-05, |
|
"loss": 0.7793, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4288659793814433, |
|
"grad_norm": 1.6124516785601999, |
|
"learning_rate": 7.951959397907237e-05, |
|
"loss": 0.7753, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.44536082474226807, |
|
"grad_norm": 1.0172041366242313, |
|
"learning_rate": 7.939231012048833e-05, |
|
"loss": 0.7498, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4618556701030928, |
|
"grad_norm": 2.063022239192394, |
|
"learning_rate": 7.925021242509539e-05, |
|
"loss": 0.7668, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.47835051546391755, |
|
"grad_norm": 1.203906865110656, |
|
"learning_rate": 7.909335433002543e-05, |
|
"loss": 0.7522, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.4948453608247423, |
|
"grad_norm": 1.8963235208653197, |
|
"learning_rate": 7.892179482319297e-05, |
|
"loss": 0.7562, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.511340206185567, |
|
"grad_norm": 1.4857136251084346, |
|
"learning_rate": 7.873559842111225e-05, |
|
"loss": 0.758, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5278350515463918, |
|
"grad_norm": 1.324299893798851, |
|
"learning_rate": 7.853483514463521e-05, |
|
"loss": 0.7392, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5443298969072164, |
|
"grad_norm": 1.1480502056057977, |
|
"learning_rate": 7.831958049261956e-05, |
|
"loss": 0.7363, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5608247422680412, |
|
"grad_norm": 1.0730578557805914, |
|
"learning_rate": 7.808991541353662e-05, |
|
"loss": 0.7244, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5773195876288659, |
|
"grad_norm": 1.2205423026753601, |
|
"learning_rate": 7.784592627503004e-05, |
|
"loss": 0.7307, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5938144329896907, |
|
"grad_norm": 1.1023928563893555, |
|
"learning_rate": 7.758770483143634e-05, |
|
"loss": 0.7178, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6103092783505155, |
|
"grad_norm": 1.2767578646872828, |
|
"learning_rate": 7.731534818928004e-05, |
|
"loss": 0.7173, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6268041237113402, |
|
"grad_norm": 1.682499530220273, |
|
"learning_rate": 7.702895877075563e-05, |
|
"loss": 0.733, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.643298969072165, |
|
"grad_norm": 1.2445811359694992, |
|
"learning_rate": 7.672864427521097e-05, |
|
"loss": 0.708, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6597938144329897, |
|
"grad_norm": 0.923390898153776, |
|
"learning_rate": 7.641451763864587e-05, |
|
"loss": 0.7117, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6762886597938145, |
|
"grad_norm": 1.6810293301142893, |
|
"learning_rate": 7.608669699124153e-05, |
|
"loss": 0.7164, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6927835051546392, |
|
"grad_norm": 0.8665817723805724, |
|
"learning_rate": 7.57453056129365e-05, |
|
"loss": 0.7015, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.709278350515464, |
|
"grad_norm": 1.38899095974282, |
|
"learning_rate": 7.539047188706631e-05, |
|
"loss": 0.7138, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7257731958762886, |
|
"grad_norm": 0.8370635723618396, |
|
"learning_rate": 7.502232925208365e-05, |
|
"loss": 0.6963, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7422680412371134, |
|
"grad_norm": 1.2109419876620096, |
|
"learning_rate": 7.464101615137756e-05, |
|
"loss": 0.6998, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7587628865979381, |
|
"grad_norm": 0.7345771754907713, |
|
"learning_rate": 7.424667598121067e-05, |
|
"loss": 0.6895, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.7752577319587629, |
|
"grad_norm": 0.866583046558764, |
|
"learning_rate": 7.383945703679365e-05, |
|
"loss": 0.6853, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7917525773195876, |
|
"grad_norm": 0.954971416511012, |
|
"learning_rate": 7.341951245651747e-05, |
|
"loss": 0.6924, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8082474226804124, |
|
"grad_norm": 0.9698764503436211, |
|
"learning_rate": 7.298700016436427e-05, |
|
"loss": 0.692, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8247422680412371, |
|
"grad_norm": 1.144548805435363, |
|
"learning_rate": 7.254208281051871e-05, |
|
"loss": 0.6928, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8412371134020619, |
|
"grad_norm": 0.8399338717507018, |
|
"learning_rate": 7.208492771020176e-05, |
|
"loss": 0.685, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8577319587628865, |
|
"grad_norm": 0.8519348258795473, |
|
"learning_rate": 7.161570678075038e-05, |
|
"loss": 0.679, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8742268041237113, |
|
"grad_norm": 0.8682024621779066, |
|
"learning_rate": 7.113459647696641e-05, |
|
"loss": 0.6855, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8907216494845361, |
|
"grad_norm": 0.9687444841568461, |
|
"learning_rate": 7.064177772475912e-05, |
|
"loss": 0.679, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9072164948453608, |
|
"grad_norm": 1.3660815254644363, |
|
"learning_rate": 7.013743585310642e-05, |
|
"loss": 0.675, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9237113402061856, |
|
"grad_norm": 0.546131036326861, |
|
"learning_rate": 6.96217605243602e-05, |
|
"loss": 0.6711, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9402061855670103, |
|
"grad_norm": 1.1202176159006922, |
|
"learning_rate": 6.909494566292195e-05, |
|
"loss": 0.6688, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9567010309278351, |
|
"grad_norm": 1.2767701093146733, |
|
"learning_rate": 6.855718938231597e-05, |
|
"loss": 0.6707, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9731958762886598, |
|
"grad_norm": 0.5478022664863074, |
|
"learning_rate": 6.800869391068674e-05, |
|
"loss": 0.661, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9896907216494846, |
|
"grad_norm": 1.0121489489469924, |
|
"learning_rate": 6.744966551474936e-05, |
|
"loss": 0.6668, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0061855670103093, |
|
"grad_norm": 1.4780025517613948, |
|
"learning_rate": 6.688031442222091e-05, |
|
"loss": 0.9023, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.022680412371134, |
|
"grad_norm": 0.8312783075179766, |
|
"learning_rate": 6.630085474276256e-05, |
|
"loss": 0.6324, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0391752577319588, |
|
"grad_norm": 0.8089531767122732, |
|
"learning_rate": 6.571150438746157e-05, |
|
"loss": 0.6264, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.0556701030927835, |
|
"grad_norm": 0.5525157311375302, |
|
"learning_rate": 6.511248498688396e-05, |
|
"loss": 0.6245, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.0721649484536082, |
|
"grad_norm": 0.6641634808727606, |
|
"learning_rate": 6.450402180772811e-05, |
|
"loss": 0.6239, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.088659793814433, |
|
"grad_norm": 0.6945453989138148, |
|
"learning_rate": 6.388634366811146e-05, |
|
"loss": 0.6218, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1051546391752578, |
|
"grad_norm": 0.467762743943545, |
|
"learning_rate": 6.325968285152107e-05, |
|
"loss": 0.6191, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1216494845360825, |
|
"grad_norm": 0.5960377140822906, |
|
"learning_rate": 6.262427501946155e-05, |
|
"loss": 0.6196, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.1381443298969072, |
|
"grad_norm": 0.6012136643477979, |
|
"learning_rate": 6.198035912283225e-05, |
|
"loss": 0.6156, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.1546391752577319, |
|
"grad_norm": 0.3830555698329603, |
|
"learning_rate": 6.132817731206766e-05, |
|
"loss": 0.6164, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.1711340206185568, |
|
"grad_norm": 0.5255708619248807, |
|
"learning_rate": 6.0667974846074524e-05, |
|
"loss": 0.6116, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1876288659793814, |
|
"grad_norm": 0.34124652573381775, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 0.6112, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2041237113402061, |
|
"grad_norm": 0.36719307010241353, |
|
"learning_rate": 5.9324503971865545e-05, |
|
"loss": 0.6087, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.220618556701031, |
|
"grad_norm": 0.3744904944376666, |
|
"learning_rate": 5.8641740788101566e-05, |
|
"loss": 0.6178, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.2371134020618557, |
|
"grad_norm": 0.40861995730924344, |
|
"learning_rate": 5.79519672080185e-05, |
|
"loss": 0.6078, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.2536082474226804, |
|
"grad_norm": 0.4191817335171689, |
|
"learning_rate": 5.7255442627250146e-05, |
|
"loss": 0.6093, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.270103092783505, |
|
"grad_norm": 0.4279620084135946, |
|
"learning_rate": 5.6552428980205575e-05, |
|
"loss": 0.6036, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.2865979381443298, |
|
"grad_norm": 0.43387459801090117, |
|
"learning_rate": 5.584319064156628e-05, |
|
"loss": 0.6122, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3030927835051547, |
|
"grad_norm": 0.36347036101765934, |
|
"learning_rate": 5.5127994326865706e-05, |
|
"loss": 0.6095, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.3195876288659794, |
|
"grad_norm": 0.4126765641672846, |
|
"learning_rate": 5.440710899218842e-05, |
|
"loss": 0.6066, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.3360824742268043, |
|
"grad_norm": 0.4342669224320681, |
|
"learning_rate": 5.368080573302676e-05, |
|
"loss": 0.6049, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.352577319587629, |
|
"grad_norm": 0.5247434320363652, |
|
"learning_rate": 5.294935768233285e-05, |
|
"loss": 0.6096, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.3690721649484536, |
|
"grad_norm": 0.5346000474514714, |
|
"learning_rate": 5.2213039907804535e-05, |
|
"loss": 0.6103, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3855670103092783, |
|
"grad_norm": 0.4704610145658556, |
|
"learning_rate": 5.1472129308443616e-05, |
|
"loss": 0.6031, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.402061855670103, |
|
"grad_norm": 0.39776693880011527, |
|
"learning_rate": 5.07269045104255e-05, |
|
"loss": 0.6037, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.418556701030928, |
|
"grad_norm": 0.4551461417385109, |
|
"learning_rate": 4.9977645762319255e-05, |
|
"loss": 0.5964, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.4350515463917526, |
|
"grad_norm": 0.49160111934260997, |
|
"learning_rate": 4.922463482969761e-05, |
|
"loss": 0.6041, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.4515463917525773, |
|
"grad_norm": 0.4569400444284779, |
|
"learning_rate": 4.846815488917644e-05, |
|
"loss": 0.6038, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.4680412371134022, |
|
"grad_norm": 0.37114067381878696, |
|
"learning_rate": 4.7708490421923596e-05, |
|
"loss": 0.6002, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4845360824742269, |
|
"grad_norm": 0.28286469224476896, |
|
"learning_rate": 4.694592710667723e-05, |
|
"loss": 0.5999, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5010309278350515, |
|
"grad_norm": 0.2544980684276211, |
|
"learning_rate": 4.618075171231363e-05, |
|
"loss": 0.5975, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.5175257731958762, |
|
"grad_norm": 0.3294943971867536, |
|
"learning_rate": 4.541325199000525e-05, |
|
"loss": 0.6029, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.534020618556701, |
|
"grad_norm": 0.41072515728050296, |
|
"learning_rate": 4.464371656500921e-05, |
|
"loss": 0.5921, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.5505154639175258, |
|
"grad_norm": 0.35351679633113386, |
|
"learning_rate": 4.387243482812717e-05, |
|
"loss": 0.5949, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.5670103092783505, |
|
"grad_norm": 0.26711294822182974, |
|
"learning_rate": 4.309969682687724e-05, |
|
"loss": 0.5994, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.5835051546391754, |
|
"grad_norm": 0.32816659581517543, |
|
"learning_rate": 4.2325793156419035e-05, |
|
"loss": 0.5932, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.3504348660738673, |
|
"learning_rate": 4.155101485027268e-05, |
|
"loss": 0.5987, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.6164948453608248, |
|
"grad_norm": 0.31517917046437344, |
|
"learning_rate": 4.077565327087298e-05, |
|
"loss": 0.5971, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.6329896907216495, |
|
"grad_norm": 0.2535047071826055, |
|
"learning_rate": 4e-05, |
|
"loss": 0.5948, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.6494845360824741, |
|
"grad_norm": 0.2375012914581539, |
|
"learning_rate": 3.9224346729127034e-05, |
|
"loss": 0.5949, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.6659793814432988, |
|
"grad_norm": 0.2623717476380207, |
|
"learning_rate": 3.844898514972733e-05, |
|
"loss": 0.5947, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.6824742268041237, |
|
"grad_norm": 0.2512637608509031, |
|
"learning_rate": 3.767420684358097e-05, |
|
"loss": 0.5988, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.6989690721649484, |
|
"grad_norm": 0.18029355738972075, |
|
"learning_rate": 3.690030317312277e-05, |
|
"loss": 0.5927, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.7154639175257733, |
|
"grad_norm": 0.20901651442289526, |
|
"learning_rate": 3.612756517187284e-05, |
|
"loss": 0.5963, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.731958762886598, |
|
"grad_norm": 0.23100136098283308, |
|
"learning_rate": 3.535628343499079e-05, |
|
"loss": 0.5957, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.7484536082474227, |
|
"grad_norm": 0.18935083808209807, |
|
"learning_rate": 3.458674800999477e-05, |
|
"loss": 0.5926, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.7649484536082474, |
|
"grad_norm": 0.16393645527535064, |
|
"learning_rate": 3.3819248287686386e-05, |
|
"loss": 0.5949, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.781443298969072, |
|
"grad_norm": 0.190745251922808, |
|
"learning_rate": 3.305407289332279e-05, |
|
"loss": 0.5891, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.797938144329897, |
|
"grad_norm": 0.16188706039772582, |
|
"learning_rate": 3.229150957807641e-05, |
|
"loss": 0.5892, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.8144329896907216, |
|
"grad_norm": 0.18778980243793414, |
|
"learning_rate": 3.153184511082359e-05, |
|
"loss": 0.592, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.8309278350515465, |
|
"grad_norm": 0.16922028732307035, |
|
"learning_rate": 3.07753651703024e-05, |
|
"loss": 0.5926, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.8474226804123712, |
|
"grad_norm": 0.2075008066134838, |
|
"learning_rate": 3.0022354237680752e-05, |
|
"loss": 0.5915, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.863917525773196, |
|
"grad_norm": 0.15945271606226596, |
|
"learning_rate": 2.9273095489574502e-05, |
|
"loss": 0.5917, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.8804123711340206, |
|
"grad_norm": 0.20924714994332358, |
|
"learning_rate": 2.8527870691556404e-05, |
|
"loss": 0.589, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.8969072164948453, |
|
"grad_norm": 0.15413768093132926, |
|
"learning_rate": 2.778696009219548e-05, |
|
"loss": 0.5895, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.91340206185567, |
|
"grad_norm": 0.17340241637334963, |
|
"learning_rate": 2.7050642317667164e-05, |
|
"loss": 0.5908, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.9298969072164949, |
|
"grad_norm": 0.13700935671950965, |
|
"learning_rate": 2.6319194266973256e-05, |
|
"loss": 0.5881, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.9463917525773196, |
|
"grad_norm": 0.16121450857357886, |
|
"learning_rate": 2.5592891007811594e-05, |
|
"loss": 0.5856, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.9628865979381445, |
|
"grad_norm": 0.13760640745884387, |
|
"learning_rate": 2.4872005673134307e-05, |
|
"loss": 0.5888, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.9793814432989691, |
|
"grad_norm": 0.136153453631026, |
|
"learning_rate": 2.4156809358433728e-05, |
|
"loss": 0.5866, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.9958762886597938, |
|
"grad_norm": 0.1740518146800063, |
|
"learning_rate": 2.3447571019794438e-05, |
|
"loss": 0.7221, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.0123711340206185, |
|
"grad_norm": 0.1913924294175378, |
|
"learning_rate": 2.274455737274987e-05, |
|
"loss": 0.649, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.028865979381443, |
|
"grad_norm": 0.16283149279005063, |
|
"learning_rate": 2.2048032791981515e-05, |
|
"loss": 0.5525, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.045360824742268, |
|
"grad_norm": 0.14595606592277305, |
|
"learning_rate": 2.135825921189846e-05, |
|
"loss": 0.5518, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.0618556701030926, |
|
"grad_norm": 0.17091723837273506, |
|
"learning_rate": 2.067549602813446e-05, |
|
"loss": 0.5537, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.0783505154639177, |
|
"grad_norm": 0.1472669016069401, |
|
"learning_rate": 2.0000000000000012e-05, |
|
"loss": 0.5468, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.0948453608247424, |
|
"grad_norm": 0.1383718741274944, |
|
"learning_rate": 1.9332025153925486e-05, |
|
"loss": 0.554, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.111340206185567, |
|
"grad_norm": 0.1449682321168763, |
|
"learning_rate": 1.867182268793236e-05, |
|
"loss": 0.5481, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.1278350515463917, |
|
"grad_norm": 0.14268562258623765, |
|
"learning_rate": 1.8019640877167763e-05, |
|
"loss": 0.551, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.1443298969072164, |
|
"grad_norm": 0.14398336699894976, |
|
"learning_rate": 1.7375724980538465e-05, |
|
"loss": 0.5485, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.160824742268041, |
|
"grad_norm": 0.13632304217235544, |
|
"learning_rate": 1.6740317148478932e-05, |
|
"loss": 0.5513, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.177319587628866, |
|
"grad_norm": 0.13802596151410262, |
|
"learning_rate": 1.6113656331888563e-05, |
|
"loss": 0.546, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.193814432989691, |
|
"grad_norm": 0.1367651968028799, |
|
"learning_rate": 1.5495978192271887e-05, |
|
"loss": 0.5507, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.2103092783505156, |
|
"grad_norm": 0.14319896957050066, |
|
"learning_rate": 1.4887515013116067e-05, |
|
"loss": 0.5474, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.2268041237113403, |
|
"grad_norm": 0.13563813749833395, |
|
"learning_rate": 1.4288495612538427e-05, |
|
"loss": 0.5466, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.243298969072165, |
|
"grad_norm": 0.13299637580848364, |
|
"learning_rate": 1.369914525723746e-05, |
|
"loss": 0.5546, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.2597938144329897, |
|
"grad_norm": 0.12840209113989406, |
|
"learning_rate": 1.3119685577779105e-05, |
|
"loss": 0.5443, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.2762886597938143, |
|
"grad_norm": 0.12810536536017894, |
|
"learning_rate": 1.2550334485250661e-05, |
|
"loss": 0.5499, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.292783505154639, |
|
"grad_norm": 0.11355419047013178, |
|
"learning_rate": 1.1991306089313261e-05, |
|
"loss": 0.5479, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.3092783505154637, |
|
"grad_norm": 0.1295630499886211, |
|
"learning_rate": 1.1442810617684046e-05, |
|
"loss": 0.5489, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.325773195876289, |
|
"grad_norm": 0.1221697199562323, |
|
"learning_rate": 1.0905054337078051e-05, |
|
"loss": 0.5499, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.3422680412371135, |
|
"grad_norm": 0.10736730884852848, |
|
"learning_rate": 1.0378239475639823e-05, |
|
"loss": 0.5469, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.358762886597938, |
|
"grad_norm": 0.11911805451017765, |
|
"learning_rate": 9.862564146893571e-06, |
|
"loss": 0.5533, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.375257731958763, |
|
"grad_norm": 0.10719682452107784, |
|
"learning_rate": 9.358222275240884e-06, |
|
"loss": 0.5492, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.3917525773195876, |
|
"grad_norm": 0.1062158910238879, |
|
"learning_rate": 8.8654035230336e-06, |
|
"loss": 0.549, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.4082474226804123, |
|
"grad_norm": 0.10542623562823075, |
|
"learning_rate": 8.384293219249633e-06, |
|
"loss": 0.5472, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.424742268041237, |
|
"grad_norm": 0.10737625408939123, |
|
"learning_rate": 7.915072289798247e-06, |
|
"loss": 0.5439, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.441237113402062, |
|
"grad_norm": 0.113365548855691, |
|
"learning_rate": 7.457917189481301e-06, |
|
"loss": 0.5465, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.4577319587628867, |
|
"grad_norm": 0.09803128955615437, |
|
"learning_rate": 7.0129998356357295e-06, |
|
"loss": 0.5454, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.4742268041237114, |
|
"grad_norm": 0.10740236378002757, |
|
"learning_rate": 6.58048754348255e-06, |
|
"loss": 0.5532, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.490721649484536, |
|
"grad_norm": 0.10615779921255894, |
|
"learning_rate": 6.160542963206357e-06, |
|
"loss": 0.5462, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.507216494845361, |
|
"grad_norm": 0.10941467555755459, |
|
"learning_rate": 5.753324018789346e-06, |
|
"loss": 0.5467, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.5237113402061855, |
|
"grad_norm": 0.09824679682670868, |
|
"learning_rate": 5.358983848622452e-06, |
|
"loss": 0.5464, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.54020618556701, |
|
"grad_norm": 0.09719860472925167, |
|
"learning_rate": 4.97767074791637e-06, |
|
"loss": 0.5437, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.556701030927835, |
|
"grad_norm": 0.09683396959072503, |
|
"learning_rate": 4.609528112933688e-06, |
|
"loss": 0.5477, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.5731958762886595, |
|
"grad_norm": 0.09836422675496619, |
|
"learning_rate": 4.254694387063514e-06, |
|
"loss": 0.5469, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.5896907216494847, |
|
"grad_norm": 0.0995681618956925, |
|
"learning_rate": 3.913303008758491e-06, |
|
"loss": 0.5482, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.6061855670103093, |
|
"grad_norm": 0.09559938988699572, |
|
"learning_rate": 3.585482361354138e-06, |
|
"loss": 0.5463, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.622680412371134, |
|
"grad_norm": 0.09220251838124616, |
|
"learning_rate": 3.2713557247890447e-06, |
|
"loss": 0.549, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.6391752577319587, |
|
"grad_norm": 0.09182371190140297, |
|
"learning_rate": 2.9710412292443868e-06, |
|
"loss": 0.5447, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.6556701030927834, |
|
"grad_norm": 0.09285117312965371, |
|
"learning_rate": 2.6846518107199782e-06, |
|
"loss": 0.5499, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.6721649484536085, |
|
"grad_norm": 0.08888916031023086, |
|
"learning_rate": 2.4122951685636674e-06, |
|
"loss": 0.5476, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.688659793814433, |
|
"grad_norm": 0.09108536955799308, |
|
"learning_rate": 2.1540737249699893e-06, |
|
"loss": 0.5468, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.705154639175258, |
|
"grad_norm": 0.09161991297859239, |
|
"learning_rate": 1.9100845864633875e-06, |
|
"loss": 0.5447, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.7216494845360826, |
|
"grad_norm": 0.08993683350116187, |
|
"learning_rate": 1.6804195073804442e-06, |
|
"loss": 0.547, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.7381443298969073, |
|
"grad_norm": 0.08768348095869663, |
|
"learning_rate": 1.4651648553647869e-06, |
|
"loss": 0.548, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.754639175257732, |
|
"grad_norm": 0.08908278856482474, |
|
"learning_rate": 1.2644015788877684e-06, |
|
"loss": 0.5424, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.7711340206185566, |
|
"grad_norm": 0.08772182232275079, |
|
"learning_rate": 1.0782051768070477e-06, |
|
"loss": 0.5426, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.7876288659793813, |
|
"grad_norm": 0.09014271400131238, |
|
"learning_rate": 9.066456699745774e-07, |
|
"loss": 0.5423, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.804123711340206, |
|
"grad_norm": 0.08846808941818018, |
|
"learning_rate": 7.497875749046124e-07, |
|
"loss": 0.5491, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.8206185567010307, |
|
"grad_norm": 0.09003919031348395, |
|
"learning_rate": 6.076898795116792e-07, |
|
"loss": 0.5466, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.837113402061856, |
|
"grad_norm": 0.0860255355801196, |
|
"learning_rate": 4.804060209276396e-07, |
|
"loss": 0.5481, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.8536082474226805, |
|
"grad_norm": 0.0863084170468407, |
|
"learning_rate": 3.679838654061874e-07, |
|
"loss": 0.5466, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.870103092783505, |
|
"grad_norm": 0.08494132877549186, |
|
"learning_rate": 2.704656903222791e-07, |
|
"loss": 0.5481, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.88659793814433, |
|
"grad_norm": 0.08696339412244179, |
|
"learning_rate": 1.8788816827336686e-07, |
|
"loss": 0.5488, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.9030927835051545, |
|
"grad_norm": 0.08479502393635585, |
|
"learning_rate": 1.2028235328831906e-07, |
|
"loss": 0.5457, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.9195876288659792, |
|
"grad_norm": 0.08544111428797167, |
|
"learning_rate": 6.767366914927298e-08, |
|
"loss": 0.5482, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.9360824742268044, |
|
"grad_norm": 0.08423838312148532, |
|
"learning_rate": 3.0081899830798345e-08, |
|
"loss": 0.5468, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.952577319587629, |
|
"grad_norm": 0.08547740551858984, |
|
"learning_rate": 7.521182059946342e-09, |
|
"loss": 0.5457, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.9690721649484537, |
|
"grad_norm": 0.08570261529447104, |
|
"learning_rate": 0.0, |
|
"loss": 0.5454, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.9690721649484537, |
|
"step": 180, |
|
"total_flos": 4.323339643798946e+18, |
|
"train_loss": 0.6483473062515259, |
|
"train_runtime": 10930.2571, |
|
"train_samples_per_second": 8.519, |
|
"train_steps_per_second": 0.016 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 180, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.323339643798946e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|