|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9991829458515933, |
|
"eval_steps": 500, |
|
"global_step": 2242, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000891331798261903, |
|
"grad_norm": 1.9037784957265278, |
|
"learning_rate": 4.444444444444445e-08, |
|
"loss": 2.0985, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004456658991309515, |
|
"grad_norm": 2.041848097696368, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 2.1176, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00891331798261903, |
|
"grad_norm": 1.948478886522995, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 2.1163, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013369976973928544, |
|
"grad_norm": 1.2537062422839984, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 1.9595, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01782663596523806, |
|
"grad_norm": 0.7428497382948752, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 1.753, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.022283294956547576, |
|
"grad_norm": 0.8283681781487435, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 1.3909, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02673995394785709, |
|
"grad_norm": 0.36636499270519984, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 1.0722, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.031196612939166604, |
|
"grad_norm": 0.26139683469044633, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.8856, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03565327193047612, |
|
"grad_norm": 0.25123645705965264, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.7828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.040109930921785636, |
|
"grad_norm": 0.21827339412974361, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.7277, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04456658991309515, |
|
"grad_norm": 0.184310768975577, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.6973, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04902324890440467, |
|
"grad_norm": 0.15756594573320623, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.6115, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05347990789571418, |
|
"grad_norm": 0.1495421231449033, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.5526, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05793656688702369, |
|
"grad_norm": 0.11563000336226048, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.553, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06239322587833321, |
|
"grad_norm": 0.11747617404049002, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.5422, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06684988486964273, |
|
"grad_norm": 0.124912316735415, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.5342, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07130654386095224, |
|
"grad_norm": 0.11219727658791885, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.506, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07576320285226175, |
|
"grad_norm": 0.13707586211382092, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.4776, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08021986184357127, |
|
"grad_norm": 0.14849136857556636, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.4592, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08467652083488078, |
|
"grad_norm": 0.13619871874828954, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.4622, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0891331798261903, |
|
"grad_norm": 0.11869279856425284, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.4518, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09358983881749981, |
|
"grad_norm": 0.12992883840039074, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.4115, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09804649780880934, |
|
"grad_norm": 0.1023382558423108, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.4397, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10250315680011884, |
|
"grad_norm": 0.12320266344355456, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.4409, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10695981579142835, |
|
"grad_norm": 0.11198962480183935, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.4436, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11141647478273788, |
|
"grad_norm": 0.10145699193523927, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.4338, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11587313377404738, |
|
"grad_norm": 0.09900502716310512, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.4208, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12032979276535691, |
|
"grad_norm": 0.08614271297265043, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4089, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12478645175666642, |
|
"grad_norm": 0.07806255577197463, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.3945, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12924311074797593, |
|
"grad_norm": 0.07178671408430405, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.3889, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13369976973928546, |
|
"grad_norm": 0.08445699462456596, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.4131, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13815642873059497, |
|
"grad_norm": 0.0853269964638284, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.4053, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14261308772190448, |
|
"grad_norm": 0.0776450305961035, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.3882, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.147069746713214, |
|
"grad_norm": 0.08066465881766263, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.4148, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1515264057045235, |
|
"grad_norm": 0.08075299908764268, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.4014, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15598306469583303, |
|
"grad_norm": 0.08146641337838845, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.4067, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16043972368714254, |
|
"grad_norm": 0.06545943489756188, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3913, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16489638267845205, |
|
"grad_norm": 0.07220259387677776, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.4169, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16935304166976156, |
|
"grad_norm": 0.0666745164016267, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.4203, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17380970066107107, |
|
"grad_norm": 0.08104266749063946, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3586, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1782663596523806, |
|
"grad_norm": 0.06500061012271417, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.3844, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18272301864369012, |
|
"grad_norm": 0.06438217840061684, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.4154, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.18717967763499962, |
|
"grad_norm": 0.07013299670767151, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.3874, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19163633662630913, |
|
"grad_norm": 0.07168242323487646, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.3849, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.19609299561761867, |
|
"grad_norm": 0.05932657135991667, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.3672, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.20054965460892818, |
|
"grad_norm": 0.07058592680145803, |
|
"learning_rate": 1e-05, |
|
"loss": 0.387, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2050063136002377, |
|
"grad_norm": 0.051067467997344705, |
|
"learning_rate": 9.999848376760494e-06, |
|
"loss": 0.3826, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2094629725915472, |
|
"grad_norm": 0.06816795236315125, |
|
"learning_rate": 9.999393516237815e-06, |
|
"loss": 0.3931, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2139196315828567, |
|
"grad_norm": 0.06395745969231853, |
|
"learning_rate": 9.998635446018936e-06, |
|
"loss": 0.4009, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21837629057416624, |
|
"grad_norm": 0.0713711836562236, |
|
"learning_rate": 9.997574212080282e-06, |
|
"loss": 0.3793, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22283294956547575, |
|
"grad_norm": 0.0698163755701484, |
|
"learning_rate": 9.996209878784942e-06, |
|
"loss": 0.397, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22728960855678526, |
|
"grad_norm": 0.0682706528712552, |
|
"learning_rate": 9.99454252887877e-06, |
|
"loss": 0.405, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.23174626754809477, |
|
"grad_norm": 0.06445770501449549, |
|
"learning_rate": 9.992572263485365e-06, |
|
"loss": 0.3502, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2362029265394043, |
|
"grad_norm": 0.06470981088114686, |
|
"learning_rate": 9.990299202099934e-06, |
|
"loss": 0.415, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.24065958553071382, |
|
"grad_norm": 0.05884297180052234, |
|
"learning_rate": 9.987723482582052e-06, |
|
"loss": 0.3891, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24511624452202332, |
|
"grad_norm": 0.05013087545524401, |
|
"learning_rate": 9.98484526114729e-06, |
|
"loss": 0.4081, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.24957290351333283, |
|
"grad_norm": 0.05965101051743905, |
|
"learning_rate": 9.981664712357756e-06, |
|
"loss": 0.367, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.25402956250464237, |
|
"grad_norm": 0.06332324890343168, |
|
"learning_rate": 9.97818202911149e-06, |
|
"loss": 0.3821, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.25848622149595185, |
|
"grad_norm": 0.06434539140174649, |
|
"learning_rate": 9.974397422630781e-06, |
|
"loss": 0.4017, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2629428804872614, |
|
"grad_norm": 0.047968388544284014, |
|
"learning_rate": 9.970311122449348e-06, |
|
"loss": 0.3785, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2673995394785709, |
|
"grad_norm": 0.05355639391449654, |
|
"learning_rate": 9.965923376398414e-06, |
|
"loss": 0.3829, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2718561984698804, |
|
"grad_norm": 0.05997351126022962, |
|
"learning_rate": 9.961234450591692e-06, |
|
"loss": 0.3834, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.27631285746118994, |
|
"grad_norm": 0.06439261526987958, |
|
"learning_rate": 9.956244629409231e-06, |
|
"loss": 0.3473, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2807695164524994, |
|
"grad_norm": 0.048664469272370746, |
|
"learning_rate": 9.950954215480168e-06, |
|
"loss": 0.3883, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.28522617544380896, |
|
"grad_norm": 0.06106240669926854, |
|
"learning_rate": 9.945363529664385e-06, |
|
"loss": 0.3671, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2896828344351185, |
|
"grad_norm": 0.06560525566792794, |
|
"learning_rate": 9.93947291103304e-06, |
|
"loss": 0.3872, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.294139493426428, |
|
"grad_norm": 0.051890916918650776, |
|
"learning_rate": 9.933282716848001e-06, |
|
"loss": 0.3881, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2985961524177375, |
|
"grad_norm": 0.05177493540298683, |
|
"learning_rate": 9.92679332254019e-06, |
|
"loss": 0.3688, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.303052811409047, |
|
"grad_norm": 0.05169465457227038, |
|
"learning_rate": 9.920005121686802e-06, |
|
"loss": 0.3742, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.30750947040035653, |
|
"grad_norm": 0.05433225521681918, |
|
"learning_rate": 9.912918525987437e-06, |
|
"loss": 0.3672, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.31196612939166607, |
|
"grad_norm": 0.05227372498869847, |
|
"learning_rate": 9.905533965239133e-06, |
|
"loss": 0.3674, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.31642278838297555, |
|
"grad_norm": 0.04827464212259707, |
|
"learning_rate": 9.897851887310304e-06, |
|
"loss": 0.386, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3208794473742851, |
|
"grad_norm": 0.04786866328513665, |
|
"learning_rate": 9.88987275811356e-06, |
|
"loss": 0.3888, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.32533610636559457, |
|
"grad_norm": 0.05117403335251474, |
|
"learning_rate": 9.881597061577473e-06, |
|
"loss": 0.371, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3297927653569041, |
|
"grad_norm": 0.05209023655644307, |
|
"learning_rate": 9.873025299617207e-06, |
|
"loss": 0.3554, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.33424942434821364, |
|
"grad_norm": 0.04620493946724755, |
|
"learning_rate": 9.86415799210409e-06, |
|
"loss": 0.339, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3387060833395231, |
|
"grad_norm": 0.04972442705458662, |
|
"learning_rate": 9.854995676834078e-06, |
|
"loss": 0.3381, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.34316274233083266, |
|
"grad_norm": 0.04902183301719761, |
|
"learning_rate": 9.845538909495142e-06, |
|
"loss": 0.3473, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.34761940132214214, |
|
"grad_norm": 0.048588732980178, |
|
"learning_rate": 9.835788263633559e-06, |
|
"loss": 0.3715, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3520760603134517, |
|
"grad_norm": 0.0518211854230867, |
|
"learning_rate": 9.825744330619136e-06, |
|
"loss": 0.335, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3565327193047612, |
|
"grad_norm": 0.04470995877402061, |
|
"learning_rate": 9.815407719609336e-06, |
|
"loss": 0.3576, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3609893782960707, |
|
"grad_norm": 0.046718917610627575, |
|
"learning_rate": 9.804779057512337e-06, |
|
"loss": 0.3844, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.36544603728738023, |
|
"grad_norm": 0.04487868027034623, |
|
"learning_rate": 9.793858988949013e-06, |
|
"loss": 0.3651, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.36990269627868977, |
|
"grad_norm": 0.04508769734592565, |
|
"learning_rate": 9.782648176213832e-06, |
|
"loss": 0.3327, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.37435935526999925, |
|
"grad_norm": 0.047550358889489414, |
|
"learning_rate": 9.77114729923469e-06, |
|
"loss": 0.3845, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3788160142613088, |
|
"grad_norm": 0.04522521563643231, |
|
"learning_rate": 9.759357055531677e-06, |
|
"loss": 0.3758, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.38327267325261827, |
|
"grad_norm": 0.046807592924263536, |
|
"learning_rate": 9.747278160174773e-06, |
|
"loss": 0.3684, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3877293322439278, |
|
"grad_norm": 0.04945346042475437, |
|
"learning_rate": 9.734911345740476e-06, |
|
"loss": 0.374, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.39218599123523734, |
|
"grad_norm": 0.04507829141189117, |
|
"learning_rate": 9.72225736226737e-06, |
|
"loss": 0.3719, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3966426502265468, |
|
"grad_norm": 0.05375175462388905, |
|
"learning_rate": 9.709316977210641e-06, |
|
"loss": 0.3738, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.40109930921785636, |
|
"grad_norm": 0.04178366229734565, |
|
"learning_rate": 9.696090975395535e-06, |
|
"loss": 0.35, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.40555596820916584, |
|
"grad_norm": 0.04307772924393916, |
|
"learning_rate": 9.682580158969744e-06, |
|
"loss": 0.3647, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4100126272004754, |
|
"grad_norm": 0.04607844523886918, |
|
"learning_rate": 9.66878534735477e-06, |
|
"loss": 0.3422, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4144692861917849, |
|
"grad_norm": 0.042361601709353026, |
|
"learning_rate": 9.654707377196226e-06, |
|
"loss": 0.3282, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4189259451830944, |
|
"grad_norm": 0.05012304389062508, |
|
"learning_rate": 9.640347102313086e-06, |
|
"loss": 0.3872, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.42338260417440393, |
|
"grad_norm": 0.04177143585248632, |
|
"learning_rate": 9.625705393645913e-06, |
|
"loss": 0.332, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4278392631657134, |
|
"grad_norm": 0.04105439567860185, |
|
"learning_rate": 9.61078313920402e-06, |
|
"loss": 0.3683, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.43229592215702295, |
|
"grad_norm": 0.04188596881105404, |
|
"learning_rate": 9.595581244011638e-06, |
|
"loss": 0.3764, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4367525811483325, |
|
"grad_norm": 0.04374077499179829, |
|
"learning_rate": 9.580100630053e-06, |
|
"loss": 0.3585, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.44120924013964197, |
|
"grad_norm": 0.0435642556725744, |
|
"learning_rate": 9.564342236216444e-06, |
|
"loss": 0.3673, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4456658991309515, |
|
"grad_norm": 0.04616615791848655, |
|
"learning_rate": 9.54830701823746e-06, |
|
"loss": 0.343, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.450122558122261, |
|
"grad_norm": 0.05116268160745142, |
|
"learning_rate": 9.531995948640722e-06, |
|
"loss": 0.3682, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4545792171135705, |
|
"grad_norm": 0.045000388036651476, |
|
"learning_rate": 9.515410016681119e-06, |
|
"loss": 0.3673, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.45903587610488006, |
|
"grad_norm": 0.039981904676462336, |
|
"learning_rate": 9.498550228283745e-06, |
|
"loss": 0.3573, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.46349253509618954, |
|
"grad_norm": 0.05079247509194735, |
|
"learning_rate": 9.48141760598289e-06, |
|
"loss": 0.3358, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4679491940874991, |
|
"grad_norm": 0.04339754696474626, |
|
"learning_rate": 9.464013188860034e-06, |
|
"loss": 0.3697, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4724058530788086, |
|
"grad_norm": 0.05672032368874533, |
|
"learning_rate": 9.44633803248082e-06, |
|
"loss": 0.3609, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4768625120701181, |
|
"grad_norm": 0.03968689567244294, |
|
"learning_rate": 9.428393208831035e-06, |
|
"loss": 0.3782, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.48131917106142763, |
|
"grad_norm": 0.03823591428746807, |
|
"learning_rate": 9.410179806251597e-06, |
|
"loss": 0.3676, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4857758300527371, |
|
"grad_norm": 0.04953625328978311, |
|
"learning_rate": 9.391698929372545e-06, |
|
"loss": 0.3843, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.49023248904404665, |
|
"grad_norm": 0.043174962416537774, |
|
"learning_rate": 9.372951699046048e-06, |
|
"loss": 0.3537, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4946891480353562, |
|
"grad_norm": 0.037737887971063026, |
|
"learning_rate": 9.353939252278425e-06, |
|
"loss": 0.3559, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.49914580702666567, |
|
"grad_norm": 0.03944313396062169, |
|
"learning_rate": 9.334662742161185e-06, |
|
"loss": 0.3486, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5036024660179752, |
|
"grad_norm": 0.040982285416120506, |
|
"learning_rate": 9.315123337801088e-06, |
|
"loss": 0.3833, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5080591250092847, |
|
"grad_norm": 0.05250003442053583, |
|
"learning_rate": 9.295322224249251e-06, |
|
"loss": 0.3835, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5125157840005943, |
|
"grad_norm": 0.04762096881101235, |
|
"learning_rate": 9.275260602429268e-06, |
|
"loss": 0.3655, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5169724429919037, |
|
"grad_norm": 0.04042638366958117, |
|
"learning_rate": 9.254939689064374e-06, |
|
"loss": 0.3524, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5214291019832132, |
|
"grad_norm": 0.03853478933299392, |
|
"learning_rate": 9.234360716603654e-06, |
|
"loss": 0.3344, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5258857609745228, |
|
"grad_norm": 0.04253231208243496, |
|
"learning_rate": 9.213524933147297e-06, |
|
"loss": 0.3518, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5303424199658323, |
|
"grad_norm": 0.03781916769626258, |
|
"learning_rate": 9.1924336023709e-06, |
|
"loss": 0.3581, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5347990789571418, |
|
"grad_norm": 0.042956152801003476, |
|
"learning_rate": 9.171088003448816e-06, |
|
"loss": 0.3751, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5392557379484513, |
|
"grad_norm": 0.042733495391241834, |
|
"learning_rate": 9.149489430976592e-06, |
|
"loss": 0.3735, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5437123969397608, |
|
"grad_norm": 0.038555605729189334, |
|
"learning_rate": 9.12763919489244e-06, |
|
"loss": 0.3343, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5481690559310703, |
|
"grad_norm": 0.04878126396745322, |
|
"learning_rate": 9.105538620397786e-06, |
|
"loss": 0.3693, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5526257149223799, |
|
"grad_norm": 0.0388842791468199, |
|
"learning_rate": 9.083189047876917e-06, |
|
"loss": 0.3319, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5570823739136894, |
|
"grad_norm": 0.045281525903800586, |
|
"learning_rate": 9.060591832815663e-06, |
|
"loss": 0.3483, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5615390329049988, |
|
"grad_norm": 0.037306006153875235, |
|
"learning_rate": 9.037748345719206e-06, |
|
"loss": 0.3715, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5659956918963084, |
|
"grad_norm": 0.04232483585848596, |
|
"learning_rate": 9.014659972028954e-06, |
|
"loss": 0.3784, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5704523508876179, |
|
"grad_norm": 0.04366418393202621, |
|
"learning_rate": 8.991328112038508e-06, |
|
"loss": 0.3421, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5749090098789275, |
|
"grad_norm": 0.042775684521190016, |
|
"learning_rate": 8.96775418080875e-06, |
|
"loss": 0.3641, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.579365668870237, |
|
"grad_norm": 0.03905324730933948, |
|
"learning_rate": 8.943939608082008e-06, |
|
"loss": 0.326, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5838223278615464, |
|
"grad_norm": 0.0439109485141227, |
|
"learning_rate": 8.919885838195346e-06, |
|
"loss": 0.38, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.588278986852856, |
|
"grad_norm": 0.040278188616367915, |
|
"learning_rate": 8.895594329992971e-06, |
|
"loss": 0.3554, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5927356458441655, |
|
"grad_norm": 0.04296489937369988, |
|
"learning_rate": 8.871066556737749e-06, |
|
"loss": 0.3515, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.597192304835475, |
|
"grad_norm": 0.03219193900520065, |
|
"learning_rate": 8.846304006021853e-06, |
|
"loss": 0.3065, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6016489638267846, |
|
"grad_norm": 0.03750987603517473, |
|
"learning_rate": 8.82130817967655e-06, |
|
"loss": 0.3753, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.606105622818094, |
|
"grad_norm": 0.045258173403080484, |
|
"learning_rate": 8.796080593681104e-06, |
|
"loss": 0.375, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6105622818094035, |
|
"grad_norm": 0.03803993220538302, |
|
"learning_rate": 8.77062277807084e-06, |
|
"loss": 0.356, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6150189408007131, |
|
"grad_norm": 0.04095600232061625, |
|
"learning_rate": 8.744936276844348e-06, |
|
"loss": 0.3482, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6194755997920226, |
|
"grad_norm": 0.04108431793653308, |
|
"learning_rate": 8.719022647869839e-06, |
|
"loss": 0.3558, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6239322587833321, |
|
"grad_norm": 0.04300826921850201, |
|
"learning_rate": 8.692883462790664e-06, |
|
"loss": 0.347, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6283889177746416, |
|
"grad_norm": 0.04147196857931913, |
|
"learning_rate": 8.666520306929987e-06, |
|
"loss": 0.3391, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6328455767659511, |
|
"grad_norm": 0.03487134011785261, |
|
"learning_rate": 8.639934779194647e-06, |
|
"loss": 0.3232, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6373022357572606, |
|
"grad_norm": 0.03649852892645952, |
|
"learning_rate": 8.613128491978181e-06, |
|
"loss": 0.3223, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6417588947485702, |
|
"grad_norm": 0.04596879876204484, |
|
"learning_rate": 8.586103071063034e-06, |
|
"loss": 0.3733, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6462155537398797, |
|
"grad_norm": 0.04225087710012865, |
|
"learning_rate": 8.558860155521948e-06, |
|
"loss": 0.3584, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6506722127311891, |
|
"grad_norm": 0.040614068313829625, |
|
"learning_rate": 8.53140139761857e-06, |
|
"loss": 0.3778, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6551288717224987, |
|
"grad_norm": 0.038923901123470606, |
|
"learning_rate": 8.50372846270723e-06, |
|
"loss": 0.355, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6595855307138082, |
|
"grad_norm": 0.04003805383028198, |
|
"learning_rate": 8.475843029131942e-06, |
|
"loss": 0.3446, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6640421897051177, |
|
"grad_norm": 0.03817113093765007, |
|
"learning_rate": 8.447746788124618e-06, |
|
"loss": 0.3533, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6684988486964273, |
|
"grad_norm": 0.03946386120647576, |
|
"learning_rate": 8.419441443702485e-06, |
|
"loss": 0.3385, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6729555076877367, |
|
"grad_norm": 0.04594980639856022, |
|
"learning_rate": 8.390928712564754e-06, |
|
"loss": 0.3468, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6774121666790462, |
|
"grad_norm": 0.037734409170981406, |
|
"learning_rate": 8.362210323988486e-06, |
|
"loss": 0.3142, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6818688256703558, |
|
"grad_norm": 0.04135989239417575, |
|
"learning_rate": 8.33328801972373e-06, |
|
"loss": 0.3512, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6863254846616653, |
|
"grad_norm": 0.040399706529750154, |
|
"learning_rate": 8.30416355388787e-06, |
|
"loss": 0.3441, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6907821436529749, |
|
"grad_norm": 0.04632767114532932, |
|
"learning_rate": 8.274838692859249e-06, |
|
"loss": 0.3492, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6952388026442843, |
|
"grad_norm": 0.0466278822946521, |
|
"learning_rate": 8.245315215170036e-06, |
|
"loss": 0.3634, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6996954616355938, |
|
"grad_norm": 0.03854497108096806, |
|
"learning_rate": 8.215594911398366e-06, |
|
"loss": 0.3818, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7041521206269034, |
|
"grad_norm": 0.03807528541379026, |
|
"learning_rate": 8.185679584059733e-06, |
|
"loss": 0.3399, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7086087796182129, |
|
"grad_norm": 0.039364528432836096, |
|
"learning_rate": 8.155571047497675e-06, |
|
"loss": 0.3471, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7130654386095224, |
|
"grad_norm": 0.04009146302797121, |
|
"learning_rate": 8.125271127773728e-06, |
|
"loss": 0.317, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.717522097600832, |
|
"grad_norm": 0.037371893337503514, |
|
"learning_rate": 8.09478166255669e-06, |
|
"loss": 0.3371, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7219787565921414, |
|
"grad_norm": 0.034113185631526655, |
|
"learning_rate": 8.064104501011154e-06, |
|
"loss": 0.3316, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7264354155834509, |
|
"grad_norm": 0.041522627293234436, |
|
"learning_rate": 8.033241503685366e-06, |
|
"loss": 0.346, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7308920745747605, |
|
"grad_norm": 0.04352859369847954, |
|
"learning_rate": 8.00219454239838e-06, |
|
"loss": 0.3493, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.73534873356607, |
|
"grad_norm": 0.040392710281690065, |
|
"learning_rate": 7.970965500126534e-06, |
|
"loss": 0.3506, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7398053925573795, |
|
"grad_norm": 0.037788817713187726, |
|
"learning_rate": 7.93955627088925e-06, |
|
"loss": 0.3449, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.744262051548689, |
|
"grad_norm": 0.03913978671424209, |
|
"learning_rate": 7.907968759634165e-06, |
|
"loss": 0.3416, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7487187105399985, |
|
"grad_norm": 0.03647678211671544, |
|
"learning_rate": 7.876204882121592e-06, |
|
"loss": 0.3632, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.753175369531308, |
|
"grad_norm": 0.03824752767359855, |
|
"learning_rate": 7.844266564808333e-06, |
|
"loss": 0.3506, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7576320285226176, |
|
"grad_norm": 0.04193412196826429, |
|
"learning_rate": 7.812155744730842e-06, |
|
"loss": 0.3514, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7620886875139271, |
|
"grad_norm": 0.03719881876463841, |
|
"learning_rate": 7.779874369387747e-06, |
|
"loss": 0.3744, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7665453465052365, |
|
"grad_norm": 0.039523606403741154, |
|
"learning_rate": 7.747424396621728e-06, |
|
"loss": 0.3464, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7710020054965461, |
|
"grad_norm": 0.038967790741718966, |
|
"learning_rate": 7.71480779450078e-06, |
|
"loss": 0.3522, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7754586644878556, |
|
"grad_norm": 0.0338399453339563, |
|
"learning_rate": 7.682026541198859e-06, |
|
"loss": 0.3175, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7799153234791651, |
|
"grad_norm": 0.03838893499453238, |
|
"learning_rate": 7.649082624875889e-06, |
|
"loss": 0.371, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7843719824704747, |
|
"grad_norm": 0.03582979505427013, |
|
"learning_rate": 7.615978043557195e-06, |
|
"loss": 0.3545, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7888286414617841, |
|
"grad_norm": 0.03627985350213129, |
|
"learning_rate": 7.582714805012326e-06, |
|
"loss": 0.3474, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7932853004530936, |
|
"grad_norm": 0.03850554835854682, |
|
"learning_rate": 7.549294926633273e-06, |
|
"loss": 0.3464, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7977419594444032, |
|
"grad_norm": 0.03875905385241387, |
|
"learning_rate": 7.515720435312124e-06, |
|
"loss": 0.3536, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8021986184357127, |
|
"grad_norm": 0.03902115614104229, |
|
"learning_rate": 7.481993367318139e-06, |
|
"loss": 0.3494, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8066552774270223, |
|
"grad_norm": 0.042663509463480305, |
|
"learning_rate": 7.4481157681742376e-06, |
|
"loss": 0.3693, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8111119364183317, |
|
"grad_norm": 0.03463878522652425, |
|
"learning_rate": 7.414089692532953e-06, |
|
"loss": 0.3597, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8155685954096412, |
|
"grad_norm": 0.04096150483153926, |
|
"learning_rate": 7.379917204051812e-06, |
|
"loss": 0.3594, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8200252544009508, |
|
"grad_norm": 0.04402866398417241, |
|
"learning_rate": 7.3456003752681755e-06, |
|
"loss": 0.3414, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8244819133922603, |
|
"grad_norm": 0.04139811223309286, |
|
"learning_rate": 7.311141287473545e-06, |
|
"loss": 0.3224, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8289385723835698, |
|
"grad_norm": 0.06645909647569262, |
|
"learning_rate": 7.276542030587328e-06, |
|
"loss": 0.3456, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8333952313748793, |
|
"grad_norm": 0.044636161342359676, |
|
"learning_rate": 7.2418047030300905e-06, |
|
"loss": 0.3353, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8378518903661888, |
|
"grad_norm": 0.035318060235713856, |
|
"learning_rate": 7.206931411596288e-06, |
|
"loss": 0.3288, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8423085493574983, |
|
"grad_norm": 0.03828050355505084, |
|
"learning_rate": 7.171924271326483e-06, |
|
"loss": 0.3707, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8467652083488079, |
|
"grad_norm": 0.042224528093265754, |
|
"learning_rate": 7.136785405379088e-06, |
|
"loss": 0.3595, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8512218673401174, |
|
"grad_norm": 0.04480625231480598, |
|
"learning_rate": 7.101516944901576e-06, |
|
"loss": 0.3619, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8556785263314268, |
|
"grad_norm": 0.03576964869540681, |
|
"learning_rate": 7.066121028901236e-06, |
|
"loss": 0.3464, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8601351853227364, |
|
"grad_norm": 0.030991978553480678, |
|
"learning_rate": 7.030599804115452e-06, |
|
"loss": 0.3607, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8645918443140459, |
|
"grad_norm": 0.03650657693648493, |
|
"learning_rate": 6.99495542488149e-06, |
|
"loss": 0.3547, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8690485033053554, |
|
"grad_norm": 0.039958685112108004, |
|
"learning_rate": 6.95919005300585e-06, |
|
"loss": 0.3435, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.873505162296665, |
|
"grad_norm": 0.04077064091002527, |
|
"learning_rate": 6.923305857633153e-06, |
|
"loss": 0.3351, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8779618212879744, |
|
"grad_norm": 0.03587466267665625, |
|
"learning_rate": 6.887305015114576e-06, |
|
"loss": 0.3512, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8824184802792839, |
|
"grad_norm": 0.034539337349791706, |
|
"learning_rate": 6.851189708875867e-06, |
|
"loss": 0.3696, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8868751392705935, |
|
"grad_norm": 0.038997841115035806, |
|
"learning_rate": 6.8149621292849175e-06, |
|
"loss": 0.3254, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.891331798261903, |
|
"grad_norm": 0.038220707196931394, |
|
"learning_rate": 6.7786244735189186e-06, |
|
"loss": 0.3629, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8957884572532125, |
|
"grad_norm": 0.0434042294031072, |
|
"learning_rate": 6.742178945431101e-06, |
|
"loss": 0.3354, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.900245116244522, |
|
"grad_norm": 0.03433600018730854, |
|
"learning_rate": 6.705627755417083e-06, |
|
"loss": 0.3963, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9047017752358315, |
|
"grad_norm": 0.03903725290243943, |
|
"learning_rate": 6.668973120280797e-06, |
|
"loss": 0.3546, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.909158434227141, |
|
"grad_norm": 0.038365695036849674, |
|
"learning_rate": 6.632217263100053e-06, |
|
"loss": 0.3375, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9136150932184506, |
|
"grad_norm": 0.03990475305692419, |
|
"learning_rate": 6.5953624130917035e-06, |
|
"loss": 0.3331, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9180717522097601, |
|
"grad_norm": 0.040794399359075834, |
|
"learning_rate": 6.558410805476451e-06, |
|
"loss": 0.3458, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9225284112010697, |
|
"grad_norm": 0.03912702525863214, |
|
"learning_rate": 6.521364681343273e-06, |
|
"loss": 0.3524, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9269850701923791, |
|
"grad_norm": 0.03661850369975111, |
|
"learning_rate": 6.484226287513512e-06, |
|
"loss": 0.3718, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9314417291836886, |
|
"grad_norm": 0.03440847203779226, |
|
"learning_rate": 6.446997876404603e-06, |
|
"loss": 0.3533, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9358983881749982, |
|
"grad_norm": 0.037084675468021515, |
|
"learning_rate": 6.409681705893461e-06, |
|
"loss": 0.3707, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9403550471663077, |
|
"grad_norm": 0.03106927125420908, |
|
"learning_rate": 6.37228003917955e-06, |
|
"loss": 0.3331, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9448117061576172, |
|
"grad_norm": 0.043893802285259104, |
|
"learning_rate": 6.3347951446476194e-06, |
|
"loss": 0.3375, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9492683651489267, |
|
"grad_norm": 0.03789346876693591, |
|
"learning_rate": 6.297229295730125e-06, |
|
"loss": 0.3574, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9537250241402362, |
|
"grad_norm": 0.04123920807743081, |
|
"learning_rate": 6.2595847707693505e-06, |
|
"loss": 0.3607, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9581816831315457, |
|
"grad_norm": 0.03746001686079855, |
|
"learning_rate": 6.221863852879224e-06, |
|
"loss": 0.3497, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9626383421228553, |
|
"grad_norm": 0.037332308666625603, |
|
"learning_rate": 6.184068829806853e-06, |
|
"loss": 0.3406, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9670950011141648, |
|
"grad_norm": 0.03600032344614023, |
|
"learning_rate": 6.146201993793772e-06, |
|
"loss": 0.334, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.9715516601054742, |
|
"grad_norm": 0.03913531342479123, |
|
"learning_rate": 6.10826564143692e-06, |
|
"loss": 0.3239, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9760083190967838, |
|
"grad_norm": 0.044657313605693603, |
|
"learning_rate": 6.0702620735493535e-06, |
|
"loss": 0.3517, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9804649780880933, |
|
"grad_norm": 0.036697071260256314, |
|
"learning_rate": 6.0321935950207e-06, |
|
"loss": 0.364, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9849216370794028, |
|
"grad_norm": 0.047007092034447036, |
|
"learning_rate": 5.994062514677377e-06, |
|
"loss": 0.3365, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9893782960707124, |
|
"grad_norm": 0.038567817770514705, |
|
"learning_rate": 5.9558711451425555e-06, |
|
"loss": 0.3437, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9938349550620218, |
|
"grad_norm": 0.046535006533488465, |
|
"learning_rate": 5.917621802695903e-06, |
|
"loss": 0.3674, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.9982916140533313, |
|
"grad_norm": 0.044782400881786595, |
|
"learning_rate": 5.879316807133103e-06, |
|
"loss": 0.3539, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9991829458515933, |
|
"eval_loss": 0.3121582567691803, |
|
"eval_runtime": 28.1501, |
|
"eval_samples_per_second": 19.325, |
|
"eval_steps_per_second": 4.831, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.0035653271930476, |
|
"grad_norm": 0.035018958725684814, |
|
"learning_rate": 5.840958481625163e-06, |
|
"loss": 0.3768, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0080219861843571, |
|
"grad_norm": 0.08275303281391562, |
|
"learning_rate": 5.8025491525775125e-06, |
|
"loss": 0.3373, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0124786451756667, |
|
"grad_norm": 0.036690917598704145, |
|
"learning_rate": 5.764091149488912e-06, |
|
"loss": 0.3191, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0169353041669762, |
|
"grad_norm": 0.038089309373433904, |
|
"learning_rate": 5.725586804810166e-06, |
|
"loss": 0.3377, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0213919631582857, |
|
"grad_norm": 0.04219155515782442, |
|
"learning_rate": 5.687038453802663e-06, |
|
"loss": 0.33, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0258486221495953, |
|
"grad_norm": 0.03643779050650904, |
|
"learning_rate": 5.648448434396747e-06, |
|
"loss": 0.3352, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.0303052811409048, |
|
"grad_norm": 0.044990286569605006, |
|
"learning_rate": 5.609819087049923e-06, |
|
"loss": 0.3233, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.0347619401322141, |
|
"grad_norm": 0.0673142495663998, |
|
"learning_rate": 5.571152754604901e-06, |
|
"loss": 0.3164, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0392185991235237, |
|
"grad_norm": 0.044924920507469504, |
|
"learning_rate": 5.5324517821475165e-06, |
|
"loss": 0.3234, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0436752581148332, |
|
"grad_norm": 0.04520885855234551, |
|
"learning_rate": 5.493718516864496e-06, |
|
"loss": 0.3572, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0481319171061427, |
|
"grad_norm": 0.0488015684618307, |
|
"learning_rate": 5.454955307901103e-06, |
|
"loss": 0.3286, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0525885760974523, |
|
"grad_norm": 0.043929046851137146, |
|
"learning_rate": 5.416164506218662e-06, |
|
"loss": 0.3347, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0570452350887618, |
|
"grad_norm": 0.039430384622077694, |
|
"learning_rate": 5.3773484644519825e-06, |
|
"loss": 0.346, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.0615018940800713, |
|
"grad_norm": 0.050917021723215446, |
|
"learning_rate": 5.338509536766662e-06, |
|
"loss": 0.3502, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.0659585530713809, |
|
"grad_norm": 0.0432190687785534, |
|
"learning_rate": 5.299650078716313e-06, |
|
"loss": 0.343, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.0704152120626904, |
|
"grad_norm": 0.05235380991950498, |
|
"learning_rate": 5.260772447099704e-06, |
|
"loss": 0.3179, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.074871871054, |
|
"grad_norm": 0.043917180636865144, |
|
"learning_rate": 5.2218789998178125e-06, |
|
"loss": 0.3098, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.0793285300453093, |
|
"grad_norm": 0.039598233352694974, |
|
"learning_rate": 5.182972095730828e-06, |
|
"loss": 0.3299, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.0837851890366188, |
|
"grad_norm": 0.03914796341413229, |
|
"learning_rate": 5.144054094515088e-06, |
|
"loss": 0.3191, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.0882418480279283, |
|
"grad_norm": 0.04563116912150565, |
|
"learning_rate": 5.1051273565199564e-06, |
|
"loss": 0.3465, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0926985070192379, |
|
"grad_norm": 0.04488310022064193, |
|
"learning_rate": 5.0661942426246825e-06, |
|
"loss": 0.3336, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.0971551660105474, |
|
"grad_norm": 0.045624300677554144, |
|
"learning_rate": 5.0272571140952076e-06, |
|
"loss": 0.3366, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.101611825001857, |
|
"grad_norm": 0.04547698828207826, |
|
"learning_rate": 4.988318332440958e-06, |
|
"loss": 0.3401, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1060684839931665, |
|
"grad_norm": 0.0352328063030309, |
|
"learning_rate": 4.949380259271618e-06, |
|
"loss": 0.3429, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.110525142984476, |
|
"grad_norm": 0.03628153680973482, |
|
"learning_rate": 4.910445256153909e-06, |
|
"loss": 0.3685, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1149818019757856, |
|
"grad_norm": 0.05406284923018293, |
|
"learning_rate": 4.871515684468348e-06, |
|
"loss": 0.3255, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.119438460967095, |
|
"grad_norm": 0.04417976412734551, |
|
"learning_rate": 4.832593905266045e-06, |
|
"loss": 0.3255, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.1238951199584046, |
|
"grad_norm": 0.04882094138393501, |
|
"learning_rate": 4.793682279125504e-06, |
|
"loss": 0.3233, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.128351778949714, |
|
"grad_norm": 0.04666808909953559, |
|
"learning_rate": 4.754783166009443e-06, |
|
"loss": 0.3381, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1328084379410235, |
|
"grad_norm": 0.0393273395144626, |
|
"learning_rate": 4.715898925121683e-06, |
|
"loss": 0.3362, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.137265096932333, |
|
"grad_norm": 0.042614792670262726, |
|
"learning_rate": 4.677031914764052e-06, |
|
"loss": 0.3565, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.1417217559236426, |
|
"grad_norm": 0.04265242813984368, |
|
"learning_rate": 4.638184492193357e-06, |
|
"loss": 0.3391, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.146178414914952, |
|
"grad_norm": 0.05253572445021966, |
|
"learning_rate": 4.59935901347842e-06, |
|
"loss": 0.3444, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.1506350739062616, |
|
"grad_norm": 0.04661513504414531, |
|
"learning_rate": 4.560557833357188e-06, |
|
"loss": 0.3324, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.1550917328975712, |
|
"grad_norm": 0.05181632502738739, |
|
"learning_rate": 4.521783305093906e-06, |
|
"loss": 0.3593, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.1595483918888807, |
|
"grad_norm": 0.043190087583387604, |
|
"learning_rate": 4.483037780336411e-06, |
|
"loss": 0.3313, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1640050508801902, |
|
"grad_norm": 0.04699548343789582, |
|
"learning_rate": 4.444323608973498e-06, |
|
"loss": 0.3235, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.1684617098714996, |
|
"grad_norm": 0.04832144947499993, |
|
"learning_rate": 4.4056431389923925e-06, |
|
"loss": 0.3249, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.172918368862809, |
|
"grad_norm": 0.049877024800225496, |
|
"learning_rate": 4.366998716336364e-06, |
|
"loss": 0.3289, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.1773750278541186, |
|
"grad_norm": 0.050518314153578, |
|
"learning_rate": 4.328392684762432e-06, |
|
"loss": 0.3227, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.1818316868454282, |
|
"grad_norm": 0.05274930662575011, |
|
"learning_rate": 4.289827385699225e-06, |
|
"loss": 0.3389, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.1862883458367377, |
|
"grad_norm": 0.04314769892456159, |
|
"learning_rate": 4.251305158104973e-06, |
|
"loss": 0.3325, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.1907450048280472, |
|
"grad_norm": 0.05744060663520273, |
|
"learning_rate": 4.212828338325655e-06, |
|
"loss": 0.3501, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.1952016638193568, |
|
"grad_norm": 0.04788383703548173, |
|
"learning_rate": 4.174399259953292e-06, |
|
"loss": 0.3182, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1996583228106663, |
|
"grad_norm": 0.05322839124311334, |
|
"learning_rate": 4.136020253684426e-06, |
|
"loss": 0.3236, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.2041149818019758, |
|
"grad_norm": 0.04192717948800928, |
|
"learning_rate": 4.097693647178765e-06, |
|
"loss": 0.3116, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2085716407932854, |
|
"grad_norm": 0.07466924907391985, |
|
"learning_rate": 4.059421764917998e-06, |
|
"loss": 0.336, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.213028299784595, |
|
"grad_norm": 0.047779612339586844, |
|
"learning_rate": 4.021206928064837e-06, |
|
"loss": 0.3199, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.2174849587759042, |
|
"grad_norm": 0.04890716812809991, |
|
"learning_rate": 3.983051454322227e-06, |
|
"loss": 0.3042, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2219416177672138, |
|
"grad_norm": 0.05622057359534205, |
|
"learning_rate": 3.944957657792779e-06, |
|
"loss": 0.315, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.2263982767585233, |
|
"grad_norm": 0.05816972903766055, |
|
"learning_rate": 3.906927848838428e-06, |
|
"loss": 0.3447, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2308549357498328, |
|
"grad_norm": 0.0521170519743598, |
|
"learning_rate": 3.868964333940308e-06, |
|
"loss": 0.3287, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2353115947411424, |
|
"grad_norm": 0.05613111708237626, |
|
"learning_rate": 3.83106941555886e-06, |
|
"loss": 0.3514, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.239768253732452, |
|
"grad_norm": 0.05079823220287822, |
|
"learning_rate": 3.7932453919942002e-06, |
|
"loss": 0.2976, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.2442249127237615, |
|
"grad_norm": 0.04588237737238143, |
|
"learning_rate": 3.7554945572467262e-06, |
|
"loss": 0.3429, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.248681571715071, |
|
"grad_norm": 0.06835690002137525, |
|
"learning_rate": 3.7178192008779746e-06, |
|
"loss": 0.3251, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.2531382307063805, |
|
"grad_norm": 0.050609979224703276, |
|
"learning_rate": 3.6802216078717825e-06, |
|
"loss": 0.3403, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.2575948896976898, |
|
"grad_norm": 0.053908001625159505, |
|
"learning_rate": 3.642704058495691e-06, |
|
"loss": 0.3237, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2620515486889996, |
|
"grad_norm": 0.06722754734917484, |
|
"learning_rate": 3.6052688281626448e-06, |
|
"loss": 0.3298, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.266508207680309, |
|
"grad_norm": 0.04804813484670096, |
|
"learning_rate": 3.5679181872930067e-06, |
|
"loss": 0.3137, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.2709648666716185, |
|
"grad_norm": 0.052833907520638754, |
|
"learning_rate": 3.5306544011768414e-06, |
|
"loss": 0.3503, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.275421525662928, |
|
"grad_norm": 0.05272400381575242, |
|
"learning_rate": 3.493479729836534e-06, |
|
"loss": 0.3093, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.2798781846542375, |
|
"grad_norm": 0.0580236610090227, |
|
"learning_rate": 3.4563964278897267e-06, |
|
"loss": 0.3576, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.284334843645547, |
|
"grad_norm": 0.06013873751374385, |
|
"learning_rate": 3.4194067444125645e-06, |
|
"loss": 0.3231, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.2887915026368566, |
|
"grad_norm": 0.056386412770539907, |
|
"learning_rate": 3.3825129228033043e-06, |
|
"loss": 0.3109, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.2932481616281661, |
|
"grad_norm": 0.05917179185232147, |
|
"learning_rate": 3.3457172006462486e-06, |
|
"loss": 0.3454, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2977048206194757, |
|
"grad_norm": 0.051639399606407455, |
|
"learning_rate": 3.3090218095760296e-06, |
|
"loss": 0.3448, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.3021614796107852, |
|
"grad_norm": 0.04783319813503841, |
|
"learning_rate": 3.272428975142279e-06, |
|
"loss": 0.2983, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3066181386020945, |
|
"grad_norm": 0.05087706067935069, |
|
"learning_rate": 3.235940916674636e-06, |
|
"loss": 0.3224, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.311074797593404, |
|
"grad_norm": 0.0531382115416805, |
|
"learning_rate": 3.19955984714815e-06, |
|
"loss": 0.3081, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3155314565847136, |
|
"grad_norm": 0.060724547940211995, |
|
"learning_rate": 3.1632879730490705e-06, |
|
"loss": 0.309, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3199881155760231, |
|
"grad_norm": 0.05180410376030714, |
|
"learning_rate": 3.127127494241018e-06, |
|
"loss": 0.3412, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.3244447745673327, |
|
"grad_norm": 0.06436906484669133, |
|
"learning_rate": 3.091080603831568e-06, |
|
"loss": 0.3304, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3289014335586422, |
|
"grad_norm": 0.06141323563691381, |
|
"learning_rate": 3.055149488039239e-06, |
|
"loss": 0.3482, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.3333580925499517, |
|
"grad_norm": 0.05073605621235636, |
|
"learning_rate": 3.0193363260609045e-06, |
|
"loss": 0.3118, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3378147515412613, |
|
"grad_norm": 0.06151762054710644, |
|
"learning_rate": 2.983643289939614e-06, |
|
"loss": 0.353, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.3422714105325708, |
|
"grad_norm": 0.0812926316126211, |
|
"learning_rate": 2.9480725444328763e-06, |
|
"loss": 0.3642, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.3467280695238801, |
|
"grad_norm": 0.05677065462915271, |
|
"learning_rate": 2.9126262468813602e-06, |
|
"loss": 0.329, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.35118472851519, |
|
"grad_norm": 0.05518140834166454, |
|
"learning_rate": 2.877306547078045e-06, |
|
"loss": 0.2899, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.3556413875064992, |
|
"grad_norm": 0.056017531551750585, |
|
"learning_rate": 2.842115587137855e-06, |
|
"loss": 0.3043, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.3600980464978087, |
|
"grad_norm": 0.06536315700251481, |
|
"learning_rate": 2.8070555013677305e-06, |
|
"loss": 0.3018, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.3645547054891183, |
|
"grad_norm": 0.06137584376324279, |
|
"learning_rate": 2.772128416137181e-06, |
|
"loss": 0.2912, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3690113644804278, |
|
"grad_norm": 0.058100040167798406, |
|
"learning_rate": 2.7373364497493315e-06, |
|
"loss": 0.2794, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.3734680234717374, |
|
"grad_norm": 0.07619482744511352, |
|
"learning_rate": 2.7026817123124456e-06, |
|
"loss": 0.309, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.377924682463047, |
|
"grad_norm": 0.0602222443408102, |
|
"learning_rate": 2.6681663056119413e-06, |
|
"loss": 0.3248, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.3823813414543564, |
|
"grad_norm": 0.06509968172306801, |
|
"learning_rate": 2.63379232298293e-06, |
|
"loss": 0.3449, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.386838000445666, |
|
"grad_norm": 0.06683684546365645, |
|
"learning_rate": 2.599561849183255e-06, |
|
"loss": 0.3022, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.3912946594369755, |
|
"grad_norm": 0.0796435434481212, |
|
"learning_rate": 2.5654769602670438e-06, |
|
"loss": 0.3372, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.3957513184282848, |
|
"grad_norm": 0.071737403463626, |
|
"learning_rate": 2.531539723458808e-06, |
|
"loss": 0.3226, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4002079774195946, |
|
"grad_norm": 0.0628814888060204, |
|
"learning_rate": 2.4977521970280632e-06, |
|
"loss": 0.3054, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.4046646364109039, |
|
"grad_norm": 0.0664126755234312, |
|
"learning_rate": 2.4641164301644916e-06, |
|
"loss": 0.3163, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4091212954022134, |
|
"grad_norm": 0.0691268006065538, |
|
"learning_rate": 2.4306344628536677e-06, |
|
"loss": 0.292, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.413577954393523, |
|
"grad_norm": 0.08254717009005777, |
|
"learning_rate": 2.3973083257533337e-06, |
|
"loss": 0.2964, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.4180346133848325, |
|
"grad_norm": 0.0664448970280554, |
|
"learning_rate": 2.364140040070233e-06, |
|
"loss": 0.3013, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.422491272376142, |
|
"grad_norm": 0.06641276103365941, |
|
"learning_rate": 2.3311316174375385e-06, |
|
"loss": 0.3088, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4269479313674516, |
|
"grad_norm": 0.08218794135414882, |
|
"learning_rate": 2.2982850597928353e-06, |
|
"loss": 0.3015, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.431404590358761, |
|
"grad_norm": 0.06405949653118427, |
|
"learning_rate": 2.2656023592567168e-06, |
|
"loss": 0.3161, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.4358612493500706, |
|
"grad_norm": 0.07949199345356144, |
|
"learning_rate": 2.2330854980119524e-06, |
|
"loss": 0.3018, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.4403179083413802, |
|
"grad_norm": 0.074973629780883, |
|
"learning_rate": 2.2007364481832827e-06, |
|
"loss": 0.286, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.4447745673326895, |
|
"grad_norm": 0.06978140375268374, |
|
"learning_rate": 2.168557171717795e-06, |
|
"loss": 0.3029, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.449231226323999, |
|
"grad_norm": 0.060658080118647636, |
|
"learning_rate": 2.1365496202659503e-06, |
|
"loss": 0.2996, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.4536878853153086, |
|
"grad_norm": 0.07246627865043749, |
|
"learning_rate": 2.104715735063201e-06, |
|
"loss": 0.2961, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.458144544306618, |
|
"grad_norm": 0.07791853687083404, |
|
"learning_rate": 2.0730574468122683e-06, |
|
"loss": 0.3244, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.4626012032979276, |
|
"grad_norm": 0.083758493641139, |
|
"learning_rate": 2.0415766755660444e-06, |
|
"loss": 0.3449, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.4670578622892372, |
|
"grad_norm": 0.08124328287993272, |
|
"learning_rate": 2.010275330611132e-06, |
|
"loss": 0.3335, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.4715145212805467, |
|
"grad_norm": 0.0699592628852485, |
|
"learning_rate": 1.9791553103520624e-06, |
|
"loss": 0.3101, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4759711802718563, |
|
"grad_norm": 0.05596234972263472, |
|
"learning_rate": 1.9482185021961506e-06, |
|
"loss": 0.3062, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.4804278392631658, |
|
"grad_norm": 0.06327709166815214, |
|
"learning_rate": 1.917466782439024e-06, |
|
"loss": 0.2714, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.484884498254475, |
|
"grad_norm": 0.09193245610034326, |
|
"learning_rate": 1.88690201615083e-06, |
|
"loss": 0.3015, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.4893411572457849, |
|
"grad_norm": 0.116065810706921, |
|
"learning_rate": 1.8565260570631244e-06, |
|
"loss": 0.3023, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.4937978162370942, |
|
"grad_norm": 0.06085779115348225, |
|
"learning_rate": 1.82634074745643e-06, |
|
"loss": 0.2793, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.4982544752284037, |
|
"grad_norm": 0.07726951500485736, |
|
"learning_rate": 1.7963479180485204e-06, |
|
"loss": 0.2952, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.5027111342197133, |
|
"grad_norm": 0.08737288468078919, |
|
"learning_rate": 1.7665493878833807e-06, |
|
"loss": 0.3074, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5071677932110228, |
|
"grad_norm": 0.07717042291948013, |
|
"learning_rate": 1.736946964220877e-06, |
|
"loss": 0.3265, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5116244522023323, |
|
"grad_norm": 0.09959612155824546, |
|
"learning_rate": 1.7075424424271604e-06, |
|
"loss": 0.2923, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.5160811111936419, |
|
"grad_norm": 0.07789019553417695, |
|
"learning_rate": 1.678337605865773e-06, |
|
"loss": 0.3185, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5205377701849514, |
|
"grad_norm": 0.09414331699116586, |
|
"learning_rate": 1.6493342257894845e-06, |
|
"loss": 0.3291, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5249944291762607, |
|
"grad_norm": 0.09880867896025543, |
|
"learning_rate": 1.6205340612328707e-06, |
|
"loss": 0.311, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.5294510881675705, |
|
"grad_norm": 0.07951318628953609, |
|
"learning_rate": 1.591938858905634e-06, |
|
"loss": 0.303, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.5339077471588798, |
|
"grad_norm": 0.07982777318332517, |
|
"learning_rate": 1.5635503530866563e-06, |
|
"loss": 0.2811, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.5383644061501895, |
|
"grad_norm": 0.08550606994115802, |
|
"learning_rate": 1.5353702655188257e-06, |
|
"loss": 0.2993, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.5428210651414989, |
|
"grad_norm": 0.08986128570256782, |
|
"learning_rate": 1.5074003053046105e-06, |
|
"loss": 0.3038, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.5472777241328084, |
|
"grad_norm": 0.06401529887840299, |
|
"learning_rate": 1.4796421688023975e-06, |
|
"loss": 0.2757, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.551734383124118, |
|
"grad_norm": 0.09741957746810867, |
|
"learning_rate": 1.4520975395236215e-06, |
|
"loss": 0.3062, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.5561910421154275, |
|
"grad_norm": 0.07760428621549359, |
|
"learning_rate": 1.4247680880306513e-06, |
|
"loss": 0.3317, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.560647701106737, |
|
"grad_norm": 0.08909029154183626, |
|
"learning_rate": 1.3976554718354723e-06, |
|
"loss": 0.318, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.5651043600980465, |
|
"grad_norm": 0.10011564881446526, |
|
"learning_rate": 1.3707613352991656e-06, |
|
"loss": 0.2931, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.569561019089356, |
|
"grad_norm": 0.08176423100104935, |
|
"learning_rate": 1.344087309532175e-06, |
|
"loss": 0.2894, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.5740176780806654, |
|
"grad_norm": 0.07307560261614855, |
|
"learning_rate": 1.3176350122953763e-06, |
|
"loss": 0.2867, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.5784743370719752, |
|
"grad_norm": 0.12191262928713528, |
|
"learning_rate": 1.2914060479019707e-06, |
|
"loss": 0.3202, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5829309960632845, |
|
"grad_norm": 0.09026660529628, |
|
"learning_rate": 1.2654020071201795e-06, |
|
"loss": 0.3106, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.5873876550545942, |
|
"grad_norm": 0.09053383432158335, |
|
"learning_rate": 1.239624467076762e-06, |
|
"loss": 0.2984, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.5918443140459035, |
|
"grad_norm": 0.10384705796359629, |
|
"learning_rate": 1.2140749911613703e-06, |
|
"loss": 0.3184, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.596300973037213, |
|
"grad_norm": 0.09677144301192052, |
|
"learning_rate": 1.1887551289317284e-06, |
|
"loss": 0.3159, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.6007576320285226, |
|
"grad_norm": 0.08758929393810426, |
|
"learning_rate": 1.163666416019647e-06, |
|
"loss": 0.2917, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6052142910198322, |
|
"grad_norm": 0.08976099626976211, |
|
"learning_rate": 1.1388103740379009e-06, |
|
"loss": 0.3137, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.6096709500111417, |
|
"grad_norm": 0.09589145787287572, |
|
"learning_rate": 1.114188510487928e-06, |
|
"loss": 0.2672, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.614127609002451, |
|
"grad_norm": 0.0898571951688524, |
|
"learning_rate": 1.0898023186684188e-06, |
|
"loss": 0.2821, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6185842679937608, |
|
"grad_norm": 0.10084905972281838, |
|
"learning_rate": 1.06565327758473e-06, |
|
"loss": 0.3108, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.62304092698507, |
|
"grad_norm": 0.09625652044207637, |
|
"learning_rate": 1.0417428518592015e-06, |
|
"loss": 0.2797, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.6274975859763798, |
|
"grad_norm": 0.13668108404773008, |
|
"learning_rate": 1.018072491642313e-06, |
|
"loss": 0.3249, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.6319542449676891, |
|
"grad_norm": 0.1094276650717538, |
|
"learning_rate": 9.946436325247438e-07, |
|
"loss": 0.3072, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.6364109039589987, |
|
"grad_norm": 0.09417653715028669, |
|
"learning_rate": 9.71457695450303e-07, |
|
"loss": 0.2896, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.6408675629503082, |
|
"grad_norm": 0.10106662883357638, |
|
"learning_rate": 9.485160866297433e-07, |
|
"loss": 0.2978, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.6453242219416178, |
|
"grad_norm": 0.10438310063745738, |
|
"learning_rate": 9.25820197455487e-07, |
|
"loss": 0.2821, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.6497808809329273, |
|
"grad_norm": 0.10535335881946876, |
|
"learning_rate": 9.033714044172281e-07, |
|
"loss": 0.294, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.6542375399242368, |
|
"grad_norm": 0.0889200936563808, |
|
"learning_rate": 8.811710690184566e-07, |
|
"loss": 0.262, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.6586941989155464, |
|
"grad_norm": 0.10455279564530415, |
|
"learning_rate": 8.592205376938834e-07, |
|
"loss": 0.2817, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.6631508579068557, |
|
"grad_norm": 0.11318390537590481, |
|
"learning_rate": 8.375211417277717e-07, |
|
"loss": 0.3045, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.6676075168981654, |
|
"grad_norm": 0.088743004174387, |
|
"learning_rate": 8.160741971732084e-07, |
|
"loss": 0.2697, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.6720641758894748, |
|
"grad_norm": 0.12499336666614728, |
|
"learning_rate": 7.948810047722793e-07, |
|
"loss": 0.2856, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.6765208348807845, |
|
"grad_norm": 0.11172684998831532, |
|
"learning_rate": 7.739428498771761e-07, |
|
"loss": 0.2658, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6809774938720938, |
|
"grad_norm": 0.12595234706288227, |
|
"learning_rate": 7.532610023722509e-07, |
|
"loss": 0.2732, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.6854341528634034, |
|
"grad_norm": 0.09689191794827777, |
|
"learning_rate": 7.328367165969907e-07, |
|
"loss": 0.3044, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.689890811854713, |
|
"grad_norm": 0.11874605920811564, |
|
"learning_rate": 7.126712312699435e-07, |
|
"loss": 0.3221, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.6943474708460224, |
|
"grad_norm": 0.08766218268129858, |
|
"learning_rate": 6.927657694135959e-07, |
|
"loss": 0.2814, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.698804129837332, |
|
"grad_norm": 0.10403913380960049, |
|
"learning_rate": 6.731215382801903e-07, |
|
"loss": 0.3007, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7032607888286415, |
|
"grad_norm": 0.12394080523133466, |
|
"learning_rate": 6.53739729278513e-07, |
|
"loss": 0.2685, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.707717447819951, |
|
"grad_norm": 0.09220124360987444, |
|
"learning_rate": 6.346215179016297e-07, |
|
"loss": 0.2999, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7121741068112604, |
|
"grad_norm": 0.09256443041488645, |
|
"learning_rate": 6.157680636555996e-07, |
|
"loss": 0.2921, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.7166307658025701, |
|
"grad_norm": 0.11801980704813507, |
|
"learning_rate": 5.971805099891437e-07, |
|
"loss": 0.2952, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.7210874247938794, |
|
"grad_norm": 0.11500014463950178, |
|
"learning_rate": 5.788599842243043e-07, |
|
"loss": 0.2757, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.7255440837851892, |
|
"grad_norm": 0.12909046401480123, |
|
"learning_rate": 5.608075974880678e-07, |
|
"loss": 0.2783, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.7300007427764985, |
|
"grad_norm": 0.10409707004186307, |
|
"learning_rate": 5.430244446449762e-07, |
|
"loss": 0.2853, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.734457401767808, |
|
"grad_norm": 0.11009184273146726, |
|
"learning_rate": 5.255116042307267e-07, |
|
"loss": 0.2712, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.7389140607591176, |
|
"grad_norm": 0.16597411522897149, |
|
"learning_rate": 5.0827013838676e-07, |
|
"loss": 0.3097, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.7433707197504271, |
|
"grad_norm": 0.10555291444735147, |
|
"learning_rate": 4.91301092795835e-07, |
|
"loss": 0.3001, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.7478273787417367, |
|
"grad_norm": 0.12464546891723725, |
|
"learning_rate": 4.746054966186192e-07, |
|
"loss": 0.2955, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.752284037733046, |
|
"grad_norm": 0.11277180986490198, |
|
"learning_rate": 4.581843624312643e-07, |
|
"loss": 0.2643, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.7567406967243557, |
|
"grad_norm": 0.09867908839152027, |
|
"learning_rate": 4.4203868616399305e-07, |
|
"loss": 0.312, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.761197355715665, |
|
"grad_norm": 0.11312294922723523, |
|
"learning_rate": 4.261694470407024e-07, |
|
"loss": 0.2754, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.7656540147069748, |
|
"grad_norm": 0.10862426439391781, |
|
"learning_rate": 4.1057760751957056e-07, |
|
"loss": 0.291, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.7701106736982841, |
|
"grad_norm": 0.13833325552109443, |
|
"learning_rate": 3.9526411323468403e-07, |
|
"loss": 0.3009, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.7745673326895937, |
|
"grad_norm": 0.11648729127760897, |
|
"learning_rate": 3.802298929386872e-07, |
|
"loss": 0.2948, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.7790239916809032, |
|
"grad_norm": 0.10832697002551062, |
|
"learning_rate": 3.6547585844645574e-07, |
|
"loss": 0.264, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.7834806506722127, |
|
"grad_norm": 0.1129159601097704, |
|
"learning_rate": 3.510029045797897e-07, |
|
"loss": 0.2814, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7879373096635223, |
|
"grad_norm": 0.12881883038121175, |
|
"learning_rate": 3.368119091131511e-07, |
|
"loss": 0.2961, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.7923939686548318, |
|
"grad_norm": 0.11915967567506883, |
|
"learning_rate": 3.2290373272041984e-07, |
|
"loss": 0.2847, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.7968506276461413, |
|
"grad_norm": 0.14883545803777695, |
|
"learning_rate": 3.0927921892270173e-07, |
|
"loss": 0.3072, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8013072866374507, |
|
"grad_norm": 0.14935341192789242, |
|
"learning_rate": 2.9593919403716254e-07, |
|
"loss": 0.2585, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8057639456287604, |
|
"grad_norm": 0.10091752925214101, |
|
"learning_rate": 2.8288446712691985e-07, |
|
"loss": 0.2927, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8102206046200697, |
|
"grad_norm": 0.1272686505517032, |
|
"learning_rate": 2.701158299519652e-07, |
|
"loss": 0.297, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.8146772636113795, |
|
"grad_norm": 0.11235244093503088, |
|
"learning_rate": 2.5763405692115326e-07, |
|
"loss": 0.2834, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.8191339226026888, |
|
"grad_norm": 0.13526604911761567, |
|
"learning_rate": 2.454399050452283e-07, |
|
"loss": 0.296, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.8235905815939983, |
|
"grad_norm": 0.13280050999221021, |
|
"learning_rate": 2.335341138909142e-07, |
|
"loss": 0.2717, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.8280472405853079, |
|
"grad_norm": 0.12814019839952007, |
|
"learning_rate": 2.219174055360612e-07, |
|
"loss": 0.2717, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.8325038995766174, |
|
"grad_norm": 0.11817894924930601, |
|
"learning_rate": 2.1059048452584996e-07, |
|
"loss": 0.2735, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.836960558567927, |
|
"grad_norm": 0.12820739048926352, |
|
"learning_rate": 1.9955403783006255e-07, |
|
"loss": 0.2726, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.8414172175592363, |
|
"grad_norm": 0.14570257294552771, |
|
"learning_rate": 1.888087348014217e-07, |
|
"loss": 0.2526, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.845873876550546, |
|
"grad_norm": 0.09484685178822737, |
|
"learning_rate": 1.7835522713498644e-07, |
|
"loss": 0.2592, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.8503305355418553, |
|
"grad_norm": 0.11168612744194842, |
|
"learning_rate": 1.6819414882863704e-07, |
|
"loss": 0.3027, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.854787194533165, |
|
"grad_norm": 0.13169293315087485, |
|
"learning_rate": 1.5832611614461757e-07, |
|
"loss": 0.2576, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.8592438535244744, |
|
"grad_norm": 0.13264409398535437, |
|
"learning_rate": 1.4875172757216006e-07, |
|
"loss": 0.2808, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.863700512515784, |
|
"grad_norm": 0.11349693321653709, |
|
"learning_rate": 1.394715637911903e-07, |
|
"loss": 0.2588, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.8681571715070935, |
|
"grad_norm": 0.14216886029418724, |
|
"learning_rate": 1.3048618763710609e-07, |
|
"loss": 0.2886, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.872613830498403, |
|
"grad_norm": 0.11190825927177947, |
|
"learning_rate": 1.2179614406664374e-07, |
|
"loss": 0.2806, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.8770704894897126, |
|
"grad_norm": 0.13139481283283222, |
|
"learning_rate": 1.1340196012482652e-07, |
|
"loss": 0.2701, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.881527148481022, |
|
"grad_norm": 0.13212149531161754, |
|
"learning_rate": 1.0530414491299823e-07, |
|
"loss": 0.2976, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.8859838074723316, |
|
"grad_norm": 0.14761056178281307, |
|
"learning_rate": 9.75031895579509e-08, |
|
"loss": 0.2851, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.890440466463641, |
|
"grad_norm": 0.11807804154726727, |
|
"learning_rate": 8.999956718213187e-08, |
|
"loss": 0.2675, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8948971254549507, |
|
"grad_norm": 0.13153250275786837, |
|
"learning_rate": 8.279373287495452e-08, |
|
"loss": 0.2756, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.89935378444626, |
|
"grad_norm": 0.12010506744054632, |
|
"learning_rate": 7.588612366519588e-08, |
|
"loss": 0.2755, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9038104434375698, |
|
"grad_norm": 0.13595444122630035, |
|
"learning_rate": 6.927715849448902e-08, |
|
"loss": 0.2621, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.908267102428879, |
|
"grad_norm": 0.11969702875665357, |
|
"learning_rate": 6.296723819191886e-08, |
|
"loss": 0.2619, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9127237614201886, |
|
"grad_norm": 0.12713767895060166, |
|
"learning_rate": 5.695674544970775e-08, |
|
"loss": 0.2646, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.9171804204114982, |
|
"grad_norm": 0.1512438174845061, |
|
"learning_rate": 5.124604480000739e-08, |
|
"loss": 0.2861, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.9216370794028077, |
|
"grad_norm": 0.15063267518774118, |
|
"learning_rate": 4.5835482592792026e-08, |
|
"loss": 0.276, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.9260937383941172, |
|
"grad_norm": 0.12516539105766297, |
|
"learning_rate": 4.072538697484807e-08, |
|
"loss": 0.288, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.9305503973854268, |
|
"grad_norm": 0.12187069090426104, |
|
"learning_rate": 3.591606786987667e-08, |
|
"loss": 0.2864, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.9350070563767363, |
|
"grad_norm": 0.10855277743154071, |
|
"learning_rate": 3.1407816959695416e-08, |
|
"loss": 0.2978, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.9394637153680456, |
|
"grad_norm": 0.133314659297753, |
|
"learning_rate": 2.720090766654693e-08, |
|
"loss": 0.2864, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.9439203743593554, |
|
"grad_norm": 0.1263650702337354, |
|
"learning_rate": 2.3295595136517136e-08, |
|
"loss": 0.2737, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.9483770333506647, |
|
"grad_norm": 0.1343875450088774, |
|
"learning_rate": 1.9692116224060976e-08, |
|
"loss": 0.2624, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 1.9528336923419745, |
|
"grad_norm": 0.12031351113756965, |
|
"learning_rate": 1.639068947763722e-08, |
|
"loss": 0.277, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.9572903513332838, |
|
"grad_norm": 0.12928344033539446, |
|
"learning_rate": 1.3391515126452426e-08, |
|
"loss": 0.2739, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 1.9617470103245933, |
|
"grad_norm": 0.12874880789823828, |
|
"learning_rate": 1.0694775068320063e-08, |
|
"loss": 0.2622, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.9662036693159028, |
|
"grad_norm": 0.17186304444380732, |
|
"learning_rate": 8.30063285862437e-09, |
|
"loss": 0.2909, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.9706603283072124, |
|
"grad_norm": 0.1309907572097109, |
|
"learning_rate": 6.209233700406048e-09, |
|
"loss": 0.277, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.975116987298522, |
|
"grad_norm": 0.1365092145131148, |
|
"learning_rate": 4.4207044355493164e-09, |
|
"loss": 0.2629, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 1.9795736462898312, |
|
"grad_norm": 0.12156842653941813, |
|
"learning_rate": 2.935153537096391e-09, |
|
"loss": 0.2658, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.984030305281141, |
|
"grad_norm": 0.1263668031483968, |
|
"learning_rate": 1.7526711026621957e-09, |
|
"loss": 0.2805, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.9884869642724503, |
|
"grad_norm": 0.13239230121316822, |
|
"learning_rate": 8.733288489737312e-10, |
|
"loss": 0.2822, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.99294362326376, |
|
"grad_norm": 0.13212067705172323, |
|
"learning_rate": 2.9718010751966784e-10, |
|
"loss": 0.2625, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 1.9974002822550694, |
|
"grad_norm": 0.13697820005606215, |
|
"learning_rate": 2.4259821315153476e-11, |
|
"loss": 0.2635, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9991829458515933, |
|
"eval_loss": 0.2551692724227905, |
|
"eval_runtime": 27.6419, |
|
"eval_samples_per_second": 19.68, |
|
"eval_steps_per_second": 4.92, |
|
"step": 2242 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2242, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.790417620582793e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|