|
{ |
|
"best_global_step": 1000, |
|
"best_metric": 1.0913933515548706, |
|
"best_model_checkpoint": "output/reasoning-model_v11/checkpoint-1000", |
|
"epoch": 0.37512895057676077, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018756447528838038, |
|
"grad_norm": 17.375, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.5042, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0037512895057676076, |
|
"grad_norm": 14.875, |
|
"learning_rate": 2.25e-06, |
|
"loss": 1.413, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005626934258651411, |
|
"grad_norm": 13.375, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.5803, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.007502579011535215, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 4.75e-06, |
|
"loss": 1.5081, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00937822376441902, |
|
"grad_norm": 9.5, |
|
"learning_rate": 6e-06, |
|
"loss": 1.3924, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.011253868517302822, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 7.25e-06, |
|
"loss": 1.4063, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013129513270186627, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 8.5e-06, |
|
"loss": 1.2718, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01500515802307043, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 9.75e-06, |
|
"loss": 1.2445, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.016880802775954235, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.1666, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01875644752883804, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.2250000000000001e-05, |
|
"loss": 1.2257, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02063209228172184, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.1071, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.022507737034605645, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.4750000000000003e-05, |
|
"loss": 1.2369, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02438338178748945, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.2355, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.026259026540373254, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 1.7250000000000003e-05, |
|
"loss": 1.0905, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.028134671293257058, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.8500000000000002e-05, |
|
"loss": 1.1362, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03001031604614086, |
|
"grad_norm": 12.875, |
|
"learning_rate": 1.9750000000000002e-05, |
|
"loss": 1.1355, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.031885960799024664, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.999988193210057e-05, |
|
"loss": 1.2262, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03376160555190847, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 1.9999402286037404e-05, |
|
"loss": 1.0978, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03563725030479227, |
|
"grad_norm": 9.375, |
|
"learning_rate": 1.9998553700250286e-05, |
|
"loss": 1.098, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03751289505767608, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 1.9997336206048778e-05, |
|
"loss": 1.2503, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03938853981055988, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.999574984835377e-05, |
|
"loss": 1.0663, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04126418456344368, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 1.9993794685695792e-05, |
|
"loss": 1.3083, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04313982931632749, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 1.9991470790212877e-05, |
|
"loss": 1.1906, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04501547406921129, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 1.9988778247647887e-05, |
|
"loss": 1.1358, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.046891118822095096, |
|
"grad_norm": 9.375, |
|
"learning_rate": 1.9985717157345346e-05, |
|
"loss": 1.1421, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0487667635749789, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.998228763224779e-05, |
|
"loss": 1.1236, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0506424083278627, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.9978489798891584e-05, |
|
"loss": 1.2223, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05251805308074651, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.9974323797402264e-05, |
|
"loss": 1.1818, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05439369783363031, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.996978978148936e-05, |
|
"loss": 1.0341, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.056269342586514115, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.9964887918440735e-05, |
|
"loss": 1.118, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.058144987339397915, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.995961838911639e-05, |
|
"loss": 1.2993, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06002063209228172, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.995398138794182e-05, |
|
"loss": 1.1533, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06189627684516553, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.9947977122900825e-05, |
|
"loss": 1.0802, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06377192159804933, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 1.9941605815527827e-05, |
|
"loss": 1.0135, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06564756635093313, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.9934867700899724e-05, |
|
"loss": 1.074, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06752321110381694, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 1.9927763027627184e-05, |
|
"loss": 1.0574, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06939885585670075, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 1.99202920578455e-05, |
|
"loss": 1.0316, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.07127450060958454, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 1.9912455067204898e-05, |
|
"loss": 1.0653, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07315014536246835, |
|
"grad_norm": 10.4375, |
|
"learning_rate": 1.990425234486038e-05, |
|
"loss": 1.1119, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07502579011535215, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.9895684193461047e-05, |
|
"loss": 1.28, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07690143486823596, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 1.9886750929138935e-05, |
|
"loss": 1.1392, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.07877707962111977, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.987745288149735e-05, |
|
"loss": 1.1955, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08065272437400356, |
|
"grad_norm": 6.625, |
|
"learning_rate": 1.986779039359871e-05, |
|
"loss": 1.1513, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.08252836912688737, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.985776382195189e-05, |
|
"loss": 1.0088, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08440401387977117, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.984737353649906e-05, |
|
"loss": 0.9908, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.08627965863265498, |
|
"grad_norm": 8.5, |
|
"learning_rate": 1.9836619920602032e-05, |
|
"loss": 1.1958, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08815530338553879, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 1.9825503371028136e-05, |
|
"loss": 1.1107, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.09003094813842258, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 1.981402429793556e-05, |
|
"loss": 1.062, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09190659289130639, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.980218312485822e-05, |
|
"loss": 1.1393, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.09378223764419019, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 1.978998028869015e-05, |
|
"loss": 1.0907, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.095657882397074, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.977741623966936e-05, |
|
"loss": 1.0887, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.0975335271499578, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.9764491441361227e-05, |
|
"loss": 1.1234, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0994091719028416, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.975120637064142e-05, |
|
"loss": 1.0825, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1012848166557254, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.973756151767826e-05, |
|
"loss": 1.0728, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.10316046140860921, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 1.972355738591467e-05, |
|
"loss": 1.2317, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.10503610616149302, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.9709194492049585e-05, |
|
"loss": 1.1281, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.10691175091437681, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.9694473366018887e-05, |
|
"loss": 0.9848, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.10878739566726062, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 1.9679394550975864e-05, |
|
"loss": 1.0793, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11066304042014442, |
|
"grad_norm": 8.875, |
|
"learning_rate": 1.9663958603271148e-05, |
|
"loss": 1.1765, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.11253868517302823, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 1.9648166092432216e-05, |
|
"loss": 1.0936, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11441432992591204, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 1.9632017601142353e-05, |
|
"loss": 1.0343, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.11628997467879583, |
|
"grad_norm": 8.75, |
|
"learning_rate": 1.961551372521916e-05, |
|
"loss": 1.2004, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11816561943167964, |
|
"grad_norm": 8.5, |
|
"learning_rate": 1.9598655073592583e-05, |
|
"loss": 1.1076, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.12004126418456344, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 1.9581442268282426e-05, |
|
"loss": 1.1502, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12191690893744725, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 1.956387594437541e-05, |
|
"loss": 1.0411, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.12379255369033106, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 1.9545956750001744e-05, |
|
"loss": 0.9873, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12566819844321486, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.952768534631121e-05, |
|
"loss": 1.1338, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.12754384319609866, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.950906240744877e-05, |
|
"loss": 1.0202, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12941948794898248, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.9490088620529678e-05, |
|
"loss": 1.1292, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.13129513270186627, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 1.9470764685614158e-05, |
|
"loss": 1.0699, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13317077745475006, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.945109131568154e-05, |
|
"loss": 1.1161, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.13504642220763388, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.943106923660398e-05, |
|
"loss": 1.099, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13692206696051767, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 1.9410699187119662e-05, |
|
"loss": 1.0477, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.1387977117134015, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.938998191880556e-05, |
|
"loss": 1.0746, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1406733564662853, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.936891819604968e-05, |
|
"loss": 1.0619, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.14254900121916908, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.9347508796022888e-05, |
|
"loss": 1.1807, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1444246459720529, |
|
"grad_norm": 6.75, |
|
"learning_rate": 1.9325754508650208e-05, |
|
"loss": 1.0707, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1463002907249367, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 1.9303656136581694e-05, |
|
"loss": 1.0464, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14817593547782051, |
|
"grad_norm": 9.0, |
|
"learning_rate": 1.928121449516281e-05, |
|
"loss": 1.0882, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1500515802307043, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.9258430412404344e-05, |
|
"loss": 1.125, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1519272249835881, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.9235304728951868e-05, |
|
"loss": 1.1356, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.15380286973647192, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 1.9211838298054704e-05, |
|
"loss": 1.1532, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1556785144893557, |
|
"grad_norm": 8.5, |
|
"learning_rate": 1.918803198553446e-05, |
|
"loss": 1.1386, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.15755415924223953, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 1.916388666975307e-05, |
|
"loss": 1.064, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15942980399512333, |
|
"grad_norm": 9.0, |
|
"learning_rate": 1.9139403241580403e-05, |
|
"loss": 1.2345, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.16130544874800712, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.9114582604361368e-05, |
|
"loss": 1.1021, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16318109350089094, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.9089425673882617e-05, |
|
"loss": 1.1105, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.16505673825377473, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.906393337833872e-05, |
|
"loss": 1.0213, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16693238300665855, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 1.9038106658297946e-05, |
|
"loss": 1.2062, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.16880802775954235, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 1.9011946466667553e-05, |
|
"loss": 1.2408, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.17068367251242614, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.8985453768658613e-05, |
|
"loss": 1.0678, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.17255931726530996, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.8958629541750422e-05, |
|
"loss": 1.071, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17443496201819375, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 1.893147477565443e-05, |
|
"loss": 1.2118, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.17631060677107757, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.8903990472277707e-05, |
|
"loss": 1.0279, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17818625152396136, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 1.8876177645685997e-05, |
|
"loss": 1.1233, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.18006189627684516, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 1.8848037322066295e-05, |
|
"loss": 1.0398, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.18193754102972898, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.881957053968898e-05, |
|
"loss": 1.1796, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.18381318578261277, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 1.8790778348869516e-05, |
|
"loss": 1.0767, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1856888305354966, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.8761661811929686e-05, |
|
"loss": 1.1285, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.18756447528838038, |
|
"grad_norm": 8.875, |
|
"learning_rate": 1.8732222003158423e-05, |
|
"loss": 1.163, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18756447528838038, |
|
"eval_loss": 1.1088175773620605, |
|
"eval_runtime": 108.1214, |
|
"eval_samples_per_second": 20.486, |
|
"eval_steps_per_second": 2.562, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18944012004126418, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.870246000877214e-05, |
|
"loss": 1.031, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.191315764794148, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.8672376926874668e-05, |
|
"loss": 1.1465, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.1931914095470318, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.8641973867416742e-05, |
|
"loss": 1.1154, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.1950670542999156, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.8611251952155057e-05, |
|
"loss": 1.0743, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.1969426990527994, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.8580212314610847e-05, |
|
"loss": 1.0347, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.1988183438056832, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.85488561000281e-05, |
|
"loss": 1.0763, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.20069398855856702, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.8517184465331288e-05, |
|
"loss": 1.078, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.2025696333114508, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.848519857908267e-05, |
|
"loss": 1.178, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2044452780643346, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.845289962143918e-05, |
|
"loss": 1.0804, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.20632092281721842, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 1.8420288784108917e-05, |
|
"loss": 1.1589, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.20819656757010221, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.8387367270307122e-05, |
|
"loss": 1.1116, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.21007221232298603, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.835413629471182e-05, |
|
"loss": 0.9735, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.21194785707586983, |
|
"grad_norm": 7.625, |
|
"learning_rate": 1.832059708341899e-05, |
|
"loss": 1.0991, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.21382350182875362, |
|
"grad_norm": 9.125, |
|
"learning_rate": 1.8286750873897338e-05, |
|
"loss": 1.1227, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.21569914658163744, |
|
"grad_norm": 10.0, |
|
"learning_rate": 1.8252598914942624e-05, |
|
"loss": 0.9956, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.21757479133452123, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 1.8218142466631595e-05, |
|
"loss": 1.0511, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21945043608740505, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.8183382800275492e-05, |
|
"loss": 1.1195, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.22132608084028885, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.8148321198373146e-05, |
|
"loss": 1.0831, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.22320172559317264, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 1.8112958954563647e-05, |
|
"loss": 1.2181, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.22507737034605646, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.8077297373578625e-05, |
|
"loss": 1.0144, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22695301509894025, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.8041337771194124e-05, |
|
"loss": 0.8084, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.22882865985182407, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.800508147418201e-05, |
|
"loss": 1.0292, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.23070430460470787, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.796852982026107e-05, |
|
"loss": 1.0705, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.23257994935759166, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.7931684158047623e-05, |
|
"loss": 1.0863, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.23445559411047548, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.7894545847005764e-05, |
|
"loss": 1.1707, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.23633123886335927, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 1.7857116257397225e-05, |
|
"loss": 0.9151, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2382068836162431, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.7819396770230796e-05, |
|
"loss": 1.0119, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.24008252836912689, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 1.7781388777211374e-05, |
|
"loss": 1.1517, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.24195817312201068, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 1.7743093680688626e-05, |
|
"loss": 1.0162, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.2438338178748945, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 1.7704512893605247e-05, |
|
"loss": 1.208, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2457094626277783, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.7665647839444807e-05, |
|
"loss": 1.0252, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.2475851073806621, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.7626499952179255e-05, |
|
"loss": 1.1601, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2494607521335459, |
|
"grad_norm": 8.625, |
|
"learning_rate": 1.7587070676215995e-05, |
|
"loss": 1.1795, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.2513363968864297, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.75473614663446e-05, |
|
"loss": 0.9092, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2532120416393135, |
|
"grad_norm": 12.5625, |
|
"learning_rate": 1.750737378768314e-05, |
|
"loss": 1.1697, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.2550876863921973, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 1.7467109115624113e-05, |
|
"loss": 1.1992, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.25696333114508113, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.7426568935780007e-05, |
|
"loss": 1.0365, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.25883897589796495, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 1.7385754743928512e-05, |
|
"loss": 1.2158, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.2607146206508487, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.7344668045957303e-05, |
|
"loss": 1.1652, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.26259026540373254, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 1.730331035780849e-05, |
|
"loss": 1.1178, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.26446591015661636, |
|
"grad_norm": 10.375, |
|
"learning_rate": 1.726168320542269e-05, |
|
"loss": 1.0897, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.2663415549095001, |
|
"grad_norm": 7.625, |
|
"learning_rate": 1.7219788124682702e-05, |
|
"loss": 1.2076, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.26821719966238394, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.7177626661356885e-05, |
|
"loss": 1.0381, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.27009284441526776, |
|
"grad_norm": 8.25, |
|
"learning_rate": 1.713520037104208e-05, |
|
"loss": 1.0232, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.27196848916815153, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.709251081910623e-05, |
|
"loss": 1.0218, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.27384413392103535, |
|
"grad_norm": 9.625, |
|
"learning_rate": 1.704955958063063e-05, |
|
"loss": 1.0518, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.27571977867391917, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 1.700634824035182e-05, |
|
"loss": 1.043, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.277595423426803, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.696287839260308e-05, |
|
"loss": 1.0724, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.27947106817968675, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.6919151641255642e-05, |
|
"loss": 0.9938, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.2813467129325706, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.6875169599659495e-05, |
|
"loss": 1.1153, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2832223576854544, |
|
"grad_norm": 7.375, |
|
"learning_rate": 1.6830933890583863e-05, |
|
"loss": 1.1801, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.28509800243833816, |
|
"grad_norm": 7.4375, |
|
"learning_rate": 1.6786446146157332e-05, |
|
"loss": 1.1175, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.286973647191222, |
|
"grad_norm": 8.0, |
|
"learning_rate": 1.6741708007807626e-05, |
|
"loss": 1.1015, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.2888492919441058, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.6696721126201048e-05, |
|
"loss": 1.2244, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.29072493669698957, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 1.6651487161181577e-05, |
|
"loss": 1.0448, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.2926005814498734, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 1.6606007781709626e-05, |
|
"loss": 1.0295, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2944762262027572, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.6560284665800464e-05, |
|
"loss": 0.9881, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.29635187095564103, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 1.6514319500462303e-05, |
|
"loss": 1.0681, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2982275157085248, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 1.646811398163405e-05, |
|
"loss": 1.0571, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.3001031604614086, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.642166981412274e-05, |
|
"loss": 0.9326, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.30197880521429243, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 1.6374988711540634e-05, |
|
"loss": 1.2067, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.3038544499671762, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.6328072396241993e-05, |
|
"loss": 1.1231, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.30573009472006, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 1.6280922599259515e-05, |
|
"loss": 1.2164, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.30760573947294384, |
|
"grad_norm": 9.5625, |
|
"learning_rate": 1.62335410602405e-05, |
|
"loss": 1.0431, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3094813842258276, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 1.6185929527382628e-05, |
|
"loss": 1.2427, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.3113570289787114, |
|
"grad_norm": 7.125, |
|
"learning_rate": 1.6138089757369475e-05, |
|
"loss": 1.1003, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.31323267373159525, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.6090023515305703e-05, |
|
"loss": 1.1261, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.31510831848447907, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.604173257465192e-05, |
|
"loss": 0.9375, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.31698396323736283, |
|
"grad_norm": 7.875, |
|
"learning_rate": 1.5993218717159253e-05, |
|
"loss": 1.0321, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.31885960799024665, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.5944483732803612e-05, |
|
"loss": 1.1149, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3207352527431305, |
|
"grad_norm": 10.125, |
|
"learning_rate": 1.5895529419719645e-05, |
|
"loss": 0.8962, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.32261089749601424, |
|
"grad_norm": 7.5, |
|
"learning_rate": 1.5846357584134385e-05, |
|
"loss": 0.9511, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.32448654224889806, |
|
"grad_norm": 6.375, |
|
"learning_rate": 1.579697004030061e-05, |
|
"loss": 1.0865, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.3263621870017819, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.5747368610429933e-05, |
|
"loss": 0.9981, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.32823783175466564, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.569755512462551e-05, |
|
"loss": 1.1338, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.33011347650754946, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 1.5647531420814574e-05, |
|
"loss": 0.9559, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3319891212604333, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 1.559729934468059e-05, |
|
"loss": 0.949, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.3338647660133171, |
|
"grad_norm": 8.75, |
|
"learning_rate": 1.5546860749595165e-05, |
|
"loss": 1.0738, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.33574041076620087, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 1.5496217496549673e-05, |
|
"loss": 1.2675, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.3376160555190847, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.5445371454086574e-05, |
|
"loss": 1.1757, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3394917002719685, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 1.5394324498230487e-05, |
|
"loss": 1.1336, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.3413673450248523, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 1.5343078512418977e-05, |
|
"loss": 1.008, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3432429897777361, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 1.529163538743303e-05, |
|
"loss": 1.1445, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.3451186345306199, |
|
"grad_norm": 6.5, |
|
"learning_rate": 1.5239997021327343e-05, |
|
"loss": 1.0209, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3469942792835037, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 1.518816531936024e-05, |
|
"loss": 1.0617, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.3488699240363875, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 1.5136142193923413e-05, |
|
"loss": 1.0885, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3507455687892713, |
|
"grad_norm": 7.875, |
|
"learning_rate": 1.5083929564471344e-05, |
|
"loss": 0.9534, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.35262121354215514, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.5031529357450487e-05, |
|
"loss": 0.9802, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.3544968582950389, |
|
"grad_norm": 11.375, |
|
"learning_rate": 1.4978943506228198e-05, |
|
"loss": 1.149, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.35637250304792273, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.4926173951021384e-05, |
|
"loss": 1.179, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.35824814780080655, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 1.4873222638824938e-05, |
|
"loss": 0.9677, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.3601237925536903, |
|
"grad_norm": 7.75, |
|
"learning_rate": 1.4820091523339883e-05, |
|
"loss": 1.0542, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.36199943730657413, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.4766782564901299e-05, |
|
"loss": 0.9912, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.36387508205945795, |
|
"grad_norm": 8.5, |
|
"learning_rate": 1.471329773040599e-05, |
|
"loss": 1.1461, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.3657507268123417, |
|
"grad_norm": 9.25, |
|
"learning_rate": 1.465963899323992e-05, |
|
"loss": 1.1274, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.36762637156522554, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.4605808333205387e-05, |
|
"loss": 1.0867, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.36950201631810936, |
|
"grad_norm": 8.875, |
|
"learning_rate": 1.4551807736447996e-05, |
|
"loss": 1.0081, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.3713776610709932, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 1.4497639195383362e-05, |
|
"loss": 0.9978, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.37325330582387695, |
|
"grad_norm": 7.875, |
|
"learning_rate": 1.4443304708623598e-05, |
|
"loss": 0.9439, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.37512895057676077, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 1.4388806280903591e-05, |
|
"loss": 0.9749, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.37512895057676077, |
|
"eval_loss": 1.0913933515548706, |
|
"eval_runtime": 107.8156, |
|
"eval_samples_per_second": 20.544, |
|
"eval_steps_per_second": 2.569, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2666, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.001 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.3299712794624e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|