|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.0657781302803545, |
|
"eval_steps": 500, |
|
"global_step": 7630, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002672505948480982, |
|
"grad_norm": 2.519429922103882, |
|
"learning_rate": 1.0157273918741808e-06, |
|
"loss": 1.398, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0005345011896961964, |
|
"grad_norm": 2.11808705329895, |
|
"learning_rate": 2.0314547837483616e-06, |
|
"loss": 1.3113, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0008017517845442946, |
|
"grad_norm": 2.91418719291687, |
|
"learning_rate": 3.0471821756225426e-06, |
|
"loss": 1.2489, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0010690023793923928, |
|
"grad_norm": 2.435168504714966, |
|
"learning_rate": 4.062909567496723e-06, |
|
"loss": 1.2146, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.001336252974240491, |
|
"grad_norm": 3.325174331665039, |
|
"learning_rate": 5.078636959370905e-06, |
|
"loss": 1.1937, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0016035035690885892, |
|
"grad_norm": 2.077951192855835, |
|
"learning_rate": 6.094364351245085e-06, |
|
"loss": 1.1737, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.0018707541639366874, |
|
"grad_norm": 3.0669870376586914, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 1.1484, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0021380047587847856, |
|
"grad_norm": 2.14487886428833, |
|
"learning_rate": 8.125819134993446e-06, |
|
"loss": 1.1465, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.002405255353632884, |
|
"grad_norm": 2.303673267364502, |
|
"learning_rate": 9.141546526867629e-06, |
|
"loss": 1.1242, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.002672505948480982, |
|
"grad_norm": 2.725895404815674, |
|
"learning_rate": 1.015727391874181e-05, |
|
"loss": 1.1004, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0029397565433290805, |
|
"grad_norm": 2.272921562194824, |
|
"learning_rate": 1.117300131061599e-05, |
|
"loss": 1.0987, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0032070071381771785, |
|
"grad_norm": 2.352524518966675, |
|
"learning_rate": 1.218872870249017e-05, |
|
"loss": 1.0898, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.003474257733025277, |
|
"grad_norm": 2.23698353767395, |
|
"learning_rate": 1.3204456094364351e-05, |
|
"loss": 1.0892, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.003741508327873375, |
|
"grad_norm": 2.56597900390625, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 1.0709, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.004008758922721473, |
|
"grad_norm": 2.4281728267669678, |
|
"learning_rate": 1.5235910878112714e-05, |
|
"loss": 1.0597, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.004276009517569571, |
|
"grad_norm": 2.885925769805908, |
|
"learning_rate": 1.6251638269986893e-05, |
|
"loss": 1.0452, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.004543260112417669, |
|
"grad_norm": 2.3664019107818604, |
|
"learning_rate": 1.7267365661861077e-05, |
|
"loss": 1.0439, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.004810510707265768, |
|
"grad_norm": 2.420078992843628, |
|
"learning_rate": 1.8283093053735257e-05, |
|
"loss": 1.0515, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.005077761302113866, |
|
"grad_norm": 2.113743782043457, |
|
"learning_rate": 1.9298820445609438e-05, |
|
"loss": 1.0373, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.005345011896961964, |
|
"grad_norm": 2.6651926040649414, |
|
"learning_rate": 2.031454783748362e-05, |
|
"loss": 1.0249, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.005612262491810062, |
|
"grad_norm": 2.965786933898926, |
|
"learning_rate": 2.13302752293578e-05, |
|
"loss": 1.0144, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.005879513086658161, |
|
"grad_norm": 2.5247995853424072, |
|
"learning_rate": 2.234600262123198e-05, |
|
"loss": 1.0061, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.006146763681506259, |
|
"grad_norm": 2.1675336360931396, |
|
"learning_rate": 2.336173001310616e-05, |
|
"loss": 1.0084, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.006414014276354357, |
|
"grad_norm": 1.9945191144943237, |
|
"learning_rate": 2.437745740498034e-05, |
|
"loss": 0.9968, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.006681264871202455, |
|
"grad_norm": 2.9827675819396973, |
|
"learning_rate": 2.5393184796854525e-05, |
|
"loss": 0.9903, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.006948515466050554, |
|
"grad_norm": 1.8591395616531372, |
|
"learning_rate": 2.6408912188728702e-05, |
|
"loss": 0.9914, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.007215766060898652, |
|
"grad_norm": 2.4378135204315186, |
|
"learning_rate": 2.7424639580602886e-05, |
|
"loss": 0.9967, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.00748301665574675, |
|
"grad_norm": 2.258831739425659, |
|
"learning_rate": 2.8440366972477066e-05, |
|
"loss": 0.9829, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.007750267250594848, |
|
"grad_norm": 2.1327550411224365, |
|
"learning_rate": 2.9456094364351244e-05, |
|
"loss": 0.979, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.008017517845442947, |
|
"grad_norm": 1.9900795221328735, |
|
"learning_rate": 3.0471821756225428e-05, |
|
"loss": 0.9693, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.008284768440291045, |
|
"grad_norm": 2.1929852962493896, |
|
"learning_rate": 3.148754914809961e-05, |
|
"loss": 0.9679, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.008552019035139143, |
|
"grad_norm": 2.0846214294433594, |
|
"learning_rate": 3.2503276539973785e-05, |
|
"loss": 0.9675, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.00881926962998724, |
|
"grad_norm": 2.1106324195861816, |
|
"learning_rate": 3.351900393184797e-05, |
|
"loss": 0.9535, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.009086520224835339, |
|
"grad_norm": 2.5252115726470947, |
|
"learning_rate": 3.453473132372215e-05, |
|
"loss": 0.9547, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.009353770819683437, |
|
"grad_norm": 1.9128707647323608, |
|
"learning_rate": 3.555045871559633e-05, |
|
"loss": 0.9467, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.009621021414531536, |
|
"grad_norm": 1.7640856504440308, |
|
"learning_rate": 3.6566186107470514e-05, |
|
"loss": 0.9549, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.009888272009379634, |
|
"grad_norm": 2.087353467941284, |
|
"learning_rate": 3.7581913499344695e-05, |
|
"loss": 0.9451, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.010155522604227732, |
|
"grad_norm": 2.153779983520508, |
|
"learning_rate": 3.8597640891218876e-05, |
|
"loss": 0.9488, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.01042277319907583, |
|
"grad_norm": 1.6695175170898438, |
|
"learning_rate": 3.9613368283093056e-05, |
|
"loss": 0.9425, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.010690023793923928, |
|
"grad_norm": 2.2292797565460205, |
|
"learning_rate": 4.062909567496724e-05, |
|
"loss": 0.9228, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.010957274388772026, |
|
"grad_norm": 1.5429211854934692, |
|
"learning_rate": 4.164482306684142e-05, |
|
"loss": 0.932, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.011224524983620124, |
|
"grad_norm": 1.6612755060195923, |
|
"learning_rate": 4.26605504587156e-05, |
|
"loss": 0.926, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.011491775578468222, |
|
"grad_norm": 1.2971765995025635, |
|
"learning_rate": 4.367627785058978e-05, |
|
"loss": 0.9288, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.011759026173316322, |
|
"grad_norm": 1.834157943725586, |
|
"learning_rate": 4.469200524246396e-05, |
|
"loss": 0.9215, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.01202627676816442, |
|
"grad_norm": 1.5940046310424805, |
|
"learning_rate": 4.570773263433814e-05, |
|
"loss": 0.9164, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.012293527363012518, |
|
"grad_norm": 1.9722518920898438, |
|
"learning_rate": 4.672346002621232e-05, |
|
"loss": 0.9048, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.012560777957860616, |
|
"grad_norm": 1.5735468864440918, |
|
"learning_rate": 4.77391874180865e-05, |
|
"loss": 0.9159, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.012828028552708714, |
|
"grad_norm": 2.411482572555542, |
|
"learning_rate": 4.875491480996068e-05, |
|
"loss": 0.9039, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.013095279147556812, |
|
"grad_norm": 1.8868205547332764, |
|
"learning_rate": 4.977064220183487e-05, |
|
"loss": 0.9048, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.01336252974240491, |
|
"grad_norm": 1.232778787612915, |
|
"learning_rate": 4.9999915451558777e-05, |
|
"loss": 0.9027, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.01362978033725301, |
|
"grad_norm": 1.7322916984558105, |
|
"learning_rate": 4.999955597496219e-05, |
|
"loss": 0.9048, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.013897030932101108, |
|
"grad_norm": 1.9727288484573364, |
|
"learning_rate": 4.9998914381774255e-05, |
|
"loss": 0.8952, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.014164281526949206, |
|
"grad_norm": 1.446117639541626, |
|
"learning_rate": 4.999799067923527e-05, |
|
"loss": 0.8927, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.014431532121797304, |
|
"grad_norm": 1.4051318168640137, |
|
"learning_rate": 4.999678487776908e-05, |
|
"loss": 0.8908, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.014698782716645402, |
|
"grad_norm": 1.4036601781845093, |
|
"learning_rate": 4.9995296990983006e-05, |
|
"loss": 0.8962, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.0149660333114935, |
|
"grad_norm": 1.2325096130371094, |
|
"learning_rate": 4.999352703566763e-05, |
|
"loss": 0.8966, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.015233283906341597, |
|
"grad_norm": 1.4330248832702637, |
|
"learning_rate": 4.999147503179668e-05, |
|
"loss": 0.8756, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.015500534501189695, |
|
"grad_norm": 1.3245261907577515, |
|
"learning_rate": 4.998914100252672e-05, |
|
"loss": 0.8818, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.015767785096037795, |
|
"grad_norm": 1.2330083847045898, |
|
"learning_rate": 4.998652497419696e-05, |
|
"loss": 0.8749, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.016035035690885893, |
|
"grad_norm": 1.3080857992172241, |
|
"learning_rate": 4.9983626976328927e-05, |
|
"loss": 0.8793, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.01630228628573399, |
|
"grad_norm": 1.8834900856018066, |
|
"learning_rate": 4.998044704162613e-05, |
|
"loss": 0.8786, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.01656953688058209, |
|
"grad_norm": 1.9869036674499512, |
|
"learning_rate": 4.9976985205973705e-05, |
|
"loss": 0.865, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.016836787475430187, |
|
"grad_norm": 1.1913156509399414, |
|
"learning_rate": 4.997324150843799e-05, |
|
"loss": 0.8675, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.017104038070278285, |
|
"grad_norm": 1.733834147453308, |
|
"learning_rate": 4.99692159912661e-05, |
|
"loss": 0.8739, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.017371288665126383, |
|
"grad_norm": 1.2863543033599854, |
|
"learning_rate": 4.996490869988546e-05, |
|
"loss": 0.8692, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.01763853925997448, |
|
"grad_norm": 1.3839147090911865, |
|
"learning_rate": 4.996031968290326e-05, |
|
"loss": 0.8687, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.01790578985482258, |
|
"grad_norm": 1.3583861589431763, |
|
"learning_rate": 4.995544899210594e-05, |
|
"loss": 0.8659, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 0.018173040449670677, |
|
"grad_norm": 1.3181118965148926, |
|
"learning_rate": 4.9950296682458583e-05, |
|
"loss": 0.8653, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 0.018440291044518775, |
|
"grad_norm": 1.8370361328125, |
|
"learning_rate": 4.994486281210429e-05, |
|
"loss": 0.8571, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.018707541639366873, |
|
"grad_norm": 1.4417659044265747, |
|
"learning_rate": 4.9939147442363566e-05, |
|
"loss": 0.8625, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.018974792234214975, |
|
"grad_norm": 1.4819358587265015, |
|
"learning_rate": 4.9933150637733574e-05, |
|
"loss": 0.8534, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 0.019242042829063073, |
|
"grad_norm": 1.4205533266067505, |
|
"learning_rate": 4.992687246588743e-05, |
|
"loss": 0.8406, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.01950929342391117, |
|
"grad_norm": 1.3700474500656128, |
|
"learning_rate": 4.992031299767347e-05, |
|
"loss": 0.8482, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 0.01977654401875927, |
|
"grad_norm": 1.134555697441101, |
|
"learning_rate": 4.9913472307114386e-05, |
|
"loss": 0.8506, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 0.020043794613607367, |
|
"grad_norm": 1.058451771736145, |
|
"learning_rate": 4.9906350471406446e-05, |
|
"loss": 0.8529, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.020311045208455464, |
|
"grad_norm": 1.0300796031951904, |
|
"learning_rate": 4.989894757091861e-05, |
|
"loss": 0.852, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 0.020578295803303562, |
|
"grad_norm": 1.0600379705429077, |
|
"learning_rate": 4.989126368919158e-05, |
|
"loss": 0.8499, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 0.02084554639815166, |
|
"grad_norm": 1.2507109642028809, |
|
"learning_rate": 4.988329891293693e-05, |
|
"loss": 0.847, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.02111279699299976, |
|
"grad_norm": 1.272971510887146, |
|
"learning_rate": 4.987505333203608e-05, |
|
"loss": 0.8458, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 0.021380047587847856, |
|
"grad_norm": 1.320924162864685, |
|
"learning_rate": 4.9866527039539276e-05, |
|
"loss": 0.833, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.021647298182695954, |
|
"grad_norm": 1.0759563446044922, |
|
"learning_rate": 4.9857720131664594e-05, |
|
"loss": 0.8422, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.021914548777544052, |
|
"grad_norm": 1.1410887241363525, |
|
"learning_rate": 4.9848632707796773e-05, |
|
"loss": 0.8458, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 0.02218179937239215, |
|
"grad_norm": 1.337589144706726, |
|
"learning_rate": 4.9839264870486155e-05, |
|
"loss": 0.835, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 0.02244904996724025, |
|
"grad_norm": 1.2136834859848022, |
|
"learning_rate": 4.9829616725447526e-05, |
|
"loss": 0.8309, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.022716300562088346, |
|
"grad_norm": 1.08687162399292, |
|
"learning_rate": 4.981968838155888e-05, |
|
"loss": 0.8418, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.022983551156936444, |
|
"grad_norm": 1.2663565874099731, |
|
"learning_rate": 4.980947995086024e-05, |
|
"loss": 0.8292, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 0.023250801751784546, |
|
"grad_norm": 1.6643290519714355, |
|
"learning_rate": 4.979899154855234e-05, |
|
"loss": 0.8353, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.023518052346632644, |
|
"grad_norm": 1.275803565979004, |
|
"learning_rate": 4.9788223292995386e-05, |
|
"loss": 0.8209, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 0.023785302941480742, |
|
"grad_norm": 1.0680255889892578, |
|
"learning_rate": 4.977717530570768e-05, |
|
"loss": 0.8139, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 0.02405255353632884, |
|
"grad_norm": 1.2904725074768066, |
|
"learning_rate": 4.976584771136425e-05, |
|
"loss": 0.832, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.024319804131176938, |
|
"grad_norm": 1.1689643859863281, |
|
"learning_rate": 4.975424063779547e-05, |
|
"loss": 0.8218, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 0.024587054726025036, |
|
"grad_norm": 1.1010081768035889, |
|
"learning_rate": 4.974235421598557e-05, |
|
"loss": 0.8341, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 0.024854305320873134, |
|
"grad_norm": 1.5075924396514893, |
|
"learning_rate": 4.973018858007122e-05, |
|
"loss": 0.8216, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.025121555915721232, |
|
"grad_norm": 1.0460745096206665, |
|
"learning_rate": 4.9717743867339963e-05, |
|
"loss": 0.8235, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 0.02538880651056933, |
|
"grad_norm": 1.0851374864578247, |
|
"learning_rate": 4.9705020218228695e-05, |
|
"loss": 0.8169, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.025656057105417428, |
|
"grad_norm": 1.0281602144241333, |
|
"learning_rate": 4.969201777632205e-05, |
|
"loss": 0.8359, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.025923307700265526, |
|
"grad_norm": 1.1618735790252686, |
|
"learning_rate": 4.9678736688350846e-05, |
|
"loss": 0.8221, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 0.026190558295113624, |
|
"grad_norm": 0.9836195111274719, |
|
"learning_rate": 4.966517710419033e-05, |
|
"loss": 0.8203, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 0.02645780888996172, |
|
"grad_norm": 1.2163631916046143, |
|
"learning_rate": 4.965133917685858e-05, |
|
"loss": 0.8127, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.02672505948480982, |
|
"grad_norm": 0.9987328052520752, |
|
"learning_rate": 4.9637223062514714e-05, |
|
"loss": 0.8166, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.026992310079657918, |
|
"grad_norm": 0.9968119263648987, |
|
"learning_rate": 4.962282892045718e-05, |
|
"loss": 0.8152, |
|
"step": 3131 |
|
}, |
|
{ |
|
"epoch": 0.02725956067450602, |
|
"grad_norm": 1.3783190250396729, |
|
"learning_rate": 4.9608156913121904e-05, |
|
"loss": 0.814, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.027526811269354117, |
|
"grad_norm": 1.1062177419662476, |
|
"learning_rate": 4.959320720608049e-05, |
|
"loss": 0.8125, |
|
"step": 3193 |
|
}, |
|
{ |
|
"epoch": 0.027794061864202215, |
|
"grad_norm": 1.0210764408111572, |
|
"learning_rate": 4.9577979968038354e-05, |
|
"loss": 0.8033, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 0.028061312459050313, |
|
"grad_norm": 1.1704070568084717, |
|
"learning_rate": 4.956247537083282e-05, |
|
"loss": 0.8147, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.02832856305389841, |
|
"grad_norm": 1.3224440813064575, |
|
"learning_rate": 4.9546693589431145e-05, |
|
"loss": 0.8043, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 0.02859581364874651, |
|
"grad_norm": 1.142484426498413, |
|
"learning_rate": 4.9530634801928595e-05, |
|
"loss": 0.8076, |
|
"step": 3317 |
|
}, |
|
{ |
|
"epoch": 0.028863064243594607, |
|
"grad_norm": 1.014917254447937, |
|
"learning_rate": 4.9514299189546395e-05, |
|
"loss": 0.8055, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.029130314838442705, |
|
"grad_norm": 1.1869006156921387, |
|
"learning_rate": 4.949768693662973e-05, |
|
"loss": 0.8096, |
|
"step": 3379 |
|
}, |
|
{ |
|
"epoch": 0.029397565433290803, |
|
"grad_norm": 1.1795014142990112, |
|
"learning_rate": 4.948079823064559e-05, |
|
"loss": 0.8038, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.0296648160281389, |
|
"grad_norm": 1.0491461753845215, |
|
"learning_rate": 4.946363326218074e-05, |
|
"loss": 0.8007, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.029932066622987, |
|
"grad_norm": 1.2223421335220337, |
|
"learning_rate": 4.9446192224939525e-05, |
|
"loss": 0.8109, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 0.030199317217835097, |
|
"grad_norm": 1.392539381980896, |
|
"learning_rate": 4.942847531574167e-05, |
|
"loss": 0.8069, |
|
"step": 3503 |
|
}, |
|
{ |
|
"epoch": 0.030466567812683195, |
|
"grad_norm": 1.018568992614746, |
|
"learning_rate": 4.941048273452008e-05, |
|
"loss": 0.8026, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 0.030733818407531293, |
|
"grad_norm": 0.827509343624115, |
|
"learning_rate": 4.9392214684318605e-05, |
|
"loss": 0.8017, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.03100106900237939, |
|
"grad_norm": 1.6765421628952026, |
|
"learning_rate": 4.93736713712897e-05, |
|
"loss": 0.8017, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 0.03126831959722749, |
|
"grad_norm": 1.3162996768951416, |
|
"learning_rate": 4.9354853004692124e-05, |
|
"loss": 0.7935, |
|
"step": 3627 |
|
}, |
|
{ |
|
"epoch": 0.03153557019207559, |
|
"grad_norm": 0.8754608631134033, |
|
"learning_rate": 4.93357597968886e-05, |
|
"loss": 0.7944, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 0.03180282078692369, |
|
"grad_norm": 1.177233099937439, |
|
"learning_rate": 4.931639196334338e-05, |
|
"loss": 0.7874, |
|
"step": 3689 |
|
}, |
|
{ |
|
"epoch": 0.032070071381771786, |
|
"grad_norm": 1.1029152870178223, |
|
"learning_rate": 4.9296749722619826e-05, |
|
"loss": 0.8035, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.032337321976619884, |
|
"grad_norm": 1.0172922611236572, |
|
"learning_rate": 4.9276833296377966e-05, |
|
"loss": 0.8059, |
|
"step": 3751 |
|
}, |
|
{ |
|
"epoch": 0.03260457257146798, |
|
"grad_norm": 1.1427065134048462, |
|
"learning_rate": 4.925664290937196e-05, |
|
"loss": 0.7988, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 0.03287182316631608, |
|
"grad_norm": 0.9142988920211792, |
|
"learning_rate": 4.9236178789447576e-05, |
|
"loss": 0.7827, |
|
"step": 3813 |
|
}, |
|
{ |
|
"epoch": 0.03313907376116418, |
|
"grad_norm": 0.6876903176307678, |
|
"learning_rate": 4.921544116753962e-05, |
|
"loss": 0.7902, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 0.033406324356012276, |
|
"grad_norm": 0.9945843815803528, |
|
"learning_rate": 4.919443027766935e-05, |
|
"loss": 0.7877, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.033673574950860374, |
|
"grad_norm": 0.8367254734039307, |
|
"learning_rate": 4.91731463569418e-05, |
|
"loss": 0.793, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 0.03394082554570847, |
|
"grad_norm": 1.2604331970214844, |
|
"learning_rate": 4.915158964554312e-05, |
|
"loss": 0.7988, |
|
"step": 3937 |
|
}, |
|
{ |
|
"epoch": 0.03420807614055657, |
|
"grad_norm": 1.0501065254211426, |
|
"learning_rate": 4.912976038673786e-05, |
|
"loss": 0.7831, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.03447532673540467, |
|
"grad_norm": 1.0765125751495361, |
|
"learning_rate": 4.9107658826866254e-05, |
|
"loss": 0.7959, |
|
"step": 3999 |
|
}, |
|
{ |
|
"epoch": 0.034742577330252766, |
|
"grad_norm": 1.0146560668945312, |
|
"learning_rate": 4.908528521534139e-05, |
|
"loss": 0.7954, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.035009827925100864, |
|
"grad_norm": 0.7888779640197754, |
|
"learning_rate": 4.906263980464644e-05, |
|
"loss": 0.7864, |
|
"step": 4061 |
|
}, |
|
{ |
|
"epoch": 0.03527707851994896, |
|
"grad_norm": 0.9910663962364197, |
|
"learning_rate": 4.903972285033178e-05, |
|
"loss": 0.7891, |
|
"step": 4092 |
|
}, |
|
{ |
|
"epoch": 0.03554432911479706, |
|
"grad_norm": 1.0636518001556396, |
|
"learning_rate": 4.901653461101213e-05, |
|
"loss": 0.7971, |
|
"step": 4123 |
|
}, |
|
{ |
|
"epoch": 0.03581157970964516, |
|
"grad_norm": 0.9061274528503418, |
|
"learning_rate": 4.8993075348363626e-05, |
|
"loss": 0.7866, |
|
"step": 4154 |
|
}, |
|
{ |
|
"epoch": 0.036078830304493256, |
|
"grad_norm": 0.8862403035163879, |
|
"learning_rate": 4.896934532712084e-05, |
|
"loss": 0.7895, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.036346080899341354, |
|
"grad_norm": 1.1269464492797852, |
|
"learning_rate": 4.8945344815073846e-05, |
|
"loss": 0.7933, |
|
"step": 4216 |
|
}, |
|
{ |
|
"epoch": 0.03661333149418945, |
|
"grad_norm": 0.8442766666412354, |
|
"learning_rate": 4.892107408306516e-05, |
|
"loss": 0.7826, |
|
"step": 4247 |
|
}, |
|
{ |
|
"epoch": 0.03688058208903755, |
|
"grad_norm": 1.176395297050476, |
|
"learning_rate": 4.889653340498669e-05, |
|
"loss": 0.7812, |
|
"step": 4278 |
|
}, |
|
{ |
|
"epoch": 0.03714783268388565, |
|
"grad_norm": 0.9889622330665588, |
|
"learning_rate": 4.8871723057776664e-05, |
|
"loss": 0.7796, |
|
"step": 4309 |
|
}, |
|
{ |
|
"epoch": 0.037415083278733746, |
|
"grad_norm": 1.0450257062911987, |
|
"learning_rate": 4.8846643321416476e-05, |
|
"loss": 0.7808, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.03768233387358185, |
|
"grad_norm": 0.9454023838043213, |
|
"learning_rate": 4.882129447892753e-05, |
|
"loss": 0.7762, |
|
"step": 4371 |
|
}, |
|
{ |
|
"epoch": 0.03794958446842995, |
|
"grad_norm": 0.8115898966789246, |
|
"learning_rate": 4.8795676816368076e-05, |
|
"loss": 0.7897, |
|
"step": 4402 |
|
}, |
|
{ |
|
"epoch": 0.03821683506327805, |
|
"grad_norm": 1.3698389530181885, |
|
"learning_rate": 4.876979062282995e-05, |
|
"loss": 0.7829, |
|
"step": 4433 |
|
}, |
|
{ |
|
"epoch": 0.038484085658126145, |
|
"grad_norm": 0.8531683087348938, |
|
"learning_rate": 4.8743636190435325e-05, |
|
"loss": 0.7785, |
|
"step": 4464 |
|
}, |
|
{ |
|
"epoch": 0.03875133625297424, |
|
"grad_norm": 1.0102500915527344, |
|
"learning_rate": 4.871721381433344e-05, |
|
"loss": 0.7782, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.03901858684782234, |
|
"grad_norm": 0.8592963218688965, |
|
"learning_rate": 4.869052379269719e-05, |
|
"loss": 0.7822, |
|
"step": 4526 |
|
}, |
|
{ |
|
"epoch": 0.03928583744267044, |
|
"grad_norm": 0.8510753512382507, |
|
"learning_rate": 4.866356642671985e-05, |
|
"loss": 0.7747, |
|
"step": 4557 |
|
}, |
|
{ |
|
"epoch": 0.03955308803751854, |
|
"grad_norm": 0.8656049370765686, |
|
"learning_rate": 4.8636342020611634e-05, |
|
"loss": 0.7796, |
|
"step": 4588 |
|
}, |
|
{ |
|
"epoch": 0.039820338632366635, |
|
"grad_norm": 0.7938835620880127, |
|
"learning_rate": 4.860885088159626e-05, |
|
"loss": 0.7813, |
|
"step": 4619 |
|
}, |
|
{ |
|
"epoch": 0.04008758922721473, |
|
"grad_norm": 1.1025077104568481, |
|
"learning_rate": 4.858109331990751e-05, |
|
"loss": 0.7781, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.04035483982206283, |
|
"grad_norm": 0.9807741045951843, |
|
"learning_rate": 4.855306964878567e-05, |
|
"loss": 0.7835, |
|
"step": 4681 |
|
}, |
|
{ |
|
"epoch": 0.04062209041691093, |
|
"grad_norm": 0.8083140850067139, |
|
"learning_rate": 4.8524780184474084e-05, |
|
"loss": 0.7801, |
|
"step": 4712 |
|
}, |
|
{ |
|
"epoch": 0.04088934101175903, |
|
"grad_norm": 22.445158004760742, |
|
"learning_rate": 4.8496225246215496e-05, |
|
"loss": 0.7982, |
|
"step": 4743 |
|
}, |
|
{ |
|
"epoch": 0.041156591606607125, |
|
"grad_norm": 0.9455680847167969, |
|
"learning_rate": 4.8467405156248505e-05, |
|
"loss": 0.8094, |
|
"step": 4774 |
|
}, |
|
{ |
|
"epoch": 0.04142384220145522, |
|
"grad_norm": 1.2236374616622925, |
|
"learning_rate": 4.843832023980392e-05, |
|
"loss": 0.7868, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.04169109279630332, |
|
"grad_norm": 0.9210876226425171, |
|
"learning_rate": 4.840897082510106e-05, |
|
"loss": 0.7782, |
|
"step": 4836 |
|
}, |
|
{ |
|
"epoch": 0.04195834339115142, |
|
"grad_norm": 1.6687206029891968, |
|
"learning_rate": 4.8379357243344084e-05, |
|
"loss": 0.7898, |
|
"step": 4867 |
|
}, |
|
{ |
|
"epoch": 0.04222559398599952, |
|
"grad_norm": 1.0837191343307495, |
|
"learning_rate": 4.8349479828718236e-05, |
|
"loss": 0.7671, |
|
"step": 4898 |
|
}, |
|
{ |
|
"epoch": 0.042492844580847615, |
|
"grad_norm": 1.0459831953048706, |
|
"learning_rate": 4.8319338918386075e-05, |
|
"loss": 0.7741, |
|
"step": 4929 |
|
}, |
|
{ |
|
"epoch": 0.04276009517569571, |
|
"grad_norm": 0.8402792811393738, |
|
"learning_rate": 4.828893485248369e-05, |
|
"loss": 0.7788, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.04302734577054381, |
|
"grad_norm": 0.8613904714584351, |
|
"learning_rate": 4.825826797411682e-05, |
|
"loss": 0.7745, |
|
"step": 4991 |
|
}, |
|
{ |
|
"epoch": 0.04329459636539191, |
|
"grad_norm": 0.9229130744934082, |
|
"learning_rate": 4.822733862935702e-05, |
|
"loss": 0.7735, |
|
"step": 5022 |
|
}, |
|
{ |
|
"epoch": 0.04356184696024001, |
|
"grad_norm": 1.2563060522079468, |
|
"learning_rate": 4.819614716723775e-05, |
|
"loss": 0.7765, |
|
"step": 5053 |
|
}, |
|
{ |
|
"epoch": 0.043829097555088105, |
|
"grad_norm": 1.149911642074585, |
|
"learning_rate": 4.8164693939750425e-05, |
|
"loss": 0.7952, |
|
"step": 5084 |
|
}, |
|
{ |
|
"epoch": 0.0440963481499362, |
|
"grad_norm": 0.7990419864654541, |
|
"learning_rate": 4.813297930184042e-05, |
|
"loss": 0.776, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.0443635987447843, |
|
"grad_norm": 0.9344177842140198, |
|
"learning_rate": 4.810100361140314e-05, |
|
"loss": 0.7738, |
|
"step": 5146 |
|
}, |
|
{ |
|
"epoch": 0.0446308493396324, |
|
"grad_norm": 0.9060741662979126, |
|
"learning_rate": 4.8068767229279885e-05, |
|
"loss": 0.7722, |
|
"step": 5177 |
|
}, |
|
{ |
|
"epoch": 0.0448980999344805, |
|
"grad_norm": 1.309382438659668, |
|
"learning_rate": 4.8036270519253854e-05, |
|
"loss": 0.7842, |
|
"step": 5208 |
|
}, |
|
{ |
|
"epoch": 0.045165350529328595, |
|
"grad_norm": 1.1653950214385986, |
|
"learning_rate": 4.8003513848046e-05, |
|
"loss": 0.7607, |
|
"step": 5239 |
|
}, |
|
{ |
|
"epoch": 0.04543260112417669, |
|
"grad_norm": 0.8792166709899902, |
|
"learning_rate": 4.79704975853109e-05, |
|
"loss": 0.7698, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.04569985171902479, |
|
"grad_norm": 1.0823981761932373, |
|
"learning_rate": 4.793722210363262e-05, |
|
"loss": 0.771, |
|
"step": 5301 |
|
}, |
|
{ |
|
"epoch": 0.04596710231387289, |
|
"grad_norm": 1.2965304851531982, |
|
"learning_rate": 4.7903687778520414e-05, |
|
"loss": 0.7728, |
|
"step": 5332 |
|
}, |
|
{ |
|
"epoch": 0.046234352908720994, |
|
"grad_norm": 0.8686290979385376, |
|
"learning_rate": 4.7869894988404593e-05, |
|
"loss": 0.7672, |
|
"step": 5363 |
|
}, |
|
{ |
|
"epoch": 0.04650160350356909, |
|
"grad_norm": 0.8105490207672119, |
|
"learning_rate": 4.783584411463221e-05, |
|
"loss": 0.7734, |
|
"step": 5394 |
|
}, |
|
{ |
|
"epoch": 0.04676885409841719, |
|
"grad_norm": 1.1494736671447754, |
|
"learning_rate": 4.780153554146274e-05, |
|
"loss": 0.7757, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.04703610469326529, |
|
"grad_norm": 0.9842113852500916, |
|
"learning_rate": 4.7766969656063766e-05, |
|
"loss": 0.7716, |
|
"step": 5456 |
|
}, |
|
{ |
|
"epoch": 0.047303355288113386, |
|
"grad_norm": 0.8069283962249756, |
|
"learning_rate": 4.773214684850662e-05, |
|
"loss": 0.7718, |
|
"step": 5487 |
|
}, |
|
{ |
|
"epoch": 0.047570605882961484, |
|
"grad_norm": 0.7985478043556213, |
|
"learning_rate": 4.769706751176193e-05, |
|
"loss": 0.7671, |
|
"step": 5518 |
|
}, |
|
{ |
|
"epoch": 0.04783785647780958, |
|
"grad_norm": 1.0442899465560913, |
|
"learning_rate": 4.7661732041695264e-05, |
|
"loss": 0.7663, |
|
"step": 5549 |
|
}, |
|
{ |
|
"epoch": 0.04810510707265768, |
|
"grad_norm": 0.9672103524208069, |
|
"learning_rate": 4.762614083706258e-05, |
|
"loss": 0.7647, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.04837235766750578, |
|
"grad_norm": 1.1078449487686157, |
|
"learning_rate": 4.759029429950581e-05, |
|
"loss": 0.7685, |
|
"step": 5611 |
|
}, |
|
{ |
|
"epoch": 0.048639608262353876, |
|
"grad_norm": 1.2695120573043823, |
|
"learning_rate": 4.7554192833548235e-05, |
|
"loss": 0.7632, |
|
"step": 5642 |
|
}, |
|
{ |
|
"epoch": 0.048906858857201974, |
|
"grad_norm": 0.8276777863502502, |
|
"learning_rate": 4.751783684659e-05, |
|
"loss": 0.7701, |
|
"step": 5673 |
|
}, |
|
{ |
|
"epoch": 0.04917410945205007, |
|
"grad_norm": 0.8197301626205444, |
|
"learning_rate": 4.748122674890348e-05, |
|
"loss": 0.7751, |
|
"step": 5704 |
|
}, |
|
{ |
|
"epoch": 0.04944136004689817, |
|
"grad_norm": 0.8977752923965454, |
|
"learning_rate": 4.7444362953628654e-05, |
|
"loss": 0.7663, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.04970861064174627, |
|
"grad_norm": 1.0451029539108276, |
|
"learning_rate": 4.7407245876768424e-05, |
|
"loss": 0.763, |
|
"step": 5766 |
|
}, |
|
{ |
|
"epoch": 0.049975861236594366, |
|
"grad_norm": 0.7946732044219971, |
|
"learning_rate": 4.736987593718397e-05, |
|
"loss": 0.7587, |
|
"step": 5797 |
|
}, |
|
{ |
|
"epoch": 0.050243111831442464, |
|
"grad_norm": 0.7361197471618652, |
|
"learning_rate": 4.733225355658999e-05, |
|
"loss": 0.7632, |
|
"step": 5828 |
|
}, |
|
{ |
|
"epoch": 0.05051036242629056, |
|
"grad_norm": 0.764507532119751, |
|
"learning_rate": 4.7294379159549926e-05, |
|
"loss": 0.7698, |
|
"step": 5859 |
|
}, |
|
{ |
|
"epoch": 0.05077761302113866, |
|
"grad_norm": 0.8598806262016296, |
|
"learning_rate": 4.725625317347119e-05, |
|
"loss": 0.7627, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.05104486361598676, |
|
"grad_norm": 1.0427391529083252, |
|
"learning_rate": 4.7217876028600374e-05, |
|
"loss": 0.7585, |
|
"step": 5921 |
|
}, |
|
{ |
|
"epoch": 0.051312114210834855, |
|
"grad_norm": 1.2377961874008179, |
|
"learning_rate": 4.717924815801832e-05, |
|
"loss": 0.7572, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.05157936480568295, |
|
"grad_norm": 0.8498066067695618, |
|
"learning_rate": 4.714036999763532e-05, |
|
"loss": 0.7663, |
|
"step": 5983 |
|
}, |
|
{ |
|
"epoch": 0.05184661540053105, |
|
"grad_norm": 0.8852441310882568, |
|
"learning_rate": 4.7101241986186116e-05, |
|
"loss": 0.755, |
|
"step": 6014 |
|
}, |
|
{ |
|
"epoch": 0.05211386599537915, |
|
"grad_norm": 0.9701170325279236, |
|
"learning_rate": 4.7061864565225e-05, |
|
"loss": 0.7656, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 0.05238111659022725, |
|
"grad_norm": 0.7707578539848328, |
|
"learning_rate": 4.702223817912081e-05, |
|
"loss": 0.7455, |
|
"step": 6076 |
|
}, |
|
{ |
|
"epoch": 0.052648367185075345, |
|
"grad_norm": 0.7524960041046143, |
|
"learning_rate": 4.698236327505195e-05, |
|
"loss": 0.7592, |
|
"step": 6107 |
|
}, |
|
{ |
|
"epoch": 0.05291561777992344, |
|
"grad_norm": 0.8374608755111694, |
|
"learning_rate": 4.694224030300127e-05, |
|
"loss": 0.7489, |
|
"step": 6138 |
|
}, |
|
{ |
|
"epoch": 0.05318286837477154, |
|
"grad_norm": 0.8925785422325134, |
|
"learning_rate": 4.690186971575107e-05, |
|
"loss": 0.7525, |
|
"step": 6169 |
|
}, |
|
{ |
|
"epoch": 0.05345011896961964, |
|
"grad_norm": 0.8820602297782898, |
|
"learning_rate": 4.6861251968877916e-05, |
|
"loss": 0.7588, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.05371736956446774, |
|
"grad_norm": 0.894283652305603, |
|
"learning_rate": 4.68203875207476e-05, |
|
"loss": 0.7596, |
|
"step": 6231 |
|
}, |
|
{ |
|
"epoch": 0.053984620159315835, |
|
"grad_norm": 0.9042718410491943, |
|
"learning_rate": 4.677927683250983e-05, |
|
"loss": 0.7585, |
|
"step": 6262 |
|
}, |
|
{ |
|
"epoch": 0.05425187075416393, |
|
"grad_norm": 0.8079699873924255, |
|
"learning_rate": 4.6737920368093156e-05, |
|
"loss": 0.7494, |
|
"step": 6293 |
|
}, |
|
{ |
|
"epoch": 0.05451912134901204, |
|
"grad_norm": 0.930780827999115, |
|
"learning_rate": 4.669631859419965e-05, |
|
"loss": 0.7534, |
|
"step": 6324 |
|
}, |
|
{ |
|
"epoch": 0.054786371943860136, |
|
"grad_norm": 0.8054472804069519, |
|
"learning_rate": 4.6654471980299676e-05, |
|
"loss": 0.7537, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 0.055053622538708234, |
|
"grad_norm": 0.9037302732467651, |
|
"learning_rate": 4.661238099862658e-05, |
|
"loss": 0.7541, |
|
"step": 6386 |
|
}, |
|
{ |
|
"epoch": 0.05532087313355633, |
|
"grad_norm": 0.92222660779953, |
|
"learning_rate": 4.657004612417138e-05, |
|
"loss": 0.7513, |
|
"step": 6417 |
|
}, |
|
{ |
|
"epoch": 0.05558812372840443, |
|
"grad_norm": 0.7747927904129028, |
|
"learning_rate": 4.6527467834677374e-05, |
|
"loss": 0.7626, |
|
"step": 6448 |
|
}, |
|
{ |
|
"epoch": 0.05585537432325253, |
|
"grad_norm": 0.8381842374801636, |
|
"learning_rate": 4.648464661063478e-05, |
|
"loss": 0.7543, |
|
"step": 6479 |
|
}, |
|
{ |
|
"epoch": 0.056122624918100626, |
|
"grad_norm": 0.9815627336502075, |
|
"learning_rate": 4.6441582935275264e-05, |
|
"loss": 0.7502, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.056389875512948724, |
|
"grad_norm": 0.80378657579422, |
|
"learning_rate": 4.6398277294566586e-05, |
|
"loss": 0.7434, |
|
"step": 6541 |
|
}, |
|
{ |
|
"epoch": 0.05665712610779682, |
|
"grad_norm": 0.8066803812980652, |
|
"learning_rate": 4.6354730177207e-05, |
|
"loss": 0.7606, |
|
"step": 6572 |
|
}, |
|
{ |
|
"epoch": 0.05692437670264492, |
|
"grad_norm": 0.8799330592155457, |
|
"learning_rate": 4.6310942074619787e-05, |
|
"loss": 0.7597, |
|
"step": 6603 |
|
}, |
|
{ |
|
"epoch": 0.05719162729749302, |
|
"grad_norm": 0.9310518503189087, |
|
"learning_rate": 4.626691348094777e-05, |
|
"loss": 0.7544, |
|
"step": 6634 |
|
}, |
|
{ |
|
"epoch": 0.057458877892341116, |
|
"grad_norm": 0.9824729561805725, |
|
"learning_rate": 4.622264489304762e-05, |
|
"loss": 0.752, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 0.057726128487189214, |
|
"grad_norm": 1.0709872245788574, |
|
"learning_rate": 4.617813681048434e-05, |
|
"loss": 0.7561, |
|
"step": 6696 |
|
}, |
|
{ |
|
"epoch": 0.05799337908203731, |
|
"grad_norm": 0.8781105279922485, |
|
"learning_rate": 4.61333897355256e-05, |
|
"loss": 0.7562, |
|
"step": 6727 |
|
}, |
|
{ |
|
"epoch": 0.05826062967688541, |
|
"grad_norm": 0.8140677213668823, |
|
"learning_rate": 4.608840417313604e-05, |
|
"loss": 0.7502, |
|
"step": 6758 |
|
}, |
|
{ |
|
"epoch": 0.05852788027173351, |
|
"grad_norm": 0.7568345069885254, |
|
"learning_rate": 4.6043180630971646e-05, |
|
"loss": 0.7518, |
|
"step": 6789 |
|
}, |
|
{ |
|
"epoch": 0.058795130866581606, |
|
"grad_norm": 0.8876240253448486, |
|
"learning_rate": 4.599771961937391e-05, |
|
"loss": 0.7566, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.059062381461429704, |
|
"grad_norm": 1.036300539970398, |
|
"learning_rate": 4.5952021651364204e-05, |
|
"loss": 0.7455, |
|
"step": 6851 |
|
}, |
|
{ |
|
"epoch": 0.0593296320562778, |
|
"grad_norm": 0.8825740814208984, |
|
"learning_rate": 4.590608724263786e-05, |
|
"loss": 0.7448, |
|
"step": 6882 |
|
}, |
|
{ |
|
"epoch": 0.0595968826511259, |
|
"grad_norm": 1.0014013051986694, |
|
"learning_rate": 4.585991691155845e-05, |
|
"loss": 0.7434, |
|
"step": 6913 |
|
}, |
|
{ |
|
"epoch": 0.059864133245974, |
|
"grad_norm": 0.7801791429519653, |
|
"learning_rate": 4.581351117915188e-05, |
|
"loss": 0.7507, |
|
"step": 6944 |
|
}, |
|
{ |
|
"epoch": 0.060131383840822096, |
|
"grad_norm": 1.215936541557312, |
|
"learning_rate": 4.5766870569100534e-05, |
|
"loss": 0.7385, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.060398634435670194, |
|
"grad_norm": 1.0760201215744019, |
|
"learning_rate": 4.571999560773736e-05, |
|
"loss": 0.7414, |
|
"step": 7006 |
|
}, |
|
{ |
|
"epoch": 0.06066588503051829, |
|
"grad_norm": 1.3316197395324707, |
|
"learning_rate": 4.5672886824039915e-05, |
|
"loss": 0.7538, |
|
"step": 7037 |
|
}, |
|
{ |
|
"epoch": 0.06093313562536639, |
|
"grad_norm": 0.8019669651985168, |
|
"learning_rate": 4.5625544749624435e-05, |
|
"loss": 0.7496, |
|
"step": 7068 |
|
}, |
|
{ |
|
"epoch": 0.06120038622021449, |
|
"grad_norm": 0.7621326446533203, |
|
"learning_rate": 4.5577969918739794e-05, |
|
"loss": 0.746, |
|
"step": 7099 |
|
}, |
|
{ |
|
"epoch": 0.061467636815062586, |
|
"grad_norm": 0.8234092593193054, |
|
"learning_rate": 4.5530162868261486e-05, |
|
"loss": 0.7472, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.061734887409910684, |
|
"grad_norm": 0.6942526698112488, |
|
"learning_rate": 4.548212413768558e-05, |
|
"loss": 0.7463, |
|
"step": 7161 |
|
}, |
|
{ |
|
"epoch": 0.06200213800475878, |
|
"grad_norm": 0.892749547958374, |
|
"learning_rate": 4.543385426912261e-05, |
|
"loss": 0.74, |
|
"step": 7192 |
|
}, |
|
{ |
|
"epoch": 0.06226938859960688, |
|
"grad_norm": 0.8521167635917664, |
|
"learning_rate": 4.53853538072915e-05, |
|
"loss": 0.738, |
|
"step": 7223 |
|
}, |
|
{ |
|
"epoch": 0.06253663919445498, |
|
"grad_norm": 0.9233390688896179, |
|
"learning_rate": 4.533662329951336e-05, |
|
"loss": 0.7401, |
|
"step": 7254 |
|
}, |
|
{ |
|
"epoch": 0.06280388978930308, |
|
"grad_norm": 0.7096614241600037, |
|
"learning_rate": 4.528766329570536e-05, |
|
"loss": 0.7496, |
|
"step": 7285 |
|
}, |
|
{ |
|
"epoch": 0.06307114038415118, |
|
"grad_norm": 0.7589650750160217, |
|
"learning_rate": 4.523847434837447e-05, |
|
"loss": 0.7403, |
|
"step": 7316 |
|
}, |
|
{ |
|
"epoch": 0.06333839097899928, |
|
"grad_norm": 0.8393285274505615, |
|
"learning_rate": 4.518905701261128e-05, |
|
"loss": 0.7507, |
|
"step": 7347 |
|
}, |
|
{ |
|
"epoch": 0.06360564157384738, |
|
"grad_norm": 0.855707585811615, |
|
"learning_rate": 4.5139411846083715e-05, |
|
"loss": 0.7389, |
|
"step": 7378 |
|
}, |
|
{ |
|
"epoch": 0.06387289216869547, |
|
"grad_norm": 0.8630636930465698, |
|
"learning_rate": 4.508953940903073e-05, |
|
"loss": 0.7378, |
|
"step": 7409 |
|
}, |
|
{ |
|
"epoch": 0.06414014276354357, |
|
"grad_norm": 0.784131646156311, |
|
"learning_rate": 4.5039440264255994e-05, |
|
"loss": 0.7561, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.06440739335839167, |
|
"grad_norm": 0.8171682953834534, |
|
"learning_rate": 4.498911497712155e-05, |
|
"loss": 0.7538, |
|
"step": 7471 |
|
}, |
|
{ |
|
"epoch": 0.06467464395323977, |
|
"grad_norm": 0.8260509967803955, |
|
"learning_rate": 4.493856411554142e-05, |
|
"loss": 0.7564, |
|
"step": 7502 |
|
}, |
|
{ |
|
"epoch": 0.06494189454808787, |
|
"grad_norm": 0.6839385628700256, |
|
"learning_rate": 4.4887788249975206e-05, |
|
"loss": 0.7371, |
|
"step": 7533 |
|
}, |
|
{ |
|
"epoch": 0.06520914514293596, |
|
"grad_norm": 0.831058144569397, |
|
"learning_rate": 4.4836787953421656e-05, |
|
"loss": 0.7429, |
|
"step": 7564 |
|
}, |
|
{ |
|
"epoch": 0.06547639573778406, |
|
"grad_norm": 0.7439693808555603, |
|
"learning_rate": 4.478556380141218e-05, |
|
"loss": 0.7383, |
|
"step": 7595 |
|
}, |
|
{ |
|
"epoch": 0.06574364633263216, |
|
"grad_norm": 0.6511676907539368, |
|
"learning_rate": 4.4734116372004375e-05, |
|
"loss": 0.7385, |
|
"step": 7626 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 30517, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 7630, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.303385195250975e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|