|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1795, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0027868738242876052, |
|
"grad_norm": 1.3125083446502686, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 2.2127, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0055737476485752105, |
|
"grad_norm": 1.2902835607528687, |
|
"learning_rate": 5e-06, |
|
"loss": 2.1685, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008360621472862817, |
|
"grad_norm": 1.260772466659546, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 2.1734, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.011147495297150421, |
|
"grad_norm": 1.2492824792861938, |
|
"learning_rate": 1.0555555555555555e-05, |
|
"loss": 2.1881, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013934369121438027, |
|
"grad_norm": 1.8405438661575317, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.1732, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016721242945725634, |
|
"grad_norm": 2.053272247314453, |
|
"learning_rate": 1.6111111111111115e-05, |
|
"loss": 2.1535, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.019508116770013236, |
|
"grad_norm": 1.6805298328399658, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.9838, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.022294990594300842, |
|
"grad_norm": 1.5838550329208374, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 1.7728, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.025081864418588447, |
|
"grad_norm": 1.1088097095489502, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 1.7897, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.027868738242876053, |
|
"grad_norm": 1.060654878616333, |
|
"learning_rate": 2.7222222222222223e-05, |
|
"loss": 1.6669, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03065561206716366, |
|
"grad_norm": 0.9146080017089844, |
|
"learning_rate": 3e-05, |
|
"loss": 1.5346, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03344248589145127, |
|
"grad_norm": 0.811827540397644, |
|
"learning_rate": 3.277777777777778e-05, |
|
"loss": 1.6357, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03622935971573887, |
|
"grad_norm": 1.025479793548584, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 1.5798, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03901623354002647, |
|
"grad_norm": 0.8861313462257385, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 1.4825, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04180310736431408, |
|
"grad_norm": 1.0417464971542358, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 1.5584, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.044589981188601684, |
|
"grad_norm": 0.9840788245201111, |
|
"learning_rate": 4.388888888888889e-05, |
|
"loss": 1.4355, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04737685501288929, |
|
"grad_norm": 0.9761689901351929, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.5785, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.050163728837176895, |
|
"grad_norm": 1.0217355489730835, |
|
"learning_rate": 4.9444444444444446e-05, |
|
"loss": 1.477, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0529506026614645, |
|
"grad_norm": 1.0723602771759033, |
|
"learning_rate": 5.222222222222223e-05, |
|
"loss": 1.4874, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.055737476485752106, |
|
"grad_norm": 1.0415840148925781, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.3867, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05852435031003971, |
|
"grad_norm": 0.9482999444007874, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 1.4529, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06131122413432732, |
|
"grad_norm": 1.2104490995407104, |
|
"learning_rate": 6.055555555555555e-05, |
|
"loss": 1.5199, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06409809795861493, |
|
"grad_norm": 1.095499873161316, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.4173, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06688497178290254, |
|
"grad_norm": 1.0751917362213135, |
|
"learning_rate": 6.611111111111111e-05, |
|
"loss": 1.4632, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06967184560719013, |
|
"grad_norm": 1.1421129703521729, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 1.4691, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07245871943147773, |
|
"grad_norm": 1.0927807092666626, |
|
"learning_rate": 7.166666666666667e-05, |
|
"loss": 1.4509, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07524559325576534, |
|
"grad_norm": 1.067568302154541, |
|
"learning_rate": 7.444444444444444e-05, |
|
"loss": 1.4235, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07803246708005294, |
|
"grad_norm": 1.159364104270935, |
|
"learning_rate": 7.722222222222223e-05, |
|
"loss": 1.4362, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08081934090434055, |
|
"grad_norm": 1.0486923456192017, |
|
"learning_rate": 8e-05, |
|
"loss": 1.503, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08360621472862816, |
|
"grad_norm": 1.1524556875228882, |
|
"learning_rate": 8.277777777777778e-05, |
|
"loss": 1.4556, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08639308855291576, |
|
"grad_norm": 1.0299068689346313, |
|
"learning_rate": 8.555555555555556e-05, |
|
"loss": 1.3988, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08917996237720337, |
|
"grad_norm": 1.0863871574401855, |
|
"learning_rate": 8.833333333333333e-05, |
|
"loss": 1.37, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09196683620149097, |
|
"grad_norm": 1.1330057382583618, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 1.3879, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09475371002577858, |
|
"grad_norm": 1.0953528881072998, |
|
"learning_rate": 9.388888888888889e-05, |
|
"loss": 1.415, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09754058385006618, |
|
"grad_norm": 1.0204545259475708, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.4199, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.10032745767435379, |
|
"grad_norm": 1.0416138172149658, |
|
"learning_rate": 9.944444444444446e-05, |
|
"loss": 1.3911, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1031143314986414, |
|
"grad_norm": 1.0255354642868042, |
|
"learning_rate": 9.999848639521432e-05, |
|
"loss": 1.382, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.105901205322929, |
|
"grad_norm": 1.0507235527038574, |
|
"learning_rate": 9.999233753283091e-05, |
|
"loss": 1.3531, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1086880791472166, |
|
"grad_norm": 1.1468594074249268, |
|
"learning_rate": 9.998145939378577e-05, |
|
"loss": 1.3873, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11147495297150421, |
|
"grad_norm": 1.1430379152297974, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.4215, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11426182679579182, |
|
"grad_norm": 1.029249668121338, |
|
"learning_rate": 9.994551984929175e-05, |
|
"loss": 1.3678, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11704870062007942, |
|
"grad_norm": 0.9233781099319458, |
|
"learning_rate": 9.992046184372492e-05, |
|
"loss": 1.403, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11983557444436703, |
|
"grad_norm": 0.9915686845779419, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.3518, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12262244826865464, |
|
"grad_norm": 0.9875094294548035, |
|
"learning_rate": 9.985618121816779e-05, |
|
"loss": 1.4272, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12540932209294225, |
|
"grad_norm": 0.9711189866065979, |
|
"learning_rate": 9.981696467912664e-05, |
|
"loss": 1.3453, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12819619591722986, |
|
"grad_norm": 1.0352520942687988, |
|
"learning_rate": 9.97730354537011e-05, |
|
"loss": 1.3668, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13098306974151747, |
|
"grad_norm": 0.957478940486908, |
|
"learning_rate": 9.972439769759722e-05, |
|
"loss": 1.3027, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13376994356580507, |
|
"grad_norm": 1.0086779594421387, |
|
"learning_rate": 9.967105601194823e-05, |
|
"loss": 1.4556, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13655681739009268, |
|
"grad_norm": 0.9436930418014526, |
|
"learning_rate": 9.961301544287922e-05, |
|
"loss": 1.3322, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13934369121438026, |
|
"grad_norm": 1.000581979751587, |
|
"learning_rate": 9.955028148102979e-05, |
|
"loss": 1.303, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14213056503866786, |
|
"grad_norm": 0.9896969795227051, |
|
"learning_rate": 9.948286006103466e-05, |
|
"loss": 1.4694, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14491743886295547, |
|
"grad_norm": 1.0205310583114624, |
|
"learning_rate": 9.941075756096226e-05, |
|
"loss": 1.36, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14770431268724307, |
|
"grad_norm": 1.0231329202651978, |
|
"learning_rate": 9.933398080171123e-05, |
|
"loss": 1.3808, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15049118651153068, |
|
"grad_norm": 0.9146262407302856, |
|
"learning_rate": 9.925253704636543e-05, |
|
"loss": 1.3177, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15327806033581828, |
|
"grad_norm": 0.9757639765739441, |
|
"learning_rate": 9.916643399950656e-05, |
|
"loss": 1.3462, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1560649341601059, |
|
"grad_norm": 0.9955347180366516, |
|
"learning_rate": 9.907567980648549e-05, |
|
"loss": 1.3365, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1588518079843935, |
|
"grad_norm": 0.9526458978652954, |
|
"learning_rate": 9.898028305265169e-05, |
|
"loss": 1.4223, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1616386818086811, |
|
"grad_norm": 0.8748807907104492, |
|
"learning_rate": 9.888025276254096e-05, |
|
"loss": 1.3659, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1644255556329687, |
|
"grad_norm": 0.9113283157348633, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.3677, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1672124294572563, |
|
"grad_norm": 0.9735001921653748, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.3083, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16999930328154392, |
|
"grad_norm": 0.9326124787330627, |
|
"learning_rate": 9.855245748948326e-05, |
|
"loss": 1.3757, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17278617710583152, |
|
"grad_norm": 0.8787973523139954, |
|
"learning_rate": 9.843399205260068e-05, |
|
"loss": 1.36, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17557305093011913, |
|
"grad_norm": 1.017393708229065, |
|
"learning_rate": 9.831094475858652e-05, |
|
"loss": 1.3754, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17835992475440673, |
|
"grad_norm": 0.8852056264877319, |
|
"learning_rate": 9.818332724771857e-05, |
|
"loss": 1.4008, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18114679857869434, |
|
"grad_norm": 0.9435309171676636, |
|
"learning_rate": 9.805115159261726e-05, |
|
"loss": 1.3145, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18393367240298195, |
|
"grad_norm": 0.9178583025932312, |
|
"learning_rate": 9.791443029710361e-05, |
|
"loss": 1.3052, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.18672054622726955, |
|
"grad_norm": 0.9663990139961243, |
|
"learning_rate": 9.777317629501636e-05, |
|
"loss": 1.3237, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.18950742005155716, |
|
"grad_norm": 0.8486679792404175, |
|
"learning_rate": 9.762740294898846e-05, |
|
"loss": 1.2913, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19229429387584476, |
|
"grad_norm": 0.960027277469635, |
|
"learning_rate": 9.747712404918286e-05, |
|
"loss": 1.2583, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19508116770013237, |
|
"grad_norm": 1.0241199731826782, |
|
"learning_rate": 9.732235381198813e-05, |
|
"loss": 1.3535, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.19786804152441997, |
|
"grad_norm": 0.8933882713317871, |
|
"learning_rate": 9.716310687867342e-05, |
|
"loss": 1.2532, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.20065491534870758, |
|
"grad_norm": 0.917148232460022, |
|
"learning_rate": 9.699939831400351e-05, |
|
"loss": 1.4276, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.20344178917299519, |
|
"grad_norm": 0.9412631392478943, |
|
"learning_rate": 9.683124360481364e-05, |
|
"loss": 1.3184, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.2062286629972828, |
|
"grad_norm": 0.9783531427383423, |
|
"learning_rate": 9.665865865854445e-05, |
|
"loss": 1.3137, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2090155368215704, |
|
"grad_norm": 0.933350682258606, |
|
"learning_rate": 9.648165980173712e-05, |
|
"loss": 1.3405, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.211802410645858, |
|
"grad_norm": 0.9554056525230408, |
|
"learning_rate": 9.630026377848892e-05, |
|
"loss": 1.3246, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2145892844701456, |
|
"grad_norm": 0.8970703482627869, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.325, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2173761582944332, |
|
"grad_norm": 0.9755475521087646, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 1.3922, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22016303211872082, |
|
"grad_norm": 0.9652612805366516, |
|
"learning_rate": 9.572986638087396e-05, |
|
"loss": 1.3513, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22294990594300843, |
|
"grad_norm": 0.9211638569831848, |
|
"learning_rate": 9.553105742769154e-05, |
|
"loss": 1.3104, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22573677976729603, |
|
"grad_norm": 0.9684194326400757, |
|
"learning_rate": 9.532794123508197e-05, |
|
"loss": 1.23, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.22852365359158364, |
|
"grad_norm": 0.9077541828155518, |
|
"learning_rate": 9.512053701784329e-05, |
|
"loss": 1.3236, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23131052741587124, |
|
"grad_norm": 0.9119937419891357, |
|
"learning_rate": 9.490886439642081e-05, |
|
"loss": 1.3159, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23409740124015885, |
|
"grad_norm": 0.9030479192733765, |
|
"learning_rate": 9.469294339505098e-05, |
|
"loss": 1.3789, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.23688427506444645, |
|
"grad_norm": 0.9020025134086609, |
|
"learning_rate": 9.447279443986716e-05, |
|
"loss": 1.2921, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.23967114888873406, |
|
"grad_norm": 0.980591356754303, |
|
"learning_rate": 9.424843835696724e-05, |
|
"loss": 1.3361, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24245802271302166, |
|
"grad_norm": 1.0082863569259644, |
|
"learning_rate": 9.401989637044355e-05, |
|
"loss": 1.2721, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24524489653730927, |
|
"grad_norm": 0.9218441843986511, |
|
"learning_rate": 9.3787190100375e-05, |
|
"loss": 1.3214, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.24803177036159688, |
|
"grad_norm": 0.8562553524971008, |
|
"learning_rate": 9.355034156078188e-05, |
|
"loss": 1.2835, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2508186441858845, |
|
"grad_norm": 0.8887034058570862, |
|
"learning_rate": 9.330937315754329e-05, |
|
"loss": 1.2917, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2536055180101721, |
|
"grad_norm": 0.944468080997467, |
|
"learning_rate": 9.306430768627753e-05, |
|
"loss": 1.2844, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2563923918344597, |
|
"grad_norm": 0.9446823000907898, |
|
"learning_rate": 9.281516833018571e-05, |
|
"loss": 1.2999, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2591792656587473, |
|
"grad_norm": 0.9115864634513855, |
|
"learning_rate": 9.256197865785854e-05, |
|
"loss": 1.3846, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26196613948303493, |
|
"grad_norm": 0.8051035404205322, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.3087, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26475301330732254, |
|
"grad_norm": 0.9236623048782349, |
|
"learning_rate": 9.204354455239539e-05, |
|
"loss": 1.337, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.26753988713161014, |
|
"grad_norm": 0.9063863754272461, |
|
"learning_rate": 9.177834916314165e-05, |
|
"loss": 1.327, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27032676095589775, |
|
"grad_norm": 0.8260522484779358, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 1.2957, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27311363478018535, |
|
"grad_norm": 0.903109610080719, |
|
"learning_rate": 9.123612714667634e-05, |
|
"loss": 1.3345, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2759005086044729, |
|
"grad_norm": 0.8636733293533325, |
|
"learning_rate": 9.095915181368412e-05, |
|
"loss": 1.3918, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2786873824287605, |
|
"grad_norm": 0.9395173788070679, |
|
"learning_rate": 9.067830174367586e-05, |
|
"loss": 1.3573, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2814742562530481, |
|
"grad_norm": 0.8924953937530518, |
|
"learning_rate": 9.039360350507679e-05, |
|
"loss": 1.3344, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2842611300773357, |
|
"grad_norm": 0.8831427097320557, |
|
"learning_rate": 9.010508403034898e-05, |
|
"loss": 1.3086, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2870480039016233, |
|
"grad_norm": 0.8781440854072571, |
|
"learning_rate": 8.98127706134436e-05, |
|
"loss": 1.3186, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.28983487772591093, |
|
"grad_norm": 0.9398754835128784, |
|
"learning_rate": 8.951669090721881e-05, |
|
"loss": 1.308, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29262175155019854, |
|
"grad_norm": 0.9422699213027954, |
|
"learning_rate": 8.921687292082393e-05, |
|
"loss": 1.2642, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29540862537448614, |
|
"grad_norm": 0.8490906357765198, |
|
"learning_rate": 8.891334501704962e-05, |
|
"loss": 1.3098, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.29819549919877375, |
|
"grad_norm": 0.9390639662742615, |
|
"learning_rate": 8.86061359096449e-05, |
|
"loss": 1.2832, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.30098237302306136, |
|
"grad_norm": 0.9434688687324524, |
|
"learning_rate": 8.829527466060072e-05, |
|
"loss": 1.1985, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.30376924684734896, |
|
"grad_norm": 0.8724851608276367, |
|
"learning_rate": 8.798079067740077e-05, |
|
"loss": 1.3901, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.30655612067163657, |
|
"grad_norm": 0.9250208139419556, |
|
"learning_rate": 8.766271371023948e-05, |
|
"loss": 1.2924, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3093429944959242, |
|
"grad_norm": 0.9672542810440063, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.2702, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3121298683202118, |
|
"grad_norm": 0.90570467710495, |
|
"learning_rate": 8.701590152144612e-05, |
|
"loss": 1.2631, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3149167421444994, |
|
"grad_norm": 0.8750013113021851, |
|
"learning_rate": 8.668722748826693e-05, |
|
"loss": 1.2645, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.317703615968787, |
|
"grad_norm": 0.903086245059967, |
|
"learning_rate": 8.635508284224371e-05, |
|
"loss": 1.38, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3204904897930746, |
|
"grad_norm": 0.8661080598831177, |
|
"learning_rate": 8.601949900427016e-05, |
|
"loss": 1.2704, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3232773636173622, |
|
"grad_norm": 0.9066647887229919, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 1.2938, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3260642374416498, |
|
"grad_norm": 0.8687067627906799, |
|
"learning_rate": 8.533814105978191e-05, |
|
"loss": 1.2757, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3288511112659374, |
|
"grad_norm": 0.8991963863372803, |
|
"learning_rate": 8.499243140974966e-05, |
|
"loss": 1.2832, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.331637985090225, |
|
"grad_norm": 0.8930802345275879, |
|
"learning_rate": 8.464341147463431e-05, |
|
"loss": 1.2754, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3344248589145126, |
|
"grad_norm": 0.9218065738677979, |
|
"learning_rate": 8.429111427173241e-05, |
|
"loss": 1.2417, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.33721173273880023, |
|
"grad_norm": 0.851839542388916, |
|
"learning_rate": 8.393557312837018e-05, |
|
"loss": 1.2424, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.33999860656308784, |
|
"grad_norm": 0.8438836336135864, |
|
"learning_rate": 8.357682167875062e-05, |
|
"loss": 1.3203, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34278548038737544, |
|
"grad_norm": 0.926641047000885, |
|
"learning_rate": 8.321489386077192e-05, |
|
"loss": 1.2793, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.34557235421166305, |
|
"grad_norm": 0.8907529711723328, |
|
"learning_rate": 8.28498239128167e-05, |
|
"loss": 1.2911, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.34835922803595065, |
|
"grad_norm": 0.9093756675720215, |
|
"learning_rate": 8.248164637051321e-05, |
|
"loss": 1.2544, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35114610186023826, |
|
"grad_norm": 0.8902727961540222, |
|
"learning_rate": 8.211039606346826e-05, |
|
"loss": 1.3249, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35393297568452586, |
|
"grad_norm": 0.8554810285568237, |
|
"learning_rate": 8.173610811197226e-05, |
|
"loss": 1.2805, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.35671984950881347, |
|
"grad_norm": 0.864427387714386, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.3051, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3595067233331011, |
|
"grad_norm": 0.925584077835083, |
|
"learning_rate": 8.097856119024545e-05, |
|
"loss": 1.3623, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3622935971573887, |
|
"grad_norm": 0.8400620222091675, |
|
"learning_rate": 8.059537388397665e-05, |
|
"loss": 1.2708, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3650804709816763, |
|
"grad_norm": 0.9252070784568787, |
|
"learning_rate": 8.020929225440137e-05, |
|
"loss": 1.3025, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3678673448059639, |
|
"grad_norm": 0.8931984305381775, |
|
"learning_rate": 7.98203528248536e-05, |
|
"loss": 1.2877, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3706542186302515, |
|
"grad_norm": 0.888062059879303, |
|
"learning_rate": 7.942859238901528e-05, |
|
"loss": 1.3231, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3734410924545391, |
|
"grad_norm": 1.0426568984985352, |
|
"learning_rate": 7.903404800743564e-05, |
|
"loss": 1.3333, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3762279662788267, |
|
"grad_norm": 0.994853138923645, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.2642, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3790148401031143, |
|
"grad_norm": 0.9480583667755127, |
|
"learning_rate": 7.823675696252524e-05, |
|
"loss": 1.3738, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3818017139274019, |
|
"grad_norm": 0.8688267469406128, |
|
"learning_rate": 7.783408572295174e-05, |
|
"loss": 1.3444, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3845885877516895, |
|
"grad_norm": 0.8878870010375977, |
|
"learning_rate": 7.742878137801639e-05, |
|
"loss": 1.2826, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.38737546157597713, |
|
"grad_norm": 0.9165365099906921, |
|
"learning_rate": 7.702088226952258e-05, |
|
"loss": 1.282, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39016233540026474, |
|
"grad_norm": 0.9026569724082947, |
|
"learning_rate": 7.661042698473853e-05, |
|
"loss": 1.2294, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39294920922455234, |
|
"grad_norm": 0.8386527299880981, |
|
"learning_rate": 7.619745435274667e-05, |
|
"loss": 1.2249, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.39573608304883995, |
|
"grad_norm": 0.8767593502998352, |
|
"learning_rate": 7.578200344077073e-05, |
|
"loss": 1.2863, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.39852295687312755, |
|
"grad_norm": 0.8485695719718933, |
|
"learning_rate": 7.536411355047964e-05, |
|
"loss": 1.2873, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.40130983069741516, |
|
"grad_norm": 0.8822951912879944, |
|
"learning_rate": 7.494382421426984e-05, |
|
"loss": 1.3214, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.40409670452170277, |
|
"grad_norm": 0.8770014643669128, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.2389, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.40688357834599037, |
|
"grad_norm": 0.9898686408996582, |
|
"learning_rate": 7.409620646485685e-05, |
|
"loss": 1.2573, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.409670452170278, |
|
"grad_norm": 0.8657426834106445, |
|
"learning_rate": 7.36689582363187e-05, |
|
"loss": 1.2907, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.4124573259945656, |
|
"grad_norm": 0.8856871128082275, |
|
"learning_rate": 7.323947092360649e-05, |
|
"loss": 1.2291, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4152441998188532, |
|
"grad_norm": 0.9170464277267456, |
|
"learning_rate": 7.280778515623314e-05, |
|
"loss": 1.3413, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4180310736431408, |
|
"grad_norm": 1.0060070753097534, |
|
"learning_rate": 7.237394177168548e-05, |
|
"loss": 1.3131, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4208179474674284, |
|
"grad_norm": 0.9314805269241333, |
|
"learning_rate": 7.193798181156095e-05, |
|
"loss": 1.3345, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.423604821291716, |
|
"grad_norm": 0.8973556160926819, |
|
"learning_rate": 7.149994651768514e-05, |
|
"loss": 1.165, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4263916951160036, |
|
"grad_norm": 0.961757481098175, |
|
"learning_rate": 7.10598773282103e-05, |
|
"loss": 1.2835, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4291785689402912, |
|
"grad_norm": 0.9814241528511047, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 1.3459, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4319654427645788, |
|
"grad_norm": 0.906623363494873, |
|
"learning_rate": 7.017380397316695e-05, |
|
"loss": 1.2327, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4347523165888664, |
|
"grad_norm": 0.855728268623352, |
|
"learning_rate": 6.972788363016497e-05, |
|
"loss": 1.1951, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.43753919041315403, |
|
"grad_norm": 0.8998392820358276, |
|
"learning_rate": 6.92800970287674e-05, |
|
"loss": 1.2595, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44032606423744164, |
|
"grad_norm": 0.8634393215179443, |
|
"learning_rate": 6.883048652960038e-05, |
|
"loss": 1.261, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44311293806172924, |
|
"grad_norm": 0.9386719465255737, |
|
"learning_rate": 6.837909466583095e-05, |
|
"loss": 1.3039, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.44589981188601685, |
|
"grad_norm": 0.8203379511833191, |
|
"learning_rate": 6.792596413914324e-05, |
|
"loss": 1.1734, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.44868668571030446, |
|
"grad_norm": 0.9148008823394775, |
|
"learning_rate": 6.747113781569892e-05, |
|
"loss": 1.3215, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45147355953459206, |
|
"grad_norm": 0.9338198304176331, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.3128, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45426043335887967, |
|
"grad_norm": 0.9510387778282166, |
|
"learning_rate": 6.655657004122916e-05, |
|
"loss": 1.3056, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4570473071831673, |
|
"grad_norm": 0.9619747996330261, |
|
"learning_rate": 6.60969151083432e-05, |
|
"loss": 1.2678, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4598341810074549, |
|
"grad_norm": 0.8645920753479004, |
|
"learning_rate": 6.563573740679496e-05, |
|
"loss": 1.2585, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4626210548317425, |
|
"grad_norm": 0.9401421546936035, |
|
"learning_rate": 6.517308056400917e-05, |
|
"loss": 1.2844, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4654079286560301, |
|
"grad_norm": 0.8988810181617737, |
|
"learning_rate": 6.470898834733731e-05, |
|
"loss": 1.2556, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4681948024803177, |
|
"grad_norm": 0.9045888781547546, |
|
"learning_rate": 6.42435046599173e-05, |
|
"loss": 1.2669, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4709816763046053, |
|
"grad_norm": 0.9363210797309875, |
|
"learning_rate": 6.377667353652022e-05, |
|
"loss": 1.3025, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4737685501288929, |
|
"grad_norm": 0.8949057459831238, |
|
"learning_rate": 6.330853913938466e-05, |
|
"loss": 1.2632, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4765554239531805, |
|
"grad_norm": 0.8873914480209351, |
|
"learning_rate": 6.283914575403888e-05, |
|
"loss": 1.322, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.4793422977774681, |
|
"grad_norm": 0.9296184778213501, |
|
"learning_rate": 6.236853778511156e-05, |
|
"loss": 1.2484, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4821291716017557, |
|
"grad_norm": 0.9600350260734558, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 1.2925, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.48491604542604333, |
|
"grad_norm": 0.9283687472343445, |
|
"learning_rate": 6.142385628531342e-05, |
|
"loss": 1.2948, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.48770291925033094, |
|
"grad_norm": 0.9505272507667542, |
|
"learning_rate": 6.09498721213414e-05, |
|
"loss": 1.2185, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49048979307461854, |
|
"grad_norm": 0.9199963808059692, |
|
"learning_rate": 6.047485209913137e-05, |
|
"loss": 1.3212, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49327666689890615, |
|
"grad_norm": 0.8689345717430115, |
|
"learning_rate": 5.999884115559192e-05, |
|
"loss": 1.2593, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.49606354072319375, |
|
"grad_norm": 0.8260021805763245, |
|
"learning_rate": 5.952188432137293e-05, |
|
"loss": 1.2825, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.49885041454748136, |
|
"grad_norm": 0.7934656143188477, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.2512, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.501637288371769, |
|
"grad_norm": 0.9623500108718872, |
|
"learning_rate": 5.8565313546633684e-05, |
|
"loss": 1.2891, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5044241621960566, |
|
"grad_norm": 0.8901398777961731, |
|
"learning_rate": 5.8085790097738025e-05, |
|
"loss": 1.273, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5072110360203442, |
|
"grad_norm": 0.9480902552604675, |
|
"learning_rate": 5.7605501732851475e-05, |
|
"loss": 1.3143, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5099979098446318, |
|
"grad_norm": 0.8739180564880371, |
|
"learning_rate": 5.712449388726807e-05, |
|
"loss": 1.3233, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5127847836689194, |
|
"grad_norm": 0.8921142220497131, |
|
"learning_rate": 5.664281206434472e-05, |
|
"loss": 1.2178, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.515571657493207, |
|
"grad_norm": 0.8952714204788208, |
|
"learning_rate": 5.616050183119663e-05, |
|
"loss": 1.3035, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5183585313174947, |
|
"grad_norm": 0.8778610825538635, |
|
"learning_rate": 5.5677608814386616e-05, |
|
"loss": 1.1698, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5211454051417822, |
|
"grad_norm": 0.9074441194534302, |
|
"learning_rate": 5.519417869560889e-05, |
|
"loss": 1.2659, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5239322789660699, |
|
"grad_norm": 1.0030386447906494, |
|
"learning_rate": 5.471025720736747e-05, |
|
"loss": 1.3052, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5267191527903574, |
|
"grad_norm": 0.8883451223373413, |
|
"learning_rate": 5.422589012864996e-05, |
|
"loss": 1.2955, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5295060266146451, |
|
"grad_norm": 0.9527599215507507, |
|
"learning_rate": 5.3741123280596864e-05, |
|
"loss": 1.2489, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5322929004389326, |
|
"grad_norm": 0.9727275967597961, |
|
"learning_rate": 5.325600252216685e-05, |
|
"loss": 1.2669, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5350797742632203, |
|
"grad_norm": 0.9047435522079468, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 1.1822, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5378666480875078, |
|
"grad_norm": 0.929094135761261, |
|
"learning_rate": 5.228488287306896e-05, |
|
"loss": 1.2304, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5406535219117955, |
|
"grad_norm": 0.8297614455223083, |
|
"learning_rate": 5.179897585034963e-05, |
|
"loss": 1.2458, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.543440395736083, |
|
"grad_norm": 0.8769505023956299, |
|
"learning_rate": 5.1312898644459776e-05, |
|
"loss": 1.1869, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5462272695603707, |
|
"grad_norm": 0.9683988690376282, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.2764, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5490141433846583, |
|
"grad_norm": 0.886499285697937, |
|
"learning_rate": 5.0340417626592016e-05, |
|
"loss": 1.2708, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5518010172089458, |
|
"grad_norm": 0.9555862545967102, |
|
"learning_rate": 4.9854105811348216e-05, |
|
"loss": 1.2959, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5545878910332335, |
|
"grad_norm": 0.9976694583892822, |
|
"learning_rate": 4.936780779769913e-05, |
|
"loss": 1.2704, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.557374764857521, |
|
"grad_norm": 0.9540396928787231, |
|
"learning_rate": 4.888156958945174e-05, |
|
"loss": 1.2399, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5601616386818087, |
|
"grad_norm": 0.9087413549423218, |
|
"learning_rate": 4.839543718475543e-05, |
|
"loss": 1.2465, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5629485125060962, |
|
"grad_norm": 0.8988016247749329, |
|
"learning_rate": 4.790945657175061e-05, |
|
"loss": 1.2452, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5657353863303839, |
|
"grad_norm": 0.9223970174789429, |
|
"learning_rate": 4.742367372421811e-05, |
|
"loss": 1.2782, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5685222601546714, |
|
"grad_norm": 0.9696489572525024, |
|
"learning_rate": 4.69381345972302e-05, |
|
"loss": 1.217, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5713091339789591, |
|
"grad_norm": 0.8725862503051758, |
|
"learning_rate": 4.6452885122803205e-05, |
|
"loss": 1.2164, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5740960078032467, |
|
"grad_norm": 0.9017091989517212, |
|
"learning_rate": 4.5967971205552194e-05, |
|
"loss": 1.262, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5768828816275343, |
|
"grad_norm": 0.8934043049812317, |
|
"learning_rate": 4.548343871834864e-05, |
|
"loss": 1.2592, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5796697554518219, |
|
"grad_norm": 0.962250292301178, |
|
"learning_rate": 4.499933349798067e-05, |
|
"loss": 1.2987, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5824566292761095, |
|
"grad_norm": 0.9082201719284058, |
|
"learning_rate": 4.451570134081694e-05, |
|
"loss": 1.3104, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5852435031003971, |
|
"grad_norm": 0.8380867838859558, |
|
"learning_rate": 4.403258799847433e-05, |
|
"loss": 1.2397, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5880303769246847, |
|
"grad_norm": 1.0069491863250732, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 1.2416, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5908172507489723, |
|
"grad_norm": 0.901740550994873, |
|
"learning_rate": 4.306810051499708e-05, |
|
"loss": 1.2247, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59360412457326, |
|
"grad_norm": 1.0371346473693848, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.2432, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5963909983975475, |
|
"grad_norm": 0.8401350975036621, |
|
"learning_rate": 4.210623600109946e-05, |
|
"loss": 1.1691, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5991778722218352, |
|
"grad_norm": 0.924321711063385, |
|
"learning_rate": 4.162640113810706e-05, |
|
"loss": 1.2647, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6019647460461227, |
|
"grad_norm": 0.9289098381996155, |
|
"learning_rate": 4.114735841782347e-05, |
|
"loss": 1.2958, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6047516198704104, |
|
"grad_norm": 0.9739760756492615, |
|
"learning_rate": 4.06691531577047e-05, |
|
"loss": 1.2703, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6075384936946979, |
|
"grad_norm": 0.8968196511268616, |
|
"learning_rate": 4.019183059598296e-05, |
|
"loss": 1.2312, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6103253675189856, |
|
"grad_norm": 0.8930853605270386, |
|
"learning_rate": 3.971543588738724e-05, |
|
"loss": 1.2612, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6131122413432731, |
|
"grad_norm": 0.8859248757362366, |
|
"learning_rate": 3.924001409887158e-05, |
|
"loss": 1.1742, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6158991151675608, |
|
"grad_norm": 0.9286195635795593, |
|
"learning_rate": 3.87656102053517e-05, |
|
"loss": 1.2483, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6186859889918483, |
|
"grad_norm": 0.895728349685669, |
|
"learning_rate": 3.8292269085450474e-05, |
|
"loss": 1.2157, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.621472862816136, |
|
"grad_norm": 0.879304826259613, |
|
"learning_rate": 3.782003551725236e-05, |
|
"loss": 1.3032, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6242597366404236, |
|
"grad_norm": 0.893455982208252, |
|
"learning_rate": 3.734895417406734e-05, |
|
"loss": 1.2443, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6270466104647112, |
|
"grad_norm": 0.924315869808197, |
|
"learning_rate": 3.687906962020491e-05, |
|
"loss": 1.2759, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6298334842889988, |
|
"grad_norm": 1.0192773342132568, |
|
"learning_rate": 3.641042630675829e-05, |
|
"loss": 1.2816, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6326203581132864, |
|
"grad_norm": 0.9503198266029358, |
|
"learning_rate": 3.594306856739924e-05, |
|
"loss": 1.2173, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.635407231937574, |
|
"grad_norm": 0.9149984121322632, |
|
"learning_rate": 3.547704061418424e-05, |
|
"loss": 1.2245, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6381941057618616, |
|
"grad_norm": 0.9525075554847717, |
|
"learning_rate": 3.501238653337194e-05, |
|
"loss": 1.2298, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6409809795861492, |
|
"grad_norm": 0.9972552061080933, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.3021, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6437678534104369, |
|
"grad_norm": 0.8908212184906006, |
|
"learning_rate": 3.408737567998993e-05, |
|
"loss": 1.191, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6465547272347244, |
|
"grad_norm": 0.9422298669815063, |
|
"learning_rate": 3.362710641347524e-05, |
|
"loss": 1.2812, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6493416010590121, |
|
"grad_norm": 0.8892982006072998, |
|
"learning_rate": 3.316838602319532e-05, |
|
"loss": 1.2787, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6521284748832996, |
|
"grad_norm": 0.9730682373046875, |
|
"learning_rate": 3.271125790411309e-05, |
|
"loss": 1.1704, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6549153487075873, |
|
"grad_norm": 1.0745271444320679, |
|
"learning_rate": 3.225576530056264e-05, |
|
"loss": 1.3028, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6577022225318748, |
|
"grad_norm": 0.9123522043228149, |
|
"learning_rate": 3.180195130215824e-05, |
|
"loss": 1.262, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6604890963561625, |
|
"grad_norm": 0.9651986360549927, |
|
"learning_rate": 3.1349858839717986e-05, |
|
"loss": 1.1867, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66327597018045, |
|
"grad_norm": 0.9499524831771851, |
|
"learning_rate": 3.089953068120271e-05, |
|
"loss": 1.1766, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6660628440047377, |
|
"grad_norm": 0.9542708396911621, |
|
"learning_rate": 3.0451009427669986e-05, |
|
"loss": 1.2212, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6688497178290252, |
|
"grad_norm": 0.9473170042037964, |
|
"learning_rate": 3.000433750924414e-05, |
|
"loss": 1.2332, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6716365916533129, |
|
"grad_norm": 0.8507165312767029, |
|
"learning_rate": 2.9559557181102315e-05, |
|
"loss": 1.245, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6744234654776005, |
|
"grad_norm": 0.9855820536613464, |
|
"learning_rate": 2.911671051947722e-05, |
|
"loss": 1.1962, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6772103393018881, |
|
"grad_norm": 0.970454752445221, |
|
"learning_rate": 2.867583941767657e-05, |
|
"loss": 1.1937, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6799972131261757, |
|
"grad_norm": 0.9287285804748535, |
|
"learning_rate": 2.823698558212009e-05, |
|
"loss": 1.2562, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6827840869504633, |
|
"grad_norm": 0.9838190078735352, |
|
"learning_rate": 2.7800190528394122e-05, |
|
"loss": 1.2125, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6855709607747509, |
|
"grad_norm": 0.8512199521064758, |
|
"learning_rate": 2.736549557732405e-05, |
|
"loss": 1.1991, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6883578345990385, |
|
"grad_norm": 0.8986166715621948, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.3633, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6911447084233261, |
|
"grad_norm": 0.9696632027626038, |
|
"learning_rate": 2.650257026921455e-05, |
|
"loss": 1.2675, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6939315822476138, |
|
"grad_norm": 1.0738840103149414, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 1.2917, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6967184560719013, |
|
"grad_norm": 0.9573596715927124, |
|
"learning_rate": 2.5648536181111438e-05, |
|
"loss": 1.2427, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.699505329896189, |
|
"grad_norm": 0.9653842449188232, |
|
"learning_rate": 2.5224954466510274e-05, |
|
"loss": 1.2188, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7022922037204765, |
|
"grad_norm": 0.9326026439666748, |
|
"learning_rate": 2.480371647197538e-05, |
|
"loss": 1.2812, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7050790775447642, |
|
"grad_norm": 0.9213048219680786, |
|
"learning_rate": 2.438486204663391e-05, |
|
"loss": 1.2512, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7078659513690517, |
|
"grad_norm": 0.9547055959701538, |
|
"learning_rate": 2.3968430814127385e-05, |
|
"loss": 1.2657, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7106528251933394, |
|
"grad_norm": 0.9416990280151367, |
|
"learning_rate": 2.3554462168863085e-05, |
|
"loss": 1.2631, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7134396990176269, |
|
"grad_norm": 0.9471155405044556, |
|
"learning_rate": 2.314299527228759e-05, |
|
"loss": 1.2631, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.7162265728419146, |
|
"grad_norm": 0.9220243096351624, |
|
"learning_rate": 2.2734069049181882e-05, |
|
"loss": 1.2288, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7190134466662021, |
|
"grad_norm": 0.9854679107666016, |
|
"learning_rate": 2.2327722183979212e-05, |
|
"loss": 1.261, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7218003204904898, |
|
"grad_norm": 0.898456335067749, |
|
"learning_rate": 2.1923993117105462e-05, |
|
"loss": 1.2037, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.7245871943147774, |
|
"grad_norm": 1.0650768280029297, |
|
"learning_rate": 2.1522920041342704e-05, |
|
"loss": 1.214, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.727374068139065, |
|
"grad_norm": 1.02187979221344, |
|
"learning_rate": 2.1124540898216248e-05, |
|
"loss": 1.2634, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7301609419633526, |
|
"grad_norm": 0.9087027311325073, |
|
"learning_rate": 2.0728893374405166e-05, |
|
"loss": 1.1596, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7329478157876402, |
|
"grad_norm": 1.0457086563110352, |
|
"learning_rate": 2.033601489817738e-05, |
|
"loss": 1.3245, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7357346896119278, |
|
"grad_norm": 1.012440800666809, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.2658, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7385215634362154, |
|
"grad_norm": 0.9544854164123535, |
|
"learning_rate": 1.9558713488267238e-05, |
|
"loss": 1.2584, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.741308437260503, |
|
"grad_norm": 0.9268321394920349, |
|
"learning_rate": 1.917436408732208e-05, |
|
"loss": 1.2337, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7440953110847907, |
|
"grad_norm": 1.108223557472229, |
|
"learning_rate": 1.8792930792478357e-05, |
|
"loss": 1.2228, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7468821849090782, |
|
"grad_norm": 1.0709432363510132, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 1.183, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7496690587333659, |
|
"grad_norm": 0.9613754153251648, |
|
"learning_rate": 1.8038956576223504e-05, |
|
"loss": 1.1697, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7524559325576534, |
|
"grad_norm": 0.865163266658783, |
|
"learning_rate": 1.766648698079635e-05, |
|
"loss": 1.2347, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7552428063819411, |
|
"grad_norm": 0.9915691614151001, |
|
"learning_rate": 1.7297076136691072e-05, |
|
"loss": 1.3237, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7580296802062286, |
|
"grad_norm": 1.0180468559265137, |
|
"learning_rate": 1.6930758990184875e-05, |
|
"loss": 1.2416, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7608165540305163, |
|
"grad_norm": 1.0427292585372925, |
|
"learning_rate": 1.6567570194891024e-05, |
|
"loss": 1.225, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7636034278548038, |
|
"grad_norm": 1.0692229270935059, |
|
"learning_rate": 1.620754410848069e-05, |
|
"loss": 1.2287, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7663903016790915, |
|
"grad_norm": 0.9599437713623047, |
|
"learning_rate": 1.5850714789432663e-05, |
|
"loss": 1.2652, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.769177175503379, |
|
"grad_norm": 0.9924296140670776, |
|
"learning_rate": 1.549711599381145e-05, |
|
"loss": 1.2543, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7719640493276667, |
|
"grad_norm": 0.8805568218231201, |
|
"learning_rate": 1.5146781172073959e-05, |
|
"loss": 1.2086, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7747509231519543, |
|
"grad_norm": 0.9607245922088623, |
|
"learning_rate": 1.479974346590503e-05, |
|
"loss": 1.2543, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7775377969762419, |
|
"grad_norm": 0.9395803213119507, |
|
"learning_rate": 1.4456035705082349e-05, |
|
"loss": 1.2146, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7803246708005295, |
|
"grad_norm": 0.9911707043647766, |
|
"learning_rate": 1.4115690404370551e-05, |
|
"loss": 1.2256, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7831115446248171, |
|
"grad_norm": 0.9199152588844299, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.2138, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7858984184491047, |
|
"grad_norm": 0.9677468538284302, |
|
"learning_rate": 1.344521564884858e-05, |
|
"loss": 1.2739, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7886852922733923, |
|
"grad_norm": 0.9051083326339722, |
|
"learning_rate": 1.3115149620970795e-05, |
|
"loss": 1.278, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7914721660976799, |
|
"grad_norm": 1.0139137506484985, |
|
"learning_rate": 1.2788572901068552e-05, |
|
"loss": 1.2309, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7942590399219676, |
|
"grad_norm": 1.022859811782837, |
|
"learning_rate": 1.2465516383309551e-05, |
|
"loss": 1.2423, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7970459137462551, |
|
"grad_norm": 0.9269377589225769, |
|
"learning_rate": 1.2146010628850268e-05, |
|
"loss": 1.2685, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7998327875705428, |
|
"grad_norm": 0.9552863836288452, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 1.2021, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8026196613948303, |
|
"grad_norm": 0.9597859382629395, |
|
"learning_rate": 1.151777197208585e-05, |
|
"loss": 1.215, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.805406535219118, |
|
"grad_norm": 0.9569510817527771, |
|
"learning_rate": 1.1209098501176896e-05, |
|
"loss": 1.2153, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8081934090434055, |
|
"grad_norm": 0.9744769334793091, |
|
"learning_rate": 1.0904094650737795e-05, |
|
"loss": 1.2153, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8109802828676932, |
|
"grad_norm": 1.118589997291565, |
|
"learning_rate": 1.0602789274142133e-05, |
|
"loss": 1.2763, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8137671566919807, |
|
"grad_norm": 0.9778153300285339, |
|
"learning_rate": 1.0305210874887766e-05, |
|
"loss": 1.1713, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8165540305162684, |
|
"grad_norm": 0.9907205700874329, |
|
"learning_rate": 1.0011387603900385e-05, |
|
"loss": 1.2282, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.819340904340556, |
|
"grad_norm": 0.9918599128723145, |
|
"learning_rate": 9.7213472568704e-06, |
|
"loss": 1.2396, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8221277781648436, |
|
"grad_norm": 0.9913582801818848, |
|
"learning_rate": 9.435117271623566e-06, |
|
"loss": 1.1384, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.8249146519891312, |
|
"grad_norm": 0.9848992228507996, |
|
"learning_rate": 9.152724725525202e-06, |
|
"loss": 1.2261, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8277015258134188, |
|
"grad_norm": 0.8803195953369141, |
|
"learning_rate": 8.87419633291886e-06, |
|
"loss": 1.265, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8304883996377064, |
|
"grad_norm": 1.0013386011123657, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.1752, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.833275273461994, |
|
"grad_norm": 1.0229575634002686, |
|
"learning_rate": 8.328837035318448e-06, |
|
"loss": 1.2616, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.8360621472862816, |
|
"grad_norm": 0.93488609790802, |
|
"learning_rate": 8.06205772133063e-06, |
|
"loss": 1.1683, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8388490211105692, |
|
"grad_norm": 1.0417516231536865, |
|
"learning_rate": 7.799245737966821e-06, |
|
"loss": 1.218, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.8416358949348568, |
|
"grad_norm": 0.9976839423179626, |
|
"learning_rate": 7.540425947248697e-06, |
|
"loss": 1.2297, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8444227687591445, |
|
"grad_norm": 0.9360842108726501, |
|
"learning_rate": 7.28562283353637e-06, |
|
"loss": 1.2272, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.847209642583432, |
|
"grad_norm": 0.9308948516845703, |
|
"learning_rate": 7.034860501212243e-06, |
|
"loss": 1.228, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8499965164077197, |
|
"grad_norm": 0.9917842149734497, |
|
"learning_rate": 6.788162672400583e-06, |
|
"loss": 1.307, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8527833902320072, |
|
"grad_norm": 0.9004817605018616, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 1.236, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8555702640562949, |
|
"grad_norm": 0.9853699803352356, |
|
"learning_rate": 6.307053489093506e-06, |
|
"loss": 1.2217, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8583571378805824, |
|
"grad_norm": 0.9549526572227478, |
|
"learning_rate": 6.072687647541553e-06, |
|
"loss": 1.2435, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8611440117048701, |
|
"grad_norm": 0.9663627743721008, |
|
"learning_rate": 5.842477331083518e-06, |
|
"loss": 1.2514, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8639308855291576, |
|
"grad_norm": 0.9174144864082336, |
|
"learning_rate": 5.616444317622388e-06, |
|
"loss": 1.1749, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8667177593534453, |
|
"grad_norm": 0.9564938545227051, |
|
"learning_rate": 5.394609989888161e-06, |
|
"loss": 1.2476, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8695046331777329, |
|
"grad_norm": 1.1256656646728516, |
|
"learning_rate": 5.176995333415019e-06, |
|
"loss": 1.2506, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8722915070020205, |
|
"grad_norm": 1.0491883754730225, |
|
"learning_rate": 4.963620934556168e-06, |
|
"loss": 1.2388, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8750783808263081, |
|
"grad_norm": 0.9458408951759338, |
|
"learning_rate": 4.754506978536227e-06, |
|
"loss": 1.2388, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8778652546505957, |
|
"grad_norm": 0.932964563369751, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 1.2333, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8806521284748833, |
|
"grad_norm": 0.8364896178245544, |
|
"learning_rate": 4.3491391188503264e-06, |
|
"loss": 1.2139, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8834390022991709, |
|
"grad_norm": 1.0495798587799072, |
|
"learning_rate": 4.152923562996297e-06, |
|
"loss": 1.1667, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8862258761234585, |
|
"grad_norm": 0.9140335321426392, |
|
"learning_rate": 3.961045141977376e-06, |
|
"loss": 1.2402, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8890127499477462, |
|
"grad_norm": 0.9842175245285034, |
|
"learning_rate": 3.773522007498065e-06, |
|
"loss": 1.2425, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8917996237720337, |
|
"grad_norm": 0.9036964178085327, |
|
"learning_rate": 3.590371899252659e-06, |
|
"loss": 1.2087, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8945864975963214, |
|
"grad_norm": 0.9229257106781006, |
|
"learning_rate": 3.4116121432469615e-06, |
|
"loss": 1.2167, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8973733714206089, |
|
"grad_norm": 0.9462814331054688, |
|
"learning_rate": 3.237259650159402e-06, |
|
"loss": 1.2211, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9001602452448966, |
|
"grad_norm": 0.9471161961555481, |
|
"learning_rate": 3.0673309137411564e-06, |
|
"loss": 1.2463, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.9029471190691841, |
|
"grad_norm": 0.9873669147491455, |
|
"learning_rate": 2.9018420092558786e-06, |
|
"loss": 1.207, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.9057339928934718, |
|
"grad_norm": 1.0254420042037964, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 1.2891, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.9085208667177593, |
|
"grad_norm": 0.9538567662239075, |
|
"learning_rate": 2.584245895616788e-06, |
|
"loss": 1.2533, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.911307740542047, |
|
"grad_norm": 1.055637240409851, |
|
"learning_rate": 2.4321687310650487e-06, |
|
"loss": 1.2598, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.9140946143663345, |
|
"grad_norm": 0.9872303009033203, |
|
"learning_rate": 2.2845914848082127e-06, |
|
"loss": 1.2765, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9168814881906222, |
|
"grad_norm": 1.0598726272583008, |
|
"learning_rate": 2.1415281176583203e-06, |
|
"loss": 1.2305, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.9196683620149098, |
|
"grad_norm": 0.9400553703308105, |
|
"learning_rate": 2.0029921634142632e-06, |
|
"loss": 1.3019, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9224552358391974, |
|
"grad_norm": 0.9695894122123718, |
|
"learning_rate": 1.8689967275815679e-06, |
|
"loss": 1.2685, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.925242109663485, |
|
"grad_norm": 0.9889479875564575, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.2702, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.9280289834877726, |
|
"grad_norm": 0.9385488033294678, |
|
"learning_rate": 1.614677684307264e-06, |
|
"loss": 1.2255, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.9308158573120602, |
|
"grad_norm": 0.9816272258758545, |
|
"learning_rate": 1.494378135454938e-06, |
|
"loss": 1.2531, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9336027311363478, |
|
"grad_norm": 0.9538016319274902, |
|
"learning_rate": 1.3786672199165962e-06, |
|
"loss": 1.2487, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.9363896049606354, |
|
"grad_norm": 0.95050448179245, |
|
"learning_rate": 1.2675558839483848e-06, |
|
"loss": 1.1962, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.939176478784923, |
|
"grad_norm": 0.98070228099823, |
|
"learning_rate": 1.1610546386860988e-06, |
|
"loss": 1.2306, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.9419633526092106, |
|
"grad_norm": 1.0162360668182373, |
|
"learning_rate": 1.0591735591507946e-06, |
|
"loss": 1.2259, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9447502264334983, |
|
"grad_norm": 0.997144341468811, |
|
"learning_rate": 9.619222832957243e-07, |
|
"loss": 1.2142, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9475371002577858, |
|
"grad_norm": 0.9727258086204529, |
|
"learning_rate": 8.693100110945484e-07, |
|
"loss": 1.2123, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9503239740820735, |
|
"grad_norm": 0.8900772929191589, |
|
"learning_rate": 7.813455036710715e-07, |
|
"loss": 1.1853, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.953110847906361, |
|
"grad_norm": 0.9536607265472412, |
|
"learning_rate": 6.980370824703763e-07, |
|
"loss": 1.2617, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9558977217306487, |
|
"grad_norm": 1.0575697422027588, |
|
"learning_rate": 6.193926284716711e-07, |
|
"loss": 1.1969, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9586845955549362, |
|
"grad_norm": 0.9426103234291077, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 1.3009, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9614714693792239, |
|
"grad_norm": 1.020992636680603, |
|
"learning_rate": 4.76124939235989e-07, |
|
"loss": 1.2011, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9642583432035114, |
|
"grad_norm": 0.9522945284843445, |
|
"learning_rate": 4.1151525712680996e-07, |
|
"loss": 1.1584, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9670452170277991, |
|
"grad_norm": 1.105689525604248, |
|
"learning_rate": 3.515966471930643e-07, |
|
"loss": 1.2898, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9698320908520867, |
|
"grad_norm": 1.0255939960479736, |
|
"learning_rate": 2.963747777370907e-07, |
|
"loss": 1.2634, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9726189646763743, |
|
"grad_norm": 1.0123835802078247, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.1778, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9754058385006619, |
|
"grad_norm": 0.9710216522216797, |
|
"learning_rate": 2.0004171141464467e-07, |
|
"loss": 1.2408, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9781927123249495, |
|
"grad_norm": 1.0030288696289062, |
|
"learning_rate": 1.589396276591937e-07, |
|
"loss": 1.2336, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9809795861492371, |
|
"grad_norm": 0.9503644108772278, |
|
"learning_rate": 1.225525097414637e-07, |
|
"loss": 1.1797, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9837664599735247, |
|
"grad_norm": 1.007887840270996, |
|
"learning_rate": 9.088379988392848e-08, |
|
"loss": 1.2185, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9865533337978123, |
|
"grad_norm": 1.062198281288147, |
|
"learning_rate": 6.393649394749734e-08, |
|
"loss": 1.2139, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9893402076221, |
|
"grad_norm": 1.0820635557174683, |
|
"learning_rate": 4.171314114815306e-08, |
|
"loss": 1.2726, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9921270814463875, |
|
"grad_norm": 0.97090744972229, |
|
"learning_rate": 2.4215843815733607e-08, |
|
"loss": 1.1734, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9949139552706752, |
|
"grad_norm": 0.9105931520462036, |
|
"learning_rate": 1.1446257195119048e-08, |
|
"loss": 1.2729, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9977008290949627, |
|
"grad_norm": 0.9542015790939331, |
|
"learning_rate": 3.4055892895901167e-09, |
|
"loss": 1.2552, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.9945850372314453, |
|
"learning_rate": 9.460074656963969e-11, |
|
"loss": 1.3228, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1795, |
|
"total_flos": 1.3861725429517517e+17, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.8982, |
|
"train_samples_per_second": 15978.88, |
|
"train_steps_per_second": 1998.334 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1795, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3861725429517517e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|