|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1795, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0027868738242876052, |
|
"grad_norm": 3.341921806335449, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 3.1415, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0055737476485752105, |
|
"grad_norm": 3.4862101078033447, |
|
"learning_rate": 5e-06, |
|
"loss": 3.04, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008360621472862817, |
|
"grad_norm": 3.363572597503662, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 3.019, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.011147495297150421, |
|
"grad_norm": 3.5916693210601807, |
|
"learning_rate": 1.0555555555555555e-05, |
|
"loss": 3.073, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013934369121438027, |
|
"grad_norm": 3.8997910022735596, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 3.0359, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016721242945725634, |
|
"grad_norm": 3.789405345916748, |
|
"learning_rate": 1.6111111111111115e-05, |
|
"loss": 2.9568, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.019508116770013236, |
|
"grad_norm": 2.785924196243286, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 2.6633, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.022294990594300842, |
|
"grad_norm": 2.715925693511963, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 2.3416, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.025081864418588447, |
|
"grad_norm": 1.851042628288269, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 2.304, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.027868738242876053, |
|
"grad_norm": 1.1627306938171387, |
|
"learning_rate": 2.7222222222222223e-05, |
|
"loss": 2.0913, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03065561206716366, |
|
"grad_norm": 0.8980898857116699, |
|
"learning_rate": 3e-05, |
|
"loss": 1.9108, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03344248589145127, |
|
"grad_norm": 0.741324245929718, |
|
"learning_rate": 3.277777777777778e-05, |
|
"loss": 1.989, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03622935971573887, |
|
"grad_norm": 0.6890819668769836, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 1.8792, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03901623354002647, |
|
"grad_norm": 0.6898017525672913, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 1.7895, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04180310736431408, |
|
"grad_norm": 0.6752148866653442, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 1.8742, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.044589981188601684, |
|
"grad_norm": 0.6477967500686646, |
|
"learning_rate": 4.388888888888889e-05, |
|
"loss": 1.7473, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04737685501288929, |
|
"grad_norm": 0.6459152698516846, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.8558, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.050163728837176895, |
|
"grad_norm": 0.6237108707427979, |
|
"learning_rate": 4.9444444444444446e-05, |
|
"loss": 1.7931, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0529506026614645, |
|
"grad_norm": 0.6388494968414307, |
|
"learning_rate": 5.222222222222223e-05, |
|
"loss": 1.7712, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.055737476485752106, |
|
"grad_norm": 0.686677098274231, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.7316, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05852435031003971, |
|
"grad_norm": 0.612424910068512, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 1.7602, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06131122413432732, |
|
"grad_norm": 0.8961934447288513, |
|
"learning_rate": 6.055555555555555e-05, |
|
"loss": 1.8706, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06409809795861493, |
|
"grad_norm": 0.7244030833244324, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.7701, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06688497178290254, |
|
"grad_norm": 0.6804441809654236, |
|
"learning_rate": 6.611111111111111e-05, |
|
"loss": 1.7833, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06967184560719013, |
|
"grad_norm": 0.6717159152030945, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 1.8445, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07245871943147773, |
|
"grad_norm": 0.6639799475669861, |
|
"learning_rate": 7.166666666666667e-05, |
|
"loss": 1.7847, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07524559325576534, |
|
"grad_norm": 0.7095969915390015, |
|
"learning_rate": 7.444444444444444e-05, |
|
"loss": 1.7281, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07803246708005294, |
|
"grad_norm": 0.8189035654067993, |
|
"learning_rate": 7.722222222222223e-05, |
|
"loss": 1.7678, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08081934090434055, |
|
"grad_norm": 0.6709702610969543, |
|
"learning_rate": 8e-05, |
|
"loss": 1.8645, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08360621472862816, |
|
"grad_norm": 0.743190348148346, |
|
"learning_rate": 8.277777777777778e-05, |
|
"loss": 1.8086, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08639308855291576, |
|
"grad_norm": 0.669023871421814, |
|
"learning_rate": 8.555555555555556e-05, |
|
"loss": 1.7844, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08917996237720337, |
|
"grad_norm": 0.8071898221969604, |
|
"learning_rate": 8.833333333333333e-05, |
|
"loss": 1.7379, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09196683620149097, |
|
"grad_norm": 0.7679445743560791, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 1.706, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09475371002577858, |
|
"grad_norm": 0.7896930575370789, |
|
"learning_rate": 9.388888888888889e-05, |
|
"loss": 1.7752, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09754058385006618, |
|
"grad_norm": 0.7258396744728088, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.7635, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.10032745767435379, |
|
"grad_norm": 0.8085169196128845, |
|
"learning_rate": 9.944444444444446e-05, |
|
"loss": 1.7449, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1031143314986414, |
|
"grad_norm": 0.7673636674880981, |
|
"learning_rate": 9.999848639521432e-05, |
|
"loss": 1.7286, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.105901205322929, |
|
"grad_norm": 0.7478857040405273, |
|
"learning_rate": 9.999233753283091e-05, |
|
"loss": 1.7203, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1086880791472166, |
|
"grad_norm": 0.8994764685630798, |
|
"learning_rate": 9.998145939378577e-05, |
|
"loss": 1.6964, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11147495297150421, |
|
"grad_norm": 0.7851970791816711, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.7513, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11426182679579182, |
|
"grad_norm": 0.7743813395500183, |
|
"learning_rate": 9.994551984929175e-05, |
|
"loss": 1.7099, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11704870062007942, |
|
"grad_norm": 0.7080249786376953, |
|
"learning_rate": 9.992046184372492e-05, |
|
"loss": 1.7431, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11983557444436703, |
|
"grad_norm": 0.744627058506012, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.7159, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12262244826865464, |
|
"grad_norm": 0.7715389132499695, |
|
"learning_rate": 9.985618121816779e-05, |
|
"loss": 1.7696, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12540932209294225, |
|
"grad_norm": 0.7663665413856506, |
|
"learning_rate": 9.981696467912664e-05, |
|
"loss": 1.7205, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12819619591722986, |
|
"grad_norm": 0.7923425436019897, |
|
"learning_rate": 9.97730354537011e-05, |
|
"loss": 1.7136, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13098306974151747, |
|
"grad_norm": 0.7929289937019348, |
|
"learning_rate": 9.972439769759722e-05, |
|
"loss": 1.6462, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13376994356580507, |
|
"grad_norm": 0.845177948474884, |
|
"learning_rate": 9.967105601194823e-05, |
|
"loss": 1.8031, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13655681739009268, |
|
"grad_norm": 0.7540026307106018, |
|
"learning_rate": 9.961301544287922e-05, |
|
"loss": 1.6899, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13934369121438026, |
|
"grad_norm": 0.8806189894676208, |
|
"learning_rate": 9.955028148102979e-05, |
|
"loss": 1.6526, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14213056503866786, |
|
"grad_norm": 0.8206574320793152, |
|
"learning_rate": 9.948286006103466e-05, |
|
"loss": 1.8233, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14491743886295547, |
|
"grad_norm": 0.8984567523002625, |
|
"learning_rate": 9.941075756096226e-05, |
|
"loss": 1.7252, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14770431268724307, |
|
"grad_norm": 0.7411919832229614, |
|
"learning_rate": 9.933398080171123e-05, |
|
"loss": 1.7339, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15049118651153068, |
|
"grad_norm": 0.8030174970626831, |
|
"learning_rate": 9.925253704636543e-05, |
|
"loss": 1.6934, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15327806033581828, |
|
"grad_norm": 0.7879084944725037, |
|
"learning_rate": 9.916643399950656e-05, |
|
"loss": 1.7197, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1560649341601059, |
|
"grad_norm": 0.8534114956855774, |
|
"learning_rate": 9.907567980648549e-05, |
|
"loss": 1.6941, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1588518079843935, |
|
"grad_norm": 0.74381023645401, |
|
"learning_rate": 9.898028305265169e-05, |
|
"loss": 1.7919, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1616386818086811, |
|
"grad_norm": 0.8476956486701965, |
|
"learning_rate": 9.888025276254096e-05, |
|
"loss": 1.7417, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1644255556329687, |
|
"grad_norm": 0.8212200403213501, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.7452, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1672124294572563, |
|
"grad_norm": 0.8340197801589966, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.6687, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16999930328154392, |
|
"grad_norm": 0.858363151550293, |
|
"learning_rate": 9.855245748948326e-05, |
|
"loss": 1.7669, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17278617710583152, |
|
"grad_norm": 0.8392812013626099, |
|
"learning_rate": 9.843399205260068e-05, |
|
"loss": 1.736, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17557305093011913, |
|
"grad_norm": 0.8345137238502502, |
|
"learning_rate": 9.831094475858652e-05, |
|
"loss": 1.7264, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17835992475440673, |
|
"grad_norm": 0.8243482708930969, |
|
"learning_rate": 9.818332724771857e-05, |
|
"loss": 1.7748, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18114679857869434, |
|
"grad_norm": 0.8255311846733093, |
|
"learning_rate": 9.805115159261726e-05, |
|
"loss": 1.6798, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18393367240298195, |
|
"grad_norm": 0.8170781135559082, |
|
"learning_rate": 9.791443029710361e-05, |
|
"loss": 1.6542, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.18672054622726955, |
|
"grad_norm": 0.8476420044898987, |
|
"learning_rate": 9.777317629501636e-05, |
|
"loss": 1.6875, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.18950742005155716, |
|
"grad_norm": 0.7547033429145813, |
|
"learning_rate": 9.762740294898846e-05, |
|
"loss": 1.646, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19229429387584476, |
|
"grad_norm": 0.826404869556427, |
|
"learning_rate": 9.747712404918286e-05, |
|
"loss": 1.6187, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19508116770013237, |
|
"grad_norm": 0.964885950088501, |
|
"learning_rate": 9.732235381198813e-05, |
|
"loss": 1.7507, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.19786804152441997, |
|
"grad_norm": 0.8207951188087463, |
|
"learning_rate": 9.716310687867342e-05, |
|
"loss": 1.5745, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.20065491534870758, |
|
"grad_norm": 0.8143700361251831, |
|
"learning_rate": 9.699939831400351e-05, |
|
"loss": 1.7754, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.20344178917299519, |
|
"grad_norm": 0.811844527721405, |
|
"learning_rate": 9.683124360481364e-05, |
|
"loss": 1.6993, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.2062286629972828, |
|
"grad_norm": 0.8975111842155457, |
|
"learning_rate": 9.665865865854445e-05, |
|
"loss": 1.6901, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2090155368215704, |
|
"grad_norm": 0.7903837561607361, |
|
"learning_rate": 9.648165980173712e-05, |
|
"loss": 1.6895, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.211802410645858, |
|
"grad_norm": 0.8718327879905701, |
|
"learning_rate": 9.630026377848892e-05, |
|
"loss": 1.6725, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2145892844701456, |
|
"grad_norm": 0.8131148219108582, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.6874, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2173761582944332, |
|
"grad_norm": 0.8331175446510315, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 1.7542, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22016303211872082, |
|
"grad_norm": 0.792988121509552, |
|
"learning_rate": 9.572986638087396e-05, |
|
"loss": 1.7311, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22294990594300843, |
|
"grad_norm": 0.7799636721611023, |
|
"learning_rate": 9.553105742769154e-05, |
|
"loss": 1.6567, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22573677976729603, |
|
"grad_norm": 0.7864108085632324, |
|
"learning_rate": 9.532794123508197e-05, |
|
"loss": 1.5611, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.22852365359158364, |
|
"grad_norm": 0.8886444568634033, |
|
"learning_rate": 9.512053701784329e-05, |
|
"loss": 1.6995, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23131052741587124, |
|
"grad_norm": 0.7999454736709595, |
|
"learning_rate": 9.490886439642081e-05, |
|
"loss": 1.6898, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23409740124015885, |
|
"grad_norm": 0.7729182243347168, |
|
"learning_rate": 9.469294339505098e-05, |
|
"loss": 1.7204, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.23688427506444645, |
|
"grad_norm": 0.8300912380218506, |
|
"learning_rate": 9.447279443986716e-05, |
|
"loss": 1.6668, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.23967114888873406, |
|
"grad_norm": 0.840481698513031, |
|
"learning_rate": 9.424843835696724e-05, |
|
"loss": 1.704, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24245802271302166, |
|
"grad_norm": 0.9396219253540039, |
|
"learning_rate": 9.401989637044355e-05, |
|
"loss": 1.6713, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24524489653730927, |
|
"grad_norm": 0.8775554299354553, |
|
"learning_rate": 9.3787190100375e-05, |
|
"loss": 1.6883, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.24803177036159688, |
|
"grad_norm": 0.8623490333557129, |
|
"learning_rate": 9.355034156078188e-05, |
|
"loss": 1.6783, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2508186441858845, |
|
"grad_norm": 0.806533694267273, |
|
"learning_rate": 9.330937315754329e-05, |
|
"loss": 1.6853, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2536055180101721, |
|
"grad_norm": 0.8504590392112732, |
|
"learning_rate": 9.306430768627753e-05, |
|
"loss": 1.6594, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2563923918344597, |
|
"grad_norm": 0.9382562637329102, |
|
"learning_rate": 9.281516833018571e-05, |
|
"loss": 1.6827, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2591792656587473, |
|
"grad_norm": 0.821978747844696, |
|
"learning_rate": 9.256197865785854e-05, |
|
"loss": 1.752, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26196613948303493, |
|
"grad_norm": 0.8061692714691162, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.6773, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26475301330732254, |
|
"grad_norm": 0.8759399652481079, |
|
"learning_rate": 9.204354455239539e-05, |
|
"loss": 1.7003, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.26753988713161014, |
|
"grad_norm": 0.8721809387207031, |
|
"learning_rate": 9.177834916314165e-05, |
|
"loss": 1.6898, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27032676095589775, |
|
"grad_norm": 0.7904046773910522, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 1.6636, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27311363478018535, |
|
"grad_norm": 0.8828450441360474, |
|
"learning_rate": 9.123612714667634e-05, |
|
"loss": 1.6945, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2759005086044729, |
|
"grad_norm": 0.7389103174209595, |
|
"learning_rate": 9.095915181368412e-05, |
|
"loss": 1.7459, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2786873824287605, |
|
"grad_norm": 0.975307285785675, |
|
"learning_rate": 9.067830174367586e-05, |
|
"loss": 1.7296, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2814742562530481, |
|
"grad_norm": 0.812856137752533, |
|
"learning_rate": 9.039360350507679e-05, |
|
"loss": 1.696, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2842611300773357, |
|
"grad_norm": 0.8609644770622253, |
|
"learning_rate": 9.010508403034898e-05, |
|
"loss": 1.6464, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2870480039016233, |
|
"grad_norm": 0.8457721471786499, |
|
"learning_rate": 8.98127706134436e-05, |
|
"loss": 1.682, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.28983487772591093, |
|
"grad_norm": 0.8576518297195435, |
|
"learning_rate": 8.951669090721881e-05, |
|
"loss": 1.6784, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29262175155019854, |
|
"grad_norm": 0.892368495464325, |
|
"learning_rate": 8.921687292082393e-05, |
|
"loss": 1.6283, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29540862537448614, |
|
"grad_norm": 0.7966616749763489, |
|
"learning_rate": 8.891334501704962e-05, |
|
"loss": 1.6898, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.29819549919877375, |
|
"grad_norm": 0.9272663593292236, |
|
"learning_rate": 8.86061359096449e-05, |
|
"loss": 1.6205, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.30098237302306136, |
|
"grad_norm": 0.8790031671524048, |
|
"learning_rate": 8.829527466060072e-05, |
|
"loss": 1.5345, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.30376924684734896, |
|
"grad_norm": 0.8462222814559937, |
|
"learning_rate": 8.798079067740077e-05, |
|
"loss": 1.7664, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.30655612067163657, |
|
"grad_norm": 0.8935257792472839, |
|
"learning_rate": 8.766271371023948e-05, |
|
"loss": 1.6742, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3093429944959242, |
|
"grad_norm": 0.9315011501312256, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.6154, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3121298683202118, |
|
"grad_norm": 0.8667712807655334, |
|
"learning_rate": 8.701590152144612e-05, |
|
"loss": 1.6356, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3149167421444994, |
|
"grad_norm": 0.9088530540466309, |
|
"learning_rate": 8.668722748826693e-05, |
|
"loss": 1.6645, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.317703615968787, |
|
"grad_norm": 0.8751830458641052, |
|
"learning_rate": 8.635508284224371e-05, |
|
"loss": 1.7629, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3204904897930746, |
|
"grad_norm": 0.8768523335456848, |
|
"learning_rate": 8.601949900427016e-05, |
|
"loss": 1.631, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3232773636173622, |
|
"grad_norm": 0.9074881672859192, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 1.6924, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3260642374416498, |
|
"grad_norm": 0.9533477425575256, |
|
"learning_rate": 8.533814105978191e-05, |
|
"loss": 1.644, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3288511112659374, |
|
"grad_norm": 0.8163946866989136, |
|
"learning_rate": 8.499243140974966e-05, |
|
"loss": 1.6587, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.331637985090225, |
|
"grad_norm": 0.8785070180892944, |
|
"learning_rate": 8.464341147463431e-05, |
|
"loss": 1.6666, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3344248589145126, |
|
"grad_norm": 0.8283839821815491, |
|
"learning_rate": 8.429111427173241e-05, |
|
"loss": 1.5772, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.33721173273880023, |
|
"grad_norm": 0.8584945797920227, |
|
"learning_rate": 8.393557312837018e-05, |
|
"loss": 1.6317, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.33999860656308784, |
|
"grad_norm": 0.880062997341156, |
|
"learning_rate": 8.357682167875062e-05, |
|
"loss": 1.7025, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34278548038737544, |
|
"grad_norm": 0.9203423261642456, |
|
"learning_rate": 8.321489386077192e-05, |
|
"loss": 1.6741, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.34557235421166305, |
|
"grad_norm": 0.917999804019928, |
|
"learning_rate": 8.28498239128167e-05, |
|
"loss": 1.6954, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.34835922803595065, |
|
"grad_norm": 0.8408088088035583, |
|
"learning_rate": 8.248164637051321e-05, |
|
"loss": 1.6443, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35114610186023826, |
|
"grad_norm": 0.908991277217865, |
|
"learning_rate": 8.211039606346826e-05, |
|
"loss": 1.6907, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35393297568452586, |
|
"grad_norm": 0.9045994877815247, |
|
"learning_rate": 8.173610811197226e-05, |
|
"loss": 1.66, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.35671984950881347, |
|
"grad_norm": 0.8032850623130798, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.6538, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3595067233331011, |
|
"grad_norm": 0.9243664741516113, |
|
"learning_rate": 8.097856119024545e-05, |
|
"loss": 1.7321, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3622935971573887, |
|
"grad_norm": 0.9106388092041016, |
|
"learning_rate": 8.059537388397665e-05, |
|
"loss": 1.6566, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3650804709816763, |
|
"grad_norm": 0.9164809584617615, |
|
"learning_rate": 8.020929225440137e-05, |
|
"loss": 1.6663, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3678673448059639, |
|
"grad_norm": 0.9171935319900513, |
|
"learning_rate": 7.98203528248536e-05, |
|
"loss": 1.6781, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3706542186302515, |
|
"grad_norm": 0.8433538675308228, |
|
"learning_rate": 7.942859238901528e-05, |
|
"loss": 1.7255, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3734410924545391, |
|
"grad_norm": 0.9780874252319336, |
|
"learning_rate": 7.903404800743564e-05, |
|
"loss": 1.722, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3762279662788267, |
|
"grad_norm": 0.9980689287185669, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.6403, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3790148401031143, |
|
"grad_norm": 0.843333899974823, |
|
"learning_rate": 7.823675696252524e-05, |
|
"loss": 1.7583, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3818017139274019, |
|
"grad_norm": 0.8417442440986633, |
|
"learning_rate": 7.783408572295174e-05, |
|
"loss": 1.6938, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3845885877516895, |
|
"grad_norm": 0.8940498232841492, |
|
"learning_rate": 7.742878137801639e-05, |
|
"loss": 1.6737, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.38737546157597713, |
|
"grad_norm": 0.8713932633399963, |
|
"learning_rate": 7.702088226952258e-05, |
|
"loss": 1.6435, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39016233540026474, |
|
"grad_norm": 0.9215396046638489, |
|
"learning_rate": 7.661042698473853e-05, |
|
"loss": 1.5994, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39294920922455234, |
|
"grad_norm": 0.8360981941223145, |
|
"learning_rate": 7.619745435274667e-05, |
|
"loss": 1.6511, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.39573608304883995, |
|
"grad_norm": 0.9177049398422241, |
|
"learning_rate": 7.578200344077073e-05, |
|
"loss": 1.6655, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.39852295687312755, |
|
"grad_norm": 0.8501256108283997, |
|
"learning_rate": 7.536411355047964e-05, |
|
"loss": 1.659, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.40130983069741516, |
|
"grad_norm": 0.8666147589683533, |
|
"learning_rate": 7.494382421426984e-05, |
|
"loss": 1.7046, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.40409670452170277, |
|
"grad_norm": 0.8096281290054321, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.6108, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.40688357834599037, |
|
"grad_norm": 1.0028629302978516, |
|
"learning_rate": 7.409620646485685e-05, |
|
"loss": 1.6302, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.409670452170278, |
|
"grad_norm": 0.8731248378753662, |
|
"learning_rate": 7.36689582363187e-05, |
|
"loss": 1.6784, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.4124573259945656, |
|
"grad_norm": 0.9034931659698486, |
|
"learning_rate": 7.323947092360649e-05, |
|
"loss": 1.5845, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4152441998188532, |
|
"grad_norm": 0.9129267334938049, |
|
"learning_rate": 7.280778515623314e-05, |
|
"loss": 1.7366, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4180310736431408, |
|
"grad_norm": 0.9088048338890076, |
|
"learning_rate": 7.237394177168548e-05, |
|
"loss": 1.7065, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4208179474674284, |
|
"grad_norm": 0.9397730827331543, |
|
"learning_rate": 7.193798181156095e-05, |
|
"loss": 1.7012, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.423604821291716, |
|
"grad_norm": 0.9324731230735779, |
|
"learning_rate": 7.149994651768514e-05, |
|
"loss": 1.5474, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4263916951160036, |
|
"grad_norm": 0.9360097646713257, |
|
"learning_rate": 7.10598773282103e-05, |
|
"loss": 1.6602, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4291785689402912, |
|
"grad_norm": 0.9703853130340576, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 1.7305, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4319654427645788, |
|
"grad_norm": 0.9043810367584229, |
|
"learning_rate": 7.017380397316695e-05, |
|
"loss": 1.6131, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4347523165888664, |
|
"grad_norm": 0.8870768547058105, |
|
"learning_rate": 6.972788363016497e-05, |
|
"loss": 1.5668, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.43753919041315403, |
|
"grad_norm": 0.9383454918861389, |
|
"learning_rate": 6.92800970287674e-05, |
|
"loss": 1.6773, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44032606423744164, |
|
"grad_norm": 0.8532437682151794, |
|
"learning_rate": 6.883048652960038e-05, |
|
"loss": 1.6602, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44311293806172924, |
|
"grad_norm": 0.8915623426437378, |
|
"learning_rate": 6.837909466583095e-05, |
|
"loss": 1.6778, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.44589981188601685, |
|
"grad_norm": 0.840151846408844, |
|
"learning_rate": 6.792596413914324e-05, |
|
"loss": 1.5566, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.44868668571030446, |
|
"grad_norm": 0.8808228969573975, |
|
"learning_rate": 6.747113781569892e-05, |
|
"loss": 1.6819, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45147355953459206, |
|
"grad_norm": 0.9224216938018799, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.7057, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45426043335887967, |
|
"grad_norm": 1.0062270164489746, |
|
"learning_rate": 6.655657004122916e-05, |
|
"loss": 1.719, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4570473071831673, |
|
"grad_norm": 0.9544162154197693, |
|
"learning_rate": 6.60969151083432e-05, |
|
"loss": 1.657, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4598341810074549, |
|
"grad_norm": 0.8740416169166565, |
|
"learning_rate": 6.563573740679496e-05, |
|
"loss": 1.645, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4626210548317425, |
|
"grad_norm": 0.8866944313049316, |
|
"learning_rate": 6.517308056400917e-05, |
|
"loss": 1.6446, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4654079286560301, |
|
"grad_norm": 0.9224971532821655, |
|
"learning_rate": 6.470898834733731e-05, |
|
"loss": 1.6384, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4681948024803177, |
|
"grad_norm": 0.8925870656967163, |
|
"learning_rate": 6.42435046599173e-05, |
|
"loss": 1.6455, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4709816763046053, |
|
"grad_norm": 0.9226738810539246, |
|
"learning_rate": 6.377667353652022e-05, |
|
"loss": 1.6799, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4737685501288929, |
|
"grad_norm": 0.9104128479957581, |
|
"learning_rate": 6.330853913938466e-05, |
|
"loss": 1.6267, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4765554239531805, |
|
"grad_norm": 0.912969708442688, |
|
"learning_rate": 6.283914575403888e-05, |
|
"loss": 1.716, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.4793422977774681, |
|
"grad_norm": 0.9142662882804871, |
|
"learning_rate": 6.236853778511156e-05, |
|
"loss": 1.6619, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4821291716017557, |
|
"grad_norm": 0.9380782842636108, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 1.6578, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.48491604542604333, |
|
"grad_norm": 0.9225218296051025, |
|
"learning_rate": 6.142385628531342e-05, |
|
"loss": 1.6891, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.48770291925033094, |
|
"grad_norm": 0.9732077121734619, |
|
"learning_rate": 6.09498721213414e-05, |
|
"loss": 1.6008, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49048979307461854, |
|
"grad_norm": 0.9870405197143555, |
|
"learning_rate": 6.047485209913137e-05, |
|
"loss": 1.7372, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49327666689890615, |
|
"grad_norm": 0.8708714246749878, |
|
"learning_rate": 5.999884115559192e-05, |
|
"loss": 1.6317, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.49606354072319375, |
|
"grad_norm": 0.8490287065505981, |
|
"learning_rate": 5.952188432137293e-05, |
|
"loss": 1.6514, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.49885041454748136, |
|
"grad_norm": 0.7740074992179871, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.6091, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.501637288371769, |
|
"grad_norm": 0.9218555092811584, |
|
"learning_rate": 5.8565313546633684e-05, |
|
"loss": 1.674, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5044241621960566, |
|
"grad_norm": 0.8074113726615906, |
|
"learning_rate": 5.8085790097738025e-05, |
|
"loss": 1.6567, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5072110360203442, |
|
"grad_norm": 0.9320290684700012, |
|
"learning_rate": 5.7605501732851475e-05, |
|
"loss": 1.6925, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5099979098446318, |
|
"grad_norm": 0.8326326012611389, |
|
"learning_rate": 5.712449388726807e-05, |
|
"loss": 1.6909, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5127847836689194, |
|
"grad_norm": 0.9022158980369568, |
|
"learning_rate": 5.664281206434472e-05, |
|
"loss": 1.6075, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.515571657493207, |
|
"grad_norm": 0.8791519999504089, |
|
"learning_rate": 5.616050183119663e-05, |
|
"loss": 1.7011, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5183585313174947, |
|
"grad_norm": 0.876043438911438, |
|
"learning_rate": 5.5677608814386616e-05, |
|
"loss": 1.5478, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5211454051417822, |
|
"grad_norm": 0.9437665939331055, |
|
"learning_rate": 5.519417869560889e-05, |
|
"loss": 1.6518, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5239322789660699, |
|
"grad_norm": 0.9788090586662292, |
|
"learning_rate": 5.471025720736747e-05, |
|
"loss": 1.7146, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5267191527903574, |
|
"grad_norm": 0.861488938331604, |
|
"learning_rate": 5.422589012864996e-05, |
|
"loss": 1.6805, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5295060266146451, |
|
"grad_norm": 1.008576512336731, |
|
"learning_rate": 5.3741123280596864e-05, |
|
"loss": 1.6012, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5322929004389326, |
|
"grad_norm": 0.9095168113708496, |
|
"learning_rate": 5.325600252216685e-05, |
|
"loss": 1.6477, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5350797742632203, |
|
"grad_norm": 0.8760788440704346, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 1.5747, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5378666480875078, |
|
"grad_norm": 0.9696391224861145, |
|
"learning_rate": 5.228488287306896e-05, |
|
"loss": 1.6249, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5406535219117955, |
|
"grad_norm": 0.8726052641868591, |
|
"learning_rate": 5.179897585034963e-05, |
|
"loss": 1.6701, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.543440395736083, |
|
"grad_norm": 0.8699939846992493, |
|
"learning_rate": 5.1312898644459776e-05, |
|
"loss": 1.5395, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5462272695603707, |
|
"grad_norm": 0.9301031827926636, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.6724, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5490141433846583, |
|
"grad_norm": 0.8767033815383911, |
|
"learning_rate": 5.0340417626592016e-05, |
|
"loss": 1.6492, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5518010172089458, |
|
"grad_norm": 0.9239912629127502, |
|
"learning_rate": 4.9854105811348216e-05, |
|
"loss": 1.6705, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5545878910332335, |
|
"grad_norm": 1.0233875513076782, |
|
"learning_rate": 4.936780779769913e-05, |
|
"loss": 1.6758, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.557374764857521, |
|
"grad_norm": 0.9696400165557861, |
|
"learning_rate": 4.888156958945174e-05, |
|
"loss": 1.6276, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5601616386818087, |
|
"grad_norm": 0.9741013646125793, |
|
"learning_rate": 4.839543718475543e-05, |
|
"loss": 1.6241, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5629485125060962, |
|
"grad_norm": 0.9259172081947327, |
|
"learning_rate": 4.790945657175061e-05, |
|
"loss": 1.6596, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5657353863303839, |
|
"grad_norm": 0.9725984930992126, |
|
"learning_rate": 4.742367372421811e-05, |
|
"loss": 1.6663, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5685222601546714, |
|
"grad_norm": 1.0007803440093994, |
|
"learning_rate": 4.69381345972302e-05, |
|
"loss": 1.6159, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5713091339789591, |
|
"grad_norm": 0.9005650877952576, |
|
"learning_rate": 4.6452885122803205e-05, |
|
"loss": 1.5989, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5740960078032467, |
|
"grad_norm": 0.8858962655067444, |
|
"learning_rate": 4.5967971205552194e-05, |
|
"loss": 1.6609, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5768828816275343, |
|
"grad_norm": 0.945851743221283, |
|
"learning_rate": 4.548343871834864e-05, |
|
"loss": 1.6605, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5796697554518219, |
|
"grad_norm": 0.9151830077171326, |
|
"learning_rate": 4.499933349798067e-05, |
|
"loss": 1.678, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5824566292761095, |
|
"grad_norm": 0.9208435416221619, |
|
"learning_rate": 4.451570134081694e-05, |
|
"loss": 1.7427, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5852435031003971, |
|
"grad_norm": 0.9141213893890381, |
|
"learning_rate": 4.403258799847433e-05, |
|
"loss": 1.62, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5880303769246847, |
|
"grad_norm": 1.0249472856521606, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 1.634, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5908172507489723, |
|
"grad_norm": 0.8938164114952087, |
|
"learning_rate": 4.306810051499708e-05, |
|
"loss": 1.562, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59360412457326, |
|
"grad_norm": 1.0070486068725586, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.6713, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5963909983975475, |
|
"grad_norm": 0.8382595777511597, |
|
"learning_rate": 4.210623600109946e-05, |
|
"loss": 1.5343, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5991778722218352, |
|
"grad_norm": 0.9747705459594727, |
|
"learning_rate": 4.162640113810706e-05, |
|
"loss": 1.67, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6019647460461227, |
|
"grad_norm": 0.9397943019866943, |
|
"learning_rate": 4.114735841782347e-05, |
|
"loss": 1.705, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6047516198704104, |
|
"grad_norm": 0.9868884086608887, |
|
"learning_rate": 4.06691531577047e-05, |
|
"loss": 1.6343, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6075384936946979, |
|
"grad_norm": 0.9283191561698914, |
|
"learning_rate": 4.019183059598296e-05, |
|
"loss": 1.6226, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6103253675189856, |
|
"grad_norm": 0.8564023971557617, |
|
"learning_rate": 3.971543588738724e-05, |
|
"loss": 1.6206, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6131122413432731, |
|
"grad_norm": 0.9272263050079346, |
|
"learning_rate": 3.924001409887158e-05, |
|
"loss": 1.5952, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6158991151675608, |
|
"grad_norm": 0.9421799778938293, |
|
"learning_rate": 3.87656102053517e-05, |
|
"loss": 1.6356, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6186859889918483, |
|
"grad_norm": 0.918296217918396, |
|
"learning_rate": 3.8292269085450474e-05, |
|
"loss": 1.6143, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.621472862816136, |
|
"grad_norm": 0.9440540671348572, |
|
"learning_rate": 3.782003551725236e-05, |
|
"loss": 1.6928, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6242597366404236, |
|
"grad_norm": 0.8987861275672913, |
|
"learning_rate": 3.734895417406734e-05, |
|
"loss": 1.6571, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6270466104647112, |
|
"grad_norm": 0.9039310216903687, |
|
"learning_rate": 3.687906962020491e-05, |
|
"loss": 1.6862, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6298334842889988, |
|
"grad_norm": 0.9794312119483948, |
|
"learning_rate": 3.641042630675829e-05, |
|
"loss": 1.6843, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6326203581132864, |
|
"grad_norm": 0.9151976704597473, |
|
"learning_rate": 3.594306856739924e-05, |
|
"loss": 1.602, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.635407231937574, |
|
"grad_norm": 0.9147221446037292, |
|
"learning_rate": 3.547704061418424e-05, |
|
"loss": 1.6073, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6381941057618616, |
|
"grad_norm": 0.9955829977989197, |
|
"learning_rate": 3.501238653337194e-05, |
|
"loss": 1.6535, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6409809795861492, |
|
"grad_norm": 1.0493595600128174, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.6979, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6437678534104369, |
|
"grad_norm": 0.9534896016120911, |
|
"learning_rate": 3.408737567998993e-05, |
|
"loss": 1.5449, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6465547272347244, |
|
"grad_norm": 0.9850942492485046, |
|
"learning_rate": 3.362710641347524e-05, |
|
"loss": 1.6989, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6493416010590121, |
|
"grad_norm": 0.9439795017242432, |
|
"learning_rate": 3.316838602319532e-05, |
|
"loss": 1.6826, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6521284748832996, |
|
"grad_norm": 1.0012317895889282, |
|
"learning_rate": 3.271125790411309e-05, |
|
"loss": 1.524, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6549153487075873, |
|
"grad_norm": 0.9746808409690857, |
|
"learning_rate": 3.225576530056264e-05, |
|
"loss": 1.6796, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6577022225318748, |
|
"grad_norm": 0.8482115864753723, |
|
"learning_rate": 3.180195130215824e-05, |
|
"loss": 1.6443, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6604890963561625, |
|
"grad_norm": 1.0496842861175537, |
|
"learning_rate": 3.1349858839717986e-05, |
|
"loss": 1.586, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66327597018045, |
|
"grad_norm": 0.8761202692985535, |
|
"learning_rate": 3.089953068120271e-05, |
|
"loss": 1.5516, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6660628440047377, |
|
"grad_norm": 1.058709740638733, |
|
"learning_rate": 3.0451009427669986e-05, |
|
"loss": 1.611, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6688497178290252, |
|
"grad_norm": 0.9337294101715088, |
|
"learning_rate": 3.000433750924414e-05, |
|
"loss": 1.6324, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6716365916533129, |
|
"grad_norm": 0.9224420785903931, |
|
"learning_rate": 2.9559557181102315e-05, |
|
"loss": 1.6494, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6744234654776005, |
|
"grad_norm": 1.1299585103988647, |
|
"learning_rate": 2.911671051947722e-05, |
|
"loss": 1.5891, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6772103393018881, |
|
"grad_norm": 0.9998091459274292, |
|
"learning_rate": 2.867583941767657e-05, |
|
"loss": 1.5622, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6799972131261757, |
|
"grad_norm": 0.9196699261665344, |
|
"learning_rate": 2.823698558212009e-05, |
|
"loss": 1.6298, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6827840869504633, |
|
"grad_norm": 0.9296634793281555, |
|
"learning_rate": 2.7800190528394122e-05, |
|
"loss": 1.6035, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6855709607747509, |
|
"grad_norm": 0.8991268873214722, |
|
"learning_rate": 2.736549557732405e-05, |
|
"loss": 1.5498, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6883578345990385, |
|
"grad_norm": 0.9545588493347168, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.7968, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6911447084233261, |
|
"grad_norm": 0.9947872757911682, |
|
"learning_rate": 2.650257026921455e-05, |
|
"loss": 1.673, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6939315822476138, |
|
"grad_norm": 1.013684630393982, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 1.6995, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6967184560719013, |
|
"grad_norm": 0.9432601928710938, |
|
"learning_rate": 2.5648536181111438e-05, |
|
"loss": 1.6238, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.699505329896189, |
|
"grad_norm": 0.9840952157974243, |
|
"learning_rate": 2.5224954466510274e-05, |
|
"loss": 1.6116, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7022922037204765, |
|
"grad_norm": 0.9281061291694641, |
|
"learning_rate": 2.480371647197538e-05, |
|
"loss": 1.6705, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7050790775447642, |
|
"grad_norm": 0.9922870397567749, |
|
"learning_rate": 2.438486204663391e-05, |
|
"loss": 1.6893, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7078659513690517, |
|
"grad_norm": 0.9433642029762268, |
|
"learning_rate": 2.3968430814127385e-05, |
|
"loss": 1.674, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7106528251933394, |
|
"grad_norm": 0.9409830570220947, |
|
"learning_rate": 2.3554462168863085e-05, |
|
"loss": 1.6519, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7134396990176269, |
|
"grad_norm": 0.9217763543128967, |
|
"learning_rate": 2.314299527228759e-05, |
|
"loss": 1.6384, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.7162265728419146, |
|
"grad_norm": 0.8798460364341736, |
|
"learning_rate": 2.2734069049181882e-05, |
|
"loss": 1.6243, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7190134466662021, |
|
"grad_norm": 0.9369103908538818, |
|
"learning_rate": 2.2327722183979212e-05, |
|
"loss": 1.6331, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7218003204904898, |
|
"grad_norm": 0.9402239918708801, |
|
"learning_rate": 2.1923993117105462e-05, |
|
"loss": 1.6124, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.7245871943147774, |
|
"grad_norm": 1.0770180225372314, |
|
"learning_rate": 2.1522920041342704e-05, |
|
"loss": 1.6276, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.727374068139065, |
|
"grad_norm": 0.9942741990089417, |
|
"learning_rate": 2.1124540898216248e-05, |
|
"loss": 1.6543, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7301609419633526, |
|
"grad_norm": 0.9097809195518494, |
|
"learning_rate": 2.0728893374405166e-05, |
|
"loss": 1.5341, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7329478157876402, |
|
"grad_norm": 0.9943116307258606, |
|
"learning_rate": 2.033601489817738e-05, |
|
"loss": 1.7465, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7357346896119278, |
|
"grad_norm": 0.9960694313049316, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.6835, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7385215634362154, |
|
"grad_norm": 0.9358818531036377, |
|
"learning_rate": 1.9558713488267238e-05, |
|
"loss": 1.6585, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.741308437260503, |
|
"grad_norm": 0.9540205597877502, |
|
"learning_rate": 1.917436408732208e-05, |
|
"loss": 1.6298, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7440953110847907, |
|
"grad_norm": 1.0423612594604492, |
|
"learning_rate": 1.8792930792478357e-05, |
|
"loss": 1.6291, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7468821849090782, |
|
"grad_norm": 1.0178053379058838, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 1.5844, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7496690587333659, |
|
"grad_norm": 0.9405874013900757, |
|
"learning_rate": 1.8038956576223504e-05, |
|
"loss": 1.5531, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7524559325576534, |
|
"grad_norm": 0.9561493396759033, |
|
"learning_rate": 1.766648698079635e-05, |
|
"loss": 1.6504, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7552428063819411, |
|
"grad_norm": 0.9773407578468323, |
|
"learning_rate": 1.7297076136691072e-05, |
|
"loss": 1.7289, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7580296802062286, |
|
"grad_norm": 1.024316430091858, |
|
"learning_rate": 1.6930758990184875e-05, |
|
"loss": 1.6495, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7608165540305163, |
|
"grad_norm": 1.1461398601531982, |
|
"learning_rate": 1.6567570194891024e-05, |
|
"loss": 1.63, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7636034278548038, |
|
"grad_norm": 1.1386655569076538, |
|
"learning_rate": 1.620754410848069e-05, |
|
"loss": 1.647, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7663903016790915, |
|
"grad_norm": 0.9157811999320984, |
|
"learning_rate": 1.5850714789432663e-05, |
|
"loss": 1.659, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.769177175503379, |
|
"grad_norm": 1.0108404159545898, |
|
"learning_rate": 1.549711599381145e-05, |
|
"loss": 1.6671, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7719640493276667, |
|
"grad_norm": 0.8621100783348083, |
|
"learning_rate": 1.5146781172073959e-05, |
|
"loss": 1.6478, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7747509231519543, |
|
"grad_norm": 0.957285463809967, |
|
"learning_rate": 1.479974346590503e-05, |
|
"loss": 1.6313, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7775377969762419, |
|
"grad_norm": 0.9681037068367004, |
|
"learning_rate": 1.4456035705082349e-05, |
|
"loss": 1.5881, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7803246708005295, |
|
"grad_norm": 0.9414888620376587, |
|
"learning_rate": 1.4115690404370551e-05, |
|
"loss": 1.6011, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7831115446248171, |
|
"grad_norm": 0.9640589952468872, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.623, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7858984184491047, |
|
"grad_norm": 0.9019273519515991, |
|
"learning_rate": 1.344521564884858e-05, |
|
"loss": 1.6779, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7886852922733923, |
|
"grad_norm": 0.9059531688690186, |
|
"learning_rate": 1.3115149620970795e-05, |
|
"loss": 1.6783, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7914721660976799, |
|
"grad_norm": 0.9945409297943115, |
|
"learning_rate": 1.2788572901068552e-05, |
|
"loss": 1.629, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7942590399219676, |
|
"grad_norm": 1.0185166597366333, |
|
"learning_rate": 1.2465516383309551e-05, |
|
"loss": 1.6547, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7970459137462551, |
|
"grad_norm": 1.0051592588424683, |
|
"learning_rate": 1.2146010628850268e-05, |
|
"loss": 1.7016, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7998327875705428, |
|
"grad_norm": 0.9881749749183655, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 1.5857, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8026196613948303, |
|
"grad_norm": 0.9611209630966187, |
|
"learning_rate": 1.151777197208585e-05, |
|
"loss": 1.6027, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.805406535219118, |
|
"grad_norm": 1.0034438371658325, |
|
"learning_rate": 1.1209098501176896e-05, |
|
"loss": 1.6083, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8081934090434055, |
|
"grad_norm": 0.9777964949607849, |
|
"learning_rate": 1.0904094650737795e-05, |
|
"loss": 1.6016, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8109802828676932, |
|
"grad_norm": 1.0084513425827026, |
|
"learning_rate": 1.0602789274142133e-05, |
|
"loss": 1.6998, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8137671566919807, |
|
"grad_norm": 0.9777223467826843, |
|
"learning_rate": 1.0305210874887766e-05, |
|
"loss": 1.5668, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8165540305162684, |
|
"grad_norm": 0.9857009649276733, |
|
"learning_rate": 1.0011387603900385e-05, |
|
"loss": 1.5977, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.819340904340556, |
|
"grad_norm": 1.0366628170013428, |
|
"learning_rate": 9.7213472568704e-06, |
|
"loss": 1.6536, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8221277781648436, |
|
"grad_norm": 0.9983012080192566, |
|
"learning_rate": 9.435117271623566e-06, |
|
"loss": 1.4905, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.8249146519891312, |
|
"grad_norm": 1.042453646659851, |
|
"learning_rate": 9.152724725525202e-06, |
|
"loss": 1.636, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8277015258134188, |
|
"grad_norm": 0.945472776889801, |
|
"learning_rate": 8.87419633291886e-06, |
|
"loss": 1.651, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8304883996377064, |
|
"grad_norm": 1.0160605907440186, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.5852, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.833275273461994, |
|
"grad_norm": 1.047103762626648, |
|
"learning_rate": 8.328837035318448e-06, |
|
"loss": 1.6926, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.8360621472862816, |
|
"grad_norm": 0.9703435897827148, |
|
"learning_rate": 8.06205772133063e-06, |
|
"loss": 1.5367, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8388490211105692, |
|
"grad_norm": 1.0638253688812256, |
|
"learning_rate": 7.799245737966821e-06, |
|
"loss": 1.5853, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.8416358949348568, |
|
"grad_norm": 1.0305513143539429, |
|
"learning_rate": 7.540425947248697e-06, |
|
"loss": 1.6552, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8444227687591445, |
|
"grad_norm": 0.8863044381141663, |
|
"learning_rate": 7.28562283353637e-06, |
|
"loss": 1.627, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.847209642583432, |
|
"grad_norm": 0.9131430387496948, |
|
"learning_rate": 7.034860501212243e-06, |
|
"loss": 1.6254, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8499965164077197, |
|
"grad_norm": 1.0161089897155762, |
|
"learning_rate": 6.788162672400583e-06, |
|
"loss": 1.7087, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8527833902320072, |
|
"grad_norm": 0.9403210878372192, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 1.6491, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8555702640562949, |
|
"grad_norm": 0.9508659243583679, |
|
"learning_rate": 6.307053489093506e-06, |
|
"loss": 1.6222, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8583571378805824, |
|
"grad_norm": 0.9709882140159607, |
|
"learning_rate": 6.072687647541553e-06, |
|
"loss": 1.6431, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8611440117048701, |
|
"grad_norm": 0.9428046941757202, |
|
"learning_rate": 5.842477331083518e-06, |
|
"loss": 1.6559, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8639308855291576, |
|
"grad_norm": 0.9151241779327393, |
|
"learning_rate": 5.616444317622388e-06, |
|
"loss": 1.5556, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8667177593534453, |
|
"grad_norm": 0.9186306595802307, |
|
"learning_rate": 5.394609989888161e-06, |
|
"loss": 1.6637, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8695046331777329, |
|
"grad_norm": 1.1897034645080566, |
|
"learning_rate": 5.176995333415019e-06, |
|
"loss": 1.6557, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8722915070020205, |
|
"grad_norm": 1.0049179792404175, |
|
"learning_rate": 4.963620934556168e-06, |
|
"loss": 1.6551, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8750783808263081, |
|
"grad_norm": 0.9597494602203369, |
|
"learning_rate": 4.754506978536227e-06, |
|
"loss": 1.6064, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8778652546505957, |
|
"grad_norm": 0.8873302936553955, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 1.6245, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8806521284748833, |
|
"grad_norm": 0.8784957528114319, |
|
"learning_rate": 4.3491391188503264e-06, |
|
"loss": 1.6031, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8834390022991709, |
|
"grad_norm": 1.0219107866287231, |
|
"learning_rate": 4.152923562996297e-06, |
|
"loss": 1.5441, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8862258761234585, |
|
"grad_norm": 0.9388402700424194, |
|
"learning_rate": 3.961045141977376e-06, |
|
"loss": 1.6366, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8890127499477462, |
|
"grad_norm": 0.9844707250595093, |
|
"learning_rate": 3.773522007498065e-06, |
|
"loss": 1.6457, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8917996237720337, |
|
"grad_norm": 0.941916286945343, |
|
"learning_rate": 3.590371899252659e-06, |
|
"loss": 1.6043, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8945864975963214, |
|
"grad_norm": 0.9494262337684631, |
|
"learning_rate": 3.4116121432469615e-06, |
|
"loss": 1.588, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8973733714206089, |
|
"grad_norm": 0.9501243233680725, |
|
"learning_rate": 3.237259650159402e-06, |
|
"loss": 1.6057, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9001602452448966, |
|
"grad_norm": 1.0099127292633057, |
|
"learning_rate": 3.0673309137411564e-06, |
|
"loss": 1.6564, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.9029471190691841, |
|
"grad_norm": 1.011672854423523, |
|
"learning_rate": 2.9018420092558786e-06, |
|
"loss": 1.588, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.9057339928934718, |
|
"grad_norm": 0.9527240991592407, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 1.6918, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.9085208667177593, |
|
"grad_norm": 0.9475979804992676, |
|
"learning_rate": 2.584245895616788e-06, |
|
"loss": 1.6399, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.911307740542047, |
|
"grad_norm": 1.0325227975845337, |
|
"learning_rate": 2.4321687310650487e-06, |
|
"loss": 1.677, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.9140946143663345, |
|
"grad_norm": 0.9847766757011414, |
|
"learning_rate": 2.2845914848082127e-06, |
|
"loss": 1.6693, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9168814881906222, |
|
"grad_norm": 1.0612833499908447, |
|
"learning_rate": 2.1415281176583203e-06, |
|
"loss": 1.6191, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.9196683620149098, |
|
"grad_norm": 1.0091543197631836, |
|
"learning_rate": 2.0029921634142632e-06, |
|
"loss": 1.7255, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9224552358391974, |
|
"grad_norm": 1.0188473463058472, |
|
"learning_rate": 1.8689967275815679e-06, |
|
"loss": 1.6559, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.925242109663485, |
|
"grad_norm": 0.9920323491096497, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.6751, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.9280289834877726, |
|
"grad_norm": 0.9998114705085754, |
|
"learning_rate": 1.614677684307264e-06, |
|
"loss": 1.6378, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.9308158573120602, |
|
"grad_norm": 0.9813428521156311, |
|
"learning_rate": 1.494378135454938e-06, |
|
"loss": 1.633, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9336027311363478, |
|
"grad_norm": 0.9274668097496033, |
|
"learning_rate": 1.3786672199165962e-06, |
|
"loss": 1.6576, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.9363896049606354, |
|
"grad_norm": 0.9314688444137573, |
|
"learning_rate": 1.2675558839483848e-06, |
|
"loss": 1.5988, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.939176478784923, |
|
"grad_norm": 0.9858189225196838, |
|
"learning_rate": 1.1610546386860988e-06, |
|
"loss": 1.6311, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.9419633526092106, |
|
"grad_norm": 1.005400538444519, |
|
"learning_rate": 1.0591735591507946e-06, |
|
"loss": 1.6264, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9447502264334983, |
|
"grad_norm": 1.0077372789382935, |
|
"learning_rate": 9.619222832957243e-07, |
|
"loss": 1.6273, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9475371002577858, |
|
"grad_norm": 0.9848856925964355, |
|
"learning_rate": 8.693100110945484e-07, |
|
"loss": 1.6123, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9503239740820735, |
|
"grad_norm": 0.9205525517463684, |
|
"learning_rate": 7.813455036710715e-07, |
|
"loss": 1.6065, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.953110847906361, |
|
"grad_norm": 0.9617009162902832, |
|
"learning_rate": 6.980370824703763e-07, |
|
"loss": 1.67, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9558977217306487, |
|
"grad_norm": 1.0937501192092896, |
|
"learning_rate": 6.193926284716711e-07, |
|
"loss": 1.5918, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9586845955549362, |
|
"grad_norm": 0.9189473986625671, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 1.6965, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9614714693792239, |
|
"grad_norm": 0.9764816761016846, |
|
"learning_rate": 4.76124939235989e-07, |
|
"loss": 1.5873, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9642583432035114, |
|
"grad_norm": 0.9174162149429321, |
|
"learning_rate": 4.1151525712680996e-07, |
|
"loss": 1.5511, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9670452170277991, |
|
"grad_norm": 1.0921601057052612, |
|
"learning_rate": 3.515966471930643e-07, |
|
"loss": 1.7006, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9698320908520867, |
|
"grad_norm": 0.9906566143035889, |
|
"learning_rate": 2.963747777370907e-07, |
|
"loss": 1.6587, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9726189646763743, |
|
"grad_norm": 1.0743408203125, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.5564, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9754058385006619, |
|
"grad_norm": 0.9558684229850769, |
|
"learning_rate": 2.0004171141464467e-07, |
|
"loss": 1.6312, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9781927123249495, |
|
"grad_norm": 1.00477135181427, |
|
"learning_rate": 1.589396276591937e-07, |
|
"loss": 1.613, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9809795861492371, |
|
"grad_norm": 1.034407615661621, |
|
"learning_rate": 1.225525097414637e-07, |
|
"loss": 1.5766, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9837664599735247, |
|
"grad_norm": 1.0314751863479614, |
|
"learning_rate": 9.088379988392848e-08, |
|
"loss": 1.6248, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9865533337978123, |
|
"grad_norm": 0.9959791302680969, |
|
"learning_rate": 6.393649394749734e-08, |
|
"loss": 1.6185, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9893402076221, |
|
"grad_norm": 1.0470571517944336, |
|
"learning_rate": 4.171314114815306e-08, |
|
"loss": 1.6926, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9921270814463875, |
|
"grad_norm": 1.0142252445220947, |
|
"learning_rate": 2.4215843815733607e-08, |
|
"loss": 1.5745, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9949139552706752, |
|
"grad_norm": 0.8373370170593262, |
|
"learning_rate": 1.1446257195119048e-08, |
|
"loss": 1.6419, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9977008290949627, |
|
"grad_norm": 1.0657517910003662, |
|
"learning_rate": 3.4055892895901167e-09, |
|
"loss": 1.6516, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.7746706008911133, |
|
"learning_rate": 9.460074656963969e-11, |
|
"loss": 1.7167, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1795, |
|
"total_flos": 2.029161804649267e+16, |
|
"train_loss": 1.6974632034726793, |
|
"train_runtime": 3260.2981, |
|
"train_samples_per_second": 4.402, |
|
"train_steps_per_second": 0.551 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1795, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.029161804649267e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|