|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 2146, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004659832246039142, |
|
"grad_norm": 14.767192461253016, |
|
"learning_rate": 4.651162790697675e-07, |
|
"loss": 0.8454, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009319664492078284, |
|
"grad_norm": 10.691157784855342, |
|
"learning_rate": 9.30232558139535e-07, |
|
"loss": 0.8295, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013979496738117428, |
|
"grad_norm": 4.753711347195471, |
|
"learning_rate": 1.3953488372093025e-06, |
|
"loss": 0.789, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01863932898415657, |
|
"grad_norm": 4.574744947370259, |
|
"learning_rate": 1.86046511627907e-06, |
|
"loss": 0.7631, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.023299161230195712, |
|
"grad_norm": 2.938678417992091, |
|
"learning_rate": 2.3255813953488376e-06, |
|
"loss": 0.6551, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027958993476234855, |
|
"grad_norm": 0.6490882548563339, |
|
"learning_rate": 2.790697674418605e-06, |
|
"loss": 0.5661, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.032618825722273995, |
|
"grad_norm": 0.6582256367580213, |
|
"learning_rate": 3.2558139534883724e-06, |
|
"loss": 0.5106, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03727865796831314, |
|
"grad_norm": 0.5913603077065485, |
|
"learning_rate": 3.72093023255814e-06, |
|
"loss": 0.4935, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04193849021435228, |
|
"grad_norm": 0.3873817321001381, |
|
"learning_rate": 4.186046511627907e-06, |
|
"loss": 0.4726, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.046598322460391424, |
|
"grad_norm": 0.28334846470708785, |
|
"learning_rate": 4.651162790697675e-06, |
|
"loss": 0.4644, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05125815470643057, |
|
"grad_norm": 0.35730577672386, |
|
"learning_rate": 5.116279069767442e-06, |
|
"loss": 0.4477, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05591798695246971, |
|
"grad_norm": 0.22651422858035802, |
|
"learning_rate": 5.58139534883721e-06, |
|
"loss": 0.432, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06057781919850885, |
|
"grad_norm": 0.20330529851492612, |
|
"learning_rate": 6.046511627906977e-06, |
|
"loss": 0.4219, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06523765144454799, |
|
"grad_norm": 0.21798246079136097, |
|
"learning_rate": 6.511627906976745e-06, |
|
"loss": 0.4257, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06989748369058714, |
|
"grad_norm": 0.19806053096479337, |
|
"learning_rate": 6.976744186046513e-06, |
|
"loss": 0.4136, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07455731593662628, |
|
"grad_norm": 0.21322493838033024, |
|
"learning_rate": 7.44186046511628e-06, |
|
"loss": 0.4081, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07921714818266543, |
|
"grad_norm": 0.1988425295831395, |
|
"learning_rate": 7.906976744186048e-06, |
|
"loss": 0.4059, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08387698042870456, |
|
"grad_norm": 0.17732217907185543, |
|
"learning_rate": 8.372093023255815e-06, |
|
"loss": 0.4139, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08853681267474371, |
|
"grad_norm": 0.18197721641419914, |
|
"learning_rate": 8.837209302325582e-06, |
|
"loss": 0.3999, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09319664492078285, |
|
"grad_norm": 0.18805703800459986, |
|
"learning_rate": 9.30232558139535e-06, |
|
"loss": 0.3929, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.097856477166822, |
|
"grad_norm": 0.17447861554875335, |
|
"learning_rate": 9.767441860465117e-06, |
|
"loss": 0.3899, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10251630941286113, |
|
"grad_norm": 0.17823985753713623, |
|
"learning_rate": 1.0232558139534884e-05, |
|
"loss": 0.3917, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10717614165890028, |
|
"grad_norm": 0.1843804114434655, |
|
"learning_rate": 1.0697674418604651e-05, |
|
"loss": 0.3908, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11183597390493942, |
|
"grad_norm": 0.1813727816186044, |
|
"learning_rate": 1.116279069767442e-05, |
|
"loss": 0.3847, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11649580615097857, |
|
"grad_norm": 0.18362844363862849, |
|
"learning_rate": 1.1627906976744187e-05, |
|
"loss": 0.3798, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1211556383970177, |
|
"grad_norm": 0.1976501054723186, |
|
"learning_rate": 1.2093023255813954e-05, |
|
"loss": 0.3869, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12581547064305684, |
|
"grad_norm": 0.18549227048975853, |
|
"learning_rate": 1.2558139534883723e-05, |
|
"loss": 0.3837, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13047530288909598, |
|
"grad_norm": 0.20270496221730072, |
|
"learning_rate": 1.302325581395349e-05, |
|
"loss": 0.3763, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 0.20776510982034277, |
|
"learning_rate": 1.3488372093023257e-05, |
|
"loss": 0.3964, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13979496738117428, |
|
"grad_norm": 0.2239648466821673, |
|
"learning_rate": 1.3953488372093025e-05, |
|
"loss": 0.3779, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14445479962721341, |
|
"grad_norm": 0.2159680681931303, |
|
"learning_rate": 1.441860465116279e-05, |
|
"loss": 0.3895, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14911463187325255, |
|
"grad_norm": 0.2230512293592664, |
|
"learning_rate": 1.488372093023256e-05, |
|
"loss": 0.3748, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15377446411929171, |
|
"grad_norm": 0.19857008911014462, |
|
"learning_rate": 1.5348837209302328e-05, |
|
"loss": 0.379, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15843429636533085, |
|
"grad_norm": 0.1876370992998556, |
|
"learning_rate": 1.5813953488372095e-05, |
|
"loss": 0.3599, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16309412861137, |
|
"grad_norm": 0.19594094926576977, |
|
"learning_rate": 1.6279069767441862e-05, |
|
"loss": 0.3755, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16775396085740912, |
|
"grad_norm": 0.19588958100246756, |
|
"learning_rate": 1.674418604651163e-05, |
|
"loss": 0.3714, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 0.200501028483886, |
|
"learning_rate": 1.7209302325581396e-05, |
|
"loss": 0.3714, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17707362534948742, |
|
"grad_norm": 0.21928747309088611, |
|
"learning_rate": 1.7674418604651163e-05, |
|
"loss": 0.3787, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18173345759552656, |
|
"grad_norm": 0.24229931775136032, |
|
"learning_rate": 1.813953488372093e-05, |
|
"loss": 0.3725, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1863932898415657, |
|
"grad_norm": 0.23749429090981541, |
|
"learning_rate": 1.86046511627907e-05, |
|
"loss": 0.375, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19105312208760486, |
|
"grad_norm": 0.20750507399981963, |
|
"learning_rate": 1.9069767441860468e-05, |
|
"loss": 0.3682, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.195712954333644, |
|
"grad_norm": 0.247680918714745, |
|
"learning_rate": 1.9534883720930235e-05, |
|
"loss": 0.3766, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20037278657968313, |
|
"grad_norm": 0.20635992002934314, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3656, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20503261882572227, |
|
"grad_norm": 0.21391797130718435, |
|
"learning_rate": 1.9948213360952874e-05, |
|
"loss": 0.371, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2096924510717614, |
|
"grad_norm": 0.21826086561242994, |
|
"learning_rate": 1.989642672190575e-05, |
|
"loss": 0.3575, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21435228331780057, |
|
"grad_norm": 0.2058918265850917, |
|
"learning_rate": 1.9844640082858625e-05, |
|
"loss": 0.3678, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2190121155638397, |
|
"grad_norm": 0.21256383645525231, |
|
"learning_rate": 1.9792853443811498e-05, |
|
"loss": 0.3652, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22367194780987884, |
|
"grad_norm": 0.2091365846820082, |
|
"learning_rate": 1.9741066804764374e-05, |
|
"loss": 0.3664, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22833178005591798, |
|
"grad_norm": 0.2379635390767414, |
|
"learning_rate": 1.9689280165717246e-05, |
|
"loss": 0.369, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.23299161230195714, |
|
"grad_norm": 0.21145510152697863, |
|
"learning_rate": 1.963749352667012e-05, |
|
"loss": 0.3639, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23765144454799628, |
|
"grad_norm": 0.23770392971510992, |
|
"learning_rate": 1.9585706887622994e-05, |
|
"loss": 0.3679, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2423112767940354, |
|
"grad_norm": 0.2432075399312, |
|
"learning_rate": 1.953392024857587e-05, |
|
"loss": 0.3692, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24697110904007455, |
|
"grad_norm": 0.2625016034254708, |
|
"learning_rate": 1.9482133609528746e-05, |
|
"loss": 0.3672, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2516309412861137, |
|
"grad_norm": 0.2145060352354748, |
|
"learning_rate": 1.9430346970481618e-05, |
|
"loss": 0.3641, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25629077353215285, |
|
"grad_norm": 0.2473831855865487, |
|
"learning_rate": 1.937856033143449e-05, |
|
"loss": 0.3664, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.26095060577819196, |
|
"grad_norm": 0.22052729908882035, |
|
"learning_rate": 1.9326773692387366e-05, |
|
"loss": 0.3623, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2656104380242311, |
|
"grad_norm": 0.23934016164519892, |
|
"learning_rate": 1.927498705334024e-05, |
|
"loss": 0.3655, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 0.23940932697090694, |
|
"learning_rate": 1.9223200414293114e-05, |
|
"loss": 0.3701, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2749301025163094, |
|
"grad_norm": 0.28885349201279675, |
|
"learning_rate": 1.917141377524599e-05, |
|
"loss": 0.3787, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27958993476234856, |
|
"grad_norm": 0.2732454315192837, |
|
"learning_rate": 1.9119627136198862e-05, |
|
"loss": 0.3748, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2842497670083877, |
|
"grad_norm": 0.287512960553777, |
|
"learning_rate": 1.9067840497151738e-05, |
|
"loss": 0.3648, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.28890959925442683, |
|
"grad_norm": 0.25791283197780085, |
|
"learning_rate": 1.901605385810461e-05, |
|
"loss": 0.3605, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.293569431500466, |
|
"grad_norm": 0.22578196901427042, |
|
"learning_rate": 1.8964267219057483e-05, |
|
"loss": 0.3673, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2982292637465051, |
|
"grad_norm": 0.26398442111942083, |
|
"learning_rate": 1.891248058001036e-05, |
|
"loss": 0.3526, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.30288909599254427, |
|
"grad_norm": 0.2266594032196434, |
|
"learning_rate": 1.8860693940963234e-05, |
|
"loss": 0.3533, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.30754892823858343, |
|
"grad_norm": 0.2034558825684523, |
|
"learning_rate": 1.8808907301916107e-05, |
|
"loss": 0.3625, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.31220876048462254, |
|
"grad_norm": 0.21230490114636388, |
|
"learning_rate": 1.8757120662868982e-05, |
|
"loss": 0.3612, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3168685927306617, |
|
"grad_norm": 0.21940627029273344, |
|
"learning_rate": 1.8705334023821855e-05, |
|
"loss": 0.3616, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32152842497670087, |
|
"grad_norm": 0.24083673925107846, |
|
"learning_rate": 1.865354738477473e-05, |
|
"loss": 0.3602, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32618825722274, |
|
"grad_norm": 0.2522525259887139, |
|
"learning_rate": 1.8601760745727603e-05, |
|
"loss": 0.3723, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33084808946877914, |
|
"grad_norm": 0.2573006621319903, |
|
"learning_rate": 1.854997410668048e-05, |
|
"loss": 0.3655, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.33550792171481825, |
|
"grad_norm": 0.20987075999325422, |
|
"learning_rate": 1.8498187467633354e-05, |
|
"loss": 0.3583, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3401677539608574, |
|
"grad_norm": 0.23358976426813405, |
|
"learning_rate": 1.8446400828586227e-05, |
|
"loss": 0.3566, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 0.2399874778334468, |
|
"learning_rate": 1.83946141895391e-05, |
|
"loss": 0.3657, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3494874184529357, |
|
"grad_norm": 0.2320299659051788, |
|
"learning_rate": 1.8342827550491975e-05, |
|
"loss": 0.3572, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35414725069897485, |
|
"grad_norm": 0.2141358345167075, |
|
"learning_rate": 1.8291040911444847e-05, |
|
"loss": 0.3685, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35880708294501396, |
|
"grad_norm": 0.2394313640207702, |
|
"learning_rate": 1.8239254272397723e-05, |
|
"loss": 0.3667, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3634669151910531, |
|
"grad_norm": 0.2535784041705651, |
|
"learning_rate": 1.81874676333506e-05, |
|
"loss": 0.3595, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3681267474370923, |
|
"grad_norm": 0.267215419394161, |
|
"learning_rate": 1.813568099430347e-05, |
|
"loss": 0.3627, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3727865796831314, |
|
"grad_norm": 0.22563546555156724, |
|
"learning_rate": 1.8083894355256347e-05, |
|
"loss": 0.3514, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37744641192917056, |
|
"grad_norm": 0.26175999603352973, |
|
"learning_rate": 1.803210771620922e-05, |
|
"loss": 0.3713, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3821062441752097, |
|
"grad_norm": 0.2557512981709262, |
|
"learning_rate": 1.798032107716209e-05, |
|
"loss": 0.3641, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38676607642124883, |
|
"grad_norm": 0.2618244822212724, |
|
"learning_rate": 1.7928534438114967e-05, |
|
"loss": 0.3546, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.391425908667288, |
|
"grad_norm": 0.24363536974797073, |
|
"learning_rate": 1.7876747799067843e-05, |
|
"loss": 0.3685, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3960857409133271, |
|
"grad_norm": 0.23592834779032065, |
|
"learning_rate": 1.7824961160020715e-05, |
|
"loss": 0.37, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.40074557315936626, |
|
"grad_norm": 0.22757209094122816, |
|
"learning_rate": 1.777317452097359e-05, |
|
"loss": 0.3618, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 0.2247986656928316, |
|
"learning_rate": 1.7721387881926463e-05, |
|
"loss": 0.3703, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.41006523765144454, |
|
"grad_norm": 0.22949520459408446, |
|
"learning_rate": 1.766960124287934e-05, |
|
"loss": 0.3622, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4147250698974837, |
|
"grad_norm": 0.2429836044789581, |
|
"learning_rate": 1.761781460383221e-05, |
|
"loss": 0.3597, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4193849021435228, |
|
"grad_norm": 0.2220744501031802, |
|
"learning_rate": 1.7566027964785087e-05, |
|
"loss": 0.3517, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.424044734389562, |
|
"grad_norm": 0.24960146421273202, |
|
"learning_rate": 1.7514241325737963e-05, |
|
"loss": 0.3594, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42870456663560114, |
|
"grad_norm": 0.23282637573412696, |
|
"learning_rate": 1.7462454686690835e-05, |
|
"loss": 0.3572, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43336439888164024, |
|
"grad_norm": 0.2583784107904954, |
|
"learning_rate": 1.7410668047643708e-05, |
|
"loss": 0.3536, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4380242311276794, |
|
"grad_norm": 0.24668787220594343, |
|
"learning_rate": 1.7358881408596583e-05, |
|
"loss": 0.3651, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4426840633737186, |
|
"grad_norm": 0.23401991763098598, |
|
"learning_rate": 1.7307094769549456e-05, |
|
"loss": 0.3556, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4473438956197577, |
|
"grad_norm": 0.2568440187662543, |
|
"learning_rate": 1.725530813050233e-05, |
|
"loss": 0.3702, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45200372786579684, |
|
"grad_norm": 0.24082990346217126, |
|
"learning_rate": 1.7203521491455207e-05, |
|
"loss": 0.3612, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.45666356011183595, |
|
"grad_norm": 0.23359205783172027, |
|
"learning_rate": 1.715173485240808e-05, |
|
"loss": 0.3557, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4613233923578751, |
|
"grad_norm": 0.2759487778473591, |
|
"learning_rate": 1.7099948213360955e-05, |
|
"loss": 0.3592, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4659832246039143, |
|
"grad_norm": 0.24487068910640383, |
|
"learning_rate": 1.7048161574313828e-05, |
|
"loss": 0.3676, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4706430568499534, |
|
"grad_norm": 0.23375869795982646, |
|
"learning_rate": 1.6996374935266703e-05, |
|
"loss": 0.3563, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47530288909599255, |
|
"grad_norm": 0.23138038119764745, |
|
"learning_rate": 1.6944588296219576e-05, |
|
"loss": 0.3593, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47996272134203166, |
|
"grad_norm": 0.2317039737462331, |
|
"learning_rate": 1.689280165717245e-05, |
|
"loss": 0.3585, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4846225535880708, |
|
"grad_norm": 0.24700148883977308, |
|
"learning_rate": 1.6841015018125324e-05, |
|
"loss": 0.3631, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48928238583411, |
|
"grad_norm": 0.24885633026286735, |
|
"learning_rate": 1.67892283790782e-05, |
|
"loss": 0.3629, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4939422180801491, |
|
"grad_norm": 0.24542063920879958, |
|
"learning_rate": 1.6737441740031072e-05, |
|
"loss": 0.3579, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49860205032618826, |
|
"grad_norm": 0.23117849321213937, |
|
"learning_rate": 1.6685655100983948e-05, |
|
"loss": 0.362, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5032618825722274, |
|
"grad_norm": 0.22596894744470808, |
|
"learning_rate": 1.6633868461936823e-05, |
|
"loss": 0.3492, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5079217148182665, |
|
"grad_norm": 0.25472474761261105, |
|
"learning_rate": 1.6582081822889696e-05, |
|
"loss": 0.3561, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5125815470643057, |
|
"grad_norm": 0.24056074693134755, |
|
"learning_rate": 1.653029518384257e-05, |
|
"loss": 0.3604, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.24945961942773268, |
|
"learning_rate": 1.6478508544795444e-05, |
|
"loss": 0.3569, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5219012115563839, |
|
"grad_norm": 0.23366003314070544, |
|
"learning_rate": 1.6426721905748316e-05, |
|
"loss": 0.3708, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5265610438024231, |
|
"grad_norm": 0.20558481716658702, |
|
"learning_rate": 1.6374935266701192e-05, |
|
"loss": 0.3478, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5312208760484622, |
|
"grad_norm": 0.2439084307260332, |
|
"learning_rate": 1.6323148627654068e-05, |
|
"loss": 0.3514, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5358807082945014, |
|
"grad_norm": 0.24465930880345313, |
|
"learning_rate": 1.6271361988606944e-05, |
|
"loss": 0.3554, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.2612036108887042, |
|
"learning_rate": 1.6219575349559816e-05, |
|
"loss": 0.3553, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5452003727865797, |
|
"grad_norm": 0.22670651805729272, |
|
"learning_rate": 1.6167788710512688e-05, |
|
"loss": 0.3575, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5498602050326188, |
|
"grad_norm": 0.20453871875485685, |
|
"learning_rate": 1.6116002071465564e-05, |
|
"loss": 0.3507, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.554520037278658, |
|
"grad_norm": 0.23906772066616808, |
|
"learning_rate": 1.6064215432418436e-05, |
|
"loss": 0.3545, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5591798695246971, |
|
"grad_norm": 0.2251010058012801, |
|
"learning_rate": 1.6012428793371312e-05, |
|
"loss": 0.3548, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5638397017707363, |
|
"grad_norm": 0.2042911139628111, |
|
"learning_rate": 1.5960642154324188e-05, |
|
"loss": 0.3545, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5684995340167754, |
|
"grad_norm": 0.3084593401305696, |
|
"learning_rate": 1.590885551527706e-05, |
|
"loss": 0.3634, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5731593662628145, |
|
"grad_norm": 0.24668947749199188, |
|
"learning_rate": 1.5857068876229933e-05, |
|
"loss": 0.3534, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5778191985088537, |
|
"grad_norm": 0.23815820060623935, |
|
"learning_rate": 1.5805282237182808e-05, |
|
"loss": 0.3473, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5824790307548928, |
|
"grad_norm": 0.2581921333300729, |
|
"learning_rate": 1.575349559813568e-05, |
|
"loss": 0.365, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.587138863000932, |
|
"grad_norm": 0.22365105248877565, |
|
"learning_rate": 1.5701708959088556e-05, |
|
"loss": 0.3605, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5917986952469712, |
|
"grad_norm": 0.2497896525527162, |
|
"learning_rate": 1.5649922320041432e-05, |
|
"loss": 0.3621, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5964585274930102, |
|
"grad_norm": 0.2076670286228493, |
|
"learning_rate": 1.5598135680994304e-05, |
|
"loss": 0.3589, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6011183597390494, |
|
"grad_norm": 0.21352599605542535, |
|
"learning_rate": 1.554634904194718e-05, |
|
"loss": 0.3571, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6057781919850885, |
|
"grad_norm": 0.23960611526431302, |
|
"learning_rate": 1.5494562402900053e-05, |
|
"loss": 0.3613, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6104380242311277, |
|
"grad_norm": 0.232800207968001, |
|
"learning_rate": 1.5442775763852925e-05, |
|
"loss": 0.3627, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6150978564771669, |
|
"grad_norm": 0.2258791731716894, |
|
"learning_rate": 1.53909891248058e-05, |
|
"loss": 0.3459, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6197576887232059, |
|
"grad_norm": 0.2658353866044793, |
|
"learning_rate": 1.5339202485758676e-05, |
|
"loss": 0.3483, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6244175209692451, |
|
"grad_norm": 0.23305204875594826, |
|
"learning_rate": 1.5287415846711552e-05, |
|
"loss": 0.3479, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6290773532152842, |
|
"grad_norm": 0.2384841430708915, |
|
"learning_rate": 1.5235629207664425e-05, |
|
"loss": 0.3518, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6337371854613234, |
|
"grad_norm": 0.2316602358596732, |
|
"learning_rate": 1.5183842568617297e-05, |
|
"loss": 0.3576, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6383970177073626, |
|
"grad_norm": 0.26925303516021515, |
|
"learning_rate": 1.5132055929570173e-05, |
|
"loss": 0.3626, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6430568499534017, |
|
"grad_norm": 0.273521638688401, |
|
"learning_rate": 1.5080269290523047e-05, |
|
"loss": 0.3512, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6477166821994408, |
|
"grad_norm": 0.2060802961004555, |
|
"learning_rate": 1.5028482651475919e-05, |
|
"loss": 0.3572, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.65237651444548, |
|
"grad_norm": 0.2288622125880128, |
|
"learning_rate": 1.4976696012428795e-05, |
|
"loss": 0.3634, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6570363466915191, |
|
"grad_norm": 0.20752277736678076, |
|
"learning_rate": 1.4924909373381669e-05, |
|
"loss": 0.3533, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6616961789375583, |
|
"grad_norm": 0.2501395274655127, |
|
"learning_rate": 1.4873122734334541e-05, |
|
"loss": 0.3546, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6663560111835974, |
|
"grad_norm": 0.22880446120118658, |
|
"learning_rate": 1.4821336095287417e-05, |
|
"loss": 0.3472, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6710158434296365, |
|
"grad_norm": 0.2594277925537233, |
|
"learning_rate": 1.4769549456240291e-05, |
|
"loss": 0.3534, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 0.27706152102194753, |
|
"learning_rate": 1.4717762817193167e-05, |
|
"loss": 0.3564, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6803355079217148, |
|
"grad_norm": 0.23322150825276064, |
|
"learning_rate": 1.4665976178146039e-05, |
|
"loss": 0.352, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.684995340167754, |
|
"grad_norm": 0.2301436336123601, |
|
"learning_rate": 1.4614189539098913e-05, |
|
"loss": 0.3535, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.21931396895361877, |
|
"learning_rate": 1.4562402900051789e-05, |
|
"loss": 0.3486, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6943150046598322, |
|
"grad_norm": 0.24163388236718913, |
|
"learning_rate": 1.4510616261004661e-05, |
|
"loss": 0.3477, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6989748369058714, |
|
"grad_norm": 0.20365681061091856, |
|
"learning_rate": 1.4458829621957535e-05, |
|
"loss": 0.3488, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7036346691519105, |
|
"grad_norm": 0.2337817758782465, |
|
"learning_rate": 1.4407042982910411e-05, |
|
"loss": 0.3527, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7082945013979497, |
|
"grad_norm": 0.2405868217410285, |
|
"learning_rate": 1.4355256343863283e-05, |
|
"loss": 0.3542, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7129543336439889, |
|
"grad_norm": 0.22716685518132887, |
|
"learning_rate": 1.430346970481616e-05, |
|
"loss": 0.3433, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7176141658900279, |
|
"grad_norm": 0.2296954748413165, |
|
"learning_rate": 1.4251683065769033e-05, |
|
"loss": 0.351, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7222739981360671, |
|
"grad_norm": 0.2527911740889576, |
|
"learning_rate": 1.4199896426721906e-05, |
|
"loss": 0.3599, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7269338303821062, |
|
"grad_norm": 0.237870162257353, |
|
"learning_rate": 1.4148109787674781e-05, |
|
"loss": 0.3467, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7315936626281454, |
|
"grad_norm": 0.23987588369726734, |
|
"learning_rate": 1.4096323148627655e-05, |
|
"loss": 0.3517, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7362534948741846, |
|
"grad_norm": 0.27125345543342416, |
|
"learning_rate": 1.404453650958053e-05, |
|
"loss": 0.3565, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7409133271202236, |
|
"grad_norm": 0.22038142310417874, |
|
"learning_rate": 1.3992749870533403e-05, |
|
"loss": 0.3532, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7455731593662628, |
|
"grad_norm": 0.21707991862427248, |
|
"learning_rate": 1.3940963231486278e-05, |
|
"loss": 0.3516, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.750232991612302, |
|
"grad_norm": 0.22006541459822285, |
|
"learning_rate": 1.3889176592439152e-05, |
|
"loss": 0.3514, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7548928238583411, |
|
"grad_norm": 0.2532121724442404, |
|
"learning_rate": 1.3837389953392026e-05, |
|
"loss": 0.3636, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7595526561043803, |
|
"grad_norm": 0.23171770126485144, |
|
"learning_rate": 1.37856033143449e-05, |
|
"loss": 0.3475, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7642124883504194, |
|
"grad_norm": 0.27888374187325565, |
|
"learning_rate": 1.3733816675297775e-05, |
|
"loss": 0.3577, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7688723205964585, |
|
"grad_norm": 0.2880404787911838, |
|
"learning_rate": 1.3682030036250648e-05, |
|
"loss": 0.3499, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7735321528424977, |
|
"grad_norm": 0.24672798067284002, |
|
"learning_rate": 1.3630243397203522e-05, |
|
"loss": 0.3474, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7781919850885368, |
|
"grad_norm": 0.22126054050968327, |
|
"learning_rate": 1.3578456758156398e-05, |
|
"loss": 0.3499, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.782851817334576, |
|
"grad_norm": 0.25020155421404316, |
|
"learning_rate": 1.3526670119109272e-05, |
|
"loss": 0.3563, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7875116495806151, |
|
"grad_norm": 0.2327271527985915, |
|
"learning_rate": 1.3474883480062144e-05, |
|
"loss": 0.3639, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7921714818266542, |
|
"grad_norm": 0.2518085892560592, |
|
"learning_rate": 1.342309684101502e-05, |
|
"loss": 0.3506, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7968313140726934, |
|
"grad_norm": 0.24951300339031132, |
|
"learning_rate": 1.3371310201967894e-05, |
|
"loss": 0.3563, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8014911463187325, |
|
"grad_norm": 0.19924194513914378, |
|
"learning_rate": 1.3319523562920766e-05, |
|
"loss": 0.3629, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8061509785647717, |
|
"grad_norm": 0.25003552030143006, |
|
"learning_rate": 1.3267736923873642e-05, |
|
"loss": 0.348, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 0.24307899627814475, |
|
"learning_rate": 1.3215950284826516e-05, |
|
"loss": 0.3571, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8154706430568499, |
|
"grad_norm": 0.21078480288845156, |
|
"learning_rate": 1.3164163645779392e-05, |
|
"loss": 0.3545, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8201304753028891, |
|
"grad_norm": 0.23496877363911495, |
|
"learning_rate": 1.3112377006732264e-05, |
|
"loss": 0.35, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8247903075489282, |
|
"grad_norm": 0.23754438146142418, |
|
"learning_rate": 1.3060590367685138e-05, |
|
"loss": 0.3572, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8294501397949674, |
|
"grad_norm": 0.26052536196367965, |
|
"learning_rate": 1.3008803728638014e-05, |
|
"loss": 0.3492, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8341099720410066, |
|
"grad_norm": 0.20695762420053648, |
|
"learning_rate": 1.2957017089590886e-05, |
|
"loss": 0.3662, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8387698042870456, |
|
"grad_norm": 0.2144227087561431, |
|
"learning_rate": 1.290523045054376e-05, |
|
"loss": 0.3515, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8434296365330848, |
|
"grad_norm": 0.19660702935819654, |
|
"learning_rate": 1.2853443811496636e-05, |
|
"loss": 0.3505, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.848089468779124, |
|
"grad_norm": 0.24655418170934199, |
|
"learning_rate": 1.2801657172449508e-05, |
|
"loss": 0.3549, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8527493010251631, |
|
"grad_norm": 0.24959870558662023, |
|
"learning_rate": 1.2749870533402384e-05, |
|
"loss": 0.3542, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8574091332712023, |
|
"grad_norm": 0.24939202340018593, |
|
"learning_rate": 1.2698083894355258e-05, |
|
"loss": 0.3529, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.2516391276986144, |
|
"learning_rate": 1.264629725530813e-05, |
|
"loss": 0.3446, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8667287977632805, |
|
"grad_norm": 0.21057329816992565, |
|
"learning_rate": 1.2594510616261006e-05, |
|
"loss": 0.3436, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8713886300093197, |
|
"grad_norm": 0.252888553135217, |
|
"learning_rate": 1.254272397721388e-05, |
|
"loss": 0.3516, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8760484622553588, |
|
"grad_norm": 0.24242068069037045, |
|
"learning_rate": 1.2490937338166753e-05, |
|
"loss": 0.352, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.880708294501398, |
|
"grad_norm": 0.24295740023564055, |
|
"learning_rate": 1.2439150699119628e-05, |
|
"loss": 0.3506, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8853681267474371, |
|
"grad_norm": 0.22863631282733143, |
|
"learning_rate": 1.2387364060072502e-05, |
|
"loss": 0.3613, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8900279589934762, |
|
"grad_norm": 0.2191581333955033, |
|
"learning_rate": 1.2335577421025375e-05, |
|
"loss": 0.3559, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8946877912395154, |
|
"grad_norm": 0.19128532949024207, |
|
"learning_rate": 1.228379078197825e-05, |
|
"loss": 0.3573, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8993476234855545, |
|
"grad_norm": 0.2750308888146951, |
|
"learning_rate": 1.2232004142931125e-05, |
|
"loss": 0.352, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9040074557315937, |
|
"grad_norm": 0.2379831965995701, |
|
"learning_rate": 1.2180217503884e-05, |
|
"loss": 0.3447, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9086672879776329, |
|
"grad_norm": 0.20411721240775887, |
|
"learning_rate": 1.2128430864836873e-05, |
|
"loss": 0.3606, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9133271202236719, |
|
"grad_norm": 0.24726535607310818, |
|
"learning_rate": 1.2076644225789747e-05, |
|
"loss": 0.3509, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9179869524697111, |
|
"grad_norm": 0.25484526322525997, |
|
"learning_rate": 1.2024857586742623e-05, |
|
"loss": 0.3505, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9226467847157502, |
|
"grad_norm": 0.2313975203490289, |
|
"learning_rate": 1.1973070947695495e-05, |
|
"loss": 0.3485, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9273066169617894, |
|
"grad_norm": 0.21915958189471746, |
|
"learning_rate": 1.1921284308648369e-05, |
|
"loss": 0.3493, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9319664492078286, |
|
"grad_norm": 0.21919124866455636, |
|
"learning_rate": 1.1869497669601245e-05, |
|
"loss": 0.3497, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9366262814538676, |
|
"grad_norm": 0.2028359200370513, |
|
"learning_rate": 1.1817711030554117e-05, |
|
"loss": 0.3537, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9412861136999068, |
|
"grad_norm": 0.21945782585098136, |
|
"learning_rate": 1.1765924391506993e-05, |
|
"loss": 0.3508, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 0.2523778581926582, |
|
"learning_rate": 1.1714137752459867e-05, |
|
"loss": 0.3557, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9506057781919851, |
|
"grad_norm": 0.1984715311531445, |
|
"learning_rate": 1.166235111341274e-05, |
|
"loss": 0.3499, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9552656104380243, |
|
"grad_norm": 0.20598435799914297, |
|
"learning_rate": 1.1610564474365615e-05, |
|
"loss": 0.3573, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9599254426840633, |
|
"grad_norm": 0.19822137487156943, |
|
"learning_rate": 1.1558777835318489e-05, |
|
"loss": 0.3537, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9645852749301025, |
|
"grad_norm": 0.2115159681810218, |
|
"learning_rate": 1.1506991196271361e-05, |
|
"loss": 0.3529, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9692451071761417, |
|
"grad_norm": 0.22399685831955474, |
|
"learning_rate": 1.1455204557224237e-05, |
|
"loss": 0.3501, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9739049394221808, |
|
"grad_norm": 0.21539710641856932, |
|
"learning_rate": 1.1403417918177111e-05, |
|
"loss": 0.3438, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.97856477166822, |
|
"grad_norm": 0.2525250092102095, |
|
"learning_rate": 1.1351631279129983e-05, |
|
"loss": 0.3669, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.983224603914259, |
|
"grad_norm": 0.22688066232449872, |
|
"learning_rate": 1.129984464008286e-05, |
|
"loss": 0.3474, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9878844361602982, |
|
"grad_norm": 0.22444671482154194, |
|
"learning_rate": 1.1248058001035733e-05, |
|
"loss": 0.3492, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9925442684063374, |
|
"grad_norm": 0.2494658035397173, |
|
"learning_rate": 1.1196271361988609e-05, |
|
"loss": 0.3521, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9972041006523765, |
|
"grad_norm": 0.22375408565725688, |
|
"learning_rate": 1.1144484722941481e-05, |
|
"loss": 0.3504, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0018639328984156, |
|
"grad_norm": 0.23743123795247612, |
|
"learning_rate": 1.1092698083894355e-05, |
|
"loss": 0.3269, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0065237651444547, |
|
"grad_norm": 0.23313660102890352, |
|
"learning_rate": 1.1040911444847231e-05, |
|
"loss": 0.3147, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.011183597390494, |
|
"grad_norm": 0.2222831311931088, |
|
"learning_rate": 1.0989124805800104e-05, |
|
"loss": 0.3195, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.015843429636533, |
|
"grad_norm": 0.22590081130460746, |
|
"learning_rate": 1.0937338166752978e-05, |
|
"loss": 0.3263, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0205032618825722, |
|
"grad_norm": 0.23070778966886485, |
|
"learning_rate": 1.0885551527705853e-05, |
|
"loss": 0.3143, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.0251630941286114, |
|
"grad_norm": 0.2037603982755685, |
|
"learning_rate": 1.0833764888658726e-05, |
|
"loss": 0.3053, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0298229263746506, |
|
"grad_norm": 0.24097367185952903, |
|
"learning_rate": 1.0781978249611601e-05, |
|
"loss": 0.313, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 0.20433112740141812, |
|
"learning_rate": 1.0730191610564476e-05, |
|
"loss": 0.3114, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0391425908667289, |
|
"grad_norm": 0.21622277499075848, |
|
"learning_rate": 1.067840497151735e-05, |
|
"loss": 0.3145, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0438024231127678, |
|
"grad_norm": 0.23195997985450958, |
|
"learning_rate": 1.0626618332470224e-05, |
|
"loss": 0.3101, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.048462255358807, |
|
"grad_norm": 0.19687727843381017, |
|
"learning_rate": 1.0574831693423098e-05, |
|
"loss": 0.3144, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0531220876048462, |
|
"grad_norm": 0.19398268395045887, |
|
"learning_rate": 1.0523045054375972e-05, |
|
"loss": 0.3143, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0577819198508853, |
|
"grad_norm": 0.21618958840342323, |
|
"learning_rate": 1.0471258415328846e-05, |
|
"loss": 0.3244, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0624417520969245, |
|
"grad_norm": 0.21854767532013517, |
|
"learning_rate": 1.041947177628172e-05, |
|
"loss": 0.3196, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0671015843429636, |
|
"grad_norm": 0.21752939753907818, |
|
"learning_rate": 1.0367685137234594e-05, |
|
"loss": 0.3128, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0717614165890028, |
|
"grad_norm": 0.2024866876281859, |
|
"learning_rate": 1.031589849818747e-05, |
|
"loss": 0.3066, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.076421248835042, |
|
"grad_norm": 0.23303951985635973, |
|
"learning_rate": 1.0264111859140342e-05, |
|
"loss": 0.3126, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 0.18836388691330083, |
|
"learning_rate": 1.0212325220093218e-05, |
|
"loss": 0.312, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0857409133271203, |
|
"grad_norm": 0.2569075921016222, |
|
"learning_rate": 1.0160538581046092e-05, |
|
"loss": 0.3164, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0904007455731595, |
|
"grad_norm": 0.20845453486936005, |
|
"learning_rate": 1.0108751941998964e-05, |
|
"loss": 0.3225, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0950605778191984, |
|
"grad_norm": 0.19885909880508598, |
|
"learning_rate": 1.005696530295184e-05, |
|
"loss": 0.3125, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0997204100652376, |
|
"grad_norm": 0.2669275870745111, |
|
"learning_rate": 1.0005178663904714e-05, |
|
"loss": 0.3167, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.1043802423112767, |
|
"grad_norm": 0.2232374749321823, |
|
"learning_rate": 9.953392024857588e-06, |
|
"loss": 0.3129, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.109040074557316, |
|
"grad_norm": 0.19527295898259664, |
|
"learning_rate": 9.901605385810462e-06, |
|
"loss": 0.317, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.113699906803355, |
|
"grad_norm": 0.22072636669010462, |
|
"learning_rate": 9.849818746763336e-06, |
|
"loss": 0.3228, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1183597390493942, |
|
"grad_norm": 0.1949253986918304, |
|
"learning_rate": 9.79803210771621e-06, |
|
"loss": 0.3162, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1230195712954334, |
|
"grad_norm": 0.2168403291673588, |
|
"learning_rate": 9.746245468669084e-06, |
|
"loss": 0.3164, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.1276794035414726, |
|
"grad_norm": 0.20528695943678604, |
|
"learning_rate": 9.694458829621958e-06, |
|
"loss": 0.3158, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1323392357875117, |
|
"grad_norm": 0.20323454642905758, |
|
"learning_rate": 9.642672190574832e-06, |
|
"loss": 0.3166, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.1369990680335509, |
|
"grad_norm": 0.21352753091729954, |
|
"learning_rate": 9.590885551527706e-06, |
|
"loss": 0.3166, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.14165890027959, |
|
"grad_norm": 0.2270069255362695, |
|
"learning_rate": 9.53909891248058e-06, |
|
"loss": 0.3152, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.146318732525629, |
|
"grad_norm": 0.1933121328903308, |
|
"learning_rate": 9.487312273433454e-06, |
|
"loss": 0.317, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1509785647716682, |
|
"grad_norm": 0.20309447114485268, |
|
"learning_rate": 9.435525634386328e-06, |
|
"loss": 0.3193, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1556383970177073, |
|
"grad_norm": 0.21202399279436465, |
|
"learning_rate": 9.383738995339203e-06, |
|
"loss": 0.3131, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1602982292637465, |
|
"grad_norm": 0.20262516559107904, |
|
"learning_rate": 9.331952356292078e-06, |
|
"loss": 0.3205, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1649580615097856, |
|
"grad_norm": 0.1988930651334672, |
|
"learning_rate": 9.28016571724495e-06, |
|
"loss": 0.3145, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1696178937558248, |
|
"grad_norm": 0.1976164571878271, |
|
"learning_rate": 9.228379078197825e-06, |
|
"loss": 0.2993, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.174277726001864, |
|
"grad_norm": 0.20859209970330467, |
|
"learning_rate": 9.1765924391507e-06, |
|
"loss": 0.3165, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1789375582479031, |
|
"grad_norm": 0.21856326174939153, |
|
"learning_rate": 9.124805800103574e-06, |
|
"loss": 0.3112, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1835973904939423, |
|
"grad_norm": 0.2139917663911859, |
|
"learning_rate": 9.073019161056449e-06, |
|
"loss": 0.3222, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1882572227399812, |
|
"grad_norm": 0.20444481893780622, |
|
"learning_rate": 9.021232522009323e-06, |
|
"loss": 0.3156, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.1929170549860204, |
|
"grad_norm": 0.21896418458298256, |
|
"learning_rate": 8.969445882962197e-06, |
|
"loss": 0.3192, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1975768872320596, |
|
"grad_norm": 0.20828809222865366, |
|
"learning_rate": 8.91765924391507e-06, |
|
"loss": 0.3225, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.2022367194780987, |
|
"grad_norm": 0.19945387268340256, |
|
"learning_rate": 8.865872604867945e-06, |
|
"loss": 0.3113, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.19091098433364817, |
|
"learning_rate": 8.814085965820819e-06, |
|
"loss": 0.3143, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.211556383970177, |
|
"grad_norm": 0.2444756340798008, |
|
"learning_rate": 8.762299326773693e-06, |
|
"loss": 0.3224, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 0.21335472533568636, |
|
"learning_rate": 8.710512687726569e-06, |
|
"loss": 0.3197, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2208760484622554, |
|
"grad_norm": 0.2011901037726579, |
|
"learning_rate": 8.658726048679441e-06, |
|
"loss": 0.3099, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.2255358807082946, |
|
"grad_norm": 0.19939445022217508, |
|
"learning_rate": 8.606939409632315e-06, |
|
"loss": 0.3271, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2301957129543337, |
|
"grad_norm": 0.20616420365814805, |
|
"learning_rate": 8.55515277058519e-06, |
|
"loss": 0.3162, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2348555452003729, |
|
"grad_norm": 0.19727962222530154, |
|
"learning_rate": 8.503366131538063e-06, |
|
"loss": 0.3216, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2395153774464118, |
|
"grad_norm": 0.19280265908630698, |
|
"learning_rate": 8.451579492490937e-06, |
|
"loss": 0.3148, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.244175209692451, |
|
"grad_norm": 0.20437690863854796, |
|
"learning_rate": 8.399792853443813e-06, |
|
"loss": 0.3137, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.2488350419384902, |
|
"grad_norm": 0.21131034344431252, |
|
"learning_rate": 8.348006214396687e-06, |
|
"loss": 0.3187, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2534948741845293, |
|
"grad_norm": 0.19470646825002752, |
|
"learning_rate": 8.29621957534956e-06, |
|
"loss": 0.3064, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.2581547064305685, |
|
"grad_norm": 0.2093800867383876, |
|
"learning_rate": 8.244432936302435e-06, |
|
"loss": 0.3139, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2628145386766076, |
|
"grad_norm": 0.19557894552720384, |
|
"learning_rate": 8.192646297255309e-06, |
|
"loss": 0.3258, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2674743709226468, |
|
"grad_norm": 0.22037610848319983, |
|
"learning_rate": 8.140859658208183e-06, |
|
"loss": 0.3167, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.272134203168686, |
|
"grad_norm": 0.19614411951293328, |
|
"learning_rate": 8.089073019161057e-06, |
|
"loss": 0.315, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2767940354147251, |
|
"grad_norm": 0.2184836880400932, |
|
"learning_rate": 8.037286380113931e-06, |
|
"loss": 0.3156, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.281453867660764, |
|
"grad_norm": 0.21389038369245703, |
|
"learning_rate": 7.985499741066805e-06, |
|
"loss": 0.308, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2861136999068035, |
|
"grad_norm": 0.20539585029211851, |
|
"learning_rate": 7.93371310201968e-06, |
|
"loss": 0.3192, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2907735321528424, |
|
"grad_norm": 0.19934518228998827, |
|
"learning_rate": 7.881926462972553e-06, |
|
"loss": 0.3215, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.2954333643988816, |
|
"grad_norm": 0.20741086597019026, |
|
"learning_rate": 7.830139823925427e-06, |
|
"loss": 0.314, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.3000931966449207, |
|
"grad_norm": 0.22065169448140684, |
|
"learning_rate": 7.778353184878302e-06, |
|
"loss": 0.3199, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.30475302889096, |
|
"grad_norm": 0.19477745041465316, |
|
"learning_rate": 7.726566545831177e-06, |
|
"loss": 0.3105, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.309412861136999, |
|
"grad_norm": 0.19600332577375437, |
|
"learning_rate": 7.67477990678405e-06, |
|
"loss": 0.3201, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.3140726933830382, |
|
"grad_norm": 0.20148776251738024, |
|
"learning_rate": 7.6229932677369245e-06, |
|
"loss": 0.314, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3187325256290774, |
|
"grad_norm": 0.2064755512233211, |
|
"learning_rate": 7.571206628689799e-06, |
|
"loss": 0.3209, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3233923578751166, |
|
"grad_norm": 0.2031518775941825, |
|
"learning_rate": 7.519419989642673e-06, |
|
"loss": 0.3049, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.3280521901211557, |
|
"grad_norm": 0.19891962178529504, |
|
"learning_rate": 7.467633350595547e-06, |
|
"loss": 0.316, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3327120223671947, |
|
"grad_norm": 0.20917352538781994, |
|
"learning_rate": 7.415846711548422e-06, |
|
"loss": 0.3197, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.337371854613234, |
|
"grad_norm": 0.18876283067477193, |
|
"learning_rate": 7.364060072501296e-06, |
|
"loss": 0.3231, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.342031686859273, |
|
"grad_norm": 0.19676783428533887, |
|
"learning_rate": 7.312273433454169e-06, |
|
"loss": 0.3177, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3466915191053122, |
|
"grad_norm": 0.18572514729909312, |
|
"learning_rate": 7.260486794407044e-06, |
|
"loss": 0.3211, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 0.19146579294733382, |
|
"learning_rate": 7.208700155359918e-06, |
|
"loss": 0.3118, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.3560111835973905, |
|
"grad_norm": 0.18852660444469807, |
|
"learning_rate": 7.156913516312793e-06, |
|
"loss": 0.3162, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.3606710158434296, |
|
"grad_norm": 0.19324215253779436, |
|
"learning_rate": 7.105126877265666e-06, |
|
"loss": 0.3233, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3653308480894688, |
|
"grad_norm": 0.21358938430110477, |
|
"learning_rate": 7.05334023821854e-06, |
|
"loss": 0.3187, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.369990680335508, |
|
"grad_norm": 0.19555774419476726, |
|
"learning_rate": 7.001553599171415e-06, |
|
"loss": 0.3148, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3746505125815471, |
|
"grad_norm": 0.17959759954123644, |
|
"learning_rate": 6.949766960124289e-06, |
|
"loss": 0.3139, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.18964982625602786, |
|
"learning_rate": 6.897980321077162e-06, |
|
"loss": 0.302, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.3839701770736252, |
|
"grad_norm": 0.18930534523885367, |
|
"learning_rate": 6.846193682030037e-06, |
|
"loss": 0.3149, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3886300093196646, |
|
"grad_norm": 0.19639297256742036, |
|
"learning_rate": 6.794407042982911e-06, |
|
"loss": 0.3236, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.3932898415657036, |
|
"grad_norm": 0.21467988374746413, |
|
"learning_rate": 6.742620403935786e-06, |
|
"loss": 0.3284, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3979496738117427, |
|
"grad_norm": 0.22648269852701486, |
|
"learning_rate": 6.690833764888659e-06, |
|
"loss": 0.3248, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.402609506057782, |
|
"grad_norm": 0.2122629193310997, |
|
"learning_rate": 6.639047125841533e-06, |
|
"loss": 0.3181, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.407269338303821, |
|
"grad_norm": 0.21439399946575624, |
|
"learning_rate": 6.587260486794408e-06, |
|
"loss": 0.3111, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.4119291705498602, |
|
"grad_norm": 0.21805459861989804, |
|
"learning_rate": 6.535473847747281e-06, |
|
"loss": 0.3194, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4165890027958994, |
|
"grad_norm": 0.18437558883318525, |
|
"learning_rate": 6.483687208700155e-06, |
|
"loss": 0.3186, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4212488350419386, |
|
"grad_norm": 0.19383647738248216, |
|
"learning_rate": 6.43190056965303e-06, |
|
"loss": 0.316, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.4259086672879777, |
|
"grad_norm": 0.19238369432331215, |
|
"learning_rate": 6.380113930605904e-06, |
|
"loss": 0.3119, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4305684995340169, |
|
"grad_norm": 0.20161511281163794, |
|
"learning_rate": 6.3283272915587775e-06, |
|
"loss": 0.3173, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4352283317800558, |
|
"grad_norm": 0.18321974524601511, |
|
"learning_rate": 6.276540652511652e-06, |
|
"loss": 0.3138, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.439888164026095, |
|
"grad_norm": 0.21285415491504464, |
|
"learning_rate": 6.2247540134645265e-06, |
|
"loss": 0.3152, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.4445479962721341, |
|
"grad_norm": 0.19238925372724244, |
|
"learning_rate": 6.172967374417401e-06, |
|
"loss": 0.3148, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.4492078285181733, |
|
"grad_norm": 0.21136751157123412, |
|
"learning_rate": 6.1211807353702746e-06, |
|
"loss": 0.3111, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4538676607642125, |
|
"grad_norm": 0.1979462412561175, |
|
"learning_rate": 6.069394096323149e-06, |
|
"loss": 0.3127, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4585274930102516, |
|
"grad_norm": 0.18992603381080705, |
|
"learning_rate": 6.0176074572760235e-06, |
|
"loss": 0.3154, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4631873252562908, |
|
"grad_norm": 0.18356342906034864, |
|
"learning_rate": 5.9658208182288976e-06, |
|
"loss": 0.3169, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.46784715750233, |
|
"grad_norm": 0.18698833501026985, |
|
"learning_rate": 5.914034179181772e-06, |
|
"loss": 0.3192, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4725069897483691, |
|
"grad_norm": 0.18659658551949185, |
|
"learning_rate": 5.862247540134646e-06, |
|
"loss": 0.3202, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.477166821994408, |
|
"grad_norm": 0.20607394729321243, |
|
"learning_rate": 5.81046090108752e-06, |
|
"loss": 0.3179, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.4818266542404475, |
|
"grad_norm": 0.18871598623396943, |
|
"learning_rate": 5.758674262040395e-06, |
|
"loss": 0.32, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 0.19724974014550137, |
|
"learning_rate": 5.706887622993268e-06, |
|
"loss": 0.3143, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4911463187325256, |
|
"grad_norm": 0.19442309714878647, |
|
"learning_rate": 5.655100983946143e-06, |
|
"loss": 0.3155, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.4958061509785647, |
|
"grad_norm": 0.2290940185613949, |
|
"learning_rate": 5.603314344899017e-06, |
|
"loss": 0.3241, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.500465983224604, |
|
"grad_norm": 0.20552300203050755, |
|
"learning_rate": 5.55152770585189e-06, |
|
"loss": 0.3197, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.505125815470643, |
|
"grad_norm": 0.2138342700269858, |
|
"learning_rate": 5.499741066804765e-06, |
|
"loss": 0.3149, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5097856477166822, |
|
"grad_norm": 0.1773024842853527, |
|
"learning_rate": 5.447954427757639e-06, |
|
"loss": 0.3143, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5144454799627214, |
|
"grad_norm": 0.1782828676659382, |
|
"learning_rate": 5.396167788710514e-06, |
|
"loss": 0.3085, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5191053122087603, |
|
"grad_norm": 0.20151066828643008, |
|
"learning_rate": 5.344381149663387e-06, |
|
"loss": 0.3092, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5237651444547997, |
|
"grad_norm": 0.1936112629982006, |
|
"learning_rate": 5.292594510616261e-06, |
|
"loss": 0.3092, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5284249767008387, |
|
"grad_norm": 0.20031650907945875, |
|
"learning_rate": 5.240807871569136e-06, |
|
"loss": 0.3079, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.533084808946878, |
|
"grad_norm": 0.19466786072761738, |
|
"learning_rate": 5.18902123252201e-06, |
|
"loss": 0.325, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.537744641192917, |
|
"grad_norm": 0.2014645297149083, |
|
"learning_rate": 5.137234593474883e-06, |
|
"loss": 0.3193, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.5424044734389561, |
|
"grad_norm": 0.18800764754924237, |
|
"learning_rate": 5.085447954427758e-06, |
|
"loss": 0.3223, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.5470643056849953, |
|
"grad_norm": 0.22145725970374222, |
|
"learning_rate": 5.033661315380632e-06, |
|
"loss": 0.3226, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 0.2264737586413433, |
|
"learning_rate": 4.981874676333506e-06, |
|
"loss": 0.3098, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5563839701770736, |
|
"grad_norm": 0.19843349190924084, |
|
"learning_rate": 4.930088037286381e-06, |
|
"loss": 0.3173, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.5610438024231128, |
|
"grad_norm": 0.21010850815608753, |
|
"learning_rate": 4.878301398239254e-06, |
|
"loss": 0.3242, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.565703634669152, |
|
"grad_norm": 0.2064935277967238, |
|
"learning_rate": 4.826514759192129e-06, |
|
"loss": 0.3184, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.570363466915191, |
|
"grad_norm": 0.19388277530401346, |
|
"learning_rate": 4.774728120145003e-06, |
|
"loss": 0.3096, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5750232991612303, |
|
"grad_norm": 0.2009122264808897, |
|
"learning_rate": 4.7229414810978765e-06, |
|
"loss": 0.3233, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5796831314072692, |
|
"grad_norm": 0.1847210364496899, |
|
"learning_rate": 4.671154842050751e-06, |
|
"loss": 0.3229, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.5843429636533086, |
|
"grad_norm": 0.19999440492528722, |
|
"learning_rate": 4.6193682030036255e-06, |
|
"loss": 0.3126, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5890027958993476, |
|
"grad_norm": 0.1967714404409244, |
|
"learning_rate": 4.5675815639564995e-06, |
|
"loss": 0.3161, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5936626281453867, |
|
"grad_norm": 0.19056228994472138, |
|
"learning_rate": 4.5157949249093736e-06, |
|
"loss": 0.3179, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.598322460391426, |
|
"grad_norm": 0.19780527100077158, |
|
"learning_rate": 4.464008285862248e-06, |
|
"loss": 0.3107, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.602982292637465, |
|
"grad_norm": 0.20087968051198601, |
|
"learning_rate": 4.412221646815122e-06, |
|
"loss": 0.3194, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.6076421248835042, |
|
"grad_norm": 0.18138579556889722, |
|
"learning_rate": 4.3604350077679966e-06, |
|
"loss": 0.3167, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6123019571295434, |
|
"grad_norm": 0.19116121249902454, |
|
"learning_rate": 4.308648368720871e-06, |
|
"loss": 0.3119, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6169617893755825, |
|
"grad_norm": 0.20253537385722165, |
|
"learning_rate": 4.256861729673745e-06, |
|
"loss": 0.3186, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 0.19508207479386244, |
|
"learning_rate": 4.205075090626619e-06, |
|
"loss": 0.3175, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6262814538676609, |
|
"grad_norm": 0.18521359809563662, |
|
"learning_rate": 4.153288451579493e-06, |
|
"loss": 0.3168, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6309412861136998, |
|
"grad_norm": 0.21769599978818913, |
|
"learning_rate": 4.101501812532367e-06, |
|
"loss": 0.3192, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6356011183597392, |
|
"grad_norm": 0.1951979997638157, |
|
"learning_rate": 4.049715173485242e-06, |
|
"loss": 0.3151, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.6402609506057781, |
|
"grad_norm": 0.20115642532305716, |
|
"learning_rate": 3.997928534438115e-06, |
|
"loss": 0.3221, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.6449207828518173, |
|
"grad_norm": 0.18954273661107576, |
|
"learning_rate": 3.94614189539099e-06, |
|
"loss": 0.319, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.6495806150978565, |
|
"grad_norm": 0.1898263373664628, |
|
"learning_rate": 3.894355256343864e-06, |
|
"loss": 0.3126, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6542404473438956, |
|
"grad_norm": 0.18770630530008564, |
|
"learning_rate": 3.842568617296738e-06, |
|
"loss": 0.3077, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6589002795899348, |
|
"grad_norm": 0.20920463370343462, |
|
"learning_rate": 3.790781978249612e-06, |
|
"loss": 0.319, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6635601118359737, |
|
"grad_norm": 0.2035848113749002, |
|
"learning_rate": 3.7389953392024856e-06, |
|
"loss": 0.3179, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.6682199440820131, |
|
"grad_norm": 0.1838396556826729, |
|
"learning_rate": 3.68720870015536e-06, |
|
"loss": 0.316, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.672879776328052, |
|
"grad_norm": 0.21088430204050254, |
|
"learning_rate": 3.635422061108234e-06, |
|
"loss": 0.3086, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6775396085740915, |
|
"grad_norm": 0.185482590257671, |
|
"learning_rate": 3.5836354220611086e-06, |
|
"loss": 0.3063, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.6821994408201304, |
|
"grad_norm": 0.19312227185115607, |
|
"learning_rate": 3.5318487830139827e-06, |
|
"loss": 0.3083, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.6868592730661698, |
|
"grad_norm": 0.1960317573244206, |
|
"learning_rate": 3.480062143966857e-06, |
|
"loss": 0.3122, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6915191053122087, |
|
"grad_norm": 0.19789243390438088, |
|
"learning_rate": 3.4282755049197308e-06, |
|
"loss": 0.3087, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.696178937558248, |
|
"grad_norm": 0.18112744143370826, |
|
"learning_rate": 3.3764888658726052e-06, |
|
"loss": 0.3106, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.700838769804287, |
|
"grad_norm": 0.21548231654348435, |
|
"learning_rate": 3.3247022268254793e-06, |
|
"loss": 0.3217, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.7054986020503262, |
|
"grad_norm": 0.1814574471570304, |
|
"learning_rate": 3.2729155877783538e-06, |
|
"loss": 0.3193, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7101584342963654, |
|
"grad_norm": 0.22078602305323497, |
|
"learning_rate": 3.2211289487312274e-06, |
|
"loss": 0.3206, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.7148182665424043, |
|
"grad_norm": 0.18658908941872768, |
|
"learning_rate": 3.169342309684102e-06, |
|
"loss": 0.3141, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7194780987884437, |
|
"grad_norm": 0.1782787618724745, |
|
"learning_rate": 3.117555670636976e-06, |
|
"loss": 0.3149, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.17932135518844652, |
|
"learning_rate": 3.0657690315898504e-06, |
|
"loss": 0.3017, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.728797763280522, |
|
"grad_norm": 0.19474093174359786, |
|
"learning_rate": 3.013982392542724e-06, |
|
"loss": 0.3101, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.733457595526561, |
|
"grad_norm": 0.19278349416418183, |
|
"learning_rate": 2.9621957534955985e-06, |
|
"loss": 0.311, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.7381174277726001, |
|
"grad_norm": 0.18373595675432944, |
|
"learning_rate": 2.9104091144484725e-06, |
|
"loss": 0.314, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.7427772600186393, |
|
"grad_norm": 0.1832866163272324, |
|
"learning_rate": 2.858622475401347e-06, |
|
"loss": 0.3099, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.7474370922646785, |
|
"grad_norm": 0.1700878976485856, |
|
"learning_rate": 2.8068358363542206e-06, |
|
"loss": 0.3177, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.7520969245107176, |
|
"grad_norm": 0.19630790934750134, |
|
"learning_rate": 2.7550491973070947e-06, |
|
"loss": 0.3147, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 0.1935390479382422, |
|
"learning_rate": 2.703262558259969e-06, |
|
"loss": 0.3191, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.761416589002796, |
|
"grad_norm": 0.18024055440625963, |
|
"learning_rate": 2.6514759192128432e-06, |
|
"loss": 0.3125, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.766076421248835, |
|
"grad_norm": 0.1851607633561796, |
|
"learning_rate": 2.5996892801657177e-06, |
|
"loss": 0.3135, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.7707362534948743, |
|
"grad_norm": 0.18693937618463963, |
|
"learning_rate": 2.5479026411185913e-06, |
|
"loss": 0.3079, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7753960857409132, |
|
"grad_norm": 0.18306356346123398, |
|
"learning_rate": 2.496116002071466e-06, |
|
"loss": 0.3087, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7800559179869526, |
|
"grad_norm": 0.1750201835993522, |
|
"learning_rate": 2.44432936302434e-06, |
|
"loss": 0.316, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.7847157502329916, |
|
"grad_norm": 0.18396826364198557, |
|
"learning_rate": 2.3925427239772143e-06, |
|
"loss": 0.303, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7893755824790307, |
|
"grad_norm": 0.18494502790887468, |
|
"learning_rate": 2.340756084930088e-06, |
|
"loss": 0.3195, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.7940354147250699, |
|
"grad_norm": 0.17779334157355717, |
|
"learning_rate": 2.2889694458829624e-06, |
|
"loss": 0.3119, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.798695246971109, |
|
"grad_norm": 0.18374129599917147, |
|
"learning_rate": 2.2371828068358365e-06, |
|
"loss": 0.3099, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8033550792171482, |
|
"grad_norm": 0.18305924460244258, |
|
"learning_rate": 2.1853961677887105e-06, |
|
"loss": 0.3156, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.8080149114631874, |
|
"grad_norm": 0.17348252725460908, |
|
"learning_rate": 2.1336095287415846e-06, |
|
"loss": 0.3077, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8126747437092265, |
|
"grad_norm": 0.18002736644951658, |
|
"learning_rate": 2.081822889694459e-06, |
|
"loss": 0.3217, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.8173345759552655, |
|
"grad_norm": 0.19097984942265778, |
|
"learning_rate": 2.030036250647333e-06, |
|
"loss": 0.3057, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8219944082013049, |
|
"grad_norm": 0.19397089209826424, |
|
"learning_rate": 1.978249611600207e-06, |
|
"loss": 0.312, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8266542404473438, |
|
"grad_norm": 0.1737258474947117, |
|
"learning_rate": 1.9264629725530816e-06, |
|
"loss": 0.3144, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.8313140726933832, |
|
"grad_norm": 0.17866826131161348, |
|
"learning_rate": 1.8746763335059557e-06, |
|
"loss": 0.3158, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.8359739049394221, |
|
"grad_norm": 0.1847963686612644, |
|
"learning_rate": 1.8228896944588297e-06, |
|
"loss": 0.3135, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.8406337371854613, |
|
"grad_norm": 0.17225040204469855, |
|
"learning_rate": 1.771103055411704e-06, |
|
"loss": 0.3201, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.8452935694315005, |
|
"grad_norm": 0.1784754273129112, |
|
"learning_rate": 1.719316416364578e-06, |
|
"loss": 0.3116, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.8499534016775396, |
|
"grad_norm": 0.1873529018355137, |
|
"learning_rate": 1.6675297773174523e-06, |
|
"loss": 0.3068, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.8546132339235788, |
|
"grad_norm": 0.18994212675755204, |
|
"learning_rate": 1.6157431382703264e-06, |
|
"loss": 0.3281, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.8592730661696177, |
|
"grad_norm": 0.17280992253062147, |
|
"learning_rate": 1.5639564992232006e-06, |
|
"loss": 0.3137, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8639328984156571, |
|
"grad_norm": 0.18082977178380857, |
|
"learning_rate": 1.512169860176075e-06, |
|
"loss": 0.311, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.868592730661696, |
|
"grad_norm": 0.1865537678103653, |
|
"learning_rate": 1.460383221128949e-06, |
|
"loss": 0.3136, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.8732525629077355, |
|
"grad_norm": 0.18177081525856895, |
|
"learning_rate": 1.4085965820818228e-06, |
|
"loss": 0.3179, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.8779123951537744, |
|
"grad_norm": 0.17820198629127978, |
|
"learning_rate": 1.356809943034697e-06, |
|
"loss": 0.308, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8825722273998136, |
|
"grad_norm": 0.185863894794329, |
|
"learning_rate": 1.305023303987571e-06, |
|
"loss": 0.3065, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8872320596458527, |
|
"grad_norm": 0.18770653417781014, |
|
"learning_rate": 1.2532366649404454e-06, |
|
"loss": 0.3101, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 0.17409257528881936, |
|
"learning_rate": 1.2014500258933196e-06, |
|
"loss": 0.3121, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.17868880226457717, |
|
"learning_rate": 1.1496633868461937e-06, |
|
"loss": 0.3127, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.9012115563839702, |
|
"grad_norm": 0.18133974763519492, |
|
"learning_rate": 1.097876747799068e-06, |
|
"loss": 0.3136, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.9058713886300094, |
|
"grad_norm": 0.18239370217368586, |
|
"learning_rate": 1.046090108751942e-06, |
|
"loss": 0.3241, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9105312208760483, |
|
"grad_norm": 0.17690424687928452, |
|
"learning_rate": 9.943034697048163e-07, |
|
"loss": 0.3186, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9151910531220877, |
|
"grad_norm": 0.17365540631574325, |
|
"learning_rate": 9.425168306576904e-07, |
|
"loss": 0.3092, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9198508853681266, |
|
"grad_norm": 0.1760791742900446, |
|
"learning_rate": 8.907301916105646e-07, |
|
"loss": 0.3182, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.924510717614166, |
|
"grad_norm": 0.19407129934862996, |
|
"learning_rate": 8.389435525634387e-07, |
|
"loss": 0.313, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.929170549860205, |
|
"grad_norm": 0.17237038499017496, |
|
"learning_rate": 7.871569135163129e-07, |
|
"loss": 0.316, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9338303821062441, |
|
"grad_norm": 0.18568643784585992, |
|
"learning_rate": 7.35370274469187e-07, |
|
"loss": 0.3232, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.9384902143522833, |
|
"grad_norm": 0.18031325235805645, |
|
"learning_rate": 6.835836354220611e-07, |
|
"loss": 0.311, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.9431500465983225, |
|
"grad_norm": 0.19850008316008122, |
|
"learning_rate": 6.317969963749353e-07, |
|
"loss": 0.313, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.9478098788443616, |
|
"grad_norm": 0.18169347646543055, |
|
"learning_rate": 5.800103573278094e-07, |
|
"loss": 0.3148, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.9524697110904008, |
|
"grad_norm": 0.18682984344018164, |
|
"learning_rate": 5.282237182806836e-07, |
|
"loss": 0.3016, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.95712954333644, |
|
"grad_norm": 0.19368082644307608, |
|
"learning_rate": 4.764370792335578e-07, |
|
"loss": 0.3175, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.961789375582479, |
|
"grad_norm": 0.17122020769064838, |
|
"learning_rate": 4.2465044018643194e-07, |
|
"loss": 0.3152, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.9664492078285183, |
|
"grad_norm": 0.17219123727385735, |
|
"learning_rate": 3.728638011393061e-07, |
|
"loss": 0.318, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9711090400745572, |
|
"grad_norm": 0.16986195254826772, |
|
"learning_rate": 3.210771620921802e-07, |
|
"loss": 0.3211, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9757688723205966, |
|
"grad_norm": 0.1676620379896325, |
|
"learning_rate": 2.692905230450544e-07, |
|
"loss": 0.3147, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.9804287045666356, |
|
"grad_norm": 0.1784700476744974, |
|
"learning_rate": 2.1750388399792857e-07, |
|
"loss": 0.3102, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.9850885368126747, |
|
"grad_norm": 0.17753084232759492, |
|
"learning_rate": 1.657172449508027e-07, |
|
"loss": 0.3154, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9897483690587139, |
|
"grad_norm": 0.16810901109199483, |
|
"learning_rate": 1.1393060590367686e-07, |
|
"loss": 0.3064, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.994408201304753, |
|
"grad_norm": 0.16989060528581737, |
|
"learning_rate": 6.214396685655102e-08, |
|
"loss": 0.3126, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9990680335507922, |
|
"grad_norm": 0.1916011537219613, |
|
"learning_rate": 1.0357327809425169e-08, |
|
"loss": 0.317, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 2146, |
|
"total_flos": 1.8367600516462019e+18, |
|
"train_loss": 0.34532702185789893, |
|
"train_runtime": 73160.0405, |
|
"train_samples_per_second": 0.469, |
|
"train_steps_per_second": 0.029 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2146, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8367600516462019e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|