|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 2146, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004659832246039142, |
|
"grad_norm": 2.9358379585521024, |
|
"learning_rate": 4.651162790697675e-07, |
|
"loss": 0.4889, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009319664492078284, |
|
"grad_norm": 3.0095855100891242, |
|
"learning_rate": 9.30232558139535e-07, |
|
"loss": 0.4777, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013979496738117428, |
|
"grad_norm": 2.3588796393229203, |
|
"learning_rate": 1.3953488372093025e-06, |
|
"loss": 0.4643, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01863932898415657, |
|
"grad_norm": 1.3763840278489534, |
|
"learning_rate": 1.86046511627907e-06, |
|
"loss": 0.4556, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.023299161230195712, |
|
"grad_norm": 0.6192077367736554, |
|
"learning_rate": 2.3255813953488376e-06, |
|
"loss": 0.3961, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027958993476234855, |
|
"grad_norm": 0.617990002942175, |
|
"learning_rate": 2.790697674418605e-06, |
|
"loss": 0.3762, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.032618825722273995, |
|
"grad_norm": 0.5925450142970052, |
|
"learning_rate": 3.2558139534883724e-06, |
|
"loss": 0.374, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03727865796831314, |
|
"grad_norm": 0.3561624832181387, |
|
"learning_rate": 3.72093023255814e-06, |
|
"loss": 0.3725, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04193849021435228, |
|
"grad_norm": 0.25081650759652796, |
|
"learning_rate": 4.186046511627907e-06, |
|
"loss": 0.3645, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.046598322460391424, |
|
"grad_norm": 0.20558496554294722, |
|
"learning_rate": 4.651162790697675e-06, |
|
"loss": 0.3667, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05125815470643057, |
|
"grad_norm": 0.24486074932144072, |
|
"learning_rate": 5.116279069767442e-06, |
|
"loss": 0.3571, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05591798695246971, |
|
"grad_norm": 0.19060108583147642, |
|
"learning_rate": 5.58139534883721e-06, |
|
"loss": 0.3505, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06057781919850885, |
|
"grad_norm": 0.18154982231055428, |
|
"learning_rate": 6.046511627906977e-06, |
|
"loss": 0.3434, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06523765144454799, |
|
"grad_norm": 0.1779776294534931, |
|
"learning_rate": 6.511627906976745e-06, |
|
"loss": 0.3492, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06989748369058714, |
|
"grad_norm": 0.17606219612014878, |
|
"learning_rate": 6.976744186046513e-06, |
|
"loss": 0.3416, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07455731593662628, |
|
"grad_norm": 0.1814825848846307, |
|
"learning_rate": 7.44186046511628e-06, |
|
"loss": 0.3377, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07921714818266543, |
|
"grad_norm": 0.17880816384452355, |
|
"learning_rate": 7.906976744186048e-06, |
|
"loss": 0.3393, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08387698042870456, |
|
"grad_norm": 0.1568045157823065, |
|
"learning_rate": 8.372093023255815e-06, |
|
"loss": 0.3461, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08853681267474371, |
|
"grad_norm": 0.16443817784114392, |
|
"learning_rate": 8.837209302325582e-06, |
|
"loss": 0.336, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09319664492078285, |
|
"grad_norm": 0.16992057839542438, |
|
"learning_rate": 9.30232558139535e-06, |
|
"loss": 0.3316, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.097856477166822, |
|
"grad_norm": 0.1605564738554441, |
|
"learning_rate": 9.767441860465117e-06, |
|
"loss": 0.3311, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10251630941286113, |
|
"grad_norm": 0.16236919155376553, |
|
"learning_rate": 1.0232558139534884e-05, |
|
"loss": 0.3316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10717614165890028, |
|
"grad_norm": 0.1698286305625274, |
|
"learning_rate": 1.0697674418604651e-05, |
|
"loss": 0.3328, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11183597390493942, |
|
"grad_norm": 0.16840606723491117, |
|
"learning_rate": 1.116279069767442e-05, |
|
"loss": 0.3289, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11649580615097857, |
|
"grad_norm": 0.16484123295216122, |
|
"learning_rate": 1.1627906976744187e-05, |
|
"loss": 0.325, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1211556383970177, |
|
"grad_norm": 0.18951085550690486, |
|
"learning_rate": 1.2093023255813954e-05, |
|
"loss": 0.3324, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12581547064305684, |
|
"grad_norm": 0.16528197993781607, |
|
"learning_rate": 1.2558139534883723e-05, |
|
"loss": 0.3282, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13047530288909598, |
|
"grad_norm": 0.18137576101744035, |
|
"learning_rate": 1.302325581395349e-05, |
|
"loss": 0.3248, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 0.18673134625349136, |
|
"learning_rate": 1.3488372093023257e-05, |
|
"loss": 0.3413, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13979496738117428, |
|
"grad_norm": 0.19883071868742613, |
|
"learning_rate": 1.3953488372093025e-05, |
|
"loss": 0.3275, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14445479962721341, |
|
"grad_norm": 0.19063978229188172, |
|
"learning_rate": 1.441860465116279e-05, |
|
"loss": 0.3396, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14911463187325255, |
|
"grad_norm": 0.20126727074239933, |
|
"learning_rate": 1.488372093023256e-05, |
|
"loss": 0.3283, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15377446411929171, |
|
"grad_norm": 0.17583289349910128, |
|
"learning_rate": 1.5348837209302328e-05, |
|
"loss": 0.3295, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15843429636533085, |
|
"grad_norm": 0.17444177904401117, |
|
"learning_rate": 1.5813953488372095e-05, |
|
"loss": 0.3114, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16309412861137, |
|
"grad_norm": 0.18551430775271738, |
|
"learning_rate": 1.6279069767441862e-05, |
|
"loss": 0.3283, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16775396085740912, |
|
"grad_norm": 0.1858093588140544, |
|
"learning_rate": 1.674418604651163e-05, |
|
"loss": 0.3256, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 0.18567509691449366, |
|
"learning_rate": 1.7209302325581396e-05, |
|
"loss": 0.3237, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17707362534948742, |
|
"grad_norm": 0.20208132114348742, |
|
"learning_rate": 1.7674418604651163e-05, |
|
"loss": 0.3309, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18173345759552656, |
|
"grad_norm": 0.22338210289869026, |
|
"learning_rate": 1.813953488372093e-05, |
|
"loss": 0.3275, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1863932898415657, |
|
"grad_norm": 0.2287598111489109, |
|
"learning_rate": 1.86046511627907e-05, |
|
"loss": 0.3299, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19105312208760486, |
|
"grad_norm": 0.19421590377562648, |
|
"learning_rate": 1.9069767441860468e-05, |
|
"loss": 0.3249, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.195712954333644, |
|
"grad_norm": 0.22779060574090246, |
|
"learning_rate": 1.9534883720930235e-05, |
|
"loss": 0.3307, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20037278657968313, |
|
"grad_norm": 0.19697826843895966, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3229, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20503261882572227, |
|
"grad_norm": 0.1971661281754431, |
|
"learning_rate": 1.9948213360952874e-05, |
|
"loss": 0.3286, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2096924510717614, |
|
"grad_norm": 0.19606725204483122, |
|
"learning_rate": 1.989642672190575e-05, |
|
"loss": 0.3177, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21435228331780057, |
|
"grad_norm": 0.194751313295783, |
|
"learning_rate": 1.9844640082858625e-05, |
|
"loss": 0.3264, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2190121155638397, |
|
"grad_norm": 0.19401867383810653, |
|
"learning_rate": 1.9792853443811498e-05, |
|
"loss": 0.3236, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22367194780987884, |
|
"grad_norm": 0.2061487676182812, |
|
"learning_rate": 1.9741066804764374e-05, |
|
"loss": 0.3262, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22833178005591798, |
|
"grad_norm": 0.22633953405241564, |
|
"learning_rate": 1.9689280165717246e-05, |
|
"loss": 0.3278, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.23299161230195714, |
|
"grad_norm": 0.19231837200784246, |
|
"learning_rate": 1.963749352667012e-05, |
|
"loss": 0.3239, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23765144454799628, |
|
"grad_norm": 0.20567774474325756, |
|
"learning_rate": 1.9585706887622994e-05, |
|
"loss": 0.3262, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2423112767940354, |
|
"grad_norm": 0.22721757172361462, |
|
"learning_rate": 1.953392024857587e-05, |
|
"loss": 0.3295, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24697110904007455, |
|
"grad_norm": 0.22305696098164374, |
|
"learning_rate": 1.9482133609528746e-05, |
|
"loss": 0.3279, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2516309412861137, |
|
"grad_norm": 0.1869588017000237, |
|
"learning_rate": 1.9430346970481618e-05, |
|
"loss": 0.3249, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25629077353215285, |
|
"grad_norm": 0.2172985514276064, |
|
"learning_rate": 1.937856033143449e-05, |
|
"loss": 0.3262, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.26095060577819196, |
|
"grad_norm": 0.18457639562753428, |
|
"learning_rate": 1.9326773692387366e-05, |
|
"loss": 0.3229, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2656104380242311, |
|
"grad_norm": 0.2085704847556598, |
|
"learning_rate": 1.927498705334024e-05, |
|
"loss": 0.3254, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 0.2127987275321348, |
|
"learning_rate": 1.9223200414293114e-05, |
|
"loss": 0.3313, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2749301025163094, |
|
"grad_norm": 0.22872842226387602, |
|
"learning_rate": 1.917141377524599e-05, |
|
"loss": 0.341, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27958993476234856, |
|
"grad_norm": 0.20656421805333372, |
|
"learning_rate": 1.9119627136198862e-05, |
|
"loss": 0.3367, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2842497670083877, |
|
"grad_norm": 0.21983247009596246, |
|
"learning_rate": 1.9067840497151738e-05, |
|
"loss": 0.3267, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.28890959925442683, |
|
"grad_norm": 0.23918660845251774, |
|
"learning_rate": 1.901605385810461e-05, |
|
"loss": 0.3236, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.293569431500466, |
|
"grad_norm": 0.19505454122387392, |
|
"learning_rate": 1.8964267219057483e-05, |
|
"loss": 0.3296, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2982292637465051, |
|
"grad_norm": 0.2131522282361315, |
|
"learning_rate": 1.891248058001036e-05, |
|
"loss": 0.3154, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.30288909599254427, |
|
"grad_norm": 0.20650244568080883, |
|
"learning_rate": 1.8860693940963234e-05, |
|
"loss": 0.3172, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.30754892823858343, |
|
"grad_norm": 0.19264072341187666, |
|
"learning_rate": 1.8808907301916107e-05, |
|
"loss": 0.3263, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.31220876048462254, |
|
"grad_norm": 0.19575618641019582, |
|
"learning_rate": 1.8757120662868982e-05, |
|
"loss": 0.3246, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3168685927306617, |
|
"grad_norm": 0.19258229616398992, |
|
"learning_rate": 1.8705334023821855e-05, |
|
"loss": 0.3238, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32152842497670087, |
|
"grad_norm": 0.2051192074004652, |
|
"learning_rate": 1.865354738477473e-05, |
|
"loss": 0.3243, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32618825722274, |
|
"grad_norm": 0.21124173698060286, |
|
"learning_rate": 1.8601760745727603e-05, |
|
"loss": 0.3351, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33084808946877914, |
|
"grad_norm": 0.22780074322712987, |
|
"learning_rate": 1.854997410668048e-05, |
|
"loss": 0.3307, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.33550792171481825, |
|
"grad_norm": 0.20050843718250042, |
|
"learning_rate": 1.8498187467633354e-05, |
|
"loss": 0.3231, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3401677539608574, |
|
"grad_norm": 0.20951533429996613, |
|
"learning_rate": 1.8446400828586227e-05, |
|
"loss": 0.3212, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 0.21171260508207002, |
|
"learning_rate": 1.83946141895391e-05, |
|
"loss": 0.3292, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3494874184529357, |
|
"grad_norm": 0.21633188290889002, |
|
"learning_rate": 1.8342827550491975e-05, |
|
"loss": 0.3224, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35414725069897485, |
|
"grad_norm": 0.20122697603683964, |
|
"learning_rate": 1.8291040911444847e-05, |
|
"loss": 0.3327, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35880708294501396, |
|
"grad_norm": 0.24727101450140662, |
|
"learning_rate": 1.8239254272397723e-05, |
|
"loss": 0.3294, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3634669151910531, |
|
"grad_norm": 0.20467658801471714, |
|
"learning_rate": 1.81874676333506e-05, |
|
"loss": 0.3251, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3681267474370923, |
|
"grad_norm": 0.21341519432060765, |
|
"learning_rate": 1.813568099430347e-05, |
|
"loss": 0.3273, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3727865796831314, |
|
"grad_norm": 0.19729258558856816, |
|
"learning_rate": 1.8083894355256347e-05, |
|
"loss": 0.3174, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37744641192917056, |
|
"grad_norm": 0.23364055194092742, |
|
"learning_rate": 1.803210771620922e-05, |
|
"loss": 0.3371, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3821062441752097, |
|
"grad_norm": 0.2188644833600693, |
|
"learning_rate": 1.798032107716209e-05, |
|
"loss": 0.3293, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38676607642124883, |
|
"grad_norm": 0.21936389966299402, |
|
"learning_rate": 1.7928534438114967e-05, |
|
"loss": 0.3216, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.391425908667288, |
|
"grad_norm": 0.2192776499326504, |
|
"learning_rate": 1.7876747799067843e-05, |
|
"loss": 0.3329, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3960857409133271, |
|
"grad_norm": 0.22918816226113867, |
|
"learning_rate": 1.7824961160020715e-05, |
|
"loss": 0.3355, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.40074557315936626, |
|
"grad_norm": 0.20292598837602438, |
|
"learning_rate": 1.777317452097359e-05, |
|
"loss": 0.3296, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 0.21448495438266224, |
|
"learning_rate": 1.7721387881926463e-05, |
|
"loss": 0.3361, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.41006523765144454, |
|
"grad_norm": 0.22652364723732418, |
|
"learning_rate": 1.766960124287934e-05, |
|
"loss": 0.3276, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4147250698974837, |
|
"grad_norm": 0.23415606339172687, |
|
"learning_rate": 1.761781460383221e-05, |
|
"loss": 0.3259, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4193849021435228, |
|
"grad_norm": 0.20093495271086492, |
|
"learning_rate": 1.7566027964785087e-05, |
|
"loss": 0.3187, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.424044734389562, |
|
"grad_norm": 0.2158827654398668, |
|
"learning_rate": 1.7514241325737963e-05, |
|
"loss": 0.3262, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42870456663560114, |
|
"grad_norm": 0.20249378577929067, |
|
"learning_rate": 1.7462454686690835e-05, |
|
"loss": 0.3242, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43336439888164024, |
|
"grad_norm": 0.2478822044298927, |
|
"learning_rate": 1.7410668047643708e-05, |
|
"loss": 0.3211, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4380242311276794, |
|
"grad_norm": 0.21466364366586702, |
|
"learning_rate": 1.7358881408596583e-05, |
|
"loss": 0.3315, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4426840633737186, |
|
"grad_norm": 0.21583782535840262, |
|
"learning_rate": 1.7307094769549456e-05, |
|
"loss": 0.3225, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4473438956197577, |
|
"grad_norm": 0.2277116814012742, |
|
"learning_rate": 1.725530813050233e-05, |
|
"loss": 0.3373, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45200372786579684, |
|
"grad_norm": 0.2200145220713097, |
|
"learning_rate": 1.7203521491455207e-05, |
|
"loss": 0.328, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.45666356011183595, |
|
"grad_norm": 0.20597194480299022, |
|
"learning_rate": 1.715173485240808e-05, |
|
"loss": 0.3227, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4613233923578751, |
|
"grad_norm": 0.24111938352420786, |
|
"learning_rate": 1.7099948213360955e-05, |
|
"loss": 0.3262, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4659832246039143, |
|
"grad_norm": 0.21117173274751344, |
|
"learning_rate": 1.7048161574313828e-05, |
|
"loss": 0.336, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4706430568499534, |
|
"grad_norm": 0.2109477726961095, |
|
"learning_rate": 1.6996374935266703e-05, |
|
"loss": 0.3249, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47530288909599255, |
|
"grad_norm": 0.19834985237292366, |
|
"learning_rate": 1.6944588296219576e-05, |
|
"loss": 0.3265, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47996272134203166, |
|
"grad_norm": 0.19919064505532363, |
|
"learning_rate": 1.689280165717245e-05, |
|
"loss": 0.3258, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4846225535880708, |
|
"grad_norm": 0.21189208212338323, |
|
"learning_rate": 1.6841015018125324e-05, |
|
"loss": 0.3305, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48928238583411, |
|
"grad_norm": 0.22407539115980474, |
|
"learning_rate": 1.67892283790782e-05, |
|
"loss": 0.3305, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4939422180801491, |
|
"grad_norm": 0.20719461369420158, |
|
"learning_rate": 1.6737441740031072e-05, |
|
"loss": 0.3256, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49860205032618826, |
|
"grad_norm": 0.21338336824874424, |
|
"learning_rate": 1.6685655100983948e-05, |
|
"loss": 0.3301, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5032618825722274, |
|
"grad_norm": 0.21755987080911546, |
|
"learning_rate": 1.6633868461936823e-05, |
|
"loss": 0.3173, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5079217148182665, |
|
"grad_norm": 0.23805239729872102, |
|
"learning_rate": 1.6582081822889696e-05, |
|
"loss": 0.3242, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5125815470643057, |
|
"grad_norm": 0.2178591347578284, |
|
"learning_rate": 1.653029518384257e-05, |
|
"loss": 0.3294, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.22861501679158425, |
|
"learning_rate": 1.6478508544795444e-05, |
|
"loss": 0.3273, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5219012115563839, |
|
"grad_norm": 0.22391633980488124, |
|
"learning_rate": 1.6426721905748316e-05, |
|
"loss": 0.3382, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5265610438024231, |
|
"grad_norm": 0.19479709922612634, |
|
"learning_rate": 1.6374935266701192e-05, |
|
"loss": 0.3157, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5312208760484622, |
|
"grad_norm": 0.24570860862235, |
|
"learning_rate": 1.6323148627654068e-05, |
|
"loss": 0.3197, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5358807082945014, |
|
"grad_norm": 0.22193001265554263, |
|
"learning_rate": 1.6271361988606944e-05, |
|
"loss": 0.3238, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.23961884549409623, |
|
"learning_rate": 1.6219575349559816e-05, |
|
"loss": 0.3232, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5452003727865797, |
|
"grad_norm": 0.19137567045296802, |
|
"learning_rate": 1.6167788710512688e-05, |
|
"loss": 0.3265, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5498602050326188, |
|
"grad_norm": 0.20115755434956437, |
|
"learning_rate": 1.6116002071465564e-05, |
|
"loss": 0.3193, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.554520037278658, |
|
"grad_norm": 0.2124470228159018, |
|
"learning_rate": 1.6064215432418436e-05, |
|
"loss": 0.3233, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5591798695246971, |
|
"grad_norm": 0.2180160408149773, |
|
"learning_rate": 1.6012428793371312e-05, |
|
"loss": 0.3244, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5638397017707363, |
|
"grad_norm": 0.2160242094251449, |
|
"learning_rate": 1.5960642154324188e-05, |
|
"loss": 0.3234, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5684995340167754, |
|
"grad_norm": 0.24806655464152233, |
|
"learning_rate": 1.590885551527706e-05, |
|
"loss": 0.332, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5731593662628145, |
|
"grad_norm": 0.21806755157780733, |
|
"learning_rate": 1.5857068876229933e-05, |
|
"loss": 0.3229, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5778191985088537, |
|
"grad_norm": 1.2870679394584568, |
|
"learning_rate": 1.5805282237182808e-05, |
|
"loss": 0.3168, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5824790307548928, |
|
"grad_norm": 0.2124373058232124, |
|
"learning_rate": 1.575349559813568e-05, |
|
"loss": 0.3322, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.587138863000932, |
|
"grad_norm": 0.5434431989836286, |
|
"learning_rate": 1.5701708959088556e-05, |
|
"loss": 0.3294, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5917986952469712, |
|
"grad_norm": 0.22747818610994888, |
|
"learning_rate": 1.5649922320041432e-05, |
|
"loss": 0.3307, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5964585274930102, |
|
"grad_norm": 0.19699104995801411, |
|
"learning_rate": 1.5598135680994304e-05, |
|
"loss": 0.329, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6011183597390494, |
|
"grad_norm": 0.19502045494995346, |
|
"learning_rate": 1.554634904194718e-05, |
|
"loss": 0.3267, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6057781919850885, |
|
"grad_norm": 0.2192638389835302, |
|
"learning_rate": 1.5494562402900053e-05, |
|
"loss": 0.3294, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6104380242311277, |
|
"grad_norm": 0.21933984732022657, |
|
"learning_rate": 1.5442775763852925e-05, |
|
"loss": 0.332, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6150978564771669, |
|
"grad_norm": 0.21041459913026447, |
|
"learning_rate": 1.53909891248058e-05, |
|
"loss": 0.3156, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6197576887232059, |
|
"grad_norm": 0.23530573949874736, |
|
"learning_rate": 1.5339202485758676e-05, |
|
"loss": 0.3184, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6244175209692451, |
|
"grad_norm": 0.21850565712919703, |
|
"learning_rate": 1.5287415846711552e-05, |
|
"loss": 0.3188, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6290773532152842, |
|
"grad_norm": 0.2361213653169026, |
|
"learning_rate": 1.5235629207664425e-05, |
|
"loss": 0.32, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6337371854613234, |
|
"grad_norm": 0.20552105027007925, |
|
"learning_rate": 1.5183842568617297e-05, |
|
"loss": 0.3282, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6383970177073626, |
|
"grad_norm": 0.24722706453350193, |
|
"learning_rate": 1.5132055929570173e-05, |
|
"loss": 0.3312, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6430568499534017, |
|
"grad_norm": 0.267746674789141, |
|
"learning_rate": 1.5080269290523047e-05, |
|
"loss": 0.3215, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6477166821994408, |
|
"grad_norm": 0.20969336261995508, |
|
"learning_rate": 1.5028482651475919e-05, |
|
"loss": 0.3279, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.65237651444548, |
|
"grad_norm": 0.2257138045484241, |
|
"learning_rate": 1.4976696012428795e-05, |
|
"loss": 0.3337, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6570363466915191, |
|
"grad_norm": 0.17961148295092744, |
|
"learning_rate": 1.4924909373381669e-05, |
|
"loss": 0.3224, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6616961789375583, |
|
"grad_norm": 0.23789960713870506, |
|
"learning_rate": 1.4873122734334541e-05, |
|
"loss": 0.3237, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6663560111835974, |
|
"grad_norm": 0.2208025857214297, |
|
"learning_rate": 1.4821336095287417e-05, |
|
"loss": 0.3175, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6710158434296365, |
|
"grad_norm": 0.19989756193207112, |
|
"learning_rate": 1.4769549456240291e-05, |
|
"loss": 0.3227, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 0.26463761665216684, |
|
"learning_rate": 1.4717762817193167e-05, |
|
"loss": 0.3266, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6803355079217148, |
|
"grad_norm": 0.21434087964970538, |
|
"learning_rate": 1.4665976178146039e-05, |
|
"loss": 0.3226, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.684995340167754, |
|
"grad_norm": 0.2065276035839868, |
|
"learning_rate": 1.4614189539098913e-05, |
|
"loss": 0.3235, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.20649033808104794, |
|
"learning_rate": 1.4562402900051789e-05, |
|
"loss": 0.3187, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6943150046598322, |
|
"grad_norm": 0.20751516672171208, |
|
"learning_rate": 1.4510616261004661e-05, |
|
"loss": 0.319, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6989748369058714, |
|
"grad_norm": 0.19697106412326107, |
|
"learning_rate": 1.4458829621957535e-05, |
|
"loss": 0.3195, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7036346691519105, |
|
"grad_norm": 0.22267160213111145, |
|
"learning_rate": 1.4407042982910411e-05, |
|
"loss": 0.3234, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7082945013979497, |
|
"grad_norm": 0.20795208077111424, |
|
"learning_rate": 1.4355256343863283e-05, |
|
"loss": 0.3239, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7129543336439889, |
|
"grad_norm": 0.20931413370029262, |
|
"learning_rate": 1.430346970481616e-05, |
|
"loss": 0.3157, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7176141658900279, |
|
"grad_norm": 0.2279031584837272, |
|
"learning_rate": 1.4251683065769033e-05, |
|
"loss": 0.3229, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7222739981360671, |
|
"grad_norm": 0.28081041443039917, |
|
"learning_rate": 1.4199896426721906e-05, |
|
"loss": 0.33, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7269338303821062, |
|
"grad_norm": 0.21402684688996287, |
|
"learning_rate": 1.4148109787674781e-05, |
|
"loss": 0.3176, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7315936626281454, |
|
"grad_norm": 0.2170900802684808, |
|
"learning_rate": 1.4096323148627655e-05, |
|
"loss": 0.3217, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7362534948741846, |
|
"grad_norm": 0.24860030155760016, |
|
"learning_rate": 1.404453650958053e-05, |
|
"loss": 0.3265, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7409133271202236, |
|
"grad_norm": 0.19118776157108477, |
|
"learning_rate": 1.3992749870533403e-05, |
|
"loss": 0.3247, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7455731593662628, |
|
"grad_norm": 0.2260662140886731, |
|
"learning_rate": 1.3940963231486278e-05, |
|
"loss": 0.3231, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.750232991612302, |
|
"grad_norm": 0.20631237485654458, |
|
"learning_rate": 1.3889176592439152e-05, |
|
"loss": 0.3232, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7548928238583411, |
|
"grad_norm": 0.2504046456069529, |
|
"learning_rate": 1.3837389953392026e-05, |
|
"loss": 0.3332, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7595526561043803, |
|
"grad_norm": 0.22716466517888942, |
|
"learning_rate": 1.37856033143449e-05, |
|
"loss": 0.3202, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7642124883504194, |
|
"grad_norm": 0.2609982780137847, |
|
"learning_rate": 1.3733816675297775e-05, |
|
"loss": 0.3285, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7688723205964585, |
|
"grad_norm": 0.269151275613793, |
|
"learning_rate": 1.3682030036250648e-05, |
|
"loss": 0.3206, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7735321528424977, |
|
"grad_norm": 0.21925839142695047, |
|
"learning_rate": 1.3630243397203522e-05, |
|
"loss": 0.3192, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7781919850885368, |
|
"grad_norm": 0.20386131918941994, |
|
"learning_rate": 1.3578456758156398e-05, |
|
"loss": 0.321, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.782851817334576, |
|
"grad_norm": 0.2532163945055847, |
|
"learning_rate": 1.3526670119109272e-05, |
|
"loss": 0.3275, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7875116495806151, |
|
"grad_norm": 0.21472966630917703, |
|
"learning_rate": 1.3474883480062144e-05, |
|
"loss": 0.3349, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7921714818266542, |
|
"grad_norm": 0.24312133461634136, |
|
"learning_rate": 1.342309684101502e-05, |
|
"loss": 0.321, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7968313140726934, |
|
"grad_norm": 0.2229944694531994, |
|
"learning_rate": 1.3371310201967894e-05, |
|
"loss": 0.3278, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8014911463187325, |
|
"grad_norm": 0.19371137005852632, |
|
"learning_rate": 1.3319523562920766e-05, |
|
"loss": 0.3338, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8061509785647717, |
|
"grad_norm": 0.22950989752366102, |
|
"learning_rate": 1.3267736923873642e-05, |
|
"loss": 0.3208, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 0.22496120833511551, |
|
"learning_rate": 1.3215950284826516e-05, |
|
"loss": 0.328, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8154706430568499, |
|
"grad_norm": 0.20551584337966472, |
|
"learning_rate": 1.3164163645779392e-05, |
|
"loss": 0.3257, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8201304753028891, |
|
"grad_norm": 0.21692857406595228, |
|
"learning_rate": 1.3112377006732264e-05, |
|
"loss": 0.3226, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8247903075489282, |
|
"grad_norm": 0.2171027152401885, |
|
"learning_rate": 1.3060590367685138e-05, |
|
"loss": 0.3278, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8294501397949674, |
|
"grad_norm": 0.22927464515991447, |
|
"learning_rate": 1.3008803728638014e-05, |
|
"loss": 0.3212, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8341099720410066, |
|
"grad_norm": 0.19864068045743993, |
|
"learning_rate": 1.2957017089590886e-05, |
|
"loss": 0.337, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8387698042870456, |
|
"grad_norm": 0.19961778149739876, |
|
"learning_rate": 1.290523045054376e-05, |
|
"loss": 0.3241, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8434296365330848, |
|
"grad_norm": 0.18416032414845795, |
|
"learning_rate": 1.2853443811496636e-05, |
|
"loss": 0.3222, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.848089468779124, |
|
"grad_norm": 0.2110298314090597, |
|
"learning_rate": 1.2801657172449508e-05, |
|
"loss": 0.3262, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8527493010251631, |
|
"grad_norm": 0.2281982369217672, |
|
"learning_rate": 1.2749870533402384e-05, |
|
"loss": 0.327, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8574091332712023, |
|
"grad_norm": 0.22872935927067808, |
|
"learning_rate": 1.2698083894355258e-05, |
|
"loss": 0.323, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.21165791707242024, |
|
"learning_rate": 1.264629725530813e-05, |
|
"loss": 0.3169, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8667287977632805, |
|
"grad_norm": 0.18628554545791506, |
|
"learning_rate": 1.2594510616261006e-05, |
|
"loss": 0.3164, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8713886300093197, |
|
"grad_norm": 0.23083205226589923, |
|
"learning_rate": 1.254272397721388e-05, |
|
"loss": 0.3222, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8760484622553588, |
|
"grad_norm": 0.2192954299935438, |
|
"learning_rate": 1.2490937338166753e-05, |
|
"loss": 0.3244, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.880708294501398, |
|
"grad_norm": 0.2248357209056571, |
|
"learning_rate": 1.2439150699119628e-05, |
|
"loss": 0.3219, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8853681267474371, |
|
"grad_norm": 0.20471170755414897, |
|
"learning_rate": 1.2387364060072502e-05, |
|
"loss": 0.3333, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8900279589934762, |
|
"grad_norm": 0.2076140935278383, |
|
"learning_rate": 1.2335577421025375e-05, |
|
"loss": 0.3269, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8946877912395154, |
|
"grad_norm": 0.1767922723082159, |
|
"learning_rate": 1.228379078197825e-05, |
|
"loss": 0.3296, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8993476234855545, |
|
"grad_norm": 0.24187201186511662, |
|
"learning_rate": 1.2232004142931125e-05, |
|
"loss": 0.3251, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9040074557315937, |
|
"grad_norm": 0.2309707614632519, |
|
"learning_rate": 1.2180217503884e-05, |
|
"loss": 0.3166, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9086672879776329, |
|
"grad_norm": 0.20032479610884243, |
|
"learning_rate": 1.2128430864836873e-05, |
|
"loss": 0.332, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9133271202236719, |
|
"grad_norm": 0.20908939058088777, |
|
"learning_rate": 1.2076644225789747e-05, |
|
"loss": 0.3231, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9179869524697111, |
|
"grad_norm": 0.21029655462480856, |
|
"learning_rate": 1.2024857586742623e-05, |
|
"loss": 0.3219, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9226467847157502, |
|
"grad_norm": 0.21845134201809333, |
|
"learning_rate": 1.1973070947695495e-05, |
|
"loss": 0.3203, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9273066169617894, |
|
"grad_norm": 0.20388120839780344, |
|
"learning_rate": 1.1921284308648369e-05, |
|
"loss": 0.3224, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9319664492078286, |
|
"grad_norm": 0.19143754175819644, |
|
"learning_rate": 1.1869497669601245e-05, |
|
"loss": 0.3221, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9366262814538676, |
|
"grad_norm": 0.19860377602780624, |
|
"learning_rate": 1.1817711030554117e-05, |
|
"loss": 0.326, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9412861136999068, |
|
"grad_norm": 0.22191428976312808, |
|
"learning_rate": 1.1765924391506993e-05, |
|
"loss": 0.3241, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 0.2264899602702503, |
|
"learning_rate": 1.1714137752459867e-05, |
|
"loss": 0.328, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9506057781919851, |
|
"grad_norm": 0.19315587082731886, |
|
"learning_rate": 1.166235111341274e-05, |
|
"loss": 0.3229, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9552656104380243, |
|
"grad_norm": 0.19951522522778153, |
|
"learning_rate": 1.1610564474365615e-05, |
|
"loss": 0.3311, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9599254426840633, |
|
"grad_norm": 0.18858346428525566, |
|
"learning_rate": 1.1558777835318489e-05, |
|
"loss": 0.3249, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9645852749301025, |
|
"grad_norm": 0.19792425530271554, |
|
"learning_rate": 1.1506991196271361e-05, |
|
"loss": 0.3259, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9692451071761417, |
|
"grad_norm": 0.20030469631792833, |
|
"learning_rate": 1.1455204557224237e-05, |
|
"loss": 0.3226, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9739049394221808, |
|
"grad_norm": 0.20770307462171755, |
|
"learning_rate": 1.1403417918177111e-05, |
|
"loss": 0.3171, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.97856477166822, |
|
"grad_norm": 0.28646869241469997, |
|
"learning_rate": 1.1351631279129983e-05, |
|
"loss": 0.3392, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.983224603914259, |
|
"grad_norm": 0.21788488981566065, |
|
"learning_rate": 1.129984464008286e-05, |
|
"loss": 0.3191, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9878844361602982, |
|
"grad_norm": 0.20606943186110416, |
|
"learning_rate": 1.1248058001035733e-05, |
|
"loss": 0.3217, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9925442684063374, |
|
"grad_norm": 0.21528787214750336, |
|
"learning_rate": 1.1196271361988609e-05, |
|
"loss": 0.325, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9972041006523765, |
|
"grad_norm": 0.21376540320281065, |
|
"learning_rate": 1.1144484722941481e-05, |
|
"loss": 0.3225, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0018639328984156, |
|
"grad_norm": 0.2259025613794045, |
|
"learning_rate": 1.1092698083894355e-05, |
|
"loss": 0.2983, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0065237651444547, |
|
"grad_norm": 0.2195605783591442, |
|
"learning_rate": 1.1040911444847231e-05, |
|
"loss": 0.2827, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.011183597390494, |
|
"grad_norm": 0.23344991666250092, |
|
"learning_rate": 1.0989124805800104e-05, |
|
"loss": 0.2878, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.015843429636533, |
|
"grad_norm": 0.2134504165256278, |
|
"learning_rate": 1.0937338166752978e-05, |
|
"loss": 0.2924, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0205032618825722, |
|
"grad_norm": 0.20631302270411553, |
|
"learning_rate": 1.0885551527705853e-05, |
|
"loss": 0.2823, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.0251630941286114, |
|
"grad_norm": 0.19643085882318784, |
|
"learning_rate": 1.0833764888658726e-05, |
|
"loss": 0.2736, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0298229263746506, |
|
"grad_norm": 0.22146197945390989, |
|
"learning_rate": 1.0781978249611601e-05, |
|
"loss": 0.2815, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 0.18822022387097448, |
|
"learning_rate": 1.0730191610564476e-05, |
|
"loss": 0.2798, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0391425908667289, |
|
"grad_norm": 0.2048702940946389, |
|
"learning_rate": 1.067840497151735e-05, |
|
"loss": 0.2827, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0438024231127678, |
|
"grad_norm": 0.21862482584858495, |
|
"learning_rate": 1.0626618332470224e-05, |
|
"loss": 0.2774, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.048462255358807, |
|
"grad_norm": 0.17556754940127575, |
|
"learning_rate": 1.0574831693423098e-05, |
|
"loss": 0.281, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0531220876048462, |
|
"grad_norm": 0.17905762828840907, |
|
"learning_rate": 1.0523045054375972e-05, |
|
"loss": 0.2827, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0577819198508853, |
|
"grad_norm": 0.19929098240833457, |
|
"learning_rate": 1.0471258415328846e-05, |
|
"loss": 0.2908, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0624417520969245, |
|
"grad_norm": 0.1911168370013747, |
|
"learning_rate": 1.041947177628172e-05, |
|
"loss": 0.2865, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0671015843429636, |
|
"grad_norm": 0.19469722744535697, |
|
"learning_rate": 1.0367685137234594e-05, |
|
"loss": 0.281, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0717614165890028, |
|
"grad_norm": 0.1915246964885849, |
|
"learning_rate": 1.031589849818747e-05, |
|
"loss": 0.2738, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.076421248835042, |
|
"grad_norm": 0.20821547972324014, |
|
"learning_rate": 1.0264111859140342e-05, |
|
"loss": 0.2797, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 0.1784617175285205, |
|
"learning_rate": 1.0212325220093218e-05, |
|
"loss": 0.2804, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0857409133271203, |
|
"grad_norm": 0.21890203338152653, |
|
"learning_rate": 1.0160538581046092e-05, |
|
"loss": 0.2829, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0904007455731595, |
|
"grad_norm": 0.19918388826900157, |
|
"learning_rate": 1.0108751941998964e-05, |
|
"loss": 0.289, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0950605778191984, |
|
"grad_norm": 0.19026021585624292, |
|
"learning_rate": 1.005696530295184e-05, |
|
"loss": 0.2805, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0997204100652376, |
|
"grad_norm": 0.21456689205757995, |
|
"learning_rate": 1.0005178663904714e-05, |
|
"loss": 0.2838, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.1043802423112767, |
|
"grad_norm": 0.20700020922949647, |
|
"learning_rate": 9.953392024857588e-06, |
|
"loss": 0.2802, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.109040074557316, |
|
"grad_norm": 0.18236027964263013, |
|
"learning_rate": 9.901605385810462e-06, |
|
"loss": 0.2852, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.113699906803355, |
|
"grad_norm": 0.21077637095731513, |
|
"learning_rate": 9.849818746763336e-06, |
|
"loss": 0.2896, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1183597390493942, |
|
"grad_norm": 0.18470733903435116, |
|
"learning_rate": 9.79803210771621e-06, |
|
"loss": 0.2844, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1230195712954334, |
|
"grad_norm": 0.20723918029516097, |
|
"learning_rate": 9.746245468669084e-06, |
|
"loss": 0.2851, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.1276794035414726, |
|
"grad_norm": 0.1898177888255008, |
|
"learning_rate": 9.694458829621958e-06, |
|
"loss": 0.2854, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1323392357875117, |
|
"grad_norm": 0.18458308742885182, |
|
"learning_rate": 9.642672190574832e-06, |
|
"loss": 0.2833, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.1369990680335509, |
|
"grad_norm": 0.19195564599792803, |
|
"learning_rate": 9.590885551527706e-06, |
|
"loss": 0.2842, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.14165890027959, |
|
"grad_norm": 0.2042846743518091, |
|
"learning_rate": 9.53909891248058e-06, |
|
"loss": 0.2824, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.146318732525629, |
|
"grad_norm": 0.19117917471615137, |
|
"learning_rate": 9.487312273433454e-06, |
|
"loss": 0.2844, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1509785647716682, |
|
"grad_norm": 0.1889691345807206, |
|
"learning_rate": 9.435525634386328e-06, |
|
"loss": 0.2881, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1556383970177073, |
|
"grad_norm": 0.1859542475312062, |
|
"learning_rate": 9.383738995339203e-06, |
|
"loss": 0.2802, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1602982292637465, |
|
"grad_norm": 0.18878674736359274, |
|
"learning_rate": 9.331952356292078e-06, |
|
"loss": 0.287, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1649580615097856, |
|
"grad_norm": 0.18804677940623368, |
|
"learning_rate": 9.28016571724495e-06, |
|
"loss": 0.2828, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1696178937558248, |
|
"grad_norm": 0.18920448811635315, |
|
"learning_rate": 9.228379078197825e-06, |
|
"loss": 0.2688, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.174277726001864, |
|
"grad_norm": 0.18953887671939224, |
|
"learning_rate": 9.1765924391507e-06, |
|
"loss": 0.2848, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1789375582479031, |
|
"grad_norm": 0.20452563159049228, |
|
"learning_rate": 9.124805800103574e-06, |
|
"loss": 0.2807, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1835973904939423, |
|
"grad_norm": 0.19366714572234173, |
|
"learning_rate": 9.073019161056449e-06, |
|
"loss": 0.2905, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1882572227399812, |
|
"grad_norm": 0.18697810604500456, |
|
"learning_rate": 9.021232522009323e-06, |
|
"loss": 0.2843, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.1929170549860204, |
|
"grad_norm": 0.20123824295274137, |
|
"learning_rate": 8.969445882962197e-06, |
|
"loss": 0.2857, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1975768872320596, |
|
"grad_norm": 0.1915692749101002, |
|
"learning_rate": 8.91765924391507e-06, |
|
"loss": 0.2898, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.2022367194780987, |
|
"grad_norm": 0.18916488913676208, |
|
"learning_rate": 8.865872604867945e-06, |
|
"loss": 0.2804, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.18617028635469698, |
|
"learning_rate": 8.814085965820819e-06, |
|
"loss": 0.2817, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.211556383970177, |
|
"grad_norm": 0.22609923703108467, |
|
"learning_rate": 8.762299326773693e-06, |
|
"loss": 0.2905, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 0.2126857111729568, |
|
"learning_rate": 8.710512687726569e-06, |
|
"loss": 0.2879, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2208760484622554, |
|
"grad_norm": 0.18126697001357067, |
|
"learning_rate": 8.658726048679441e-06, |
|
"loss": 0.2777, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.2255358807082946, |
|
"grad_norm": 0.19570909476945747, |
|
"learning_rate": 8.606939409632315e-06, |
|
"loss": 0.2941, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2301957129543337, |
|
"grad_norm": 0.19971390800204433, |
|
"learning_rate": 8.55515277058519e-06, |
|
"loss": 0.285, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2348555452003729, |
|
"grad_norm": 0.18248980133165246, |
|
"learning_rate": 8.503366131538063e-06, |
|
"loss": 0.2888, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2395153774464118, |
|
"grad_norm": 0.18297875521481516, |
|
"learning_rate": 8.451579492490937e-06, |
|
"loss": 0.2829, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.244175209692451, |
|
"grad_norm": 0.19520992573003834, |
|
"learning_rate": 8.399792853443813e-06, |
|
"loss": 0.2826, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.2488350419384902, |
|
"grad_norm": 0.19384120322676446, |
|
"learning_rate": 8.348006214396687e-06, |
|
"loss": 0.2868, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2534948741845293, |
|
"grad_norm": 0.1783920464815875, |
|
"learning_rate": 8.29621957534956e-06, |
|
"loss": 0.2751, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.2581547064305685, |
|
"grad_norm": 0.2021231519848707, |
|
"learning_rate": 8.244432936302435e-06, |
|
"loss": 0.2821, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2628145386766076, |
|
"grad_norm": 0.19093463948979375, |
|
"learning_rate": 8.192646297255309e-06, |
|
"loss": 0.2938, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2674743709226468, |
|
"grad_norm": 0.199915448813584, |
|
"learning_rate": 8.140859658208183e-06, |
|
"loss": 0.2858, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.272134203168686, |
|
"grad_norm": 0.17486575781096428, |
|
"learning_rate": 8.089073019161057e-06, |
|
"loss": 0.2837, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2767940354147251, |
|
"grad_norm": 0.20131105656692982, |
|
"learning_rate": 8.037286380113931e-06, |
|
"loss": 0.2827, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.281453867660764, |
|
"grad_norm": 0.19114388849396574, |
|
"learning_rate": 7.985499741066805e-06, |
|
"loss": 0.2764, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2861136999068035, |
|
"grad_norm": 0.18459191813016385, |
|
"learning_rate": 7.93371310201968e-06, |
|
"loss": 0.2869, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2907735321528424, |
|
"grad_norm": 0.18749447304277989, |
|
"learning_rate": 7.881926462972553e-06, |
|
"loss": 0.2892, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.2954333643988816, |
|
"grad_norm": 0.19126389742101615, |
|
"learning_rate": 7.830139823925427e-06, |
|
"loss": 0.2821, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.3000931966449207, |
|
"grad_norm": 0.18893656425511854, |
|
"learning_rate": 7.778353184878302e-06, |
|
"loss": 0.288, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.30475302889096, |
|
"grad_norm": 0.1749441012261624, |
|
"learning_rate": 7.726566545831177e-06, |
|
"loss": 0.2782, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.309412861136999, |
|
"grad_norm": 0.17984937281141697, |
|
"learning_rate": 7.67477990678405e-06, |
|
"loss": 0.2877, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.3140726933830382, |
|
"grad_norm": 0.18676954296910422, |
|
"learning_rate": 7.6229932677369245e-06, |
|
"loss": 0.2828, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3187325256290774, |
|
"grad_norm": 0.18513926663515104, |
|
"learning_rate": 7.571206628689799e-06, |
|
"loss": 0.2896, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3233923578751166, |
|
"grad_norm": 0.18724138325257705, |
|
"learning_rate": 7.519419989642673e-06, |
|
"loss": 0.2734, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.3280521901211557, |
|
"grad_norm": 0.18319826116355747, |
|
"learning_rate": 7.467633350595547e-06, |
|
"loss": 0.2851, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3327120223671947, |
|
"grad_norm": 0.18428323561659832, |
|
"learning_rate": 7.415846711548422e-06, |
|
"loss": 0.2874, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.337371854613234, |
|
"grad_norm": 0.17000821424536014, |
|
"learning_rate": 7.364060072501296e-06, |
|
"loss": 0.2909, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.342031686859273, |
|
"grad_norm": 0.1841048828984189, |
|
"learning_rate": 7.312273433454169e-06, |
|
"loss": 0.2861, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3466915191053122, |
|
"grad_norm": 0.17345256964148933, |
|
"learning_rate": 7.260486794407044e-06, |
|
"loss": 0.2882, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 0.18258453132832908, |
|
"learning_rate": 7.208700155359918e-06, |
|
"loss": 0.2806, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.3560111835973905, |
|
"grad_norm": 0.17378696156161202, |
|
"learning_rate": 7.156913516312793e-06, |
|
"loss": 0.2841, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.3606710158434296, |
|
"grad_norm": 0.17509727665850616, |
|
"learning_rate": 7.105126877265666e-06, |
|
"loss": 0.2921, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3653308480894688, |
|
"grad_norm": 0.20267254775086155, |
|
"learning_rate": 7.05334023821854e-06, |
|
"loss": 0.2861, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.369990680335508, |
|
"grad_norm": 0.1826780882718024, |
|
"learning_rate": 7.001553599171415e-06, |
|
"loss": 0.2835, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3746505125815471, |
|
"grad_norm": 0.17275106110892652, |
|
"learning_rate": 6.949766960124289e-06, |
|
"loss": 0.2822, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.1743364966907928, |
|
"learning_rate": 6.897980321077162e-06, |
|
"loss": 0.2717, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.3839701770736252, |
|
"grad_norm": 0.17305807049397479, |
|
"learning_rate": 6.846193682030037e-06, |
|
"loss": 0.2828, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3886300093196646, |
|
"grad_norm": 0.18234220662171363, |
|
"learning_rate": 6.794407042982911e-06, |
|
"loss": 0.2928, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.3932898415657036, |
|
"grad_norm": 0.19901527937775287, |
|
"learning_rate": 6.742620403935786e-06, |
|
"loss": 0.2963, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3979496738117427, |
|
"grad_norm": 0.20010097090257092, |
|
"learning_rate": 6.690833764888659e-06, |
|
"loss": 0.2918, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.402609506057782, |
|
"grad_norm": 0.20029238519412934, |
|
"learning_rate": 6.639047125841533e-06, |
|
"loss": 0.2855, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.407269338303821, |
|
"grad_norm": 0.1936425086687196, |
|
"learning_rate": 6.587260486794408e-06, |
|
"loss": 0.2785, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.4119291705498602, |
|
"grad_norm": 0.1991532365434793, |
|
"learning_rate": 6.535473847747281e-06, |
|
"loss": 0.2881, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4165890027958994, |
|
"grad_norm": 0.17075404785787288, |
|
"learning_rate": 6.483687208700155e-06, |
|
"loss": 0.2875, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4212488350419386, |
|
"grad_norm": 0.1840441188498844, |
|
"learning_rate": 6.43190056965303e-06, |
|
"loss": 0.2846, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.4259086672879777, |
|
"grad_norm": 0.1812038546431852, |
|
"learning_rate": 6.380113930605904e-06, |
|
"loss": 0.2799, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4305684995340169, |
|
"grad_norm": 0.19072904209724684, |
|
"learning_rate": 6.3283272915587775e-06, |
|
"loss": 0.2847, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4352283317800558, |
|
"grad_norm": 0.1704872579719148, |
|
"learning_rate": 6.276540652511652e-06, |
|
"loss": 0.2825, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.439888164026095, |
|
"grad_norm": 0.19536704137537816, |
|
"learning_rate": 6.2247540134645265e-06, |
|
"loss": 0.2838, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.4445479962721341, |
|
"grad_norm": 0.17789121544016315, |
|
"learning_rate": 6.172967374417401e-06, |
|
"loss": 0.2847, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.4492078285181733, |
|
"grad_norm": 0.19130871528617927, |
|
"learning_rate": 6.1211807353702746e-06, |
|
"loss": 0.28, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4538676607642125, |
|
"grad_norm": 0.17749331170718344, |
|
"learning_rate": 6.069394096323149e-06, |
|
"loss": 0.2817, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4585274930102516, |
|
"grad_norm": 0.17125040626587648, |
|
"learning_rate": 6.0176074572760235e-06, |
|
"loss": 0.2842, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4631873252562908, |
|
"grad_norm": 0.1717733923202594, |
|
"learning_rate": 5.9658208182288976e-06, |
|
"loss": 0.2852, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.46784715750233, |
|
"grad_norm": 0.18029835185575757, |
|
"learning_rate": 5.914034179181772e-06, |
|
"loss": 0.2868, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4725069897483691, |
|
"grad_norm": 0.17321438940936698, |
|
"learning_rate": 5.862247540134646e-06, |
|
"loss": 0.289, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.477166821994408, |
|
"grad_norm": 0.18465932185370307, |
|
"learning_rate": 5.81046090108752e-06, |
|
"loss": 0.2868, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.4818266542404475, |
|
"grad_norm": 0.17130471704458686, |
|
"learning_rate": 5.758674262040395e-06, |
|
"loss": 0.2862, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 0.18491783556891386, |
|
"learning_rate": 5.706887622993268e-06, |
|
"loss": 0.2823, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4911463187325256, |
|
"grad_norm": 0.18312451894682794, |
|
"learning_rate": 5.655100983946143e-06, |
|
"loss": 0.2834, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.4958061509785647, |
|
"grad_norm": 0.20865879661440087, |
|
"learning_rate": 5.603314344899017e-06, |
|
"loss": 0.2924, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.500465983224604, |
|
"grad_norm": 0.19218279954004253, |
|
"learning_rate": 5.55152770585189e-06, |
|
"loss": 0.2881, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.505125815470643, |
|
"grad_norm": 0.20044041867998658, |
|
"learning_rate": 5.499741066804765e-06, |
|
"loss": 0.2833, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5097856477166822, |
|
"grad_norm": 0.17165212019189693, |
|
"learning_rate": 5.447954427757639e-06, |
|
"loss": 0.2831, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5144454799627214, |
|
"grad_norm": 0.16485996786783458, |
|
"learning_rate": 5.396167788710514e-06, |
|
"loss": 0.2785, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5191053122087603, |
|
"grad_norm": 0.1869210240796828, |
|
"learning_rate": 5.344381149663387e-06, |
|
"loss": 0.2797, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5237651444547997, |
|
"grad_norm": 0.19194565166760805, |
|
"learning_rate": 5.292594510616261e-06, |
|
"loss": 0.2784, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5284249767008387, |
|
"grad_norm": 0.1863087676239078, |
|
"learning_rate": 5.240807871569136e-06, |
|
"loss": 0.2758, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.533084808946878, |
|
"grad_norm": 0.17516911308691216, |
|
"learning_rate": 5.18902123252201e-06, |
|
"loss": 0.2922, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.537744641192917, |
|
"grad_norm": 0.19150134236928595, |
|
"learning_rate": 5.137234593474883e-06, |
|
"loss": 0.288, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.5424044734389561, |
|
"grad_norm": 0.17534568446933935, |
|
"learning_rate": 5.085447954427758e-06, |
|
"loss": 0.2904, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.5470643056849953, |
|
"grad_norm": 0.18924120071576864, |
|
"learning_rate": 5.033661315380632e-06, |
|
"loss": 0.293, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 0.19954078997223346, |
|
"learning_rate": 4.981874676333506e-06, |
|
"loss": 0.2797, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5563839701770736, |
|
"grad_norm": 0.18388310486629303, |
|
"learning_rate": 4.930088037286381e-06, |
|
"loss": 0.2875, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.5610438024231128, |
|
"grad_norm": 0.19490866092310646, |
|
"learning_rate": 4.878301398239254e-06, |
|
"loss": 0.2931, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.565703634669152, |
|
"grad_norm": 0.19751354733303017, |
|
"learning_rate": 4.826514759192129e-06, |
|
"loss": 0.289, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.570363466915191, |
|
"grad_norm": 0.18733868260649666, |
|
"learning_rate": 4.774728120145003e-06, |
|
"loss": 0.2785, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5750232991612303, |
|
"grad_norm": 0.19221116473809075, |
|
"learning_rate": 4.7229414810978765e-06, |
|
"loss": 0.2925, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5796831314072692, |
|
"grad_norm": 0.18085900113347148, |
|
"learning_rate": 4.671154842050751e-06, |
|
"loss": 0.2911, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.5843429636533086, |
|
"grad_norm": 0.1973140421662704, |
|
"learning_rate": 4.6193682030036255e-06, |
|
"loss": 0.2803, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5890027958993476, |
|
"grad_norm": 0.18709409447385142, |
|
"learning_rate": 4.5675815639564995e-06, |
|
"loss": 0.2845, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5936626281453867, |
|
"grad_norm": 0.17665797548222434, |
|
"learning_rate": 4.5157949249093736e-06, |
|
"loss": 0.2869, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.598322460391426, |
|
"grad_norm": 0.17996538581217197, |
|
"learning_rate": 4.464008285862248e-06, |
|
"loss": 0.2795, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.602982292637465, |
|
"grad_norm": 0.19445653575092758, |
|
"learning_rate": 4.412221646815122e-06, |
|
"loss": 0.2885, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.6076421248835042, |
|
"grad_norm": 0.17825241400226646, |
|
"learning_rate": 4.3604350077679966e-06, |
|
"loss": 0.2855, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6123019571295434, |
|
"grad_norm": 0.18474656142781384, |
|
"learning_rate": 4.308648368720871e-06, |
|
"loss": 0.282, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6169617893755825, |
|
"grad_norm": 0.1950234628172142, |
|
"learning_rate": 4.256861729673745e-06, |
|
"loss": 0.2872, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 0.18770953389353345, |
|
"learning_rate": 4.205075090626619e-06, |
|
"loss": 0.2867, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6262814538676609, |
|
"grad_norm": 0.178276907246367, |
|
"learning_rate": 4.153288451579493e-06, |
|
"loss": 0.2837, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6309412861136998, |
|
"grad_norm": 0.20173726657394422, |
|
"learning_rate": 4.101501812532367e-06, |
|
"loss": 0.2878, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6356011183597392, |
|
"grad_norm": 0.18457623482800892, |
|
"learning_rate": 4.049715173485242e-06, |
|
"loss": 0.2849, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.6402609506057781, |
|
"grad_norm": 0.19451630359937264, |
|
"learning_rate": 3.997928534438115e-06, |
|
"loss": 0.2911, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.6449207828518173, |
|
"grad_norm": 0.182302636059145, |
|
"learning_rate": 3.94614189539099e-06, |
|
"loss": 0.2873, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.6495806150978565, |
|
"grad_norm": 0.17227161525874077, |
|
"learning_rate": 3.894355256343864e-06, |
|
"loss": 0.2813, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6542404473438956, |
|
"grad_norm": 0.18388975389617138, |
|
"learning_rate": 3.842568617296738e-06, |
|
"loss": 0.2778, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6589002795899348, |
|
"grad_norm": 0.18763197465021858, |
|
"learning_rate": 3.790781978249612e-06, |
|
"loss": 0.2874, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6635601118359737, |
|
"grad_norm": 0.18648811108535399, |
|
"learning_rate": 3.7389953392024856e-06, |
|
"loss": 0.287, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.6682199440820131, |
|
"grad_norm": 0.16990216601005864, |
|
"learning_rate": 3.68720870015536e-06, |
|
"loss": 0.2844, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.672879776328052, |
|
"grad_norm": 0.19315242332028645, |
|
"learning_rate": 3.635422061108234e-06, |
|
"loss": 0.2778, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6775396085740915, |
|
"grad_norm": 0.18107706144773053, |
|
"learning_rate": 3.5836354220611086e-06, |
|
"loss": 0.2746, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.6821994408201304, |
|
"grad_norm": 0.17626787448033834, |
|
"learning_rate": 3.5318487830139827e-06, |
|
"loss": 0.2759, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.6868592730661698, |
|
"grad_norm": 0.17699842425781145, |
|
"learning_rate": 3.480062143966857e-06, |
|
"loss": 0.2812, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6915191053122087, |
|
"grad_norm": 0.1755900253752614, |
|
"learning_rate": 3.4282755049197308e-06, |
|
"loss": 0.2798, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.696178937558248, |
|
"grad_norm": 0.16931795318247234, |
|
"learning_rate": 3.3764888658726052e-06, |
|
"loss": 0.2795, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.700838769804287, |
|
"grad_norm": 0.20489778400137415, |
|
"learning_rate": 3.3247022268254793e-06, |
|
"loss": 0.2908, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.7054986020503262, |
|
"grad_norm": 0.17146060928109105, |
|
"learning_rate": 3.2729155877783538e-06, |
|
"loss": 0.2874, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7101584342963654, |
|
"grad_norm": 0.20743117222876825, |
|
"learning_rate": 3.2211289487312274e-06, |
|
"loss": 0.2899, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.7148182665424043, |
|
"grad_norm": 0.17532744243024526, |
|
"learning_rate": 3.169342309684102e-06, |
|
"loss": 0.2835, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7194780987884437, |
|
"grad_norm": 0.17155149443847173, |
|
"learning_rate": 3.117555670636976e-06, |
|
"loss": 0.2843, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.17510709024655646, |
|
"learning_rate": 3.0657690315898504e-06, |
|
"loss": 0.2709, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.728797763280522, |
|
"grad_norm": 0.1936769403956301, |
|
"learning_rate": 3.013982392542724e-06, |
|
"loss": 0.2795, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.733457595526561, |
|
"grad_norm": 0.18362587824643453, |
|
"learning_rate": 2.9621957534955985e-06, |
|
"loss": 0.2793, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.7381174277726001, |
|
"grad_norm": 0.1733400456233926, |
|
"learning_rate": 2.9104091144484725e-06, |
|
"loss": 0.2833, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.7427772600186393, |
|
"grad_norm": 0.1707366786490628, |
|
"learning_rate": 2.858622475401347e-06, |
|
"loss": 0.2787, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.7474370922646785, |
|
"grad_norm": 0.1621088673034961, |
|
"learning_rate": 2.8068358363542206e-06, |
|
"loss": 0.2863, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.7520969245107176, |
|
"grad_norm": 0.18628674415468172, |
|
"learning_rate": 2.7550491973070947e-06, |
|
"loss": 0.2831, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 0.17521662090290133, |
|
"learning_rate": 2.703262558259969e-06, |
|
"loss": 0.2888, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.761416589002796, |
|
"grad_norm": 0.17377765263252967, |
|
"learning_rate": 2.6514759192128432e-06, |
|
"loss": 0.2818, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.766076421248835, |
|
"grad_norm": 0.17845993065674873, |
|
"learning_rate": 2.5996892801657177e-06, |
|
"loss": 0.2818, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.7707362534948743, |
|
"grad_norm": 0.17969442761482135, |
|
"learning_rate": 2.5479026411185913e-06, |
|
"loss": 0.2778, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7753960857409132, |
|
"grad_norm": 0.1694996391077242, |
|
"learning_rate": 2.496116002071466e-06, |
|
"loss": 0.2781, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7800559179869526, |
|
"grad_norm": 0.1657595379031282, |
|
"learning_rate": 2.44432936302434e-06, |
|
"loss": 0.2838, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.7847157502329916, |
|
"grad_norm": 0.17428612808942404, |
|
"learning_rate": 2.3925427239772143e-06, |
|
"loss": 0.2724, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7893755824790307, |
|
"grad_norm": 0.17198875130716848, |
|
"learning_rate": 2.340756084930088e-06, |
|
"loss": 0.2878, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.7940354147250699, |
|
"grad_norm": 0.16955861871882308, |
|
"learning_rate": 2.2889694458829624e-06, |
|
"loss": 0.2807, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.798695246971109, |
|
"grad_norm": 0.17174832677033502, |
|
"learning_rate": 2.2371828068358365e-06, |
|
"loss": 0.2786, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8033550792171482, |
|
"grad_norm": 0.1751748502595037, |
|
"learning_rate": 2.1853961677887105e-06, |
|
"loss": 0.2843, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.8080149114631874, |
|
"grad_norm": 0.1600435428137458, |
|
"learning_rate": 2.1336095287415846e-06, |
|
"loss": 0.2782, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8126747437092265, |
|
"grad_norm": 0.16951263851585602, |
|
"learning_rate": 2.081822889694459e-06, |
|
"loss": 0.2897, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.8173345759552655, |
|
"grad_norm": 0.17043335019815078, |
|
"learning_rate": 2.030036250647333e-06, |
|
"loss": 0.276, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8219944082013049, |
|
"grad_norm": 0.18796171228162317, |
|
"learning_rate": 1.978249611600207e-06, |
|
"loss": 0.2821, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8266542404473438, |
|
"grad_norm": 0.16512159795603745, |
|
"learning_rate": 1.9264629725530816e-06, |
|
"loss": 0.284, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.8313140726933832, |
|
"grad_norm": 0.17527046387428025, |
|
"learning_rate": 1.8746763335059557e-06, |
|
"loss": 0.2845, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.8359739049394221, |
|
"grad_norm": 0.17075350188159297, |
|
"learning_rate": 1.8228896944588297e-06, |
|
"loss": 0.2811, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.8406337371854613, |
|
"grad_norm": 0.1611238780162702, |
|
"learning_rate": 1.771103055411704e-06, |
|
"loss": 0.2874, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.8452935694315005, |
|
"grad_norm": 0.16686965231642037, |
|
"learning_rate": 1.719316416364578e-06, |
|
"loss": 0.2808, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.8499534016775396, |
|
"grad_norm": 0.18363600453053663, |
|
"learning_rate": 1.6675297773174523e-06, |
|
"loss": 0.2757, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.8546132339235788, |
|
"grad_norm": 0.17346986089912622, |
|
"learning_rate": 1.6157431382703264e-06, |
|
"loss": 0.296, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.8592730661696177, |
|
"grad_norm": 0.16302686401969246, |
|
"learning_rate": 1.5639564992232006e-06, |
|
"loss": 0.2831, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8639328984156571, |
|
"grad_norm": 0.17714019680547557, |
|
"learning_rate": 1.512169860176075e-06, |
|
"loss": 0.2801, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.868592730661696, |
|
"grad_norm": 0.17573560819468692, |
|
"learning_rate": 1.460383221128949e-06, |
|
"loss": 0.2844, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.8732525629077355, |
|
"grad_norm": 0.16964855694079678, |
|
"learning_rate": 1.4085965820818228e-06, |
|
"loss": 0.2854, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.8779123951537744, |
|
"grad_norm": 0.1747943844382359, |
|
"learning_rate": 1.356809943034697e-06, |
|
"loss": 0.2772, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8825722273998136, |
|
"grad_norm": 0.17766856004277642, |
|
"learning_rate": 1.305023303987571e-06, |
|
"loss": 0.2765, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8872320596458527, |
|
"grad_norm": 0.18210315847664407, |
|
"learning_rate": 1.2532366649404454e-06, |
|
"loss": 0.2801, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 0.1702245037263823, |
|
"learning_rate": 1.2014500258933196e-06, |
|
"loss": 0.2804, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.16687796494139084, |
|
"learning_rate": 1.1496633868461937e-06, |
|
"loss": 0.2833, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.9012115563839702, |
|
"grad_norm": 0.16709036530571203, |
|
"learning_rate": 1.097876747799068e-06, |
|
"loss": 0.2828, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.9058713886300094, |
|
"grad_norm": 0.17632942060794365, |
|
"learning_rate": 1.046090108751942e-06, |
|
"loss": 0.2916, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9105312208760483, |
|
"grad_norm": 0.1659570265982279, |
|
"learning_rate": 9.943034697048163e-07, |
|
"loss": 0.2873, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9151910531220877, |
|
"grad_norm": 0.16259315421842638, |
|
"learning_rate": 9.425168306576904e-07, |
|
"loss": 0.2772, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9198508853681266, |
|
"grad_norm": 0.1704776417406957, |
|
"learning_rate": 8.907301916105646e-07, |
|
"loss": 0.2865, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.924510717614166, |
|
"grad_norm": 0.18010747503070362, |
|
"learning_rate": 8.389435525634387e-07, |
|
"loss": 0.2828, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.929170549860205, |
|
"grad_norm": 0.16347013947993091, |
|
"learning_rate": 7.871569135163129e-07, |
|
"loss": 0.2847, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9338303821062441, |
|
"grad_norm": 0.17518692368495511, |
|
"learning_rate": 7.35370274469187e-07, |
|
"loss": 0.2933, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.9384902143522833, |
|
"grad_norm": 0.16956394794756754, |
|
"learning_rate": 6.835836354220611e-07, |
|
"loss": 0.2788, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.9431500465983225, |
|
"grad_norm": 0.18735277361498398, |
|
"learning_rate": 6.317969963749353e-07, |
|
"loss": 0.2809, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.9478098788443616, |
|
"grad_norm": 0.1730004551971713, |
|
"learning_rate": 5.800103573278094e-07, |
|
"loss": 0.2824, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.9524697110904008, |
|
"grad_norm": 0.1739877181954761, |
|
"learning_rate": 5.282237182806836e-07, |
|
"loss": 0.2709, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.95712954333644, |
|
"grad_norm": 0.18343227110963495, |
|
"learning_rate": 4.764370792335578e-07, |
|
"loss": 0.2855, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.961789375582479, |
|
"grad_norm": 0.1618328590005831, |
|
"learning_rate": 4.2465044018643194e-07, |
|
"loss": 0.2852, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.9664492078285183, |
|
"grad_norm": 0.1627906424832234, |
|
"learning_rate": 3.728638011393061e-07, |
|
"loss": 0.2859, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9711090400745572, |
|
"grad_norm": 0.16365417745617075, |
|
"learning_rate": 3.210771620921802e-07, |
|
"loss": 0.2891, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9757688723205966, |
|
"grad_norm": 0.15612849284974395, |
|
"learning_rate": 2.692905230450544e-07, |
|
"loss": 0.2848, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.9804287045666356, |
|
"grad_norm": 0.163025042449839, |
|
"learning_rate": 2.1750388399792857e-07, |
|
"loss": 0.2789, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.9850885368126747, |
|
"grad_norm": 0.16808874057585674, |
|
"learning_rate": 1.657172449508027e-07, |
|
"loss": 0.2836, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9897483690587139, |
|
"grad_norm": 0.1605707022006799, |
|
"learning_rate": 1.1393060590367686e-07, |
|
"loss": 0.2766, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.994408201304753, |
|
"grad_norm": 0.16194115983227303, |
|
"learning_rate": 6.214396685655102e-08, |
|
"loss": 0.282, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9990680335507922, |
|
"grad_norm": 0.17713436476094685, |
|
"learning_rate": 1.0357327809425169e-08, |
|
"loss": 0.2862, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 2146, |
|
"total_flos": 1.8367600516462019e+18, |
|
"train_loss": 0.3072341699830195, |
|
"train_runtime": 72164.3066, |
|
"train_samples_per_second": 0.476, |
|
"train_steps_per_second": 0.03 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2146, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8367600516462019e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|