|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 3219, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004659832246039142, |
|
"grad_norm": 55.696042392600155, |
|
"learning_rate": 7.763975155279503e-07, |
|
"loss": 11.0716, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009319664492078284, |
|
"grad_norm": 57.069547747607224, |
|
"learning_rate": 1.5527950310559006e-06, |
|
"loss": 10.8863, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013979496738117428, |
|
"grad_norm": 98.03978219154963, |
|
"learning_rate": 2.329192546583851e-06, |
|
"loss": 9.4448, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01863932898415657, |
|
"grad_norm": 32.79466303265593, |
|
"learning_rate": 3.1055900621118013e-06, |
|
"loss": 3.218, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.023299161230195712, |
|
"grad_norm": 3.2926359801565495, |
|
"learning_rate": 3.881987577639752e-06, |
|
"loss": 1.3481, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027958993476234855, |
|
"grad_norm": 1.3048818381083724, |
|
"learning_rate": 4.658385093167702e-06, |
|
"loss": 1.007, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.032618825722273995, |
|
"grad_norm": 0.8636707292841374, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 0.8418, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03727865796831314, |
|
"grad_norm": 0.6262675632712984, |
|
"learning_rate": 6.2111801242236025e-06, |
|
"loss": 0.776, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04193849021435228, |
|
"grad_norm": 0.46026483978747806, |
|
"learning_rate": 6.9875776397515525e-06, |
|
"loss": 0.714, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.046598322460391424, |
|
"grad_norm": 0.4251504366202426, |
|
"learning_rate": 7.763975155279503e-06, |
|
"loss": 0.6832, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05125815470643057, |
|
"grad_norm": 0.4309649336155848, |
|
"learning_rate": 8.540372670807453e-06, |
|
"loss": 0.6461, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05591798695246971, |
|
"grad_norm": 0.30433991851646786, |
|
"learning_rate": 9.316770186335403e-06, |
|
"loss": 0.6088, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06057781919850885, |
|
"grad_norm": 0.2902319794257594, |
|
"learning_rate": 1.0093167701863353e-05, |
|
"loss": 0.5932, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06523765144454799, |
|
"grad_norm": 0.3087611830094821, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 0.589, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06989748369058714, |
|
"grad_norm": 0.2781534357021035, |
|
"learning_rate": 1.1645962732919255e-05, |
|
"loss": 0.5712, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07455731593662628, |
|
"grad_norm": 0.28616630984231595, |
|
"learning_rate": 1.2422360248447205e-05, |
|
"loss": 0.5616, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07921714818266543, |
|
"grad_norm": 0.2830752857444657, |
|
"learning_rate": 1.3198757763975155e-05, |
|
"loss": 0.5522, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08387698042870456, |
|
"grad_norm": 0.26778425661307015, |
|
"learning_rate": 1.3975155279503105e-05, |
|
"loss": 0.5625, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08853681267474371, |
|
"grad_norm": 0.22807236906820558, |
|
"learning_rate": 1.4751552795031057e-05, |
|
"loss": 0.5438, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09319664492078285, |
|
"grad_norm": 0.2703913980044062, |
|
"learning_rate": 1.5527950310559007e-05, |
|
"loss": 0.5292, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.097856477166822, |
|
"grad_norm": 0.2662902646826629, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 0.5239, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10251630941286113, |
|
"grad_norm": 0.26339793579646015, |
|
"learning_rate": 1.7080745341614907e-05, |
|
"loss": 0.5255, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10717614165890028, |
|
"grad_norm": 0.2684306075703773, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.5228, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11183597390493942, |
|
"grad_norm": 0.2999992447259078, |
|
"learning_rate": 1.8633540372670807e-05, |
|
"loss": 0.5093, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11649580615097857, |
|
"grad_norm": 0.26244191377637505, |
|
"learning_rate": 1.940993788819876e-05, |
|
"loss": 0.5052, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1211556383970177, |
|
"grad_norm": 0.2681460052083026, |
|
"learning_rate": 2.0186335403726707e-05, |
|
"loss": 0.5109, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12581547064305684, |
|
"grad_norm": 0.3144660221069919, |
|
"learning_rate": 2.096273291925466e-05, |
|
"loss": 0.5101, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13047530288909598, |
|
"grad_norm": 0.34447643300722325, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 0.4946, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 0.3211806931163774, |
|
"learning_rate": 2.2515527950310562e-05, |
|
"loss": 0.5214, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13979496738117428, |
|
"grad_norm": 0.31319947716089985, |
|
"learning_rate": 2.329192546583851e-05, |
|
"loss": 0.4953, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14445479962721341, |
|
"grad_norm": 0.30309003434106235, |
|
"learning_rate": 2.4068322981366462e-05, |
|
"loss": 0.5036, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14911463187325255, |
|
"grad_norm": 0.3275575421453575, |
|
"learning_rate": 2.484472049689441e-05, |
|
"loss": 0.4844, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15377446411929171, |
|
"grad_norm": 0.34622801968709477, |
|
"learning_rate": 2.5621118012422362e-05, |
|
"loss": 0.4956, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15843429636533085, |
|
"grad_norm": 0.32644950308733567, |
|
"learning_rate": 2.639751552795031e-05, |
|
"loss": 0.4714, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16309412861137, |
|
"grad_norm": 0.32849551769451285, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 0.4846, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16775396085740912, |
|
"grad_norm": 0.36634895540901063, |
|
"learning_rate": 2.795031055900621e-05, |
|
"loss": 0.4775, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 0.3698999716032506, |
|
"learning_rate": 2.8726708074534165e-05, |
|
"loss": 0.4805, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17707362534948742, |
|
"grad_norm": 0.47850803595156094, |
|
"learning_rate": 2.9503105590062114e-05, |
|
"loss": 0.4903, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18173345759552656, |
|
"grad_norm": 0.3982014897681254, |
|
"learning_rate": 3.0279503105590062e-05, |
|
"loss": 0.4799, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1863932898415657, |
|
"grad_norm": 0.3280142896044268, |
|
"learning_rate": 3.1055900621118014e-05, |
|
"loss": 0.4815, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19105312208760486, |
|
"grad_norm": 0.4027201591824623, |
|
"learning_rate": 3.183229813664597e-05, |
|
"loss": 0.4704, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.195712954333644, |
|
"grad_norm": 0.43156623830552576, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 0.4843, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20037278657968313, |
|
"grad_norm": 0.3321909019654293, |
|
"learning_rate": 3.3385093167701865e-05, |
|
"loss": 0.4665, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20503261882572227, |
|
"grad_norm": 0.36680291764363804, |
|
"learning_rate": 3.4161490683229814e-05, |
|
"loss": 0.4708, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2096924510717614, |
|
"grad_norm": 0.37585882676778043, |
|
"learning_rate": 3.493788819875777e-05, |
|
"loss": 0.4529, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21435228331780057, |
|
"grad_norm": 0.3745639496990671, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.4662, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2190121155638397, |
|
"grad_norm": 0.3663694253182676, |
|
"learning_rate": 3.6490683229813665e-05, |
|
"loss": 0.4638, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22367194780987884, |
|
"grad_norm": 0.29367377276224377, |
|
"learning_rate": 3.7267080745341614e-05, |
|
"loss": 0.4612, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22833178005591798, |
|
"grad_norm": 0.35649055983775696, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 0.4665, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.23299161230195714, |
|
"grad_norm": 0.37018502702181333, |
|
"learning_rate": 3.881987577639752e-05, |
|
"loss": 0.4584, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23765144454799628, |
|
"grad_norm": 0.43146768706538224, |
|
"learning_rate": 3.9596273291925465e-05, |
|
"loss": 0.4671, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2423112767940354, |
|
"grad_norm": 0.3620216800658687, |
|
"learning_rate": 4.0372670807453414e-05, |
|
"loss": 0.4622, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24697110904007455, |
|
"grad_norm": 0.3767133493173054, |
|
"learning_rate": 4.114906832298137e-05, |
|
"loss": 0.4606, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2516309412861137, |
|
"grad_norm": 0.48122013364824345, |
|
"learning_rate": 4.192546583850932e-05, |
|
"loss": 0.4587, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25629077353215285, |
|
"grad_norm": 0.3714319756974697, |
|
"learning_rate": 4.270186335403727e-05, |
|
"loss": 0.4643, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.26095060577819196, |
|
"grad_norm": 0.3163064677066791, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 0.4562, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2656104380242311, |
|
"grad_norm": 0.33985412053941894, |
|
"learning_rate": 4.425465838509317e-05, |
|
"loss": 0.4604, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 0.35067188973876534, |
|
"learning_rate": 4.5031055900621124e-05, |
|
"loss": 0.4622, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2749301025163094, |
|
"grad_norm": 0.3912374135541525, |
|
"learning_rate": 4.580745341614907e-05, |
|
"loss": 0.4663, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27958993476234856, |
|
"grad_norm": 0.6392148726451813, |
|
"learning_rate": 4.658385093167702e-05, |
|
"loss": 0.4685, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2842497670083877, |
|
"grad_norm": 0.3440578565248047, |
|
"learning_rate": 4.736024844720497e-05, |
|
"loss": 0.4565, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.28890959925442683, |
|
"grad_norm": 0.3877681198742014, |
|
"learning_rate": 4.8136645962732924e-05, |
|
"loss": 0.4489, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.293569431500466, |
|
"grad_norm": 0.42732090051868005, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 0.457, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2982292637465051, |
|
"grad_norm": 0.4755135668551679, |
|
"learning_rate": 4.968944099378882e-05, |
|
"loss": 0.4425, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.30288909599254427, |
|
"grad_norm": 0.28693081819873917, |
|
"learning_rate": 4.994822229892993e-05, |
|
"loss": 0.4404, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.30754892823858343, |
|
"grad_norm": 0.3549632706260845, |
|
"learning_rate": 4.986192613047981e-05, |
|
"loss": 0.4513, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.31220876048462254, |
|
"grad_norm": 0.6350892029518953, |
|
"learning_rate": 4.977562996202969e-05, |
|
"loss": 0.4517, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3168685927306617, |
|
"grad_norm": 0.3469578249228007, |
|
"learning_rate": 4.968933379357957e-05, |
|
"loss": 0.4525, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32152842497670087, |
|
"grad_norm": 0.3366936748849045, |
|
"learning_rate": 4.9603037625129445e-05, |
|
"loss": 0.4475, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32618825722274, |
|
"grad_norm": 0.32981948976725134, |
|
"learning_rate": 4.951674145667933e-05, |
|
"loss": 0.4607, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33084808946877914, |
|
"grad_norm": 0.4147876243880106, |
|
"learning_rate": 4.94304452882292e-05, |
|
"loss": 0.4467, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.33550792171481825, |
|
"grad_norm": 0.351538196647573, |
|
"learning_rate": 4.934414911977908e-05, |
|
"loss": 0.4444, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3401677539608574, |
|
"grad_norm": 0.3784261491476227, |
|
"learning_rate": 4.9257852951328965e-05, |
|
"loss": 0.4411, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 0.3107740796224645, |
|
"learning_rate": 4.917155678287884e-05, |
|
"loss": 0.454, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3494874184529357, |
|
"grad_norm": 0.29690233421404694, |
|
"learning_rate": 4.908526061442872e-05, |
|
"loss": 0.4401, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35414725069897485, |
|
"grad_norm": 0.342682229888169, |
|
"learning_rate": 4.89989644459786e-05, |
|
"loss": 0.4551, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35880708294501396, |
|
"grad_norm": 0.3230798705066476, |
|
"learning_rate": 4.891266827752848e-05, |
|
"loss": 0.4567, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3634669151910531, |
|
"grad_norm": 0.3158332591167263, |
|
"learning_rate": 4.882637210907836e-05, |
|
"loss": 0.4407, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3681267474370923, |
|
"grad_norm": 0.30861658892822386, |
|
"learning_rate": 4.874007594062824e-05, |
|
"loss": 0.4473, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3727865796831314, |
|
"grad_norm": 0.3752052294703885, |
|
"learning_rate": 4.865377977217811e-05, |
|
"loss": 0.432, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37744641192917056, |
|
"grad_norm": 0.28697938410769425, |
|
"learning_rate": 4.8567483603728e-05, |
|
"loss": 0.4502, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3821062441752097, |
|
"grad_norm": 0.3694124441270389, |
|
"learning_rate": 4.8481187435277875e-05, |
|
"loss": 0.4465, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38676607642124883, |
|
"grad_norm": 0.34347293888557967, |
|
"learning_rate": 4.839489126682776e-05, |
|
"loss": 0.4331, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.391425908667288, |
|
"grad_norm": 0.43939089773365414, |
|
"learning_rate": 4.830859509837763e-05, |
|
"loss": 0.4514, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3960857409133271, |
|
"grad_norm": 0.31251544893589517, |
|
"learning_rate": 4.822229892992751e-05, |
|
"loss": 0.4488, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.40074557315936626, |
|
"grad_norm": 0.3096051523193174, |
|
"learning_rate": 4.8136002761477395e-05, |
|
"loss": 0.4358, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 0.27936532600466346, |
|
"learning_rate": 4.804970659302727e-05, |
|
"loss": 0.4489, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.41006523765144454, |
|
"grad_norm": 0.28043715820091236, |
|
"learning_rate": 4.796341042457715e-05, |
|
"loss": 0.4429, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4147250698974837, |
|
"grad_norm": 0.3080577188341033, |
|
"learning_rate": 4.787711425612703e-05, |
|
"loss": 0.4374, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4193849021435228, |
|
"grad_norm": 0.3032982482250815, |
|
"learning_rate": 4.779081808767691e-05, |
|
"loss": 0.4283, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.424044734389562, |
|
"grad_norm": 0.30945122921173923, |
|
"learning_rate": 4.770452191922679e-05, |
|
"loss": 0.4351, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42870456663560114, |
|
"grad_norm": 0.3063809025140958, |
|
"learning_rate": 4.761822575077667e-05, |
|
"loss": 0.4321, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43336439888164024, |
|
"grad_norm": 0.32117718728392913, |
|
"learning_rate": 4.753192958232654e-05, |
|
"loss": 0.429, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4380242311276794, |
|
"grad_norm": 0.24371598277850154, |
|
"learning_rate": 4.744563341387643e-05, |
|
"loss": 0.4419, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4426840633737186, |
|
"grad_norm": 0.3002331907391714, |
|
"learning_rate": 4.7359337245426306e-05, |
|
"loss": 0.4311, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4473438956197577, |
|
"grad_norm": 0.31592297578196954, |
|
"learning_rate": 4.7273041076976184e-05, |
|
"loss": 0.4432, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45200372786579684, |
|
"grad_norm": 0.24782043830876224, |
|
"learning_rate": 4.718674490852606e-05, |
|
"loss": 0.4358, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.45666356011183595, |
|
"grad_norm": 0.25847477520528434, |
|
"learning_rate": 4.710044874007594e-05, |
|
"loss": 0.4303, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4613233923578751, |
|
"grad_norm": 0.2975677802465225, |
|
"learning_rate": 4.7014152571625826e-05, |
|
"loss": 0.436, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4659832246039143, |
|
"grad_norm": 0.35408912536908277, |
|
"learning_rate": 4.6927856403175704e-05, |
|
"loss": 0.4391, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4706430568499534, |
|
"grad_norm": 0.2581776410982566, |
|
"learning_rate": 4.684156023472558e-05, |
|
"loss": 0.4256, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47530288909599255, |
|
"grad_norm": 0.3029052724079298, |
|
"learning_rate": 4.675526406627546e-05, |
|
"loss": 0.434, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47996272134203166, |
|
"grad_norm": 0.2662963885541786, |
|
"learning_rate": 4.666896789782534e-05, |
|
"loss": 0.432, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4846225535880708, |
|
"grad_norm": 0.2702185950827202, |
|
"learning_rate": 4.658267172937522e-05, |
|
"loss": 0.4357, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48928238583411, |
|
"grad_norm": 0.2741761210656215, |
|
"learning_rate": 4.64963755609251e-05, |
|
"loss": 0.4348, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4939422180801491, |
|
"grad_norm": 0.30542680481130674, |
|
"learning_rate": 4.641007939247497e-05, |
|
"loss": 0.4317, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49860205032618826, |
|
"grad_norm": 0.2747526523406713, |
|
"learning_rate": 4.632378322402486e-05, |
|
"loss": 0.4317, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5032618825722274, |
|
"grad_norm": 0.26833573706595093, |
|
"learning_rate": 4.6237487055574736e-05, |
|
"loss": 0.4217, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5079217148182665, |
|
"grad_norm": 0.37691805876616613, |
|
"learning_rate": 4.6151190887124615e-05, |
|
"loss": 0.428, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5125815470643057, |
|
"grad_norm": 0.3373322992441649, |
|
"learning_rate": 4.606489471867449e-05, |
|
"loss": 0.4285, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.2654706993312856, |
|
"learning_rate": 4.597859855022437e-05, |
|
"loss": 0.4227, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5219012115563839, |
|
"grad_norm": 0.26816544170609224, |
|
"learning_rate": 4.589230238177425e-05, |
|
"loss": 0.4435, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5265610438024231, |
|
"grad_norm": 0.2640000762064257, |
|
"learning_rate": 4.5806006213324134e-05, |
|
"loss": 0.4211, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5312208760484622, |
|
"grad_norm": 0.2409655283662583, |
|
"learning_rate": 4.5719710044874006e-05, |
|
"loss": 0.4209, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5358807082945014, |
|
"grad_norm": 0.2418344293167211, |
|
"learning_rate": 4.563341387642389e-05, |
|
"loss": 0.4271, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.2701745292583052, |
|
"learning_rate": 4.554711770797377e-05, |
|
"loss": 0.4271, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5452003727865797, |
|
"grad_norm": 0.25416001491087037, |
|
"learning_rate": 4.546082153952365e-05, |
|
"loss": 0.4239, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5498602050326188, |
|
"grad_norm": 0.26663111434738124, |
|
"learning_rate": 4.5374525371073526e-05, |
|
"loss": 0.4205, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.554520037278658, |
|
"grad_norm": 0.3057448395330086, |
|
"learning_rate": 4.5288229202623404e-05, |
|
"loss": 0.4228, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5591798695246971, |
|
"grad_norm": 0.30060519300171135, |
|
"learning_rate": 4.520193303417328e-05, |
|
"loss": 0.4227, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5638397017707363, |
|
"grad_norm": 0.2623929780467248, |
|
"learning_rate": 4.511563686572317e-05, |
|
"loss": 0.4218, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5684995340167754, |
|
"grad_norm": 0.26871087479176037, |
|
"learning_rate": 4.5029340697273045e-05, |
|
"loss": 0.4329, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5731593662628145, |
|
"grad_norm": 0.2671626237098807, |
|
"learning_rate": 4.4943044528822923e-05, |
|
"loss": 0.4196, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5778191985088537, |
|
"grad_norm": 0.24939257425927314, |
|
"learning_rate": 4.48567483603728e-05, |
|
"loss": 0.4163, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5824790307548928, |
|
"grad_norm": 0.2829106297018075, |
|
"learning_rate": 4.477045219192268e-05, |
|
"loss": 0.4382, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.587138863000932, |
|
"grad_norm": 0.2651167644955278, |
|
"learning_rate": 4.4684156023472565e-05, |
|
"loss": 0.4288, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5917986952469712, |
|
"grad_norm": 0.2819512787497873, |
|
"learning_rate": 4.4597859855022436e-05, |
|
"loss": 0.4331, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5964585274930102, |
|
"grad_norm": 0.25994396055573915, |
|
"learning_rate": 4.4511563686572315e-05, |
|
"loss": 0.4242, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6011183597390494, |
|
"grad_norm": 0.23497016140166985, |
|
"learning_rate": 4.44252675181222e-05, |
|
"loss": 0.4237, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6057781919850885, |
|
"grad_norm": 0.32764066625216914, |
|
"learning_rate": 4.433897134967208e-05, |
|
"loss": 0.4325, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6104380242311277, |
|
"grad_norm": 0.2584487696675628, |
|
"learning_rate": 4.4252675181221956e-05, |
|
"loss": 0.43, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6150978564771669, |
|
"grad_norm": 0.2449310711741172, |
|
"learning_rate": 4.4166379012771834e-05, |
|
"loss": 0.4118, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6197576887232059, |
|
"grad_norm": 0.25532178644095954, |
|
"learning_rate": 4.408008284432171e-05, |
|
"loss": 0.4125, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6244175209692451, |
|
"grad_norm": 0.27928336482430827, |
|
"learning_rate": 4.39937866758716e-05, |
|
"loss": 0.4111, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6290773532152842, |
|
"grad_norm": 0.24201602267182518, |
|
"learning_rate": 4.3907490507421476e-05, |
|
"loss": 0.4195, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6337371854613234, |
|
"grad_norm": 0.2503284749087097, |
|
"learning_rate": 4.382119433897135e-05, |
|
"loss": 0.4225, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6383970177073626, |
|
"grad_norm": 0.231126791429179, |
|
"learning_rate": 4.373489817052123e-05, |
|
"loss": 0.4321, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6430568499534017, |
|
"grad_norm": 0.3224125780424007, |
|
"learning_rate": 4.364860200207111e-05, |
|
"loss": 0.4144, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6477166821994408, |
|
"grad_norm": 0.28603576983039214, |
|
"learning_rate": 4.356230583362099e-05, |
|
"loss": 0.4198, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.65237651444548, |
|
"grad_norm": 0.33070909188918085, |
|
"learning_rate": 4.347600966517087e-05, |
|
"loss": 0.4283, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6570363466915191, |
|
"grad_norm": 0.23734438786827539, |
|
"learning_rate": 4.3389713496720745e-05, |
|
"loss": 0.4219, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6616961789375583, |
|
"grad_norm": 0.2848012053392029, |
|
"learning_rate": 4.330341732827063e-05, |
|
"loss": 0.4224, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6663560111835974, |
|
"grad_norm": 0.2825508399036823, |
|
"learning_rate": 4.321712115982051e-05, |
|
"loss": 0.4107, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6710158434296365, |
|
"grad_norm": 0.26795629040248276, |
|
"learning_rate": 4.3130824991370387e-05, |
|
"loss": 0.421, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 0.22589566921076, |
|
"learning_rate": 4.3044528822920265e-05, |
|
"loss": 0.4197, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6803355079217148, |
|
"grad_norm": 0.26954069845621687, |
|
"learning_rate": 4.295823265447014e-05, |
|
"loss": 0.4147, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.684995340167754, |
|
"grad_norm": 0.24884963957513911, |
|
"learning_rate": 4.287193648602002e-05, |
|
"loss": 0.4185, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.23522017006907175, |
|
"learning_rate": 4.27856403175699e-05, |
|
"loss": 0.4123, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6943150046598322, |
|
"grad_norm": 0.2461870833603244, |
|
"learning_rate": 4.269934414911978e-05, |
|
"loss": 0.408, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6989748369058714, |
|
"grad_norm": 0.23022051162943946, |
|
"learning_rate": 4.261304798066966e-05, |
|
"loss": 0.4129, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7036346691519105, |
|
"grad_norm": 0.26653798892341235, |
|
"learning_rate": 4.252675181221954e-05, |
|
"loss": 0.4152, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7082945013979497, |
|
"grad_norm": 0.2492957026363467, |
|
"learning_rate": 4.244045564376942e-05, |
|
"loss": 0.4183, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7129543336439889, |
|
"grad_norm": 0.2606316735727077, |
|
"learning_rate": 4.23541594753193e-05, |
|
"loss": 0.4023, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7176141658900279, |
|
"grad_norm": 0.2422042993702763, |
|
"learning_rate": 4.2267863306869176e-05, |
|
"loss": 0.4111, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7222739981360671, |
|
"grad_norm": 0.22332760965969195, |
|
"learning_rate": 4.2181567138419054e-05, |
|
"loss": 0.4242, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7269338303821062, |
|
"grad_norm": 0.252712448418313, |
|
"learning_rate": 4.209527096996894e-05, |
|
"loss": 0.4087, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7315936626281454, |
|
"grad_norm": 0.22468331263519467, |
|
"learning_rate": 4.200897480151881e-05, |
|
"loss": 0.4159, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7362534948741846, |
|
"grad_norm": 0.28086444722561676, |
|
"learning_rate": 4.1922678633068695e-05, |
|
"loss": 0.4192, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7409133271202236, |
|
"grad_norm": 0.23901556052441733, |
|
"learning_rate": 4.1836382464618573e-05, |
|
"loss": 0.4134, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7455731593662628, |
|
"grad_norm": 0.27451974099994725, |
|
"learning_rate": 4.175008629616845e-05, |
|
"loss": 0.4116, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.750232991612302, |
|
"grad_norm": 0.28359733845235, |
|
"learning_rate": 4.166379012771833e-05, |
|
"loss": 0.4115, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7548928238583411, |
|
"grad_norm": 0.26622900459240295, |
|
"learning_rate": 4.157749395926821e-05, |
|
"loss": 0.4297, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7595526561043803, |
|
"grad_norm": 0.2657065350729101, |
|
"learning_rate": 4.1491197790818086e-05, |
|
"loss": 0.404, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7642124883504194, |
|
"grad_norm": 0.2378922286860667, |
|
"learning_rate": 4.140490162236797e-05, |
|
"loss": 0.4195, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7688723205964585, |
|
"grad_norm": 0.2412808789309105, |
|
"learning_rate": 4.131860545391785e-05, |
|
"loss": 0.4116, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7735321528424977, |
|
"grad_norm": 0.2875211305670113, |
|
"learning_rate": 4.123230928546773e-05, |
|
"loss": 0.4065, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7781919850885368, |
|
"grad_norm": 0.31905163368250633, |
|
"learning_rate": 4.1146013117017606e-05, |
|
"loss": 0.4136, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.782851817334576, |
|
"grad_norm": 0.24498964877343535, |
|
"learning_rate": 4.1059716948567484e-05, |
|
"loss": 0.4166, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7875116495806151, |
|
"grad_norm": 0.2525316903241921, |
|
"learning_rate": 4.097342078011737e-05, |
|
"loss": 0.4246, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7921714818266542, |
|
"grad_norm": 0.2869191536262211, |
|
"learning_rate": 4.088712461166724e-05, |
|
"loss": 0.414, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7968313140726934, |
|
"grad_norm": 0.2582828428510849, |
|
"learning_rate": 4.080082844321712e-05, |
|
"loss": 0.4162, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8014911463187325, |
|
"grad_norm": 0.23438306007892892, |
|
"learning_rate": 4.0714532274767004e-05, |
|
"loss": 0.4231, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8061509785647717, |
|
"grad_norm": 0.21891513755451633, |
|
"learning_rate": 4.062823610631688e-05, |
|
"loss": 0.4041, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 0.23677589119502063, |
|
"learning_rate": 4.054193993786676e-05, |
|
"loss": 0.4188, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8154706430568499, |
|
"grad_norm": 0.27969610572550724, |
|
"learning_rate": 4.045564376941664e-05, |
|
"loss": 0.4142, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8201304753028891, |
|
"grad_norm": 0.2794758714461742, |
|
"learning_rate": 4.036934760096652e-05, |
|
"loss": 0.4054, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8247903075489282, |
|
"grad_norm": 0.22959555739758392, |
|
"learning_rate": 4.02830514325164e-05, |
|
"loss": 0.4193, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8294501397949674, |
|
"grad_norm": 0.24498039226520318, |
|
"learning_rate": 4.019675526406628e-05, |
|
"loss": 0.406, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8341099720410066, |
|
"grad_norm": 0.2404554867578491, |
|
"learning_rate": 4.011045909561615e-05, |
|
"loss": 0.4254, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8387698042870456, |
|
"grad_norm": 0.2551096755464756, |
|
"learning_rate": 4.0024162927166037e-05, |
|
"loss": 0.4082, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8434296365330848, |
|
"grad_norm": 0.2485971670607866, |
|
"learning_rate": 3.9937866758715915e-05, |
|
"loss": 0.4099, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.848089468779124, |
|
"grad_norm": 0.2613339917640465, |
|
"learning_rate": 3.98515705902658e-05, |
|
"loss": 0.4157, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8527493010251631, |
|
"grad_norm": 0.2916975296981261, |
|
"learning_rate": 3.976527442181567e-05, |
|
"loss": 0.4086, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8574091332712023, |
|
"grad_norm": 0.25434046187223586, |
|
"learning_rate": 3.967897825336555e-05, |
|
"loss": 0.4153, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.24235752127845034, |
|
"learning_rate": 3.9592682084915434e-05, |
|
"loss": 0.4019, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8667287977632805, |
|
"grad_norm": 0.34060623817974894, |
|
"learning_rate": 3.950638591646531e-05, |
|
"loss": 0.4014, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8713886300093197, |
|
"grad_norm": 0.23003819568203804, |
|
"learning_rate": 3.942008974801519e-05, |
|
"loss": 0.4132, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8760484622553588, |
|
"grad_norm": 0.2634510576231483, |
|
"learning_rate": 3.933379357956507e-05, |
|
"loss": 0.4091, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.880708294501398, |
|
"grad_norm": 0.24085373560230003, |
|
"learning_rate": 3.924749741111495e-05, |
|
"loss": 0.4112, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8853681267474371, |
|
"grad_norm": 0.24989234461211418, |
|
"learning_rate": 3.916120124266483e-05, |
|
"loss": 0.4189, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8900279589934762, |
|
"grad_norm": 0.23534351456323865, |
|
"learning_rate": 3.9074905074214704e-05, |
|
"loss": 0.4178, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8946877912395154, |
|
"grad_norm": 0.21732075058729955, |
|
"learning_rate": 3.898860890576458e-05, |
|
"loss": 0.413, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8993476234855545, |
|
"grad_norm": 0.24398131790553584, |
|
"learning_rate": 3.890231273731447e-05, |
|
"loss": 0.4047, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9040074557315937, |
|
"grad_norm": 0.229222377208068, |
|
"learning_rate": 3.8816016568864345e-05, |
|
"loss": 0.4025, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9086672879776329, |
|
"grad_norm": 0.2585485608107246, |
|
"learning_rate": 3.8729720400414224e-05, |
|
"loss": 0.4213, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9133271202236719, |
|
"grad_norm": 0.2522000428458628, |
|
"learning_rate": 3.86434242319641e-05, |
|
"loss": 0.4082, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9179869524697111, |
|
"grad_norm": 0.24258710274400277, |
|
"learning_rate": 3.855712806351398e-05, |
|
"loss": 0.4097, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9226467847157502, |
|
"grad_norm": 0.2233917563993876, |
|
"learning_rate": 3.8470831895063865e-05, |
|
"loss": 0.4071, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9273066169617894, |
|
"grad_norm": 0.22386956200270366, |
|
"learning_rate": 3.838453572661374e-05, |
|
"loss": 0.4029, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9319664492078286, |
|
"grad_norm": 0.24992821199240495, |
|
"learning_rate": 3.8298239558163615e-05, |
|
"loss": 0.4059, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9366262814538676, |
|
"grad_norm": 0.23793550550750026, |
|
"learning_rate": 3.82119433897135e-05, |
|
"loss": 0.4108, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9412861136999068, |
|
"grad_norm": 0.2752995843613739, |
|
"learning_rate": 3.812564722126338e-05, |
|
"loss": 0.4034, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 0.2419222008626581, |
|
"learning_rate": 3.8039351052813256e-05, |
|
"loss": 0.4119, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9506057781919851, |
|
"grad_norm": 0.22107649198917048, |
|
"learning_rate": 3.7953054884363134e-05, |
|
"loss": 0.4058, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9552656104380243, |
|
"grad_norm": 0.262732169201134, |
|
"learning_rate": 3.786675871591301e-05, |
|
"loss": 0.4096, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9599254426840633, |
|
"grad_norm": 0.2229734790957115, |
|
"learning_rate": 3.77804625474629e-05, |
|
"loss": 0.4145, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9645852749301025, |
|
"grad_norm": 0.21889151515628189, |
|
"learning_rate": 3.7694166379012776e-05, |
|
"loss": 0.4067, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9692451071761417, |
|
"grad_norm": 0.21839503578728642, |
|
"learning_rate": 3.7607870210562654e-05, |
|
"loss": 0.4044, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9739049394221808, |
|
"grad_norm": 0.19326632489297702, |
|
"learning_rate": 3.752157404211253e-05, |
|
"loss": 0.3969, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.97856477166822, |
|
"grad_norm": 0.23697546322043278, |
|
"learning_rate": 3.743527787366241e-05, |
|
"loss": 0.4266, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.983224603914259, |
|
"grad_norm": 0.23277803927818685, |
|
"learning_rate": 3.734898170521229e-05, |
|
"loss": 0.4043, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9878844361602982, |
|
"grad_norm": 0.25183012007611355, |
|
"learning_rate": 3.7262685536762174e-05, |
|
"loss": 0.4034, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9925442684063374, |
|
"grad_norm": 0.2212405756520391, |
|
"learning_rate": 3.7176389368312045e-05, |
|
"loss": 0.4077, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9972041006523765, |
|
"grad_norm": 0.22830382486281922, |
|
"learning_rate": 3.709009319986193e-05, |
|
"loss": 0.4082, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0018639328984156, |
|
"grad_norm": 0.2568074161722236, |
|
"learning_rate": 3.700379703141181e-05, |
|
"loss": 0.3729, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0065237651444547, |
|
"grad_norm": 0.23620780640891778, |
|
"learning_rate": 3.6917500862961687e-05, |
|
"loss": 0.3486, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.011183597390494, |
|
"grad_norm": 0.2826588628388306, |
|
"learning_rate": 3.6831204694511565e-05, |
|
"loss": 0.3553, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.015843429636533, |
|
"grad_norm": 0.22804995449021884, |
|
"learning_rate": 3.674490852606144e-05, |
|
"loss": 0.3624, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0205032618825722, |
|
"grad_norm": 0.22963933509840984, |
|
"learning_rate": 3.665861235761132e-05, |
|
"loss": 0.3481, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.0251630941286114, |
|
"grad_norm": 0.24458581795121753, |
|
"learning_rate": 3.6572316189161206e-05, |
|
"loss": 0.339, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0298229263746506, |
|
"grad_norm": 0.22177275414154707, |
|
"learning_rate": 3.6486020020711085e-05, |
|
"loss": 0.3455, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 0.19488893256595907, |
|
"learning_rate": 3.639972385226096e-05, |
|
"loss": 0.346, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0391425908667289, |
|
"grad_norm": 0.2337702629010098, |
|
"learning_rate": 3.631342768381084e-05, |
|
"loss": 0.346, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0438024231127678, |
|
"grad_norm": 0.23786868391121022, |
|
"learning_rate": 3.622713151536072e-05, |
|
"loss": 0.3459, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.048462255358807, |
|
"grad_norm": 0.19877714515940853, |
|
"learning_rate": 3.6140835346910604e-05, |
|
"loss": 0.3496, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0531220876048462, |
|
"grad_norm": 0.21050666679013205, |
|
"learning_rate": 3.6054539178460476e-05, |
|
"loss": 0.3465, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0577819198508853, |
|
"grad_norm": 0.24532625022653806, |
|
"learning_rate": 3.5968243010010354e-05, |
|
"loss": 0.3625, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0624417520969245, |
|
"grad_norm": 0.2187419477911204, |
|
"learning_rate": 3.588194684156024e-05, |
|
"loss": 0.3535, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0671015843429636, |
|
"grad_norm": 0.2376408953324803, |
|
"learning_rate": 3.579565067311012e-05, |
|
"loss": 0.3448, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.0717614165890028, |
|
"grad_norm": 0.20815871209791884, |
|
"learning_rate": 3.5709354504659995e-05, |
|
"loss": 0.3415, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.076421248835042, |
|
"grad_norm": 0.22367630172490455, |
|
"learning_rate": 3.5623058336209874e-05, |
|
"loss": 0.3525, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 0.19697701726697703, |
|
"learning_rate": 3.553676216775975e-05, |
|
"loss": 0.3436, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0857409133271203, |
|
"grad_norm": 0.22789579169186222, |
|
"learning_rate": 3.545046599930964e-05, |
|
"loss": 0.3557, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.0904007455731595, |
|
"grad_norm": 0.23826363102589843, |
|
"learning_rate": 3.536416983085951e-05, |
|
"loss": 0.3572, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0950605778191984, |
|
"grad_norm": 0.20330442109195937, |
|
"learning_rate": 3.5277873662409386e-05, |
|
"loss": 0.3438, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.0997204100652376, |
|
"grad_norm": 0.21893872884440935, |
|
"learning_rate": 3.519157749395927e-05, |
|
"loss": 0.3497, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.1043802423112767, |
|
"grad_norm": 0.25700531286616213, |
|
"learning_rate": 3.510528132550915e-05, |
|
"loss": 0.347, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.109040074557316, |
|
"grad_norm": 0.2531456811254025, |
|
"learning_rate": 3.501898515705903e-05, |
|
"loss": 0.3491, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.113699906803355, |
|
"grad_norm": 0.2543141190764336, |
|
"learning_rate": 3.4932688988608906e-05, |
|
"loss": 0.3603, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1183597390493942, |
|
"grad_norm": 0.24225424786601063, |
|
"learning_rate": 3.4846392820158784e-05, |
|
"loss": 0.3487, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1230195712954334, |
|
"grad_norm": 0.2634841848073112, |
|
"learning_rate": 3.476009665170867e-05, |
|
"loss": 0.3489, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.1276794035414726, |
|
"grad_norm": 0.23538882211622164, |
|
"learning_rate": 3.467380048325855e-05, |
|
"loss": 0.3473, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1323392357875117, |
|
"grad_norm": 0.193878427991159, |
|
"learning_rate": 3.458750431480842e-05, |
|
"loss": 0.3543, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.1369990680335509, |
|
"grad_norm": 0.21532671326016686, |
|
"learning_rate": 3.4501208146358304e-05, |
|
"loss": 0.3531, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.14165890027959, |
|
"grad_norm": 0.22024593707468668, |
|
"learning_rate": 3.441491197790818e-05, |
|
"loss": 0.349, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.146318732525629, |
|
"grad_norm": 0.20284226706833988, |
|
"learning_rate": 3.432861580945806e-05, |
|
"loss": 0.3503, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1509785647716682, |
|
"grad_norm": 0.24618408302246267, |
|
"learning_rate": 3.424231964100794e-05, |
|
"loss": 0.3495, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.1556383970177073, |
|
"grad_norm": 0.2735071177993681, |
|
"learning_rate": 3.415602347255782e-05, |
|
"loss": 0.3483, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1602982292637465, |
|
"grad_norm": 0.2430213172405094, |
|
"learning_rate": 3.40697273041077e-05, |
|
"loss": 0.3552, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1649580615097856, |
|
"grad_norm": 0.20260306778727305, |
|
"learning_rate": 3.398343113565758e-05, |
|
"loss": 0.3493, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1696178937558248, |
|
"grad_norm": 0.2276481253282831, |
|
"learning_rate": 3.389713496720746e-05, |
|
"loss": 0.3282, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.174277726001864, |
|
"grad_norm": 0.2055659048056618, |
|
"learning_rate": 3.381083879875734e-05, |
|
"loss": 0.348, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1789375582479031, |
|
"grad_norm": 0.2056356976494387, |
|
"learning_rate": 3.3724542630307215e-05, |
|
"loss": 0.3442, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.1835973904939423, |
|
"grad_norm": 0.1998058040934864, |
|
"learning_rate": 3.363824646185709e-05, |
|
"loss": 0.3535, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1882572227399812, |
|
"grad_norm": 0.21411393389232136, |
|
"learning_rate": 3.355195029340698e-05, |
|
"loss": 0.3474, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.1929170549860204, |
|
"grad_norm": 0.2252832229748485, |
|
"learning_rate": 3.346565412495685e-05, |
|
"loss": 0.3582, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1975768872320596, |
|
"grad_norm": 0.22192933304521822, |
|
"learning_rate": 3.3379357956506735e-05, |
|
"loss": 0.3594, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.2022367194780987, |
|
"grad_norm": 0.20918818324519411, |
|
"learning_rate": 3.329306178805661e-05, |
|
"loss": 0.3425, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.2337757441078744, |
|
"learning_rate": 3.320676561960649e-05, |
|
"loss": 0.3473, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.211556383970177, |
|
"grad_norm": 0.25638892532005586, |
|
"learning_rate": 3.312046945115637e-05, |
|
"loss": 0.3552, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 0.235425793642404, |
|
"learning_rate": 3.303417328270625e-05, |
|
"loss": 0.3541, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2208760484622554, |
|
"grad_norm": 0.1983603847561264, |
|
"learning_rate": 3.2947877114256126e-05, |
|
"loss": 0.3433, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.2255358807082946, |
|
"grad_norm": 0.24028460946684757, |
|
"learning_rate": 3.286158094580601e-05, |
|
"loss": 0.3633, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2301957129543337, |
|
"grad_norm": 0.22570081899822933, |
|
"learning_rate": 3.277528477735589e-05, |
|
"loss": 0.3491, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2348555452003729, |
|
"grad_norm": 0.2246527742116136, |
|
"learning_rate": 3.268898860890577e-05, |
|
"loss": 0.3563, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2395153774464118, |
|
"grad_norm": 0.207606744344188, |
|
"learning_rate": 3.2602692440455645e-05, |
|
"loss": 0.3444, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.244175209692451, |
|
"grad_norm": 0.1968260873796408, |
|
"learning_rate": 3.2516396272005524e-05, |
|
"loss": 0.3468, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.2488350419384902, |
|
"grad_norm": 0.21603020993318928, |
|
"learning_rate": 3.243010010355541e-05, |
|
"loss": 0.3525, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.2534948741845293, |
|
"grad_norm": 0.21304311099290554, |
|
"learning_rate": 3.234380393510528e-05, |
|
"loss": 0.3399, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.2581547064305685, |
|
"grad_norm": 0.19391479743061224, |
|
"learning_rate": 3.225750776665516e-05, |
|
"loss": 0.3471, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.2628145386766076, |
|
"grad_norm": 0.2123125255250921, |
|
"learning_rate": 3.217121159820504e-05, |
|
"loss": 0.3575, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2674743709226468, |
|
"grad_norm": 0.2020465037820743, |
|
"learning_rate": 3.208491542975492e-05, |
|
"loss": 0.3465, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.272134203168686, |
|
"grad_norm": 0.20157365586972834, |
|
"learning_rate": 3.19986192613048e-05, |
|
"loss": 0.3517, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.2767940354147251, |
|
"grad_norm": 0.203334422764835, |
|
"learning_rate": 3.191232309285468e-05, |
|
"loss": 0.3509, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.281453867660764, |
|
"grad_norm": 0.18816097878897212, |
|
"learning_rate": 3.1826026924404556e-05, |
|
"loss": 0.3405, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.2861136999068035, |
|
"grad_norm": 0.22556563555983292, |
|
"learning_rate": 3.173973075595444e-05, |
|
"loss": 0.3552, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.2907735321528424, |
|
"grad_norm": 0.21052241819882753, |
|
"learning_rate": 3.165343458750431e-05, |
|
"loss": 0.3545, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.2954333643988816, |
|
"grad_norm": 0.21509098775174043, |
|
"learning_rate": 3.156713841905419e-05, |
|
"loss": 0.347, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.3000931966449207, |
|
"grad_norm": 0.18708778542534285, |
|
"learning_rate": 3.1480842250604076e-05, |
|
"loss": 0.3512, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.30475302889096, |
|
"grad_norm": 0.23327276917136996, |
|
"learning_rate": 3.1394546082153954e-05, |
|
"loss": 0.3457, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.309412861136999, |
|
"grad_norm": 0.23155700732329837, |
|
"learning_rate": 3.130824991370383e-05, |
|
"loss": 0.3566, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.3140726933830382, |
|
"grad_norm": 0.24107613463383232, |
|
"learning_rate": 3.122195374525371e-05, |
|
"loss": 0.3442, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3187325256290774, |
|
"grad_norm": 0.20414621684379755, |
|
"learning_rate": 3.113565757680359e-05, |
|
"loss": 0.3521, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3233923578751166, |
|
"grad_norm": 0.209842742427075, |
|
"learning_rate": 3.1049361408353474e-05, |
|
"loss": 0.3393, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.3280521901211557, |
|
"grad_norm": 0.19108610767410747, |
|
"learning_rate": 3.096306523990335e-05, |
|
"loss": 0.3472, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3327120223671947, |
|
"grad_norm": 0.20890219986518263, |
|
"learning_rate": 3.0876769071453223e-05, |
|
"loss": 0.3558, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.337371854613234, |
|
"grad_norm": 0.2021518280088857, |
|
"learning_rate": 3.079047290300311e-05, |
|
"loss": 0.3539, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.342031686859273, |
|
"grad_norm": 0.22161596741376463, |
|
"learning_rate": 3.070417673455299e-05, |
|
"loss": 0.352, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3466915191053122, |
|
"grad_norm": 0.18611249829544393, |
|
"learning_rate": 3.061788056610287e-05, |
|
"loss": 0.3568, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 0.22193423327390585, |
|
"learning_rate": 3.053158439765274e-05, |
|
"loss": 0.3425, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.3560111835973905, |
|
"grad_norm": 0.19575929240707, |
|
"learning_rate": 3.0445288229202625e-05, |
|
"loss": 0.3503, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.3606710158434296, |
|
"grad_norm": 0.22633413756342563, |
|
"learning_rate": 3.0358992060752506e-05, |
|
"loss": 0.3564, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.3653308480894688, |
|
"grad_norm": 0.21863059178933453, |
|
"learning_rate": 3.027269589230238e-05, |
|
"loss": 0.3503, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.369990680335508, |
|
"grad_norm": 0.20718983821408218, |
|
"learning_rate": 3.018639972385226e-05, |
|
"loss": 0.3475, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3746505125815471, |
|
"grad_norm": 0.18400966779485417, |
|
"learning_rate": 3.010010355540214e-05, |
|
"loss": 0.3492, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.18140746407997485, |
|
"learning_rate": 3.001380738695202e-05, |
|
"loss": 0.3331, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.3839701770736252, |
|
"grad_norm": 0.19061774117619995, |
|
"learning_rate": 2.99275112185019e-05, |
|
"loss": 0.3509, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.3886300093196646, |
|
"grad_norm": 0.2047028675116995, |
|
"learning_rate": 2.984121505005178e-05, |
|
"loss": 0.356, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.3932898415657036, |
|
"grad_norm": 0.20347191964506664, |
|
"learning_rate": 2.9754918881601657e-05, |
|
"loss": 0.3642, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.3979496738117427, |
|
"grad_norm": 0.2273398983431203, |
|
"learning_rate": 2.966862271315154e-05, |
|
"loss": 0.3603, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.402609506057782, |
|
"grad_norm": 0.2291683812869433, |
|
"learning_rate": 2.9582326544701417e-05, |
|
"loss": 0.3563, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.407269338303821, |
|
"grad_norm": 0.2077830016052137, |
|
"learning_rate": 2.9496030376251292e-05, |
|
"loss": 0.3487, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.4119291705498602, |
|
"grad_norm": 0.20420473540821882, |
|
"learning_rate": 2.9409734207801177e-05, |
|
"loss": 0.3524, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4165890027958994, |
|
"grad_norm": 0.19235619261885795, |
|
"learning_rate": 2.9323438039351052e-05, |
|
"loss": 0.3516, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4212488350419386, |
|
"grad_norm": 0.19890866298412063, |
|
"learning_rate": 2.9237141870900937e-05, |
|
"loss": 0.3476, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.4259086672879777, |
|
"grad_norm": 0.18546300249824202, |
|
"learning_rate": 2.9150845702450812e-05, |
|
"loss": 0.3466, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4305684995340169, |
|
"grad_norm": 0.21145883136028973, |
|
"learning_rate": 2.906454953400069e-05, |
|
"loss": 0.3516, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4352283317800558, |
|
"grad_norm": 0.22398000313134658, |
|
"learning_rate": 2.897825336555057e-05, |
|
"loss": 0.3476, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.439888164026095, |
|
"grad_norm": 0.22880975923853247, |
|
"learning_rate": 2.889195719710045e-05, |
|
"loss": 0.3481, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.4445479962721341, |
|
"grad_norm": 0.20330379195082973, |
|
"learning_rate": 2.8805661028650328e-05, |
|
"loss": 0.3443, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.4492078285181733, |
|
"grad_norm": 0.2573526272893972, |
|
"learning_rate": 2.871936486020021e-05, |
|
"loss": 0.3418, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4538676607642125, |
|
"grad_norm": 0.20712038029776628, |
|
"learning_rate": 2.8633068691750088e-05, |
|
"loss": 0.3443, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4585274930102516, |
|
"grad_norm": 0.2050277528241079, |
|
"learning_rate": 2.854677252329997e-05, |
|
"loss": 0.3468, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.4631873252562908, |
|
"grad_norm": 0.20801929662240118, |
|
"learning_rate": 2.8460476354849848e-05, |
|
"loss": 0.3538, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.46784715750233, |
|
"grad_norm": 0.1996807866134819, |
|
"learning_rate": 2.8374180186399723e-05, |
|
"loss": 0.3535, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.4725069897483691, |
|
"grad_norm": 0.2002754175750313, |
|
"learning_rate": 2.8287884017949608e-05, |
|
"loss": 0.3549, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.477166821994408, |
|
"grad_norm": 0.2129074931544869, |
|
"learning_rate": 2.8201587849499482e-05, |
|
"loss": 0.3479, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.4818266542404475, |
|
"grad_norm": 0.18415676001985198, |
|
"learning_rate": 2.811529168104936e-05, |
|
"loss": 0.3581, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 0.196287680333158, |
|
"learning_rate": 2.8028995512599242e-05, |
|
"loss": 0.349, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.4911463187325256, |
|
"grad_norm": 0.2227322686085962, |
|
"learning_rate": 2.794269934414912e-05, |
|
"loss": 0.3505, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.4958061509785647, |
|
"grad_norm": 0.23733994939280273, |
|
"learning_rate": 2.7856403175699002e-05, |
|
"loss": 0.3545, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.500465983224604, |
|
"grad_norm": 0.2420129519934035, |
|
"learning_rate": 2.777010700724888e-05, |
|
"loss": 0.3498, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.505125815470643, |
|
"grad_norm": 0.1893500884929808, |
|
"learning_rate": 2.768381083879876e-05, |
|
"loss": 0.3447, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5097856477166822, |
|
"grad_norm": 0.1998933253053623, |
|
"learning_rate": 2.759751467034864e-05, |
|
"loss": 0.3469, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5144454799627214, |
|
"grad_norm": 0.17368180029888158, |
|
"learning_rate": 2.751121850189852e-05, |
|
"loss": 0.3411, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5191053122087603, |
|
"grad_norm": 0.19716854883766005, |
|
"learning_rate": 2.7424922333448393e-05, |
|
"loss": 0.3347, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5237651444547997, |
|
"grad_norm": 0.20126197960570164, |
|
"learning_rate": 2.7338626164998278e-05, |
|
"loss": 0.3419, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5284249767008387, |
|
"grad_norm": 0.21143917201945417, |
|
"learning_rate": 2.7252329996548153e-05, |
|
"loss": 0.3449, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.533084808946878, |
|
"grad_norm": 0.2127385172605197, |
|
"learning_rate": 2.7166033828098038e-05, |
|
"loss": 0.3601, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.537744641192917, |
|
"grad_norm": 0.20830259790781955, |
|
"learning_rate": 2.7079737659647913e-05, |
|
"loss": 0.3491, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.5424044734389561, |
|
"grad_norm": 0.19278413019931034, |
|
"learning_rate": 2.699344149119779e-05, |
|
"loss": 0.3561, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.5470643056849953, |
|
"grad_norm": 0.2150348165615608, |
|
"learning_rate": 2.6907145322747673e-05, |
|
"loss": 0.3504, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 0.20254104227445804, |
|
"learning_rate": 2.682084915429755e-05, |
|
"loss": 0.3412, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5563839701770736, |
|
"grad_norm": 0.1874349180061177, |
|
"learning_rate": 2.673455298584743e-05, |
|
"loss": 0.3463, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.5610438024231128, |
|
"grad_norm": 0.2106688572781653, |
|
"learning_rate": 2.664825681739731e-05, |
|
"loss": 0.3539, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.565703634669152, |
|
"grad_norm": 0.2572758192140219, |
|
"learning_rate": 2.6561960648947186e-05, |
|
"loss": 0.3454, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.570363466915191, |
|
"grad_norm": 0.21254370372557896, |
|
"learning_rate": 2.647566448049707e-05, |
|
"loss": 0.3415, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.5750232991612303, |
|
"grad_norm": 0.20760927923885905, |
|
"learning_rate": 2.6389368312046945e-05, |
|
"loss": 0.3541, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.5796831314072692, |
|
"grad_norm": 0.21384787601272656, |
|
"learning_rate": 2.6303072143596824e-05, |
|
"loss": 0.3578, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.5843429636533086, |
|
"grad_norm": 0.2001928042747188, |
|
"learning_rate": 2.6216775975146705e-05, |
|
"loss": 0.3502, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5890027958993476, |
|
"grad_norm": 0.23442221198058127, |
|
"learning_rate": 2.6130479806696584e-05, |
|
"loss": 0.3475, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.5936626281453867, |
|
"grad_norm": 0.21415740633727204, |
|
"learning_rate": 2.6044183638246462e-05, |
|
"loss": 0.3531, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.598322460391426, |
|
"grad_norm": 0.18098778418722639, |
|
"learning_rate": 2.5957887469796343e-05, |
|
"loss": 0.3427, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.602982292637465, |
|
"grad_norm": 0.22570425532204863, |
|
"learning_rate": 2.587159130134622e-05, |
|
"loss": 0.3471, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.6076421248835042, |
|
"grad_norm": 0.2028311769594307, |
|
"learning_rate": 2.5785295132896096e-05, |
|
"loss": 0.3468, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6123019571295434, |
|
"grad_norm": 0.18562021138400556, |
|
"learning_rate": 2.569899896444598e-05, |
|
"loss": 0.3407, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6169617893755825, |
|
"grad_norm": 0.18351231973317866, |
|
"learning_rate": 2.5612702795995856e-05, |
|
"loss": 0.3509, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 0.18459550285917165, |
|
"learning_rate": 2.552640662754574e-05, |
|
"loss": 0.3458, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6262814538676609, |
|
"grad_norm": 0.17811606197074187, |
|
"learning_rate": 2.5440110459095616e-05, |
|
"loss": 0.3527, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6309412861136998, |
|
"grad_norm": 0.19610454144405196, |
|
"learning_rate": 2.5353814290645494e-05, |
|
"loss": 0.3514, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6356011183597392, |
|
"grad_norm": 0.1858985924007689, |
|
"learning_rate": 2.5267518122195376e-05, |
|
"loss": 0.3433, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.6402609506057781, |
|
"grad_norm": 0.21353503747941435, |
|
"learning_rate": 2.5181221953745254e-05, |
|
"loss": 0.3517, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.6449207828518173, |
|
"grad_norm": 0.18584489898990506, |
|
"learning_rate": 2.5094925785295132e-05, |
|
"loss": 0.3534, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.6495806150978565, |
|
"grad_norm": 0.19365428128307033, |
|
"learning_rate": 2.5008629616845014e-05, |
|
"loss": 0.3466, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6542404473438956, |
|
"grad_norm": 0.20982062271855179, |
|
"learning_rate": 2.4922333448394892e-05, |
|
"loss": 0.3392, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6589002795899348, |
|
"grad_norm": 0.19146498945778959, |
|
"learning_rate": 2.483603727994477e-05, |
|
"loss": 0.349, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6635601118359737, |
|
"grad_norm": 0.2001196203120995, |
|
"learning_rate": 2.4749741111494652e-05, |
|
"loss": 0.3464, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.6682199440820131, |
|
"grad_norm": 0.18697929256408521, |
|
"learning_rate": 2.466344494304453e-05, |
|
"loss": 0.346, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.672879776328052, |
|
"grad_norm": 0.20652278171635405, |
|
"learning_rate": 2.457714877459441e-05, |
|
"loss": 0.3387, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.6775396085740915, |
|
"grad_norm": 0.18065674843053092, |
|
"learning_rate": 2.4490852606144287e-05, |
|
"loss": 0.3385, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.6821994408201304, |
|
"grad_norm": 0.1877041880913077, |
|
"learning_rate": 2.440455643769417e-05, |
|
"loss": 0.3415, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.6868592730661698, |
|
"grad_norm": 0.18722414367304202, |
|
"learning_rate": 2.4318260269244047e-05, |
|
"loss": 0.3449, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6915191053122087, |
|
"grad_norm": 0.18397076137907972, |
|
"learning_rate": 2.4231964100793925e-05, |
|
"loss": 0.3329, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.696178937558248, |
|
"grad_norm": 0.18357697044321036, |
|
"learning_rate": 2.4145667932343803e-05, |
|
"loss": 0.3441, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.700838769804287, |
|
"grad_norm": 0.18981914666104338, |
|
"learning_rate": 2.4059371763893685e-05, |
|
"loss": 0.3489, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.7054986020503262, |
|
"grad_norm": 0.19385625675708432, |
|
"learning_rate": 2.3973075595443563e-05, |
|
"loss": 0.3538, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7101584342963654, |
|
"grad_norm": 0.2296602929109332, |
|
"learning_rate": 2.388677942699344e-05, |
|
"loss": 0.3488, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.7148182665424043, |
|
"grad_norm": 0.20685404777763597, |
|
"learning_rate": 2.3800483258543323e-05, |
|
"loss": 0.345, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7194780987884437, |
|
"grad_norm": 0.1745042574355075, |
|
"learning_rate": 2.37141870900932e-05, |
|
"loss": 0.3503, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.17821644874007986, |
|
"learning_rate": 2.3627890921643083e-05, |
|
"loss": 0.3324, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.728797763280522, |
|
"grad_norm": 0.21627845980156138, |
|
"learning_rate": 2.3541594753192957e-05, |
|
"loss": 0.3423, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.733457595526561, |
|
"grad_norm": 0.19879766175649433, |
|
"learning_rate": 2.345529858474284e-05, |
|
"loss": 0.3426, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.7381174277726001, |
|
"grad_norm": 0.19587052584197356, |
|
"learning_rate": 2.3369002416292717e-05, |
|
"loss": 0.3421, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.7427772600186393, |
|
"grad_norm": 0.19830936553373776, |
|
"learning_rate": 2.32827062478426e-05, |
|
"loss": 0.3412, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.7474370922646785, |
|
"grad_norm": 0.19125328489290028, |
|
"learning_rate": 2.3196410079392474e-05, |
|
"loss": 0.3498, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.7520969245107176, |
|
"grad_norm": 0.20816473594050777, |
|
"learning_rate": 2.3110113910942355e-05, |
|
"loss": 0.3444, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 0.20924944617545066, |
|
"learning_rate": 2.3023817742492234e-05, |
|
"loss": 0.3496, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.761416589002796, |
|
"grad_norm": 0.19540665347917044, |
|
"learning_rate": 2.2937521574042115e-05, |
|
"loss": 0.3475, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.766076421248835, |
|
"grad_norm": 0.19144533197109495, |
|
"learning_rate": 2.285122540559199e-05, |
|
"loss": 0.345, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.7707362534948743, |
|
"grad_norm": 0.4890453131646484, |
|
"learning_rate": 2.276492923714187e-05, |
|
"loss": 0.3378, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7753960857409132, |
|
"grad_norm": 0.19016613238271587, |
|
"learning_rate": 2.267863306869175e-05, |
|
"loss": 0.3369, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.7800559179869526, |
|
"grad_norm": 0.2003225772191725, |
|
"learning_rate": 2.259233690024163e-05, |
|
"loss": 0.3497, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.7847157502329916, |
|
"grad_norm": 0.19217824101723838, |
|
"learning_rate": 2.250604073179151e-05, |
|
"loss": 0.3326, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.7893755824790307, |
|
"grad_norm": 0.21711588195305268, |
|
"learning_rate": 2.2419744563341388e-05, |
|
"loss": 0.3486, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.7940354147250699, |
|
"grad_norm": 0.1980847528143503, |
|
"learning_rate": 2.233344839489127e-05, |
|
"loss": 0.3453, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.798695246971109, |
|
"grad_norm": 0.18037885766449233, |
|
"learning_rate": 2.2247152226441148e-05, |
|
"loss": 0.3418, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8033550792171482, |
|
"grad_norm": 0.19144410933084757, |
|
"learning_rate": 2.2160856057991026e-05, |
|
"loss": 0.3465, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.8080149114631874, |
|
"grad_norm": 0.20702431177315195, |
|
"learning_rate": 2.2074559889540904e-05, |
|
"loss": 0.3347, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8126747437092265, |
|
"grad_norm": 0.19824842930709244, |
|
"learning_rate": 2.1988263721090786e-05, |
|
"loss": 0.3516, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.8173345759552655, |
|
"grad_norm": 0.1934304920163348, |
|
"learning_rate": 2.1901967552640664e-05, |
|
"loss": 0.3345, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8219944082013049, |
|
"grad_norm": 0.18178449756491494, |
|
"learning_rate": 2.1815671384190542e-05, |
|
"loss": 0.3426, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8266542404473438, |
|
"grad_norm": 0.19434526815037695, |
|
"learning_rate": 2.172937521574042e-05, |
|
"loss": 0.3461, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.8313140726933832, |
|
"grad_norm": 0.1954441752319809, |
|
"learning_rate": 2.1643079047290302e-05, |
|
"loss": 0.3476, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.8359739049394221, |
|
"grad_norm": 0.20445056518923269, |
|
"learning_rate": 2.155678287884018e-05, |
|
"loss": 0.3451, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.8406337371854613, |
|
"grad_norm": 0.1955522058620285, |
|
"learning_rate": 2.147048671039006e-05, |
|
"loss": 0.353, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.8452935694315005, |
|
"grad_norm": 0.21003814075691965, |
|
"learning_rate": 2.1384190541939937e-05, |
|
"loss": 0.3439, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.8499534016775396, |
|
"grad_norm": 0.1811476187665884, |
|
"learning_rate": 2.129789437348982e-05, |
|
"loss": 0.3394, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.8546132339235788, |
|
"grad_norm": 0.21172639167251692, |
|
"learning_rate": 2.1211598205039697e-05, |
|
"loss": 0.3583, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.8592730661696177, |
|
"grad_norm": 0.21470115708825954, |
|
"learning_rate": 2.1125302036589575e-05, |
|
"loss": 0.3439, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8639328984156571, |
|
"grad_norm": 0.19189644647389956, |
|
"learning_rate": 2.1039005868139457e-05, |
|
"loss": 0.3428, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.868592730661696, |
|
"grad_norm": 0.17594964341965214, |
|
"learning_rate": 2.0952709699689335e-05, |
|
"loss": 0.3364, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.8732525629077355, |
|
"grad_norm": 0.1870497194013576, |
|
"learning_rate": 2.0866413531239216e-05, |
|
"loss": 0.3514, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.8779123951537744, |
|
"grad_norm": 0.1936309273458679, |
|
"learning_rate": 2.078011736278909e-05, |
|
"loss": 0.3376, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.8825722273998136, |
|
"grad_norm": 0.19124711019040094, |
|
"learning_rate": 2.0693821194338973e-05, |
|
"loss": 0.3327, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.8872320596458527, |
|
"grad_norm": 0.22371202412410007, |
|
"learning_rate": 2.060752502588885e-05, |
|
"loss": 0.3388, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 0.18296153754141045, |
|
"learning_rate": 2.0521228857438733e-05, |
|
"loss": 0.3457, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.1936137681825998, |
|
"learning_rate": 2.0434932688988608e-05, |
|
"loss": 0.3389, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.9012115563839702, |
|
"grad_norm": 0.18820228000949585, |
|
"learning_rate": 2.034863652053849e-05, |
|
"loss": 0.3418, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.9058713886300094, |
|
"grad_norm": 0.18032478138329905, |
|
"learning_rate": 2.0262340352088367e-05, |
|
"loss": 0.3575, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9105312208760483, |
|
"grad_norm": 0.22505035742405854, |
|
"learning_rate": 2.017604418363825e-05, |
|
"loss": 0.3479, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9151910531220877, |
|
"grad_norm": 0.17824239790661023, |
|
"learning_rate": 2.0089748015188127e-05, |
|
"loss": 0.3423, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9198508853681266, |
|
"grad_norm": 0.18220439445009537, |
|
"learning_rate": 2.0003451846738005e-05, |
|
"loss": 0.35, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.924510717614166, |
|
"grad_norm": 0.19213952413987173, |
|
"learning_rate": 1.9917155678287887e-05, |
|
"loss": 0.3393, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.929170549860205, |
|
"grad_norm": 0.18692994326833778, |
|
"learning_rate": 1.9830859509837765e-05, |
|
"loss": 0.347, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9338303821062441, |
|
"grad_norm": 0.19298291502499157, |
|
"learning_rate": 1.9744563341387643e-05, |
|
"loss": 0.3483, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.9384902143522833, |
|
"grad_norm": 0.19737523647019892, |
|
"learning_rate": 1.9658267172937522e-05, |
|
"loss": 0.3432, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.9431500465983225, |
|
"grad_norm": 0.22612317484540523, |
|
"learning_rate": 1.9571971004487403e-05, |
|
"loss": 0.343, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.9478098788443616, |
|
"grad_norm": 0.22325638761706249, |
|
"learning_rate": 1.948567483603728e-05, |
|
"loss": 0.3488, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.9524697110904008, |
|
"grad_norm": 0.18549622269199276, |
|
"learning_rate": 1.939937866758716e-05, |
|
"loss": 0.3314, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.95712954333644, |
|
"grad_norm": 0.1825370067767358, |
|
"learning_rate": 1.9313082499137038e-05, |
|
"loss": 0.3488, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.961789375582479, |
|
"grad_norm": 0.20021290070272088, |
|
"learning_rate": 1.922678633068692e-05, |
|
"loss": 0.3431, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.9664492078285183, |
|
"grad_norm": 0.18522521172334733, |
|
"learning_rate": 1.9140490162236798e-05, |
|
"loss": 0.3484, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9711090400745572, |
|
"grad_norm": 0.18562848737313675, |
|
"learning_rate": 1.9054193993786676e-05, |
|
"loss": 0.3557, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9757688723205966, |
|
"grad_norm": 0.17712497187154147, |
|
"learning_rate": 1.8967897825336554e-05, |
|
"loss": 0.3435, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.9804287045666356, |
|
"grad_norm": 0.16869022484076907, |
|
"learning_rate": 1.8881601656886436e-05, |
|
"loss": 0.3411, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.9850885368126747, |
|
"grad_norm": 0.20596894918457725, |
|
"learning_rate": 1.8795305488436314e-05, |
|
"loss": 0.3466, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.9897483690587139, |
|
"grad_norm": 0.20771624519351856, |
|
"learning_rate": 1.8709009319986192e-05, |
|
"loss": 0.3335, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.994408201304753, |
|
"grad_norm": 0.19099341855554514, |
|
"learning_rate": 1.8622713151536074e-05, |
|
"loss": 0.3408, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.9990680335507922, |
|
"grad_norm": 0.22439161052399093, |
|
"learning_rate": 1.8536416983085952e-05, |
|
"loss": 0.3455, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 2.003727865796831, |
|
"grad_norm": 0.24760892631090284, |
|
"learning_rate": 1.8450120814635834e-05, |
|
"loss": 0.2818, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.0083876980428705, |
|
"grad_norm": 0.2076658807325857, |
|
"learning_rate": 1.836382464618571e-05, |
|
"loss": 0.2841, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 2.0130475302889095, |
|
"grad_norm": 0.19901034230583584, |
|
"learning_rate": 1.827752847773559e-05, |
|
"loss": 0.2774, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.017707362534949, |
|
"grad_norm": 0.21563635145800314, |
|
"learning_rate": 1.819123230928547e-05, |
|
"loss": 0.2799, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 2.022367194780988, |
|
"grad_norm": 0.1779996404360597, |
|
"learning_rate": 1.810493614083535e-05, |
|
"loss": 0.2792, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.027027027027027, |
|
"grad_norm": 0.18536418701002, |
|
"learning_rate": 1.8018639972385225e-05, |
|
"loss": 0.2744, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.031686859273066, |
|
"grad_norm": 0.19596511998183597, |
|
"learning_rate": 1.7932343803935107e-05, |
|
"loss": 0.2787, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.0363466915191055, |
|
"grad_norm": 0.18131675716790735, |
|
"learning_rate": 1.7846047635484985e-05, |
|
"loss": 0.2752, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.0410065237651445, |
|
"grad_norm": 0.19450825690465262, |
|
"learning_rate": 1.7759751467034866e-05, |
|
"loss": 0.275, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.0456663560111834, |
|
"grad_norm": 0.22336781870241904, |
|
"learning_rate": 1.767345529858474e-05, |
|
"loss": 0.2847, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.050326188257223, |
|
"grad_norm": 0.19274211327699445, |
|
"learning_rate": 1.7587159130134623e-05, |
|
"loss": 0.2849, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.0549860205032617, |
|
"grad_norm": 0.20462843835129105, |
|
"learning_rate": 1.75008629616845e-05, |
|
"loss": 0.2726, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.059645852749301, |
|
"grad_norm": 0.18297222845059374, |
|
"learning_rate": 1.7414566793234383e-05, |
|
"loss": 0.2751, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.06430568499534, |
|
"grad_norm": 0.19794540302981642, |
|
"learning_rate": 1.732827062478426e-05, |
|
"loss": 0.2798, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.0689655172413794, |
|
"grad_norm": 0.1909965716338283, |
|
"learning_rate": 1.724197445633414e-05, |
|
"loss": 0.2722, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.0736253494874184, |
|
"grad_norm": 0.1847274908962051, |
|
"learning_rate": 1.715567828788402e-05, |
|
"loss": 0.2706, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.0782851817334578, |
|
"grad_norm": 0.17899970762263998, |
|
"learning_rate": 1.70693821194339e-05, |
|
"loss": 0.2705, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.0829450139794967, |
|
"grad_norm": 0.17069034615127132, |
|
"learning_rate": 1.6983085950983777e-05, |
|
"loss": 0.2709, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.0876048462255357, |
|
"grad_norm": 0.186730563588476, |
|
"learning_rate": 1.6896789782533655e-05, |
|
"loss": 0.2845, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.092264678471575, |
|
"grad_norm": 0.18230773107211734, |
|
"learning_rate": 1.6810493614083537e-05, |
|
"loss": 0.276, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.096924510717614, |
|
"grad_norm": 0.18333083631597802, |
|
"learning_rate": 1.6724197445633415e-05, |
|
"loss": 0.2794, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.1015843429636534, |
|
"grad_norm": 0.19291460989794298, |
|
"learning_rate": 1.6637901277183294e-05, |
|
"loss": 0.2828, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.1062441752096923, |
|
"grad_norm": 0.17862532188027724, |
|
"learning_rate": 1.6551605108733172e-05, |
|
"loss": 0.2766, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.1109040074557317, |
|
"grad_norm": 0.18279688650508544, |
|
"learning_rate": 1.6465308940283053e-05, |
|
"loss": 0.2787, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.1155638397017706, |
|
"grad_norm": 0.19151985516528375, |
|
"learning_rate": 1.637901277183293e-05, |
|
"loss": 0.2824, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.12022367194781, |
|
"grad_norm": 0.17796870989114708, |
|
"learning_rate": 1.629271660338281e-05, |
|
"loss": 0.2751, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.124883504193849, |
|
"grad_norm": 0.24488517093999138, |
|
"learning_rate": 1.620642043493269e-05, |
|
"loss": 0.2806, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.1295433364398884, |
|
"grad_norm": 0.18824900573056774, |
|
"learning_rate": 1.612012426648257e-05, |
|
"loss": 0.2737, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.1342031686859273, |
|
"grad_norm": 0.19706559186765177, |
|
"learning_rate": 1.6033828098032448e-05, |
|
"loss": 0.2757, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.1388630009319662, |
|
"grad_norm": 0.19705577941399888, |
|
"learning_rate": 1.5947531929582326e-05, |
|
"loss": 0.2793, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.1435228331780056, |
|
"grad_norm": 0.20257007438340918, |
|
"learning_rate": 1.5861235761132208e-05, |
|
"loss": 0.28, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.1481826654240446, |
|
"grad_norm": 0.19438305706846215, |
|
"learning_rate": 1.5774939592682086e-05, |
|
"loss": 0.2724, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.152842497670084, |
|
"grad_norm": 0.20957637522728298, |
|
"learning_rate": 1.5688643424231964e-05, |
|
"loss": 0.2776, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.157502329916123, |
|
"grad_norm": 0.18416950911463542, |
|
"learning_rate": 1.5602347255781842e-05, |
|
"loss": 0.2786, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.1621621621621623, |
|
"grad_norm": 0.18078468423930508, |
|
"learning_rate": 1.5516051087331724e-05, |
|
"loss": 0.2712, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.1668219944082012, |
|
"grad_norm": 0.19122885707834714, |
|
"learning_rate": 1.5429754918881602e-05, |
|
"loss": 0.2822, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.1714818266542406, |
|
"grad_norm": 0.19930106815094198, |
|
"learning_rate": 1.534345875043148e-05, |
|
"loss": 0.2755, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.1761416589002796, |
|
"grad_norm": 0.17733281685786634, |
|
"learning_rate": 1.525716258198136e-05, |
|
"loss": 0.2742, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.180801491146319, |
|
"grad_norm": 0.19576390835914426, |
|
"learning_rate": 1.517086641353124e-05, |
|
"loss": 0.2743, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.185461323392358, |
|
"grad_norm": 0.17917694650509258, |
|
"learning_rate": 1.508457024508112e-05, |
|
"loss": 0.2864, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.190121155638397, |
|
"grad_norm": 0.18248900116649447, |
|
"learning_rate": 1.4998274076630997e-05, |
|
"loss": 0.2664, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.194780987884436, |
|
"grad_norm": 0.18553621419299704, |
|
"learning_rate": 1.4911977908180877e-05, |
|
"loss": 0.2827, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.199440820130475, |
|
"grad_norm": 0.19092470763819386, |
|
"learning_rate": 1.4825681739730757e-05, |
|
"loss": 0.2813, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.2041006523765145, |
|
"grad_norm": 0.19927156923261682, |
|
"learning_rate": 1.4739385571280637e-05, |
|
"loss": 0.2731, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.2087604846225535, |
|
"grad_norm": 0.19203445028734814, |
|
"learning_rate": 1.4653089402830513e-05, |
|
"loss": 0.2661, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.213420316868593, |
|
"grad_norm": 0.18897078940664522, |
|
"learning_rate": 1.4566793234380393e-05, |
|
"loss": 0.2748, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.218080149114632, |
|
"grad_norm": 0.17107774863115274, |
|
"learning_rate": 1.4480497065930273e-05, |
|
"loss": 0.2805, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.222739981360671, |
|
"grad_norm": 0.1766012688920227, |
|
"learning_rate": 1.4394200897480153e-05, |
|
"loss": 0.2691, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.22739981360671, |
|
"grad_norm": 0.20605771299740377, |
|
"learning_rate": 1.4307904729030031e-05, |
|
"loss": 0.2767, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.2320596458527495, |
|
"grad_norm": 0.18604061923263876, |
|
"learning_rate": 1.4221608560579911e-05, |
|
"loss": 0.2794, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.2367194780987885, |
|
"grad_norm": 0.1851616114223708, |
|
"learning_rate": 1.4135312392129791e-05, |
|
"loss": 0.2757, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.2413793103448274, |
|
"grad_norm": 0.17443369190032176, |
|
"learning_rate": 1.404901622367967e-05, |
|
"loss": 0.264, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.246039142590867, |
|
"grad_norm": 0.19812274928789778, |
|
"learning_rate": 1.3962720055229547e-05, |
|
"loss": 0.2826, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.2506989748369057, |
|
"grad_norm": 0.17824647495356746, |
|
"learning_rate": 1.3876423886779427e-05, |
|
"loss": 0.2782, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.255358807082945, |
|
"grad_norm": 0.17013806237062562, |
|
"learning_rate": 1.3790127718329307e-05, |
|
"loss": 0.2762, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.260018639328984, |
|
"grad_norm": 0.1943168922827978, |
|
"learning_rate": 1.3703831549879187e-05, |
|
"loss": 0.2789, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.2646784715750234, |
|
"grad_norm": 0.18094877523221986, |
|
"learning_rate": 1.3617535381429064e-05, |
|
"loss": 0.2743, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.2693383038210624, |
|
"grad_norm": 0.19603810048617765, |
|
"learning_rate": 1.3531239212978944e-05, |
|
"loss": 0.2758, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.2739981360671018, |
|
"grad_norm": 0.18838097506191692, |
|
"learning_rate": 1.3444943044528824e-05, |
|
"loss": 0.2746, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.2786579683131407, |
|
"grad_norm": 0.1837992421106699, |
|
"learning_rate": 1.3358646876078703e-05, |
|
"loss": 0.2829, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.28331780055918, |
|
"grad_norm": 0.20613494690370499, |
|
"learning_rate": 1.327235070762858e-05, |
|
"loss": 0.2747, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.287977632805219, |
|
"grad_norm": 0.18601645140870776, |
|
"learning_rate": 1.318605453917846e-05, |
|
"loss": 0.2788, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.292637465051258, |
|
"grad_norm": 0.1968058372163702, |
|
"learning_rate": 1.309975837072834e-05, |
|
"loss": 0.2778, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.2972972972972974, |
|
"grad_norm": 0.2043925744041973, |
|
"learning_rate": 1.301346220227822e-05, |
|
"loss": 0.2739, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.3019571295433363, |
|
"grad_norm": 0.19738280319636547, |
|
"learning_rate": 1.2927166033828098e-05, |
|
"loss": 0.2758, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.3066169617893757, |
|
"grad_norm": 0.18662351252483492, |
|
"learning_rate": 1.2840869865377978e-05, |
|
"loss": 0.2785, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.3112767940354146, |
|
"grad_norm": 0.17709594640236917, |
|
"learning_rate": 1.2754573696927858e-05, |
|
"loss": 0.2703, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.315936626281454, |
|
"grad_norm": 0.181319324908815, |
|
"learning_rate": 1.2668277528477738e-05, |
|
"loss": 0.2792, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.320596458527493, |
|
"grad_norm": 0.17361145659665653, |
|
"learning_rate": 1.2581981360027614e-05, |
|
"loss": 0.2726, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.325256290773532, |
|
"grad_norm": 0.18262624402157168, |
|
"learning_rate": 1.2495685191577494e-05, |
|
"loss": 0.2866, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.3299161230195713, |
|
"grad_norm": 0.17614938750862233, |
|
"learning_rate": 1.2409389023127374e-05, |
|
"loss": 0.2742, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.3345759552656107, |
|
"grad_norm": 0.193153455970642, |
|
"learning_rate": 1.2323092854677252e-05, |
|
"loss": 0.275, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.3392357875116496, |
|
"grad_norm": 0.17560503623781729, |
|
"learning_rate": 1.2236796686227132e-05, |
|
"loss": 0.2836, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.3438956197576886, |
|
"grad_norm": 0.17753595861207647, |
|
"learning_rate": 1.215050051777701e-05, |
|
"loss": 0.2804, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.348555452003728, |
|
"grad_norm": 0.16655795528741021, |
|
"learning_rate": 1.206420434932689e-05, |
|
"loss": 0.2795, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.353215284249767, |
|
"grad_norm": 0.18056164054302745, |
|
"learning_rate": 1.1977908180876769e-05, |
|
"loss": 0.2786, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.3578751164958063, |
|
"grad_norm": 0.17262069282740267, |
|
"learning_rate": 1.1891612012426649e-05, |
|
"loss": 0.2823, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.362534948741845, |
|
"grad_norm": 0.17648621798075145, |
|
"learning_rate": 1.1805315843976528e-05, |
|
"loss": 0.2814, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.3671947809878846, |
|
"grad_norm": 0.17180348643047774, |
|
"learning_rate": 1.1719019675526408e-05, |
|
"loss": 0.2679, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.3718546132339235, |
|
"grad_norm": 0.17163873087066184, |
|
"learning_rate": 1.1632723507076287e-05, |
|
"loss": 0.2809, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.3765144454799625, |
|
"grad_norm": 0.19443285061807042, |
|
"learning_rate": 1.1546427338626167e-05, |
|
"loss": 0.2709, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.381174277726002, |
|
"grad_norm": 0.1763276308757094, |
|
"learning_rate": 1.1460131170176045e-05, |
|
"loss": 0.2709, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.385834109972041, |
|
"grad_norm": 0.17298151925794328, |
|
"learning_rate": 1.1373835001725925e-05, |
|
"loss": 0.2742, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.39049394221808, |
|
"grad_norm": 0.18411058579613976, |
|
"learning_rate": 1.1287538833275803e-05, |
|
"loss": 0.2868, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.395153774464119, |
|
"grad_norm": 0.18158697728207573, |
|
"learning_rate": 1.1201242664825683e-05, |
|
"loss": 0.2793, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.3998136067101585, |
|
"grad_norm": 0.1785216933857131, |
|
"learning_rate": 1.1114946496375561e-05, |
|
"loss": 0.2736, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.4044734389561975, |
|
"grad_norm": 0.16270161978268782, |
|
"learning_rate": 1.1028650327925441e-05, |
|
"loss": 0.2788, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.409133271202237, |
|
"grad_norm": 0.1800761025929857, |
|
"learning_rate": 1.094235415947532e-05, |
|
"loss": 0.2906, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.413793103448276, |
|
"grad_norm": 0.1791936044618672, |
|
"learning_rate": 1.0856057991025199e-05, |
|
"loss": 0.2716, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.418452935694315, |
|
"grad_norm": 0.17530937117414502, |
|
"learning_rate": 1.0769761822575077e-05, |
|
"loss": 0.2773, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.423112767940354, |
|
"grad_norm": 0.17166473081666947, |
|
"learning_rate": 1.0683465654124957e-05, |
|
"loss": 0.2757, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.427772600186393, |
|
"grad_norm": 0.1706879157323149, |
|
"learning_rate": 1.0597169485674835e-05, |
|
"loss": 0.2803, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.4324324324324325, |
|
"grad_norm": 0.1738131864689864, |
|
"learning_rate": 1.0510873317224715e-05, |
|
"loss": 0.2749, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.4370922646784714, |
|
"grad_norm": 0.1843475170258911, |
|
"learning_rate": 1.0424577148774595e-05, |
|
"loss": 0.272, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.441752096924511, |
|
"grad_norm": 0.19903377588549018, |
|
"learning_rate": 1.0338280980324475e-05, |
|
"loss": 0.2767, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.4464119291705497, |
|
"grad_norm": 0.20606107162974335, |
|
"learning_rate": 1.0251984811874353e-05, |
|
"loss": 0.2737, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.451071761416589, |
|
"grad_norm": 0.171023290951825, |
|
"learning_rate": 1.0165688643424233e-05, |
|
"loss": 0.2714, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.455731593662628, |
|
"grad_norm": 0.18533903103307545, |
|
"learning_rate": 1.0079392474974112e-05, |
|
"loss": 0.2695, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.4603914259086674, |
|
"grad_norm": 0.17559078480116283, |
|
"learning_rate": 9.993096306523992e-06, |
|
"loss": 0.2717, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.4650512581547064, |
|
"grad_norm": 0.16308342288310101, |
|
"learning_rate": 9.90680013807387e-06, |
|
"loss": 0.2665, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.4697110904007458, |
|
"grad_norm": 0.17981260668345828, |
|
"learning_rate": 9.82050396962375e-06, |
|
"loss": 0.2811, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.4743709226467847, |
|
"grad_norm": 0.18246592302224546, |
|
"learning_rate": 9.734207801173628e-06, |
|
"loss": 0.2758, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.4790307548928237, |
|
"grad_norm": 0.17682925481784578, |
|
"learning_rate": 9.647911632723508e-06, |
|
"loss": 0.2707, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.483690587138863, |
|
"grad_norm": 0.17625800926177226, |
|
"learning_rate": 9.561615464273386e-06, |
|
"loss": 0.2767, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.488350419384902, |
|
"grad_norm": 0.17759777209359404, |
|
"learning_rate": 9.475319295823266e-06, |
|
"loss": 0.2763, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.4930102516309414, |
|
"grad_norm": 0.17167333274229002, |
|
"learning_rate": 9.389023127373144e-06, |
|
"loss": 0.2806, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.4976700838769803, |
|
"grad_norm": 0.18511179213868817, |
|
"learning_rate": 9.302726958923024e-06, |
|
"loss": 0.2766, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.5023299161230197, |
|
"grad_norm": 0.18392745158670212, |
|
"learning_rate": 9.216430790472904e-06, |
|
"loss": 0.2742, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.5069897483690586, |
|
"grad_norm": 0.17425534996379927, |
|
"learning_rate": 9.130134622022784e-06, |
|
"loss": 0.285, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.511649580615098, |
|
"grad_norm": 0.17772949274708227, |
|
"learning_rate": 9.043838453572662e-06, |
|
"loss": 0.2737, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.516309412861137, |
|
"grad_norm": 0.17917065742735563, |
|
"learning_rate": 8.957542285122542e-06, |
|
"loss": 0.272, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.5209692451071763, |
|
"grad_norm": 0.18701421523883227, |
|
"learning_rate": 8.87124611667242e-06, |
|
"loss": 0.2753, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.5256290773532153, |
|
"grad_norm": 0.17820051032936174, |
|
"learning_rate": 8.7849499482223e-06, |
|
"loss": 0.2775, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.5302889095992542, |
|
"grad_norm": 0.1813581411815883, |
|
"learning_rate": 8.698653779772179e-06, |
|
"loss": 0.2744, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.5349487418452936, |
|
"grad_norm": 0.1723367219389619, |
|
"learning_rate": 8.612357611322058e-06, |
|
"loss": 0.2825, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.5396085740913326, |
|
"grad_norm": 0.1825597003307237, |
|
"learning_rate": 8.526061442871937e-06, |
|
"loss": 0.2658, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.544268406337372, |
|
"grad_norm": 0.17953226914783166, |
|
"learning_rate": 8.439765274421817e-06, |
|
"loss": 0.2728, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.548928238583411, |
|
"grad_norm": 0.17282370480806067, |
|
"learning_rate": 8.353469105971695e-06, |
|
"loss": 0.2745, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.5535880708294503, |
|
"grad_norm": 0.19131767967715044, |
|
"learning_rate": 8.267172937521575e-06, |
|
"loss": 0.2702, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.558247903075489, |
|
"grad_norm": 0.18682723476428004, |
|
"learning_rate": 8.180876769071453e-06, |
|
"loss": 0.2744, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.562907735321528, |
|
"grad_norm": 0.169741564959811, |
|
"learning_rate": 8.094580600621333e-06, |
|
"loss": 0.2745, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.5675675675675675, |
|
"grad_norm": 0.19046968644655327, |
|
"learning_rate": 8.008284432171211e-06, |
|
"loss": 0.2753, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.572227399813607, |
|
"grad_norm": 0.18938757625869188, |
|
"learning_rate": 7.921988263721091e-06, |
|
"loss": 0.2819, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.576887232059646, |
|
"grad_norm": 0.196838655491514, |
|
"learning_rate": 7.835692095270971e-06, |
|
"loss": 0.2817, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.581547064305685, |
|
"grad_norm": 0.16669933003980705, |
|
"learning_rate": 7.749395926820851e-06, |
|
"loss": 0.269, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.586206896551724, |
|
"grad_norm": 0.19086740445946684, |
|
"learning_rate": 7.663099758370729e-06, |
|
"loss": 0.2727, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.590866728797763, |
|
"grad_norm": 0.18171222782724403, |
|
"learning_rate": 7.576803589920608e-06, |
|
"loss": 0.2811, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.5955265610438025, |
|
"grad_norm": 0.16766433754538815, |
|
"learning_rate": 7.490507421470487e-06, |
|
"loss": 0.2749, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.6001863932898415, |
|
"grad_norm": 0.16595154157280068, |
|
"learning_rate": 7.4042112530203655e-06, |
|
"loss": 0.28, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.604846225535881, |
|
"grad_norm": 0.17432293982115463, |
|
"learning_rate": 7.317915084570245e-06, |
|
"loss": 0.2802, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.60950605778192, |
|
"grad_norm": 0.17218627894292712, |
|
"learning_rate": 7.231618916120124e-06, |
|
"loss": 0.2719, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.6141658900279587, |
|
"grad_norm": 0.1634524658646518, |
|
"learning_rate": 7.1453227476700035e-06, |
|
"loss": 0.2801, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.618825722273998, |
|
"grad_norm": 0.17195997129276708, |
|
"learning_rate": 7.059026579219883e-06, |
|
"loss": 0.2723, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.6234855545200375, |
|
"grad_norm": 0.16683386409994144, |
|
"learning_rate": 6.9727304107697625e-06, |
|
"loss": 0.276, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.6281453867660765, |
|
"grad_norm": 0.18509321690413388, |
|
"learning_rate": 6.886434242319641e-06, |
|
"loss": 0.2839, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.6328052190121154, |
|
"grad_norm": 0.1897849621452287, |
|
"learning_rate": 6.800138073869521e-06, |
|
"loss": 0.281, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.637465051258155, |
|
"grad_norm": 0.17600147151565254, |
|
"learning_rate": 6.713841905419399e-06, |
|
"loss": 0.2754, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.6421248835041937, |
|
"grad_norm": 0.18358933299417948, |
|
"learning_rate": 6.627545736969279e-06, |
|
"loss": 0.2804, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.646784715750233, |
|
"grad_norm": 0.16580864272367096, |
|
"learning_rate": 6.541249568519157e-06, |
|
"loss": 0.2759, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.651444547996272, |
|
"grad_norm": 0.17576030625851174, |
|
"learning_rate": 6.454953400069037e-06, |
|
"loss": 0.2713, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.6561043802423114, |
|
"grad_norm": 0.18166243959913597, |
|
"learning_rate": 6.368657231618916e-06, |
|
"loss": 0.2802, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.6607642124883504, |
|
"grad_norm": 0.1750653292432041, |
|
"learning_rate": 6.282361063168796e-06, |
|
"loss": 0.274, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.6654240447343893, |
|
"grad_norm": 0.17633698735987563, |
|
"learning_rate": 6.196064894718675e-06, |
|
"loss": 0.282, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.6700838769804287, |
|
"grad_norm": 0.17201632030903413, |
|
"learning_rate": 6.109768726268554e-06, |
|
"loss": 0.2759, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.674743709226468, |
|
"grad_norm": 0.17683982317722033, |
|
"learning_rate": 6.023472557818433e-06, |
|
"loss": 0.2751, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.679403541472507, |
|
"grad_norm": 0.19223844859069536, |
|
"learning_rate": 5.937176389368312e-06, |
|
"loss": 0.2855, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.684063373718546, |
|
"grad_norm": 0.17369015071677205, |
|
"learning_rate": 5.850880220918191e-06, |
|
"loss": 0.2806, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.6887232059645854, |
|
"grad_norm": 0.18737304387850326, |
|
"learning_rate": 5.76458405246807e-06, |
|
"loss": 0.2745, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.6933830382106243, |
|
"grad_norm": 0.17932224811861935, |
|
"learning_rate": 5.67828788401795e-06, |
|
"loss": 0.2795, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.6980428704566637, |
|
"grad_norm": 0.17549128501841824, |
|
"learning_rate": 5.591991715567829e-06, |
|
"loss": 0.2756, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.7027027027027026, |
|
"grad_norm": 0.1889897215557741, |
|
"learning_rate": 5.5056955471177085e-06, |
|
"loss": 0.271, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.707362534948742, |
|
"grad_norm": 0.176712715251824, |
|
"learning_rate": 5.4193993786675876e-06, |
|
"loss": 0.2745, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.712022367194781, |
|
"grad_norm": 0.17488780229267484, |
|
"learning_rate": 5.333103210217467e-06, |
|
"loss": 0.2817, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.71668219944082, |
|
"grad_norm": 0.17578405904251992, |
|
"learning_rate": 5.246807041767346e-06, |
|
"loss": 0.2716, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.7213420316868593, |
|
"grad_norm": 0.16722817323501898, |
|
"learning_rate": 5.160510873317225e-06, |
|
"loss": 0.2697, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.7260018639328987, |
|
"grad_norm": 0.1662095852540087, |
|
"learning_rate": 5.074214704867105e-06, |
|
"loss": 0.2731, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.7306616961789376, |
|
"grad_norm": 0.17382804388755072, |
|
"learning_rate": 4.987918536416984e-06, |
|
"loss": 0.2827, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.7353215284249766, |
|
"grad_norm": 0.15989103315824374, |
|
"learning_rate": 4.901622367966863e-06, |
|
"loss": 0.2731, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.739981360671016, |
|
"grad_norm": 0.17481274883444473, |
|
"learning_rate": 4.815326199516742e-06, |
|
"loss": 0.2764, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.744641192917055, |
|
"grad_norm": 0.1727388307954847, |
|
"learning_rate": 4.729030031066621e-06, |
|
"loss": 0.2758, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.7493010251630943, |
|
"grad_norm": 0.17485814833895671, |
|
"learning_rate": 4.6427338626165e-06, |
|
"loss": 0.2795, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.753960857409133, |
|
"grad_norm": 0.16668606182641446, |
|
"learning_rate": 4.556437694166379e-06, |
|
"loss": 0.2693, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.7586206896551726, |
|
"grad_norm": 0.17170622218020629, |
|
"learning_rate": 4.470141525716258e-06, |
|
"loss": 0.2716, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.7632805219012115, |
|
"grad_norm": 0.17175973509703307, |
|
"learning_rate": 4.383845357266138e-06, |
|
"loss": 0.2763, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.7679403541472505, |
|
"grad_norm": 0.1761256767698172, |
|
"learning_rate": 4.297549188816017e-06, |
|
"loss": 0.2767, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.77260018639329, |
|
"grad_norm": 0.17126934009577965, |
|
"learning_rate": 4.211253020365896e-06, |
|
"loss": 0.2751, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.7772600186393293, |
|
"grad_norm": 0.16670798049129018, |
|
"learning_rate": 4.124956851915775e-06, |
|
"loss": 0.2723, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.781919850885368, |
|
"grad_norm": 0.16820690239197159, |
|
"learning_rate": 4.0386606834656544e-06, |
|
"loss": 0.2801, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.786579683131407, |
|
"grad_norm": 0.16780719956071383, |
|
"learning_rate": 3.9523645150155335e-06, |
|
"loss": 0.2753, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.7912395153774465, |
|
"grad_norm": 0.16662889959771904, |
|
"learning_rate": 3.8660683465654126e-06, |
|
"loss": 0.2707, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.7958993476234855, |
|
"grad_norm": 0.17034765846059502, |
|
"learning_rate": 3.779772178115292e-06, |
|
"loss": 0.2716, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.800559179869525, |
|
"grad_norm": 0.16987804960948094, |
|
"learning_rate": 3.693476009665171e-06, |
|
"loss": 0.2826, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.805219012115564, |
|
"grad_norm": 0.16709134754280006, |
|
"learning_rate": 3.6071798412150506e-06, |
|
"loss": 0.2852, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.809878844361603, |
|
"grad_norm": 0.17959504858137004, |
|
"learning_rate": 3.5208836727649297e-06, |
|
"loss": 0.2719, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.814538676607642, |
|
"grad_norm": 0.17694435961673835, |
|
"learning_rate": 3.434587504314809e-06, |
|
"loss": 0.2729, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.819198508853681, |
|
"grad_norm": 0.16858615519409365, |
|
"learning_rate": 3.348291335864688e-06, |
|
"loss": 0.2814, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.8238583410997204, |
|
"grad_norm": 0.16472556370426156, |
|
"learning_rate": 3.2619951674145674e-06, |
|
"loss": 0.2887, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.8285181733457594, |
|
"grad_norm": 0.17090660814144984, |
|
"learning_rate": 3.1756989989644464e-06, |
|
"loss": 0.2789, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.8331780055917988, |
|
"grad_norm": 0.1664335592724551, |
|
"learning_rate": 3.089402830514325e-06, |
|
"loss": 0.2745, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.8378378378378377, |
|
"grad_norm": 0.17059332709614805, |
|
"learning_rate": 3.0031066620642046e-06, |
|
"loss": 0.2738, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.842497670083877, |
|
"grad_norm": 0.17478620942656362, |
|
"learning_rate": 2.9168104936140837e-06, |
|
"loss": 0.2768, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.847157502329916, |
|
"grad_norm": 0.1745160502610121, |
|
"learning_rate": 2.8305143251639627e-06, |
|
"loss": 0.2738, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.8518173345759554, |
|
"grad_norm": 0.16465774847578624, |
|
"learning_rate": 2.7442181567138422e-06, |
|
"loss": 0.2715, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.8564771668219944, |
|
"grad_norm": 0.15867443448135737, |
|
"learning_rate": 2.6579219882637213e-06, |
|
"loss": 0.2719, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.8611369990680338, |
|
"grad_norm": 0.16521075659713555, |
|
"learning_rate": 2.5716258198136004e-06, |
|
"loss": 0.2715, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.8657968313140727, |
|
"grad_norm": 0.1718805282092452, |
|
"learning_rate": 2.4853296513634795e-06, |
|
"loss": 0.2748, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.8704566635601116, |
|
"grad_norm": 0.17162943950096435, |
|
"learning_rate": 2.399033482913359e-06, |
|
"loss": 0.2807, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.875116495806151, |
|
"grad_norm": 0.16105504621841749, |
|
"learning_rate": 2.312737314463238e-06, |
|
"loss": 0.2701, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.87977632805219, |
|
"grad_norm": 0.17198488326251693, |
|
"learning_rate": 2.226441146013117e-06, |
|
"loss": 0.2704, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.8844361602982294, |
|
"grad_norm": 0.15655415826821228, |
|
"learning_rate": 2.140144977562996e-06, |
|
"loss": 0.2738, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.8890959925442683, |
|
"grad_norm": 0.1672514927854638, |
|
"learning_rate": 2.0538488091128757e-06, |
|
"loss": 0.2714, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.8937558247903077, |
|
"grad_norm": 0.16501089822891354, |
|
"learning_rate": 1.9675526406627547e-06, |
|
"loss": 0.2686, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.8984156570363466, |
|
"grad_norm": 0.16797199409520505, |
|
"learning_rate": 1.8812564722126338e-06, |
|
"loss": 0.2798, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.9030754892823856, |
|
"grad_norm": 0.16204249680723293, |
|
"learning_rate": 1.794960303762513e-06, |
|
"loss": 0.2669, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.907735321528425, |
|
"grad_norm": 0.16489785712987573, |
|
"learning_rate": 1.7086641353123924e-06, |
|
"loss": 0.2777, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.9123951537744643, |
|
"grad_norm": 0.16188984157048358, |
|
"learning_rate": 1.6223679668622715e-06, |
|
"loss": 0.2693, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.9170549860205033, |
|
"grad_norm": 0.17351613142325312, |
|
"learning_rate": 1.5360717984121505e-06, |
|
"loss": 0.2686, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.9217148182665422, |
|
"grad_norm": 0.16722557848064562, |
|
"learning_rate": 1.4497756299620296e-06, |
|
"loss": 0.2723, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.9263746505125816, |
|
"grad_norm": 0.16574285596328986, |
|
"learning_rate": 1.363479461511909e-06, |
|
"loss": 0.2692, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.9310344827586206, |
|
"grad_norm": 0.15943206131907792, |
|
"learning_rate": 1.277183293061788e-06, |
|
"loss": 0.2731, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.93569431500466, |
|
"grad_norm": 0.16550622891866895, |
|
"learning_rate": 1.1908871246116673e-06, |
|
"loss": 0.2748, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.940354147250699, |
|
"grad_norm": 0.16722277195463936, |
|
"learning_rate": 1.1045909561615463e-06, |
|
"loss": 0.274, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.9450139794967383, |
|
"grad_norm": 0.16466126003616866, |
|
"learning_rate": 1.0182947877114256e-06, |
|
"loss": 0.2767, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.949673811742777, |
|
"grad_norm": 0.16026111253278752, |
|
"learning_rate": 9.319986192613048e-07, |
|
"loss": 0.2694, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.954333643988816, |
|
"grad_norm": 0.1669111211801107, |
|
"learning_rate": 8.45702450811184e-07, |
|
"loss": 0.2723, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.9589934762348555, |
|
"grad_norm": 0.16252544121483595, |
|
"learning_rate": 7.594062823610632e-07, |
|
"loss": 0.2756, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.963653308480895, |
|
"grad_norm": 0.16741227468406691, |
|
"learning_rate": 6.731101139109423e-07, |
|
"loss": 0.2789, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.968313140726934, |
|
"grad_norm": 0.15948583399835667, |
|
"learning_rate": 5.868139454608215e-07, |
|
"loss": 0.2726, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.972972972972973, |
|
"grad_norm": 0.1566399337168256, |
|
"learning_rate": 5.005177770107007e-07, |
|
"loss": 0.2725, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.977632805219012, |
|
"grad_norm": 0.16159042104615806, |
|
"learning_rate": 4.142216085605799e-07, |
|
"loss": 0.2726, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.982292637465051, |
|
"grad_norm": 0.16116182995208048, |
|
"learning_rate": 3.279254401104591e-07, |
|
"loss": 0.28, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.9869524697110905, |
|
"grad_norm": 0.16435368780395243, |
|
"learning_rate": 2.416292716603383e-07, |
|
"loss": 0.2735, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.9916123019571295, |
|
"grad_norm": 0.17079151019873065, |
|
"learning_rate": 1.5533310321021747e-07, |
|
"loss": 0.2772, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.996272134203169, |
|
"grad_norm": 0.1643652958381842, |
|
"learning_rate": 6.903693476009665e-08, |
|
"loss": 0.2751, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 3219, |
|
"total_flos": 2.755855971073917e+18, |
|
"train_loss": 0.4105892218429504, |
|
"train_runtime": 42482.7051, |
|
"train_samples_per_second": 1.212, |
|
"train_steps_per_second": 0.076 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3219, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.755855971073917e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|