|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 35.52880096435547, |
|
"learning_rate": 5e-06, |
|
"loss": 13.913, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 27.649765014648438, |
|
"learning_rate": 1e-05, |
|
"loss": 13.4311, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.23821258544922, |
|
"learning_rate": 1.5e-05, |
|
"loss": 12.2243, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.717832565307617, |
|
"learning_rate": 2e-05, |
|
"loss": 10.8725, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.961292266845703, |
|
"learning_rate": 2.5e-05, |
|
"loss": 9.8498, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.860273838043213, |
|
"learning_rate": 3e-05, |
|
"loss": 9.0782, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.4377548694610596, |
|
"learning_rate": 3.5e-05, |
|
"loss": 8.516, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.771414041519165, |
|
"learning_rate": 4e-05, |
|
"loss": 8.1584, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.5369564294815063, |
|
"learning_rate": 4.5e-05, |
|
"loss": 7.9712, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.1230754852294922, |
|
"learning_rate": 5e-05, |
|
"loss": 7.9062, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8305569291114807, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.8964, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.7593777775764465, |
|
"learning_rate": 6e-05, |
|
"loss": 7.8687, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6929013133049011, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.8158, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.3948832750320435, |
|
"learning_rate": 7e-05, |
|
"loss": 7.722, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8355083465576172, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 7.5869, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.5804309844970703, |
|
"learning_rate": 8e-05, |
|
"loss": 7.4761, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4202959537506104, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.3409, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.6333388090133667, |
|
"learning_rate": 9e-05, |
|
"loss": 7.148, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2520595788955688, |
|
"learning_rate": 9.5e-05, |
|
"loss": 7.0504, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.1779870986938477, |
|
"learning_rate": 0.0001, |
|
"loss": 6.8871, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.1255111694335938, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 6.7967, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3718830347061157, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 6.6346, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3273447751998901, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 6.485, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.462202310562134, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 6.4167, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3976832628250122, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 6.2702, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9608620405197144, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 6.2682, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4369174242019653, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 6.099, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3051707744598389, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 6.0768, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.865341305732727, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 5.9998, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.810935616493225, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 5.8743, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.2615277767181396, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 5.8808, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.114527702331543, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 5.7969, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.029925584793091, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 5.7036, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.2772178649902344, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 5.6773, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9576845169067383, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 5.6935, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.008150577545166, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 5.6154, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7725847959518433, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 5.6392, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.334122896194458, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 5.548, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.318779468536377, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 5.4917, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.765774130821228, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 5.4576, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9322354793548584, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 5.4416, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.2729501724243164, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 5.4067, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7171967029571533, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 5.2989, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9652690887451172, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 5.2895, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.945024847984314, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 5.2129, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.4888384342193604, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 5.2575, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.6458323001861572, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 5.2492, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.904470920562744, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 5.2304, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.577502489089966, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 5.1574, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.2518582344055176, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 5.1468, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.210632562637329, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 5.1903, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.289008855819702, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 5.0538, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9316537380218506, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 5.0137, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.8232439756393433, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 5.0465, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.632053852081299, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 4.9713, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.431483745574951, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 4.9716, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.3636586666107178, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 5.0058, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.994718551635742, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 4.9891, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.4368574619293213, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 4.9757, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.8038134574890137, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 4.9921, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0562264919281006, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 4.9411, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.29128360748291, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 4.9772, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8482352495193481, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 4.9491, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.426596164703369, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 4.9207, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.412191152572632, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 4.8813, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.520648956298828, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 4.8326, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.1452250480651855, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 4.8624, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7072198390960693, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 4.7228, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.610759973526001, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 4.7795, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.72112238407135, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 4.8786, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8034067153930664, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 4.7931, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.0501575469970703, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 4.7893, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.5287954807281494, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 4.7427, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.001993417739868, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 4.6804, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9089677333831787, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 4.7201, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.031919479370117, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 4.5473, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.068758964538574, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 4.637, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9847966432571411, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 4.6055, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.768234372138977, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 4.6769, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.6738271713256836, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 4.6804, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7261288166046143, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 4.7266, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.4627445936203003, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 4.6272, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5622859001159668, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 4.6135, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.9910693168640137, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 4.5982, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.1940295696258545, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 4.6473, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.3309640884399414, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 4.5478, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.0572140216827393, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 4.5874, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.139042854309082, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 4.5338, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.662381887435913, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 4.5524, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8986903429031372, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 4.6181, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.9163240194320679, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 4.5864, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5074962377548218, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 4.456, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8273178339004517, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 4.4551, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.927778720855713, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 4.423, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.81380295753479, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 4.4809, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.4844441413879395, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 4.4751, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5609166622161865, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 4.4466, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7383570671081543, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 4.4678, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.1461851596832275, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 4.423, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8402971029281616, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 4.4111, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8172292709350586, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 4.4032, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.862669825553894, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 4.4133, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.58668851852417, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 4.4304, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.267015218734741, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 4.4452, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.892336130142212, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 4.3714, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.2870523929595947, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 4.3809, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7279003858566284, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 4.4289, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5451871156692505, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 4.3959, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.040153741836548, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 4.3958, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7074291706085205, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 4.445, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.0845582485198975, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 4.321, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7364312410354614, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 4.3121, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.838957667350769, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 4.3749, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.750223994255066, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 4.3366, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.4889832735061646, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 4.3415, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.8685040473937988, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 4.3689, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.534946084022522, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 4.3685, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5127125978469849, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 4.3401, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5884028673171997, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 4.3424, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.3165289163589478, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 4.2607, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4285833835601807, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 4.3209, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.1819188594818115, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 4.3364, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.726144552230835, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 4.3645, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9843640327453613, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 4.238, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.620071291923523, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 4.2553, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4403271675109863, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 4.3202, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4145867824554443, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 4.2826, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.439998745918274, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 4.2418, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.131375789642334, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 4.2036, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.8813244104385376, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 4.1535, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4201704263687134, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 4.2536, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3782165050506592, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 4.1932, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.094642162322998, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 4.1604, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.7626591920852661, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 4.0819, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.609782099723816, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 4.1571, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.287858486175537, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 4.1096, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6848094463348389, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 4.2902, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5279600620269775, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 4.1994, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3572136163711548, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 4.1961, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6097009181976318, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 4.1777, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.7889363765716553, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 4.1743, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6500908136367798, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 4.1654, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.0012271404266357, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 4.1007, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.140477418899536, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 4.2186, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5439093112945557, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 4.1307, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.7154203653335571, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 4.1912, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.53816556930542, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 4.2425, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3727734088897705, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 4.2211, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5313302278518677, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 4.1654, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.9445953369140625, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 4.2171, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.5916863679885864, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 4.1061, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3092683553695679, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 4.2096, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3142445087432861, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 4.1241, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.357508897781372, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 4.1138, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.302988052368164, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 4.1249, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.1296546459198, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 4.1248, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.0370583534240723, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 4.1247, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.6055999994277954, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 3.9874, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8076801300048828, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 4.1358, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.474900245666504, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 4.0485, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3105722665786743, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 4.1364, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3706189393997192, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 4.0926, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.493538737297058, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 4.0733, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.29449462890625, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 4.0523, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.291658878326416, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 4.1155, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2312451601028442, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 4.006, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4466781616210938, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 4.1482, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2307556867599487, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 4.086, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3094658851623535, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 4.0652, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4318170547485352, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 4.1354, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2737038135528564, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 4.0278, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1153333187103271, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 4.0898, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2244912385940552, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 3.9959, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1208878755569458, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 4.0623, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1840647459030151, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 4.045, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4008674621582031, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 4.0959, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3445881605148315, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 4.02, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1713682413101196, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 4.0312, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2331210374832153, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 4.0289, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.279057502746582, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 4.0998, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3163552284240723, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 4.0448, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3067126274108887, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 4.029, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.145782470703125, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 4.0305, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.6717885732650757, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 3.9886, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.3663681745529175, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 3.9891, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4202651977539062, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 3.9358, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2852869033813477, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 4.0289, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.138818383216858, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 4.0046, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1434671878814697, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 4.0873, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3317791223526, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 3.963, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2954212427139282, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 3.9772, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.265075922012329, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 3.9925, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2149906158447266, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 4.0012, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2477954626083374, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 3.9611, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.324196696281433, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 3.9677, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3045045137405396, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.918, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.8825814723968506, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 4.0018, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4024105072021484, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 3.8843, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1396621465682983, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 3.9899, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4695543050765991, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 3.9376, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0880409479141235, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 3.9491, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1712977886199951, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 3.8714, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2302933931350708, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 3.8528, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.258522629737854, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 3.9281, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.213845133781433, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 3.9133, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2096773386001587, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 3.9051, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3456385135650635, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 3.8499, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3120944499969482, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 3.932, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2625679969787598, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 3.851, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.229634165763855, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 3.8906, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3108216524124146, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 3.8976, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.382756233215332, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 3.9716, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.498990774154663, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 3.931, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2947123050689697, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 3.8841, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.247287392616272, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 3.7915, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2066890001296997, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 3.922, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2116167545318604, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 3.8854, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1037461757659912, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 3.8996, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.324324369430542, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 3.9283, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3123801946640015, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 3.8173, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2384531497955322, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 3.9028, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1092414855957031, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 3.783, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.040840983390808, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 3.867, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2289977073669434, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 3.8533, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9840658903121948, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 3.851, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1351091861724854, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 3.792, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4369754791259766, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 3.8564, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2083522081375122, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 3.8876, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2528421878814697, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 3.7933, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.289838433265686, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 3.8005, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1734122037887573, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 3.7707, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1725057363510132, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 3.8424, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9867896437644958, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 3.7997, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1472907066345215, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 3.8205, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1044718027114868, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 3.789, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2645314931869507, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 3.9305, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1153810024261475, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 3.7777, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1703031063079834, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 3.819, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2039633989334106, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 3.742, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.057329773902893, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 3.7937, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1288293600082397, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 3.8379, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.081825613975525, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 3.8629, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1750764846801758, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 3.83, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1391602754592896, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 3.8315, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.4016056060791016, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 3.7994, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0848188400268555, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 3.7555, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.025410532951355, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 3.8574, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1180078983306885, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 3.8189, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.166062593460083, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 3.6929, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2254574298858643, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 3.8875, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.343238353729248, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 3.7674, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1699622869491577, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 3.7034, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0328434705734253, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 3.8051, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2840907573699951, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 3.756, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2463798522949219, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 3.8213, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.457021951675415, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 3.8307, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.196196436882019, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 3.7173, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2081055641174316, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 3.76, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0329689979553223, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 3.7521, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0779180526733398, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 3.694, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2917273044586182, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 3.7758, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.575312852859497, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 3.7461, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2756739854812622, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 3.764, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1574435234069824, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 3.776, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1152746677398682, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 3.8227, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.215476393699646, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 3.684, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2368085384368896, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 3.7279, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1022911071777344, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 3.7327, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9960315227508545, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 3.6113, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3093613386154175, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 3.7385, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1868112087249756, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 3.7108, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.20425546169281, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 3.6743, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1599810123443604, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 3.7433, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2243181467056274, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 3.6934, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.073176383972168, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 3.697, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.175176739692688, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 3.7262, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.068116307258606, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 3.6871, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1474370956420898, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 3.7306, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.140490174293518, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 3.6987, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1814143657684326, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 3.7757, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2589894533157349, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 3.7821, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0849429368972778, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 3.6301, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1057863235473633, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 3.7313, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1446341276168823, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 3.7866, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1112569570541382, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 3.6763, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0507431030273438, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 3.7954, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2522321939468384, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 3.6357, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.060377836227417, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 3.6315, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.035387396812439, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 3.6919, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0047308206558228, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 3.6445, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.10782790184021, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 3.6457, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0221713781356812, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 3.743, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0856046676635742, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 3.6569, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1059452295303345, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 3.7369, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1251044273376465, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 3.6478, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0519945621490479, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 3.6413, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.137978196144104, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 3.6362, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.045811653137207, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 3.6628, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.188126802444458, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 3.7502, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1069587469100952, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 3.6779, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0773390531539917, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 3.6953, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0676312446594238, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 3.7452, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0171785354614258, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 3.6311, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0171067714691162, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 3.6453, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1444722414016724, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 3.6623, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1217622756958008, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 3.6992, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0387611389160156, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 3.6233, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9862802028656006, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 3.5765, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1087441444396973, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 3.6472, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0384817123413086, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 3.7186, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0545164346694946, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 3.6654, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1012707948684692, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 3.6155, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9383161067962646, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 3.6554, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9553996920585632, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 3.6888, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.117811679840088, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 3.6646, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0329620838165283, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 3.5898, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8986888527870178, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 3.585, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9581660628318787, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 3.6762, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9644125699996948, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 3.5898, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0127379894256592, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 3.7051, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0924148559570312, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 3.7056, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0332456827163696, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 3.6393, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.129856824874878, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 3.6422, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.075238585472107, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 3.5333, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1756746768951416, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 3.5834, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1802172660827637, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 3.5762, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1827501058578491, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 3.6299, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2258820533752441, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 3.7182, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.035818338394165, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 3.5232, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0153076648712158, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 3.6142, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1396682262420654, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 3.6179, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9990943074226379, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 3.6462, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1192935705184937, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 3.6308, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.3080693483352661, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 3.6369, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.019212245941162, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 3.6282, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9433236122131348, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 3.5127, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0534073114395142, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 3.6323, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1378173828125, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 3.5825, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0004600286483765, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 3.524, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1865243911743164, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 3.6502, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0776444673538208, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 3.5899, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0138128995895386, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 3.6673, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1027905941009521, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 3.6207, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.036793828010559, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 3.5804, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0655146837234497, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 3.6235, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9288371205329895, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 3.5938, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9640668630599976, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 3.5945, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9216758012771606, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 3.6156, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1537082195281982, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 3.5763, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9384089112281799, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 3.6599, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0455132722854614, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 3.592, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1599528789520264, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 3.6206, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.2807819843292236, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 3.573, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.999480128288269, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 3.6414, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0415242910385132, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 3.5397, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9353726506233215, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 3.6543, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9449454545974731, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 3.5735, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9990008473396301, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 3.5328, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.05113685131073, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 3.5401, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9677740335464478, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 3.577, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9737529754638672, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 3.5982, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9464470148086548, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 3.4938, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9980365633964539, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 3.554, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9686562418937683, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 3.494, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9243375658988953, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 3.5385, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0450866222381592, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 3.604, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4128719568252563, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 3.6343, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9513347148895264, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 3.4879, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8341937065124512, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 3.5948, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9494589567184448, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 3.4882, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9091277718544006, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 3.6224, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9695673584938049, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 3.5524, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0510302782058716, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 3.5681, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9663901329040527, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 3.5877, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8571495413780212, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 3.4845, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9498350620269775, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 3.6167, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9438138604164124, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 3.6043, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8803019523620605, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 3.5352, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9197990894317627, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 3.5607, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9768213033676147, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 3.5614, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.995598316192627, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 3.4622, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.8904504776000977, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 3.4692, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9814966320991516, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 3.5498, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9564259052276611, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 3.5178, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0320782661437988, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 3.585, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.04094660282135, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 3.6019, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9763439893722534, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 3.5412, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9772986173629761, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 3.4934, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9123693108558655, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 3.5201, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0170071125030518, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 3.4898, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9429283142089844, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 3.5073, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9589207768440247, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 3.5705, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9630715847015381, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 3.5051, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9157508611679077, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 3.4814, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9560885429382324, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 3.4907, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8377314209938049, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 3.4926, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.048364281654358, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 3.4779, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9476514458656311, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 3.5114, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9079380631446838, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 3.4975, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9813776612281799, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 3.529, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8631868362426758, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 3.5393, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9297569394111633, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 3.5401, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9774935245513916, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 3.4963, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0512644052505493, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 3.506, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9815961122512817, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 3.5325, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0608659982681274, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 3.4853, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9495364427566528, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 3.5141, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.97361820936203, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 3.5453, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9082745909690857, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 3.4761, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8285298943519592, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 3.5024, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8225826025009155, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 3.5347, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9011406898498535, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 3.5243, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9474372267723083, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 3.4822, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0432955026626587, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 3.5558, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8962119817733765, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 3.506, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8903326392173767, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 3.5284, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9178863167762756, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 3.5501, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8843467235565186, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 3.47, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8805639147758484, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 3.5827, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8859938383102417, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 3.4916, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9646418690681458, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 3.5029, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8537634015083313, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 3.5093, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.956503689289093, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 3.5195, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0732929706573486, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 3.5231, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9116024374961853, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 3.4417, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.8402517437934875, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 3.4525, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.928032398223877, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 3.4627, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9409497976303101, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 3.5356, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1118775606155396, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 3.4684, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9488819241523743, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 3.495, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8869526982307434, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 3.4509, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8808426856994629, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 3.4256, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9103177785873413, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 3.4464, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9502519369125366, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 3.4684, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.091119408607483, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 3.5087, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9751874804496765, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 3.3998, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9701889753341675, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 3.5021, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1176971197128296, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 3.459, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.982542097568512, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 3.3349, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9292042255401611, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 3.422, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.939666748046875, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 3.4601, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9310621619224548, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 3.4736, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9938127994537354, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 3.5266, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0024408102035522, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 3.5674, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9908074140548706, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 3.4635, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9737017750740051, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 3.4767, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8988921046257019, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 3.4504, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8467938899993896, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 3.4944, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0359325408935547, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 3.4789, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0919708013534546, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 3.4604, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.081618309020996, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 3.4671, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9050670266151428, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 3.5274, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9571909308433533, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 3.5328, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8787181973457336, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 3.4774, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.8168122172355652, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 3.4833, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9859710931777954, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 3.4339, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9478222131729126, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 3.4099, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.020331621170044, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 3.4251, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9651831388473511, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 3.5135, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0853240489959717, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 3.4398, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.000195026397705, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 3.4936, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8865319490432739, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 3.5513, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8763468861579895, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 3.398, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.861831545829773, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 3.4215, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9110276699066162, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 3.4848, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.135488748550415, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 3.4418, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9430781602859497, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 3.4883, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8660580515861511, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 3.4674, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8980595469474792, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 3.5509, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8920612931251526, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 3.4593, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.3392480611801147, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 3.5069, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1390236616134644, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 3.4267, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8977495431900024, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 3.4173, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8903648853302002, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 3.4099, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.926061749458313, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 3.3682, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.061814546585083, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 3.4584, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.6410764455795288, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 3.4347, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8893712759017944, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 3.453, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.878666341304779, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 3.4353, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8680508732795715, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 3.355, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.049208402633667, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 3.4118, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9009265899658203, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 3.4656, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8777670860290527, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 3.3768, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8929083943367004, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 3.4334, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8720307946205139, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 3.414, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8738992214202881, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 3.4644, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.6024993658065796, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 3.3989, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9233328700065613, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 3.493, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9974755644798279, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 3.4787, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9158952832221985, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 3.4315, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8736887574195862, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 3.5099, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8213635087013245, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 3.3743, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9550583362579346, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 3.3991, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8275715708732605, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 3.4345, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9144462943077087, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 3.4527, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9548483490943909, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 3.3642, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9061784148216248, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 3.4216, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9008503556251526, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 3.4249, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.996560275554657, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 3.4145, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9953149557113647, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 3.544, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8433133363723755, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 3.4518, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9231517910957336, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 3.3918, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9039626121520996, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 3.3498, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8915175795555115, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 3.386, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0723402500152588, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 3.3958, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9475720524787903, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 3.3698, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8335372805595398, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 3.3022, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8657789826393127, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 3.4883, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.009423017501831, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 3.3994, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9131430983543396, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 3.4176, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8437486886978149, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 3.37, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8856005072593689, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 3.4163, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8783836960792542, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 3.3511, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9118834733963013, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 3.444, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8706138730049133, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 3.3979, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8939517140388489, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 3.3708, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8935962915420532, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 3.3578, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8912794589996338, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 3.5254, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8823184967041016, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 3.4387, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9021984338760376, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 3.3914, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8895813226699829, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 3.4546, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8331109285354614, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 3.4014, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8797712922096252, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 3.4068, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8429707884788513, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 3.473, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.94316565990448, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 3.3132, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8813465237617493, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 3.4597, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8824898600578308, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 3.4065, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8799294829368591, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 3.4918, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9282616972923279, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 3.339, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8519152402877808, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 3.448, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9007513523101807, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 3.3405, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.895147442817688, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 3.3407, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8479480147361755, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 3.4386, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9062541723251343, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 3.3353, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9131118655204773, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 3.3456, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9297282695770264, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 3.4446, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9151642322540283, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 3.4011, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9209034442901611, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 3.3808, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8558739423751831, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 3.475, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9316113591194153, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 3.4478, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9140479564666748, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 3.4139, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8449094295501709, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 3.4462, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9819065928459167, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 3.4237, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9675536751747131, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 3.4678, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.955564022064209, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 3.4339, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8302602767944336, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 3.3893, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0030620098114014, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 3.403, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8429064750671387, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 3.3268, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9952207803726196, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 3.419, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8415958881378174, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 3.3845, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8832328915596008, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 3.4452, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8209018707275391, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 3.3467, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7963742613792419, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 3.4648, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8554068207740784, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 3.402, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8575777411460876, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 3.4545, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9378578066825867, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 3.4027, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8902561664581299, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 3.4738, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8349782228469849, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 3.2739, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9909433722496033, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 3.3343, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9836429953575134, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 3.3772, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9544408917427063, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 3.4217, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9274853467941284, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 3.3485, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8512804508209229, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 3.3691, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8450992107391357, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 3.3275, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8768813610076904, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 3.3906, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8551543354988098, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 3.3034, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8286839127540588, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 3.3475, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0059750080108643, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 3.399, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9001671671867371, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 3.368, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9101215600967407, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 3.4451, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.885056734085083, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 3.4028, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8770620226860046, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 3.2998, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9198933839797974, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 3.4097, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8237200975418091, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 3.4916, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8838125467300415, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 3.3887, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8582887053489685, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 3.4103, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.83761066198349, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 3.3979, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9017011523246765, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 3.4158, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8480706810951233, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 3.332, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8610851168632507, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 3.36, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8271325826644897, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 3.3321, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9846967458724976, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 3.3163, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9517627358436584, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 3.437, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9230513572692871, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 3.354, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9414743185043335, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 3.3626, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8482127785682678, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 3.3172, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8926299214363098, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 3.4264, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8302354216575623, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 3.3653, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8593830466270447, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 3.353, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9003075361251831, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 3.3689, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9126880168914795, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 3.3892, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8957822918891907, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 3.4007, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8402200937271118, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 3.2873, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9142640829086304, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 3.2816, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9357993602752686, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 3.2826, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9008080363273621, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 3.3283, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8646254539489746, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 3.3901, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0222582817077637, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 3.3172, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8637706637382507, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 3.3113, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0547915697097778, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 3.3446, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0070993900299072, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 3.4087, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8321356773376465, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 3.3127, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8560150861740112, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 3.3001, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9279139041900635, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 3.2725, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.896747887134552, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 3.3268, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9544872045516968, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 3.2712, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9028177261352539, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 3.3019, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9091874957084656, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 3.3588, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8359854817390442, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 3.2409, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.070448875427246, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 3.317, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8524084091186523, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 3.3603, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8775138258934021, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 3.3665, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8480380773544312, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 3.254, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.938522219657898, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 3.4022, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.827492356300354, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 3.2898, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8760508298873901, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 3.4141, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8683328032493591, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 3.3617, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8718876838684082, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 3.3372, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8450455665588379, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 3.3932, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8650111556053162, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 3.3839, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.054282307624817, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 3.369, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9530842900276184, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 3.3099, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9338812828063965, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 3.4111, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9333266615867615, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 3.3992, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8644818067550659, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 3.3228, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9469544887542725, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 3.3703, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8587395548820496, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 3.3823, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8624106049537659, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 3.2556, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8285986185073853, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 3.3055, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.897678554058075, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 3.244, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8292384743690491, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 3.3844, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8837412595748901, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 3.3891, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8819624781608582, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 3.3456, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9516217708587646, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 3.317, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8784796595573425, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 3.3318, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8507090210914612, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 3.4276, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8158341646194458, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 3.2964, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8636460304260254, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 3.3046, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8573281168937683, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 3.3494, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8206995725631714, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 3.2589, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7886756658554077, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 3.3254, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8958569765090942, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 3.349, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8987053036689758, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 3.3414, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9279022812843323, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 3.2571, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.979684054851532, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 3.3664, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8469237685203552, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 3.3882, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.938224196434021, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 3.313, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8933103680610657, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 3.2644, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9098454117774963, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 3.2601, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.832898736000061, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 3.3678, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8103572726249695, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 3.3714, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8854662775993347, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 3.3424, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8286263942718506, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 3.3551, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9545296430587769, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 3.3823, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9493871927261353, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 3.396, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8324505686759949, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 3.3239, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.857708215713501, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 3.3066, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8161062002182007, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 3.3485, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8786885738372803, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 3.3984, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.870347797870636, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 3.3084, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.833512544631958, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 3.3442, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8745336532592773, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 3.3367, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8376188278198242, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 3.3021, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8755170106887817, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 3.3799, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8672605752944946, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 3.3517, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8204872608184814, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 3.2989, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8279518485069275, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 3.35, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8202655911445618, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 3.2457, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9008398056030273, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 3.4337, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.840510368347168, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 3.3695, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8364140391349792, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 3.3407, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.881549060344696, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 3.3053, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.827682375907898, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 3.2035, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8124437928199768, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 3.272, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.5780527591705322, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 3.304, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8243139982223511, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 3.3359, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8204466700553894, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 3.2912, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9210676550865173, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 3.3311, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8979406952857971, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 3.2767, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8867124319076538, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 3.3362, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8429162502288818, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 3.2875, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8652162551879883, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 3.3248, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.830157995223999, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 3.3039, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.851249098777771, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 3.228, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8676759004592896, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 3.2683, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8468626737594604, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 3.2965, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8902848362922668, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 3.3187, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8139768242835999, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 3.2649, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8436658978462219, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 3.2641, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8619456887245178, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 3.2942, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8965754508972168, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 3.2437, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8351267576217651, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 3.2898, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8266618847846985, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 3.3272, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9140317440032959, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 3.3165, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8285201191902161, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 3.2977, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8304650783538818, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 3.2944, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8460079431533813, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 3.2678, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8778470754623413, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 3.3551, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8502268195152283, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 3.3525, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8323951363563538, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 3.3441, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8833833336830139, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 3.349, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8423852920532227, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 3.2798, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8736124634742737, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 3.2949, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8281068801879883, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 3.2283, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8401610851287842, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 3.4024, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7921164035797119, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 3.2893, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8460646867752075, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 3.288, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8160696029663086, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 3.2195, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8647582530975342, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 3.3154, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8196638822555542, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 3.3802, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8931661248207092, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 3.2419, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8836420774459839, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 3.3038, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8200202584266663, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 3.336, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8285385370254517, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 3.3422, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.863246738910675, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 3.3437, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.81345534324646, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 3.4152, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8124732375144958, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 3.3005, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8287694454193115, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 3.2783, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8340721726417542, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 3.2003, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8558523058891296, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 3.3051, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8867878317832947, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 3.3998, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8162564039230347, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 3.3834, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8151681423187256, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 3.247, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.825150191783905, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 3.3278, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8217668533325195, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 3.345, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8449623584747314, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 3.2981, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8323595523834229, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 3.3461, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8136734366416931, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 3.3347, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8722835779190063, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 3.2841, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.897956907749176, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 3.3148, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9544664621353149, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 3.3538, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8626210689544678, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 3.3596, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8736276030540466, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 3.2998, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8064122796058655, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 3.273, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8083548545837402, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 3.251, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8138768672943115, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 3.2628, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9377182722091675, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 3.235, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.7928673624992371, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 3.2746, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8481169939041138, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 3.2695, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9184083938598633, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 3.2278, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8539621233940125, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 3.2818, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8199257850646973, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 3.2726, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.810304582118988, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 3.2467, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8342577219009399, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 3.3297, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8215441107749939, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 3.3136, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8240970969200134, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 3.2634, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8182936906814575, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 3.3069, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8966035842895508, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 3.3569, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.876420795917511, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 3.2909, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8695542216300964, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 3.3204, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8503195643424988, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 3.1899, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.807569146156311, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 3.3462, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8479645252227783, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 3.3846, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.7932925224304199, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 3.2313, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8063516616821289, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 3.2578, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8032195568084717, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 3.3719, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8684897422790527, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 3.2291, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8473917841911316, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 3.2538, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9202986359596252, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 3.3475, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8170850276947021, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 3.3528, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7920132875442505, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 3.1723, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8625448942184448, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 3.2547, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8212171196937561, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 3.3117, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8107876777648926, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 3.2923, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8409060835838318, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 3.2746, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8198840022087097, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 3.287, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8378563523292542, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 3.3435, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8228487372398376, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 3.2075, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8124101161956787, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 3.3079, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8262704014778137, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 3.3592, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8362215161323547, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 3.2244, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8361976146697998, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 3.3835, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8179517388343811, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 3.4081, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.816852867603302, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 3.3189, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8509985208511353, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 3.2415, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8221267461776733, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 3.2113, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8320128321647644, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 3.3388, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8314358592033386, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 3.3267, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.822598397731781, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 3.351, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8570499420166016, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 3.2219, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.816481351852417, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 3.2443, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8919932842254639, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 3.2184, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8713335990905762, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 3.3185, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.4435936212539673, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 3.2908, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8382534980773926, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 3.2048, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8237469792366028, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 3.2839, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8565986156463623, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 3.2783, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8280335068702698, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 3.2714, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8038430213928223, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 3.3384, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7964035868644714, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 3.1829, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8033803701400757, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 3.3429, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8323132395744324, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 3.3593, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7978505492210388, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 3.2938, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8222649097442627, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 3.2629, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8070308566093445, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 3.2682, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8659495115280151, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 3.283, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.817457377910614, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 3.2556, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8457687497138977, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 3.3078, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8201070427894592, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 3.2487, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8550896644592285, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 3.3523, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.0865380348133376e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|