|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1072, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0046641791044776115, |
|
"grad_norm": 1.9737565699550477, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 0.8462, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009328358208955223, |
|
"grad_norm": 1.4313045221260292, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.8235, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013992537313432836, |
|
"grad_norm": 1.1346996876854334, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.7682, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.018656716417910446, |
|
"grad_norm": 0.6828641731714449, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.7273, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02332089552238806, |
|
"grad_norm": 0.5132486218762813, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 0.6962, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027985074626865673, |
|
"grad_norm": 0.48646435519095227, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.6736, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03264925373134328, |
|
"grad_norm": 0.4320653730399992, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 0.6448, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03731343283582089, |
|
"grad_norm": 0.46506998468975, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.638, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04197761194029851, |
|
"grad_norm": 0.4190129866016933, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.6334, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04664179104477612, |
|
"grad_norm": 0.4197147311398817, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.615, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.051305970149253734, |
|
"grad_norm": 0.454150411866044, |
|
"learning_rate": 4.999989285883431e-05, |
|
"loss": 0.6343, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.055970149253731345, |
|
"grad_norm": 0.4976545347994734, |
|
"learning_rate": 4.999614302517356e-05, |
|
"loss": 0.6274, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06063432835820896, |
|
"grad_norm": 0.45598284917949133, |
|
"learning_rate": 4.99870371535606e-05, |
|
"loss": 0.6063, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06529850746268656, |
|
"grad_norm": 0.5227086559500899, |
|
"learning_rate": 4.997257741198456e-05, |
|
"loss": 0.6353, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06996268656716417, |
|
"grad_norm": 0.6174850179632522, |
|
"learning_rate": 4.9952767243121146e-05, |
|
"loss": 0.6128, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 0.5880731170032467, |
|
"learning_rate": 4.992761136351291e-05, |
|
"loss": 0.6087, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07929104477611941, |
|
"grad_norm": 0.6375946180801447, |
|
"learning_rate": 4.989711576244639e-05, |
|
"loss": 0.6006, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08395522388059702, |
|
"grad_norm": 0.48790239748391595, |
|
"learning_rate": 4.986128770052603e-05, |
|
"loss": 0.5899, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08861940298507463, |
|
"grad_norm": 0.5318335956060131, |
|
"learning_rate": 4.9820135707945634e-05, |
|
"loss": 0.5917, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09328358208955224, |
|
"grad_norm": 0.5656690396811953, |
|
"learning_rate": 4.9773669582457364e-05, |
|
"loss": 0.6048, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09794776119402986, |
|
"grad_norm": 0.4691363598516851, |
|
"learning_rate": 4.972190038703905e-05, |
|
"loss": 0.6103, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10261194029850747, |
|
"grad_norm": 0.49198235678257524, |
|
"learning_rate": 4.966484044726024e-05, |
|
"loss": 0.5973, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10727611940298508, |
|
"grad_norm": 0.4828283304659039, |
|
"learning_rate": 4.9602503348347625e-05, |
|
"loss": 0.5879, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11194029850746269, |
|
"grad_norm": 0.5222495128976433, |
|
"learning_rate": 4.953490393195063e-05, |
|
"loss": 0.5953, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1166044776119403, |
|
"grad_norm": 0.5173262535486767, |
|
"learning_rate": 4.9462058292607735e-05, |
|
"loss": 0.5924, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12126865671641791, |
|
"grad_norm": 0.49449726128671867, |
|
"learning_rate": 4.938398377391461e-05, |
|
"loss": 0.5853, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1259328358208955, |
|
"grad_norm": 0.3981488723042445, |
|
"learning_rate": 4.930069896439485e-05, |
|
"loss": 0.5848, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13059701492537312, |
|
"grad_norm": 0.5616720737443401, |
|
"learning_rate": 4.921222369307427e-05, |
|
"loss": 0.5869, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13526119402985073, |
|
"grad_norm": 0.5645188109079204, |
|
"learning_rate": 4.9118579024759854e-05, |
|
"loss": 0.5896, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13992537313432835, |
|
"grad_norm": 0.4528930971557964, |
|
"learning_rate": 4.901978725502454e-05, |
|
"loss": 0.5733, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14458955223880596, |
|
"grad_norm": 0.5119372178393736, |
|
"learning_rate": 4.891587190489891e-05, |
|
"loss": 0.5779, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 0.545404890594189, |
|
"learning_rate": 4.880685771527114e-05, |
|
"loss": 0.5855, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15391791044776118, |
|
"grad_norm": 0.5516112369637339, |
|
"learning_rate": 4.869277064099654e-05, |
|
"loss": 0.5814, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15858208955223882, |
|
"grad_norm": 0.42360505235042945, |
|
"learning_rate": 4.8573637844718e-05, |
|
"loss": 0.5918, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16324626865671643, |
|
"grad_norm": 0.41597973453963927, |
|
"learning_rate": 4.844948769039896e-05, |
|
"loss": 0.5859, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16791044776119404, |
|
"grad_norm": 0.44936882842852305, |
|
"learning_rate": 4.83203497365703e-05, |
|
"loss": 0.5817, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17257462686567165, |
|
"grad_norm": 0.46299205203501975, |
|
"learning_rate": 4.818625472929286e-05, |
|
"loss": 0.5746, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17723880597014927, |
|
"grad_norm": 0.4673688881878735, |
|
"learning_rate": 4.8047234594837143e-05, |
|
"loss": 0.5684, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18190298507462688, |
|
"grad_norm": 0.4435776856144207, |
|
"learning_rate": 4.7903322432082185e-05, |
|
"loss": 0.5686, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1865671641791045, |
|
"grad_norm": 0.4058371589175082, |
|
"learning_rate": 4.775455250463507e-05, |
|
"loss": 0.5684, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1912313432835821, |
|
"grad_norm": 0.49977241209761997, |
|
"learning_rate": 4.760096023267322e-05, |
|
"loss": 0.5821, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1958955223880597, |
|
"grad_norm": 0.43523914588954843, |
|
"learning_rate": 4.744258218451135e-05, |
|
"loss": 0.5739, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20055970149253732, |
|
"grad_norm": 0.7085255215907048, |
|
"learning_rate": 4.7279456067895e-05, |
|
"loss": 0.5716, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20522388059701493, |
|
"grad_norm": 0.5653424685417476, |
|
"learning_rate": 4.71116207210228e-05, |
|
"loss": 0.5674, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.20988805970149255, |
|
"grad_norm": 0.5244121990272786, |
|
"learning_rate": 4.6939116103299655e-05, |
|
"loss": 0.56, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21455223880597016, |
|
"grad_norm": 0.5077725577171148, |
|
"learning_rate": 4.676198328582288e-05, |
|
"loss": 0.5688, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21921641791044777, |
|
"grad_norm": 0.4425026395421933, |
|
"learning_rate": 4.6580264441603724e-05, |
|
"loss": 0.576, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 0.4341800111451734, |
|
"learning_rate": 4.6394002835526535e-05, |
|
"loss": 0.5752, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.228544776119403, |
|
"grad_norm": 0.4996634399185324, |
|
"learning_rate": 4.6203242814047946e-05, |
|
"loss": 0.5603, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2332089552238806, |
|
"grad_norm": 0.5912347448686226, |
|
"learning_rate": 4.6008029794638596e-05, |
|
"loss": 0.5607, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23787313432835822, |
|
"grad_norm": 0.43002434255127664, |
|
"learning_rate": 4.580841025496974e-05, |
|
"loss": 0.5581, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.24253731343283583, |
|
"grad_norm": 0.4485234966770534, |
|
"learning_rate": 4.560443172184763e-05, |
|
"loss": 0.5677, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24720149253731344, |
|
"grad_norm": 0.37138561390050095, |
|
"learning_rate": 4.539614275989793e-05, |
|
"loss": 0.5565, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.251865671641791, |
|
"grad_norm": 0.4133626295112158, |
|
"learning_rate": 4.5183592960003104e-05, |
|
"loss": 0.5548, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25652985074626866, |
|
"grad_norm": 0.45815619256972684, |
|
"learning_rate": 4.496683292749555e-05, |
|
"loss": 0.5592, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.26119402985074625, |
|
"grad_norm": 0.5382642082030428, |
|
"learning_rate": 4.4745914270109055e-05, |
|
"loss": 0.5627, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2658582089552239, |
|
"grad_norm": 0.46388012094196474, |
|
"learning_rate": 4.4520889585691705e-05, |
|
"loss": 0.5677, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.27052238805970147, |
|
"grad_norm": 0.4651562768167999, |
|
"learning_rate": 4.429181244968301e-05, |
|
"loss": 0.5518, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2751865671641791, |
|
"grad_norm": 0.3962720245968155, |
|
"learning_rate": 4.4058737402358295e-05, |
|
"loss": 0.5458, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2798507462686567, |
|
"grad_norm": 0.4883867917348728, |
|
"learning_rate": 4.38217199358434e-05, |
|
"loss": 0.5616, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.28451492537313433, |
|
"grad_norm": 0.4225077326378658, |
|
"learning_rate": 4.3580816480902656e-05, |
|
"loss": 0.5459, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2891791044776119, |
|
"grad_norm": 0.3662378172000391, |
|
"learning_rate": 4.3336084393503545e-05, |
|
"loss": 0.5542, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.29384328358208955, |
|
"grad_norm": 0.4446516792918299, |
|
"learning_rate": 4.308758194116094e-05, |
|
"loss": 0.5574, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.4359436549059787, |
|
"learning_rate": 4.283536828906436e-05, |
|
"loss": 0.5686, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3031716417910448, |
|
"grad_norm": 0.562147315461778, |
|
"learning_rate": 4.2579503485991567e-05, |
|
"loss": 0.5529, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.30783582089552236, |
|
"grad_norm": 0.5107829000076599, |
|
"learning_rate": 4.2320048450011684e-05, |
|
"loss": 0.5597, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.4735116577263397, |
|
"learning_rate": 4.205706495398143e-05, |
|
"loss": 0.5553, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.31716417910447764, |
|
"grad_norm": 0.5036979789350788, |
|
"learning_rate": 4.179061561083777e-05, |
|
"loss": 0.5414, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3218283582089552, |
|
"grad_norm": 0.3917546906129853, |
|
"learning_rate": 4.1520763858690644e-05, |
|
"loss": 0.5558, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32649253731343286, |
|
"grad_norm": 0.5111185433617897, |
|
"learning_rate": 4.124757394571914e-05, |
|
"loss": 0.552, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33115671641791045, |
|
"grad_norm": 0.45005650174171213, |
|
"learning_rate": 4.097111091487486e-05, |
|
"loss": 0.5461, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3358208955223881, |
|
"grad_norm": 0.4185868959063299, |
|
"learning_rate": 4.069144058839605e-05, |
|
"loss": 0.5638, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.34048507462686567, |
|
"grad_norm": 0.4385018433239723, |
|
"learning_rate": 4.040862955213615e-05, |
|
"loss": 0.5579, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3451492537313433, |
|
"grad_norm": 0.3658683971284041, |
|
"learning_rate": 4.012274513971061e-05, |
|
"loss": 0.5598, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3498134328358209, |
|
"grad_norm": 0.44442062333546467, |
|
"learning_rate": 3.9833855416465624e-05, |
|
"loss": 0.5473, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35447761194029853, |
|
"grad_norm": 0.45116064271828604, |
|
"learning_rate": 3.954202916327264e-05, |
|
"loss": 0.5463, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3591417910447761, |
|
"grad_norm": 0.36498533110429815, |
|
"learning_rate": 3.924733586015257e-05, |
|
"loss": 0.545, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.36380597014925375, |
|
"grad_norm": 0.39587844840520986, |
|
"learning_rate": 3.894984566973346e-05, |
|
"loss": 0.5484, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.36847014925373134, |
|
"grad_norm": 0.4001846361793331, |
|
"learning_rate": 3.864962942054572e-05, |
|
"loss": 0.5549, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 0.38715828447829004, |
|
"learning_rate": 3.834675859015876e-05, |
|
"loss": 0.5466, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37779850746268656, |
|
"grad_norm": 0.4275094946094768, |
|
"learning_rate": 3.804130528816312e-05, |
|
"loss": 0.549, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3824626865671642, |
|
"grad_norm": 0.5465207240404546, |
|
"learning_rate": 3.77333422390021e-05, |
|
"loss": 0.5347, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3871268656716418, |
|
"grad_norm": 0.3810626185760908, |
|
"learning_rate": 3.7422942764657054e-05, |
|
"loss": 0.5462, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3917910447761194, |
|
"grad_norm": 0.42546821730605433, |
|
"learning_rate": 3.711018076719034e-05, |
|
"loss": 0.5513, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.396455223880597, |
|
"grad_norm": 0.40660329464977896, |
|
"learning_rate": 3.679513071115025e-05, |
|
"loss": 0.5545, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.40111940298507465, |
|
"grad_norm": 0.4901995684742552, |
|
"learning_rate": 3.647786760584194e-05, |
|
"loss": 0.5514, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.40578358208955223, |
|
"grad_norm": 0.4561473177324483, |
|
"learning_rate": 3.615846698746869e-05, |
|
"loss": 0.5551, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.41044776119402987, |
|
"grad_norm": 0.4545066058248777, |
|
"learning_rate": 3.583700490114776e-05, |
|
"loss": 0.5475, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.41511194029850745, |
|
"grad_norm": 0.3567683329232947, |
|
"learning_rate": 3.5513557882805e-05, |
|
"loss": 0.5457, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4197761194029851, |
|
"grad_norm": 0.4935516493631999, |
|
"learning_rate": 3.518820294095267e-05, |
|
"loss": 0.5483, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4244402985074627, |
|
"grad_norm": 0.4278828841184784, |
|
"learning_rate": 3.486101753835468e-05, |
|
"loss": 0.5411, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4291044776119403, |
|
"grad_norm": 0.3831980820409706, |
|
"learning_rate": 3.453207957358377e-05, |
|
"loss": 0.5369, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4337686567164179, |
|
"grad_norm": 0.35346791316580967, |
|
"learning_rate": 3.420146736247487e-05, |
|
"loss": 0.5278, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.43843283582089554, |
|
"grad_norm": 0.4341647119894046, |
|
"learning_rate": 3.386925961947906e-05, |
|
"loss": 0.552, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4430970149253731, |
|
"grad_norm": 0.38104723235081495, |
|
"learning_rate": 3.353553543892277e-05, |
|
"loss": 0.5513, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 0.42615290307324083, |
|
"learning_rate": 3.320037427617639e-05, |
|
"loss": 0.5432, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45242537313432835, |
|
"grad_norm": 0.3462873785234133, |
|
"learning_rate": 3.2863855928737026e-05, |
|
"loss": 0.5381, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.457089552238806, |
|
"grad_norm": 0.3586109539340555, |
|
"learning_rate": 3.252606051722972e-05, |
|
"loss": 0.5527, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.46175373134328357, |
|
"grad_norm": 0.3606896599871606, |
|
"learning_rate": 3.218706846633183e-05, |
|
"loss": 0.5367, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4664179104477612, |
|
"grad_norm": 0.4086083805613451, |
|
"learning_rate": 3.1846960485624886e-05, |
|
"loss": 0.5327, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4710820895522388, |
|
"grad_norm": 0.39895267005686896, |
|
"learning_rate": 3.150581755037877e-05, |
|
"loss": 0.5449, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47574626865671643, |
|
"grad_norm": 0.32662220814932813, |
|
"learning_rate": 3.1163720882272516e-05, |
|
"loss": 0.5397, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.480410447761194, |
|
"grad_norm": 0.3375934491741763, |
|
"learning_rate": 3.08207519300565e-05, |
|
"loss": 0.5385, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.48507462686567165, |
|
"grad_norm": 0.34787210971968974, |
|
"learning_rate": 3.047699235016056e-05, |
|
"loss": 0.53, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48973880597014924, |
|
"grad_norm": 0.39620710906533535, |
|
"learning_rate": 3.0132523987252658e-05, |
|
"loss": 0.5335, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4944029850746269, |
|
"grad_norm": 0.3784201727069867, |
|
"learning_rate": 2.9787428854752736e-05, |
|
"loss": 0.5234, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49906716417910446, |
|
"grad_norm": 0.36372530063574576, |
|
"learning_rate": 2.9441789115306402e-05, |
|
"loss": 0.5449, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.503731343283582, |
|
"grad_norm": 0.33385903740532513, |
|
"learning_rate": 2.9095687061223058e-05, |
|
"loss": 0.5411, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5083955223880597, |
|
"grad_norm": 0.3634795150059451, |
|
"learning_rate": 2.874920509488319e-05, |
|
"loss": 0.544, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5130597014925373, |
|
"grad_norm": 0.3740217231457619, |
|
"learning_rate": 2.8402425709119435e-05, |
|
"loss": 0.5392, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5177238805970149, |
|
"grad_norm": 0.32673424561583636, |
|
"learning_rate": 2.8055431467576106e-05, |
|
"loss": 0.5372, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.32989889996277566, |
|
"learning_rate": 2.7708304985051868e-05, |
|
"loss": 0.5406, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5270522388059702, |
|
"grad_norm": 0.34578592805977293, |
|
"learning_rate": 2.7361128907830253e-05, |
|
"loss": 0.5315, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5317164179104478, |
|
"grad_norm": 0.3355523828461531, |
|
"learning_rate": 2.7013985894002623e-05, |
|
"loss": 0.5219, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5363805970149254, |
|
"grad_norm": 0.32374626691721103, |
|
"learning_rate": 2.6666958593788405e-05, |
|
"loss": 0.5373, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5410447761194029, |
|
"grad_norm": 0.3498631050042572, |
|
"learning_rate": 2.6320129629857093e-05, |
|
"loss": 0.5415, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5457089552238806, |
|
"grad_norm": 0.31152996247408016, |
|
"learning_rate": 2.597358157765692e-05, |
|
"loss": 0.5293, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5503731343283582, |
|
"grad_norm": 0.3008488970775489, |
|
"learning_rate": 2.56273969457547e-05, |
|
"loss": 0.5269, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5550373134328358, |
|
"grad_norm": 0.3679099326165536, |
|
"learning_rate": 2.528165815619162e-05, |
|
"loss": 0.5367, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5597014925373134, |
|
"grad_norm": 0.3216377349194825, |
|
"learning_rate": 2.4936447524859625e-05, |
|
"loss": 0.5278, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5643656716417911, |
|
"grad_norm": 0.3087685158708632, |
|
"learning_rate": 2.459184724190308e-05, |
|
"loss": 0.5202, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5690298507462687, |
|
"grad_norm": 0.3391573089678486, |
|
"learning_rate": 2.4247939352150386e-05, |
|
"loss": 0.5345, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5736940298507462, |
|
"grad_norm": 0.3255460099079437, |
|
"learning_rate": 2.390480573558012e-05, |
|
"loss": 0.5411, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5783582089552238, |
|
"grad_norm": 0.35983471442574044, |
|
"learning_rate": 2.3562528087826573e-05, |
|
"loss": 0.5257, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5830223880597015, |
|
"grad_norm": 0.32544671257325375, |
|
"learning_rate": 2.3221187900729003e-05, |
|
"loss": 0.5293, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5876865671641791, |
|
"grad_norm": 0.34956657987333445, |
|
"learning_rate": 2.2880866442929544e-05, |
|
"loss": 0.531, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5923507462686567, |
|
"grad_norm": 0.32518866529909685, |
|
"learning_rate": 2.254164474052416e-05, |
|
"loss": 0.5342, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 0.33351983201189994, |
|
"learning_rate": 2.2203603557771447e-05, |
|
"loss": 0.5183, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.601679104477612, |
|
"grad_norm": 0.3254337838136604, |
|
"learning_rate": 2.186682337786365e-05, |
|
"loss": 0.5306, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6063432835820896, |
|
"grad_norm": 0.3010923556830084, |
|
"learning_rate": 2.153138438376473e-05, |
|
"loss": 0.5183, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6110074626865671, |
|
"grad_norm": 0.3728436902441161, |
|
"learning_rate": 2.119736643911979e-05, |
|
"loss": 0.5271, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6156716417910447, |
|
"grad_norm": 0.32934721950944257, |
|
"learning_rate": 2.0864849069240645e-05, |
|
"loss": 0.531, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6203358208955224, |
|
"grad_norm": 0.32241891261617145, |
|
"learning_rate": 2.0533911442171805e-05, |
|
"loss": 0.527, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.34418976349661284, |
|
"learning_rate": 2.0204632349841667e-05, |
|
"loss": 0.5243, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6296641791044776, |
|
"grad_norm": 0.3108019920881547, |
|
"learning_rate": 1.9877090189303182e-05, |
|
"loss": 0.5316, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6343283582089553, |
|
"grad_norm": 0.31926070186961547, |
|
"learning_rate": 1.9551362944068462e-05, |
|
"loss": 0.5243, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6389925373134329, |
|
"grad_norm": 0.3155811744495747, |
|
"learning_rate": 1.922752816554204e-05, |
|
"loss": 0.5241, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6436567164179104, |
|
"grad_norm": 0.2937615368459596, |
|
"learning_rate": 1.890566295455678e-05, |
|
"loss": 0.518, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.648320895522388, |
|
"grad_norm": 0.29046163409682196, |
|
"learning_rate": 1.858584394301728e-05, |
|
"loss": 0.5221, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6529850746268657, |
|
"grad_norm": 0.3145402617953687, |
|
"learning_rate": 1.8268147275654707e-05, |
|
"loss": 0.5394, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6576492537313433, |
|
"grad_norm": 0.29543998229149865, |
|
"learning_rate": 1.7952648591897858e-05, |
|
"loss": 0.522, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6623134328358209, |
|
"grad_norm": 0.36052987636160755, |
|
"learning_rate": 1.7639423007864252e-05, |
|
"loss": 0.5138, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6669776119402985, |
|
"grad_norm": 0.3209855069869533, |
|
"learning_rate": 1.7328545098476106e-05, |
|
"loss": 0.5173, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 0.308357685515476, |
|
"learning_rate": 1.702008887970491e-05, |
|
"loss": 0.5197, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6763059701492538, |
|
"grad_norm": 0.3329034887769105, |
|
"learning_rate": 1.671412779094926e-05, |
|
"loss": 0.5235, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6809701492537313, |
|
"grad_norm": 0.3047861045310467, |
|
"learning_rate": 1.6410734677549872e-05, |
|
"loss": 0.5197, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6856343283582089, |
|
"grad_norm": 0.31416211733218163, |
|
"learning_rate": 1.6109981773446036e-05, |
|
"loss": 0.521, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6902985074626866, |
|
"grad_norm": 0.28132655200766216, |
|
"learning_rate": 1.58119406839777e-05, |
|
"loss": 0.5195, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6949626865671642, |
|
"grad_norm": 0.2814236111750379, |
|
"learning_rate": 1.5516682368837133e-05, |
|
"loss": 0.5149, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6996268656716418, |
|
"grad_norm": 0.30912591121226046, |
|
"learning_rate": 1.5224277125174388e-05, |
|
"loss": 0.5313, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7042910447761194, |
|
"grad_norm": 0.30995429354155263, |
|
"learning_rate": 1.4934794570860416e-05, |
|
"loss": 0.5322, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7089552238805971, |
|
"grad_norm": 0.3087346682651283, |
|
"learning_rate": 1.464830362791204e-05, |
|
"loss": 0.5183, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7136194029850746, |
|
"grad_norm": 0.3167565478672606, |
|
"learning_rate": 1.4364872506082425e-05, |
|
"loss": 0.526, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7182835820895522, |
|
"grad_norm": 0.3133737836701582, |
|
"learning_rate": 1.4084568686621314e-05, |
|
"loss": 0.5292, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7229477611940298, |
|
"grad_norm": 0.30698622747486176, |
|
"learning_rate": 1.3807458906208546e-05, |
|
"loss": 0.536, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7276119402985075, |
|
"grad_norm": 0.292344961823975, |
|
"learning_rate": 1.3533609141065008e-05, |
|
"loss": 0.5153, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7322761194029851, |
|
"grad_norm": 0.2958271040668866, |
|
"learning_rate": 1.326308459124447e-05, |
|
"loss": 0.5185, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7369402985074627, |
|
"grad_norm": 0.29371243569106203, |
|
"learning_rate": 1.299594966511038e-05, |
|
"loss": 0.5272, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7416044776119403, |
|
"grad_norm": 0.2917942436195178, |
|
"learning_rate": 1.2732267964001033e-05, |
|
"loss": 0.5153, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 0.30518780549956975, |
|
"learning_rate": 1.2472102267086904e-05, |
|
"loss": 0.5217, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7509328358208955, |
|
"grad_norm": 0.2723694367867143, |
|
"learning_rate": 1.2215514516423813e-05, |
|
"loss": 0.5355, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7555970149253731, |
|
"grad_norm": 0.30521862254139703, |
|
"learning_rate": 1.1962565802205255e-05, |
|
"loss": 0.5144, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7602611940298507, |
|
"grad_norm": 0.2820686752314443, |
|
"learning_rate": 1.1713316348217673e-05, |
|
"loss": 0.5193, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7649253731343284, |
|
"grad_norm": 0.3048405563450722, |
|
"learning_rate": 1.1467825497501954e-05, |
|
"loss": 0.5039, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.769589552238806, |
|
"grad_norm": 0.27703352458545016, |
|
"learning_rate": 1.1226151698224597e-05, |
|
"loss": 0.5249, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7742537313432836, |
|
"grad_norm": 0.2806197704847487, |
|
"learning_rate": 1.0988352489762006e-05, |
|
"loss": 0.5289, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7789179104477612, |
|
"grad_norm": 0.2748109945673585, |
|
"learning_rate": 1.0754484489001085e-05, |
|
"loss": 0.5192, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7835820895522388, |
|
"grad_norm": 0.29771643664474096, |
|
"learning_rate": 1.052460337685951e-05, |
|
"loss": 0.518, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7882462686567164, |
|
"grad_norm": 0.27387569498834374, |
|
"learning_rate": 1.0298763885028839e-05, |
|
"loss": 0.5141, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.792910447761194, |
|
"grad_norm": 0.2670369542440758, |
|
"learning_rate": 1.0077019782943584e-05, |
|
"loss": 0.5196, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7975746268656716, |
|
"grad_norm": 0.262789307862455, |
|
"learning_rate": 9.859423864979441e-06, |
|
"loss": 0.5081, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8022388059701493, |
|
"grad_norm": 0.28509078304386815, |
|
"learning_rate": 9.646027937883622e-06, |
|
"loss": 0.5272, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8069029850746269, |
|
"grad_norm": 0.30907587460324537, |
|
"learning_rate": 9.436882808440334e-06, |
|
"loss": 0.5285, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8115671641791045, |
|
"grad_norm": 0.26048833998062026, |
|
"learning_rate": 9.232038271374377e-06, |
|
"loss": 0.5105, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.816231343283582, |
|
"grad_norm": 0.26919840136919315, |
|
"learning_rate": 9.031543097495638e-06, |
|
"loss": 0.515, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 0.2992648125729761, |
|
"learning_rate": 8.835445022087426e-06, |
|
"loss": 0.5131, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8255597014925373, |
|
"grad_norm": 0.2723483896878325, |
|
"learning_rate": 8.6437907335413e-06, |
|
"loss": 0.5231, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8302238805970149, |
|
"grad_norm": 0.262308992285445, |
|
"learning_rate": 8.456625862241193e-06, |
|
"loss": 0.5253, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8348880597014925, |
|
"grad_norm": 0.27940346598038684, |
|
"learning_rate": 8.273994969699394e-06, |
|
"loss": 0.5195, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8395522388059702, |
|
"grad_norm": 0.2573250994947132, |
|
"learning_rate": 8.095941537947057e-06, |
|
"loss": 0.5224, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8442164179104478, |
|
"grad_norm": 0.2746489902348466, |
|
"learning_rate": 7.922507959181673e-06, |
|
"loss": 0.5107, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8488805970149254, |
|
"grad_norm": 0.2546718927089244, |
|
"learning_rate": 7.753735525674059e-06, |
|
"loss": 0.5169, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8535447761194029, |
|
"grad_norm": 0.3128597706256909, |
|
"learning_rate": 7.58966441993719e-06, |
|
"loss": 0.5135, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8582089552238806, |
|
"grad_norm": 0.29147238348317533, |
|
"learning_rate": 7.430333705159286e-06, |
|
"loss": 0.5346, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8628731343283582, |
|
"grad_norm": 0.3039841418661845, |
|
"learning_rate": 7.275781315903374e-06, |
|
"loss": 0.533, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8675373134328358, |
|
"grad_norm": 0.26748082202802587, |
|
"learning_rate": 7.126044049075548e-06, |
|
"loss": 0.5244, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8722014925373134, |
|
"grad_norm": 0.2574220292531603, |
|
"learning_rate": 6.9811575551641224e-06, |
|
"loss": 0.512, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8768656716417911, |
|
"grad_norm": 0.2697051023022897, |
|
"learning_rate": 6.8411563297516995e-06, |
|
"loss": 0.506, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8815298507462687, |
|
"grad_norm": 0.2727834127820519, |
|
"learning_rate": 6.706073705302254e-06, |
|
"loss": 0.5175, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8861940298507462, |
|
"grad_norm": 0.2726393047900149, |
|
"learning_rate": 6.575941843225068e-06, |
|
"loss": 0.5078, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8908582089552238, |
|
"grad_norm": 0.25953360021866434, |
|
"learning_rate": 6.450791726217538e-06, |
|
"loss": 0.5153, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.25309315963307893, |
|
"learning_rate": 6.330653150888617e-06, |
|
"loss": 0.519, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9001865671641791, |
|
"grad_norm": 0.25585840582334424, |
|
"learning_rate": 6.215554720664598e-06, |
|
"loss": 0.5251, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9048507462686567, |
|
"grad_norm": 0.2710531901317892, |
|
"learning_rate": 6.105523838979022e-06, |
|
"loss": 0.525, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9095149253731343, |
|
"grad_norm": 0.2602937977745067, |
|
"learning_rate": 6.000586702748301e-06, |
|
"loss": 0.5108, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.914179104477612, |
|
"grad_norm": 0.254290516488359, |
|
"learning_rate": 5.900768296134551e-06, |
|
"loss": 0.5125, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9188432835820896, |
|
"grad_norm": 0.2734352638891298, |
|
"learning_rate": 5.8060923845971825e-06, |
|
"loss": 0.5206, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9235074626865671, |
|
"grad_norm": 0.24782218153928054, |
|
"learning_rate": 5.7165815092346825e-06, |
|
"loss": 0.5065, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9281716417910447, |
|
"grad_norm": 0.32481746469443235, |
|
"learning_rate": 5.632256981417845e-06, |
|
"loss": 0.5148, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9328358208955224, |
|
"grad_norm": 0.26237184302764716, |
|
"learning_rate": 5.553138877715833e-06, |
|
"loss": 0.5168, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.24993877270733902, |
|
"learning_rate": 5.479246035116201e-06, |
|
"loss": 0.5096, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9421641791044776, |
|
"grad_norm": 0.2577951338789917, |
|
"learning_rate": 5.410596046540051e-06, |
|
"loss": 0.5119, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9468283582089553, |
|
"grad_norm": 0.27362141957349273, |
|
"learning_rate": 5.347205256653387e-06, |
|
"loss": 0.5146, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9514925373134329, |
|
"grad_norm": 0.284901166454586, |
|
"learning_rate": 5.28908875797568e-06, |
|
"loss": 0.5078, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9561567164179104, |
|
"grad_norm": 0.2547318958730913, |
|
"learning_rate": 5.236260387286509e-06, |
|
"loss": 0.514, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.960820895522388, |
|
"grad_norm": 0.25616709172231145, |
|
"learning_rate": 5.1887327223312296e-06, |
|
"loss": 0.5108, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9654850746268657, |
|
"grad_norm": 0.24877070884034994, |
|
"learning_rate": 5.1465170788263595e-06, |
|
"loss": 0.5187, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 0.26291431270775706, |
|
"learning_rate": 5.109623507765466e-06, |
|
"loss": 0.5143, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9748134328358209, |
|
"grad_norm": 0.26168106195147633, |
|
"learning_rate": 5.07806079302615e-06, |
|
"loss": 0.5214, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9794776119402985, |
|
"grad_norm": 0.2525330885754495, |
|
"learning_rate": 5.051836449278715e-06, |
|
"loss": 0.5161, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9841417910447762, |
|
"grad_norm": 0.26797294104450187, |
|
"learning_rate": 5.030956720197035e-06, |
|
"loss": 0.5086, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9888059701492538, |
|
"grad_norm": 0.2522869759491293, |
|
"learning_rate": 5.015426576972003e-06, |
|
"loss": 0.5263, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9934701492537313, |
|
"grad_norm": 0.27283752347105966, |
|
"learning_rate": 5.005249717127964e-06, |
|
"loss": 0.5173, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9981343283582089, |
|
"grad_norm": 0.26831346058816563, |
|
"learning_rate": 5.000428563642382e-06, |
|
"loss": 0.5177, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1072, |
|
"total_flos": 488507298545664.0, |
|
"train_loss": 0.5501598766935405, |
|
"train_runtime": 34536.877, |
|
"train_samples_per_second": 0.993, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1072, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 488507298545664.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|