|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 978, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003067484662576687, |
|
"grad_norm": 177.21839775031285, |
|
"learning_rate": 2.0408163265306121e-07, |
|
"loss": 3.0795, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.015337423312883436, |
|
"grad_norm": 81.30898025729103, |
|
"learning_rate": 1.0204081632653063e-06, |
|
"loss": 2.7402, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03067484662576687, |
|
"grad_norm": 107.55431230634855, |
|
"learning_rate": 2.0408163265306125e-06, |
|
"loss": 2.5637, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.046012269938650305, |
|
"grad_norm": 17.843083480868863, |
|
"learning_rate": 3.0612244897959185e-06, |
|
"loss": 2.3819, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06134969325153374, |
|
"grad_norm": 5.541698118155843, |
|
"learning_rate": 4.081632653061225e-06, |
|
"loss": 1.9572, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07668711656441718, |
|
"grad_norm": 10.409288444104131, |
|
"learning_rate": 5.1020408163265315e-06, |
|
"loss": 1.8593, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09202453987730061, |
|
"grad_norm": 5.082002576187776, |
|
"learning_rate": 6.122448979591837e-06, |
|
"loss": 1.6901, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10736196319018405, |
|
"grad_norm": 6.765872287855098, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.6053, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12269938650306748, |
|
"grad_norm": 11.409653818105163, |
|
"learning_rate": 8.16326530612245e-06, |
|
"loss": 1.5093, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13803680981595093, |
|
"grad_norm": 6.4162354324510895, |
|
"learning_rate": 9.183673469387756e-06, |
|
"loss": 1.5069, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15337423312883436, |
|
"grad_norm": 3.7675519341487136, |
|
"learning_rate": 1.0204081632653063e-05, |
|
"loss": 1.575, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1687116564417178, |
|
"grad_norm": 4.369929690874893, |
|
"learning_rate": 1.1224489795918367e-05, |
|
"loss": 1.464, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18404907975460122, |
|
"grad_norm": 5.732870449440104, |
|
"learning_rate": 1.2244897959183674e-05, |
|
"loss": 1.4131, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19938650306748465, |
|
"grad_norm": 2.9718012621338654, |
|
"learning_rate": 1.326530612244898e-05, |
|
"loss": 1.3903, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2147239263803681, |
|
"grad_norm": 2.986883686963562, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.3699, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23006134969325154, |
|
"grad_norm": 3.0609005774260116, |
|
"learning_rate": 1.530612244897959e-05, |
|
"loss": 1.4099, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.24539877300613497, |
|
"grad_norm": 3.391633696280962, |
|
"learning_rate": 1.63265306122449e-05, |
|
"loss": 1.4246, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2607361963190184, |
|
"grad_norm": 2.446544689335844, |
|
"learning_rate": 1.7346938775510206e-05, |
|
"loss": 1.3225, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.27607361963190186, |
|
"grad_norm": 2.892905300954704, |
|
"learning_rate": 1.836734693877551e-05, |
|
"loss": 1.4131, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29141104294478526, |
|
"grad_norm": 2.7276895053980588, |
|
"learning_rate": 1.9387755102040817e-05, |
|
"loss": 1.2981, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3067484662576687, |
|
"grad_norm": 3.1096897823921212, |
|
"learning_rate": 1.9999745104274995e-05, |
|
"loss": 1.2867, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3220858895705521, |
|
"grad_norm": 2.6017423936904605, |
|
"learning_rate": 1.9996877676598733e-05, |
|
"loss": 1.3923, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3374233128834356, |
|
"grad_norm": 2.357782746709104, |
|
"learning_rate": 1.9990825118233958e-05, |
|
"loss": 1.2889, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.35276073619631904, |
|
"grad_norm": 4.4128065982273155, |
|
"learning_rate": 1.9981589357601727e-05, |
|
"loss": 1.4107, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.36809815950920244, |
|
"grad_norm": 3.425193387107373, |
|
"learning_rate": 1.9969173337331283e-05, |
|
"loss": 1.3417, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3834355828220859, |
|
"grad_norm": 2.8732197303122518, |
|
"learning_rate": 1.9953581013322503e-05, |
|
"loss": 1.3121, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3987730061349693, |
|
"grad_norm": 3.121331048192887, |
|
"learning_rate": 1.99348173534855e-05, |
|
"loss": 1.2892, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.41411042944785276, |
|
"grad_norm": 2.340124072432182, |
|
"learning_rate": 1.9912888336157793e-05, |
|
"loss": 1.3019, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4294478527607362, |
|
"grad_norm": 2.5077068756467393, |
|
"learning_rate": 1.9887800948199496e-05, |
|
"loss": 1.2821, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4447852760736196, |
|
"grad_norm": 5.649859070977053, |
|
"learning_rate": 1.9859563182767268e-05, |
|
"loss": 1.2652, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4601226993865031, |
|
"grad_norm": 2.852076057226547, |
|
"learning_rate": 1.9828184036767556e-05, |
|
"loss": 1.2215, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4754601226993865, |
|
"grad_norm": 2.3152692963670853, |
|
"learning_rate": 1.9793673507990086e-05, |
|
"loss": 1.2821, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.49079754601226994, |
|
"grad_norm": 2.9501006805800682, |
|
"learning_rate": 1.9756042591922436e-05, |
|
"loss": 1.3246, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5061349693251533, |
|
"grad_norm": 2.208339912352479, |
|
"learning_rate": 1.9715303278246724e-05, |
|
"loss": 1.3105, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5214723926380368, |
|
"grad_norm": 2.377033071477205, |
|
"learning_rate": 1.9671468547019575e-05, |
|
"loss": 1.2158, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5368098159509203, |
|
"grad_norm": 2.2769169863909706, |
|
"learning_rate": 1.9624552364536472e-05, |
|
"loss": 1.3063, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5521472392638037, |
|
"grad_norm": 3.488802301164089, |
|
"learning_rate": 1.9574569678881965e-05, |
|
"loss": 1.3641, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5674846625766872, |
|
"grad_norm": 3.2567021860207923, |
|
"learning_rate": 1.952153641516698e-05, |
|
"loss": 1.1764, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5828220858895705, |
|
"grad_norm": 2.5611122809445117, |
|
"learning_rate": 1.94654694704549e-05, |
|
"loss": 1.187, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.598159509202454, |
|
"grad_norm": 3.2553565189736906, |
|
"learning_rate": 1.9406386708377956e-05, |
|
"loss": 1.3129, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6134969325153374, |
|
"grad_norm": 2.866192118065629, |
|
"learning_rate": 1.9344306953445632e-05, |
|
"loss": 1.3816, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6288343558282209, |
|
"grad_norm": 2.729878771251503, |
|
"learning_rate": 1.9279249985046948e-05, |
|
"loss": 1.3208, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6441717791411042, |
|
"grad_norm": 2.1585127050514115, |
|
"learning_rate": 1.92112365311485e-05, |
|
"loss": 1.2483, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6595092024539877, |
|
"grad_norm": 2.884411746638962, |
|
"learning_rate": 1.9140288261690278e-05, |
|
"loss": 1.2221, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6748466257668712, |
|
"grad_norm": 2.3400822227419704, |
|
"learning_rate": 1.9066427781681314e-05, |
|
"loss": 1.1909, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6901840490797546, |
|
"grad_norm": 2.2424426077461224, |
|
"learning_rate": 1.8989678623997506e-05, |
|
"loss": 1.2427, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7055214723926381, |
|
"grad_norm": 2.3532187548733847, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 1.3086, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7208588957055214, |
|
"grad_norm": 2.1575864354340557, |
|
"learning_rate": 1.8827613001162534e-05, |
|
"loss": 1.2051, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7361963190184049, |
|
"grad_norm": 2.2528114135340807, |
|
"learning_rate": 1.8742348172152728e-05, |
|
"loss": 1.3883, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7515337423312883, |
|
"grad_norm": 2.20955115974882, |
|
"learning_rate": 1.8654297921298862e-05, |
|
"loss": 1.2169, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7668711656441718, |
|
"grad_norm": 2.1026008601865933, |
|
"learning_rate": 1.856349030251589e-05, |
|
"loss": 1.2356, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7822085889570553, |
|
"grad_norm": 4.721033676079731, |
|
"learning_rate": 1.846995424825079e-05, |
|
"loss": 1.1575, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7975460122699386, |
|
"grad_norm": 2.1852216414908976, |
|
"learning_rate": 1.837371956026433e-05, |
|
"loss": 1.2681, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8128834355828221, |
|
"grad_norm": 2.4315513970788682, |
|
"learning_rate": 1.8274816900135842e-05, |
|
"loss": 1.2805, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8282208588957055, |
|
"grad_norm": 1.9970132500864828, |
|
"learning_rate": 1.817327777949407e-05, |
|
"loss": 1.2204, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.843558282208589, |
|
"grad_norm": 2.438536266127501, |
|
"learning_rate": 1.806913454997717e-05, |
|
"loss": 1.2388, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8588957055214724, |
|
"grad_norm": 1.9096886848188332, |
|
"learning_rate": 1.7962420392925066e-05, |
|
"loss": 1.3021, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8742331288343558, |
|
"grad_norm": 2.0120912351402875, |
|
"learning_rate": 1.785316930880745e-05, |
|
"loss": 1.1835, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8895705521472392, |
|
"grad_norm": 1.982796410249312, |
|
"learning_rate": 1.7741416106390828e-05, |
|
"loss": 1.1753, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9049079754601227, |
|
"grad_norm": 2.4058710177193525, |
|
"learning_rate": 1.7627196391647982e-05, |
|
"loss": 1.2109, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.9202453987730062, |
|
"grad_norm": 1.9621072259264853, |
|
"learning_rate": 1.75105465564135e-05, |
|
"loss": 1.2202, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9355828220858896, |
|
"grad_norm": 8.348303745211982, |
|
"learning_rate": 1.739150376678883e-05, |
|
"loss": 1.299, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.950920245398773, |
|
"grad_norm": 2.188760659995286, |
|
"learning_rate": 1.727010595130074e-05, |
|
"loss": 1.3906, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9662576687116564, |
|
"grad_norm": 2.2138363998997828, |
|
"learning_rate": 1.714639178881678e-05, |
|
"loss": 1.2704, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9815950920245399, |
|
"grad_norm": 7.207718966765089, |
|
"learning_rate": 1.7020400696221737e-05, |
|
"loss": 1.227, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9969325153374233, |
|
"grad_norm": 2.033313094726782, |
|
"learning_rate": 1.6892172815858896e-05, |
|
"loss": 1.265, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.214716911315918, |
|
"eval_runtime": 7.4506, |
|
"eval_samples_per_second": 21.341, |
|
"eval_steps_per_second": 5.369, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.0122699386503067, |
|
"grad_norm": 2.246404161643353, |
|
"learning_rate": 1.6761749002740195e-05, |
|
"loss": 1.1231, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0276073619631902, |
|
"grad_norm": 2.6307191299518595, |
|
"learning_rate": 1.662917081152932e-05, |
|
"loss": 1.0474, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0429447852760736, |
|
"grad_norm": 2.2466529263098596, |
|
"learning_rate": 1.6494480483301836e-05, |
|
"loss": 0.9948, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.058282208588957, |
|
"grad_norm": 2.0590187620332308, |
|
"learning_rate": 1.635772093208669e-05, |
|
"loss": 0.9675, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0736196319018405, |
|
"grad_norm": 2.5079641466397864, |
|
"learning_rate": 1.6218935731193223e-05, |
|
"loss": 0.9397, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0889570552147239, |
|
"grad_norm": 2.2015613521602053, |
|
"learning_rate": 1.6078169099328196e-05, |
|
"loss": 0.9576, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.1042944785276074, |
|
"grad_norm": 2.5678723465584303, |
|
"learning_rate": 1.5935465886507143e-05, |
|
"loss": 1.1066, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.1196319018404908, |
|
"grad_norm": 2.1152311130980976, |
|
"learning_rate": 1.579087155976459e-05, |
|
"loss": 1.1337, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.1349693251533743, |
|
"grad_norm": 2.36694508137547, |
|
"learning_rate": 1.5644432188667695e-05, |
|
"loss": 1.0043, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1503067484662577, |
|
"grad_norm": 2.257597203782575, |
|
"learning_rate": 1.5496194430637903e-05, |
|
"loss": 0.9548, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.165644171779141, |
|
"grad_norm": 2.224173969408768, |
|
"learning_rate": 1.5346205516085305e-05, |
|
"loss": 1.0267, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1809815950920246, |
|
"grad_norm": 2.785039079502963, |
|
"learning_rate": 1.5194513233360439e-05, |
|
"loss": 0.9385, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.196319018404908, |
|
"grad_norm": 2.8341094388368715, |
|
"learning_rate": 1.504116591352832e-05, |
|
"loss": 1.1401, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.2116564417177913, |
|
"grad_norm": 2.5109029462747303, |
|
"learning_rate": 1.4886212414969551e-05, |
|
"loss": 0.9811, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.2269938650306749, |
|
"grad_norm": 2.364253368402221, |
|
"learning_rate": 1.4729702107813438e-05, |
|
"loss": 0.9896, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2423312883435582, |
|
"grad_norm": 2.833077039900493, |
|
"learning_rate": 1.4571684858208045e-05, |
|
"loss": 1.0038, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2576687116564418, |
|
"grad_norm": 2.986726012938249, |
|
"learning_rate": 1.4412211012432213e-05, |
|
"loss": 1.0369, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2730061349693251, |
|
"grad_norm": 2.19472166570541, |
|
"learning_rate": 1.4251331380854602e-05, |
|
"loss": 1.0644, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2883435582822087, |
|
"grad_norm": 2.539259944541222, |
|
"learning_rate": 1.408909722174487e-05, |
|
"loss": 0.9615, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.303680981595092, |
|
"grad_norm": 2.3203428811857667, |
|
"learning_rate": 1.3925560224942145e-05, |
|
"loss": 0.933, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.3190184049079754, |
|
"grad_norm": 2.190402503264761, |
|
"learning_rate": 1.3760772495385998e-05, |
|
"loss": 0.9614, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.334355828220859, |
|
"grad_norm": 2.4516332947183055, |
|
"learning_rate": 1.3594786536515154e-05, |
|
"loss": 0.9784, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.3496932515337423, |
|
"grad_norm": 2.1838918342014897, |
|
"learning_rate": 1.3427655233539227e-05, |
|
"loss": 0.9477, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3650306748466257, |
|
"grad_norm": 2.1974830315305867, |
|
"learning_rate": 1.3259431836588843e-05, |
|
"loss": 0.9802, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3803680981595092, |
|
"grad_norm": 2.337562201817739, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 1.001, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3957055214723926, |
|
"grad_norm": 2.657566030569729, |
|
"learning_rate": 1.2919923483984415e-05, |
|
"loss": 0.9072, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.4110429447852761, |
|
"grad_norm": 2.5556378147753787, |
|
"learning_rate": 1.2748746699952338e-05, |
|
"loss": 1.0344, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.4263803680981595, |
|
"grad_norm": 2.490197731186031, |
|
"learning_rate": 1.2576694130724905e-05, |
|
"loss": 1.1441, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.441717791411043, |
|
"grad_norm": 2.456053324999455, |
|
"learning_rate": 1.2403820594409926e-05, |
|
"loss": 0.9665, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4570552147239264, |
|
"grad_norm": 2.7963958883475586, |
|
"learning_rate": 1.2230181170685636e-05, |
|
"loss": 0.9733, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4723926380368098, |
|
"grad_norm": 2.3556048604718955, |
|
"learning_rate": 1.2055831183251608e-05, |
|
"loss": 1.0198, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4877300613496933, |
|
"grad_norm": 2.7492055859532365, |
|
"learning_rate": 1.1880826182201926e-05, |
|
"loss": 0.9002, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.5030674846625767, |
|
"grad_norm": 2.138357362754652, |
|
"learning_rate": 1.170522192632624e-05, |
|
"loss": 0.9411, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.51840490797546, |
|
"grad_norm": 2.1341509025323644, |
|
"learning_rate": 1.1529074365344302e-05, |
|
"loss": 1.0553, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.5337423312883436, |
|
"grad_norm": 2.1458405128765627, |
|
"learning_rate": 1.1352439622079689e-05, |
|
"loss": 1.0077, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5490797546012272, |
|
"grad_norm": 2.1132360957639698, |
|
"learning_rate": 1.1175373974578378e-05, |
|
"loss": 0.9168, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5644171779141103, |
|
"grad_norm": 2.5113223130286686, |
|
"learning_rate": 1.0997933838177828e-05, |
|
"loss": 1.0312, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5797546012269938, |
|
"grad_norm": 2.0666811534447906, |
|
"learning_rate": 1.0820175747532373e-05, |
|
"loss": 1.0088, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5950920245398774, |
|
"grad_norm": 2.18910972259404, |
|
"learning_rate": 1.064215633860055e-05, |
|
"loss": 0.9566, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.6104294478527608, |
|
"grad_norm": 2.1335643657003764, |
|
"learning_rate": 1.0463932330600197e-05, |
|
"loss": 0.9537, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.6257668711656441, |
|
"grad_norm": 2.211695419294786, |
|
"learning_rate": 1.0285560507936962e-05, |
|
"loss": 0.9519, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.6411042944785277, |
|
"grad_norm": 1.9927545915886413, |
|
"learning_rate": 1.010709770211212e-05, |
|
"loss": 1.0909, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.656441717791411, |
|
"grad_norm": 2.1799108916751266, |
|
"learning_rate": 9.928600773615306e-06, |
|
"loss": 1.0668, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6717791411042944, |
|
"grad_norm": 2.0870390082509824, |
|
"learning_rate": 9.750126593808083e-06, |
|
"loss": 0.9425, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.687116564417178, |
|
"grad_norm": 2.219919825754393, |
|
"learning_rate": 9.571732026803978e-06, |
|
"loss": 0.9223, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.7024539877300615, |
|
"grad_norm": 2.0158507556929712, |
|
"learning_rate": 9.393473911350895e-06, |
|
"loss": 0.9779, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.7177914110429446, |
|
"grad_norm": 2.146438648027479, |
|
"learning_rate": 9.215409042721553e-06, |
|
"loss": 0.9929, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.7331288343558282, |
|
"grad_norm": 2.2820045196023115, |
|
"learning_rate": 9.037594154617811e-06, |
|
"loss": 0.9746, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.7484662576687118, |
|
"grad_norm": 2.1811945127821626, |
|
"learning_rate": 8.860085901094595e-06, |
|
"loss": 0.9463, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7638036809815951, |
|
"grad_norm": 2.2195282023793976, |
|
"learning_rate": 8.682940838509206e-06, |
|
"loss": 0.8636, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7791411042944785, |
|
"grad_norm": 1.9502264600710417, |
|
"learning_rate": 8.50621540750175e-06, |
|
"loss": 0.9595, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.794478527607362, |
|
"grad_norm": 2.1028286450122784, |
|
"learning_rate": 8.329965915012451e-06, |
|
"loss": 1.0149, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.8098159509202454, |
|
"grad_norm": 1.996134849337679, |
|
"learning_rate": 8.154248516341547e-06, |
|
"loss": 0.9714, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.8251533742331287, |
|
"grad_norm": 2.0229355706203673, |
|
"learning_rate": 7.979119197257505e-06, |
|
"loss": 1.0042, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8404907975460123, |
|
"grad_norm": 2.198065583918555, |
|
"learning_rate": 7.804633756159258e-06, |
|
"loss": 0.9354, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8558282208588959, |
|
"grad_norm": 2.0001193503472248, |
|
"learning_rate": 7.63084778629813e-06, |
|
"loss": 0.9796, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.871165644171779, |
|
"grad_norm": 2.150238538587084, |
|
"learning_rate": 7.4578166580651335e-06, |
|
"loss": 0.9495, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8865030674846626, |
|
"grad_norm": 2.1692276656591543, |
|
"learning_rate": 7.285595501349259e-06, |
|
"loss": 0.972, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.9018404907975461, |
|
"grad_norm": 2.2030115157082073, |
|
"learning_rate": 7.114239187972416e-06, |
|
"loss": 0.8763, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.9171779141104295, |
|
"grad_norm": 13.25110262390118, |
|
"learning_rate": 6.94380231420656e-06, |
|
"loss": 0.9911, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.9325153374233128, |
|
"grad_norm": 2.128565521517256, |
|
"learning_rate": 6.774339183378663e-06, |
|
"loss": 0.98, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9478527607361964, |
|
"grad_norm": 2.446455411382673, |
|
"learning_rate": 6.605903788568962e-06, |
|
"loss": 0.9163, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9631901840490797, |
|
"grad_norm": 3.6457472773842055, |
|
"learning_rate": 6.438549795408107e-06, |
|
"loss": 0.893, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.978527607361963, |
|
"grad_norm": 2.1918272282922615, |
|
"learning_rate": 6.272330524978613e-06, |
|
"loss": 0.9862, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9938650306748467, |
|
"grad_norm": 2.173238130186426, |
|
"learning_rate": 6.107298936826086e-06, |
|
"loss": 0.8983, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.211948275566101, |
|
"eval_runtime": 6.699, |
|
"eval_samples_per_second": 23.735, |
|
"eval_steps_per_second": 5.971, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.0092024539877302, |
|
"grad_norm": 2.2524003786276254, |
|
"learning_rate": 5.943507612085661e-06, |
|
"loss": 0.8223, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.0245398773006134, |
|
"grad_norm": 1.9974383389908232, |
|
"learning_rate": 5.781008736728975e-06, |
|
"loss": 0.6493, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.039877300613497, |
|
"grad_norm": 2.9682960559686897, |
|
"learning_rate": 5.619854084937085e-06, |
|
"loss": 0.6784, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.0552147239263805, |
|
"grad_norm": 2.2655157887127024, |
|
"learning_rate": 5.460095002604533e-06, |
|
"loss": 0.6748, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.0705521472392636, |
|
"grad_norm": 2.2085119385865863, |
|
"learning_rate": 5.3017823909799295e-06, |
|
"loss": 0.7027, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.085889570552147, |
|
"grad_norm": 2.27766004579248, |
|
"learning_rate": 5.144966690448159e-06, |
|
"loss": 0.7278, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.1012269938650308, |
|
"grad_norm": 2.128219709033872, |
|
"learning_rate": 4.9896978644594516e-06, |
|
"loss": 0.5989, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.116564417177914, |
|
"grad_norm": 2.4873822979923315, |
|
"learning_rate": 4.836025383610382e-06, |
|
"loss": 0.7357, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.1319018404907975, |
|
"grad_norm": 2.1568784749495244, |
|
"learning_rate": 4.683998209881943e-06, |
|
"loss": 0.714, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.147239263803681, |
|
"grad_norm": 2.3234411443951126, |
|
"learning_rate": 4.533664781039622e-06, |
|
"loss": 0.7281, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.1625766871165646, |
|
"grad_norm": 2.735689720360169, |
|
"learning_rate": 4.385072995200532e-06, |
|
"loss": 0.6257, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.1779141104294477, |
|
"grad_norm": 3.0839192153525934, |
|
"learning_rate": 4.2382701955724724e-06, |
|
"loss": 0.7162, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.1932515337423313, |
|
"grad_norm": 2.4589255584801624, |
|
"learning_rate": 4.093303155369771e-06, |
|
"loss": 0.6726, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.208588957055215, |
|
"grad_norm": 2.3530595892180637, |
|
"learning_rate": 3.950218062910776e-06, |
|
"loss": 0.6077, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.223926380368098, |
|
"grad_norm": 2.2994180114078375, |
|
"learning_rate": 3.8090605069016596e-06, |
|
"loss": 0.6484, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.2392638036809815, |
|
"grad_norm": 2.1614299270488844, |
|
"learning_rate": 3.6698754619112974e-06, |
|
"loss": 0.7606, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.254601226993865, |
|
"grad_norm": 2.148628125479035, |
|
"learning_rate": 3.53270727404179e-06, |
|
"loss": 0.6735, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.2699386503067487, |
|
"grad_norm": 2.791988335379792, |
|
"learning_rate": 3.3975996467992557e-06, |
|
"loss": 0.6685, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.285276073619632, |
|
"grad_norm": 2.4166794649978036, |
|
"learning_rate": 3.2645956271693257e-06, |
|
"loss": 0.6727, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.3006134969325154, |
|
"grad_norm": 2.270680126335755, |
|
"learning_rate": 3.133737591901864e-06, |
|
"loss": 0.6946, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.315950920245399, |
|
"grad_norm": 2.2773152491745043, |
|
"learning_rate": 3.0050672340091723e-06, |
|
"loss": 0.6436, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.331288343558282, |
|
"grad_norm": 2.351791962083414, |
|
"learning_rate": 2.878625549482084e-06, |
|
"loss": 0.671, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.3466257668711656, |
|
"grad_norm": 2.5602963650641604, |
|
"learning_rate": 2.7544528242281323e-06, |
|
"loss": 0.559, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.361963190184049, |
|
"grad_norm": 2.919982636047529, |
|
"learning_rate": 2.6325886212359496e-06, |
|
"loss": 0.608, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.3773006134969323, |
|
"grad_norm": 2.283862507375789, |
|
"learning_rate": 2.51307176797001e-06, |
|
"loss": 0.7129, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.392638036809816, |
|
"grad_norm": 2.5119018741262273, |
|
"learning_rate": 2.395940343999691e-06, |
|
"loss": 0.6617, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.4079754601226995, |
|
"grad_norm": 2.2326958137785997, |
|
"learning_rate": 2.2812316688666735e-06, |
|
"loss": 0.6001, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.4233128834355826, |
|
"grad_norm": 2.4717197828958875, |
|
"learning_rate": 2.1689822901944456e-06, |
|
"loss": 0.6849, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.438650306748466, |
|
"grad_norm": 2.428100475473941, |
|
"learning_rate": 2.0592279720437856e-06, |
|
"loss": 0.6541, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.4539877300613497, |
|
"grad_norm": 2.453498212161667, |
|
"learning_rate": 1.9520036835178667e-06, |
|
"loss": 0.7362, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.4693251533742333, |
|
"grad_norm": 2.202254537031373, |
|
"learning_rate": 1.8473435876206792e-06, |
|
"loss": 0.6467, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.4846625766871164, |
|
"grad_norm": 2.5704434258789557, |
|
"learning_rate": 1.74528103037226e-06, |
|
"loss": 0.7467, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.278793792368562, |
|
"learning_rate": 1.645848530184233e-06, |
|
"loss": 0.6641, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.5153374233128836, |
|
"grad_norm": 2.3206459000474338, |
|
"learning_rate": 1.5490777674990376e-06, |
|
"loss": 0.7399, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.530674846625767, |
|
"grad_norm": 2.2760200456954105, |
|
"learning_rate": 1.4549995746961332e-06, |
|
"loss": 0.6896, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.5460122699386503, |
|
"grad_norm": 2.0966897996478626, |
|
"learning_rate": 1.3636439262684299e-06, |
|
"loss": 0.5792, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.561349693251534, |
|
"grad_norm": 2.273137273352939, |
|
"learning_rate": 1.2750399292720284e-06, |
|
"loss": 0.6727, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.5766871165644174, |
|
"grad_norm": 2.232573385750787, |
|
"learning_rate": 1.1892158140523546e-06, |
|
"loss": 0.7411, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.5920245398773005, |
|
"grad_norm": 2.2477459570297427, |
|
"learning_rate": 1.1061989252496053e-06, |
|
"loss": 0.637, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.607361963190184, |
|
"grad_norm": 2.4503366494149152, |
|
"learning_rate": 1.0260157130864178e-06, |
|
"loss": 0.6591, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.6226993865030677, |
|
"grad_norm": 2.2727030330816707, |
|
"learning_rate": 9.486917249404815e-07, |
|
"loss": 0.6791, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.638036809815951, |
|
"grad_norm": 2.4331538301144695, |
|
"learning_rate": 8.742515972048404e-07, |
|
"loss": 0.689, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.6533742331288344, |
|
"grad_norm": 2.171632535993997, |
|
"learning_rate": 8.027190474384127e-07, |
|
"loss": 0.588, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.668711656441718, |
|
"grad_norm": 2.274712548175332, |
|
"learning_rate": 7.341168668092857e-07, |
|
"loss": 0.5984, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.684049079754601, |
|
"grad_norm": 2.187602871418788, |
|
"learning_rate": 6.684669128331655e-07, |
|
"loss": 0.7899, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.6993865030674846, |
|
"grad_norm": 2.172889231318168, |
|
"learning_rate": 6.057901024092949e-07, |
|
"loss": 0.6107, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.714723926380368, |
|
"grad_norm": 5.181833794013852, |
|
"learning_rate": 5.461064051560705e-07, |
|
"loss": 0.747, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.7300613496932513, |
|
"grad_norm": 2.1284344076378163, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 0.6449, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.745398773006135, |
|
"grad_norm": 2.742405951465162, |
|
"learning_rate": 4.3579345435930454e-07, |
|
"loss": 0.7748, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.7607361963190185, |
|
"grad_norm": 2.453933407406173, |
|
"learning_rate": 3.851993479063154e-07, |
|
"loss": 0.7247, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.7760736196319016, |
|
"grad_norm": 2.3349240625236307, |
|
"learning_rate": 3.3766863760676947e-07, |
|
"loss": 0.6742, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.791411042944785, |
|
"grad_norm": 2.346238889067428, |
|
"learning_rate": 2.93216467341475e-07, |
|
"loss": 0.6736, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.8067484662576687, |
|
"grad_norm": 2.682770493121025, |
|
"learning_rate": 2.5185700012975603e-07, |
|
"loss": 0.7891, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.8220858895705523, |
|
"grad_norm": 2.418319757792411, |
|
"learning_rate": 2.1360341361692517e-07, |
|
"loss": 0.6781, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.837423312883436, |
|
"grad_norm": 2.1865263805130737, |
|
"learning_rate": 1.784678958757291e-07, |
|
"loss": 0.6458, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.852760736196319, |
|
"grad_norm": 2.15121097086734, |
|
"learning_rate": 1.464616415230702e-07, |
|
"loss": 0.639, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.8680981595092025, |
|
"grad_norm": 2.1733157849896547, |
|
"learning_rate": 1.1759484815326294e-07, |
|
"loss": 0.6231, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.883435582822086, |
|
"grad_norm": 2.2028794055351235, |
|
"learning_rate": 9.187671308895418e-08, |
|
"loss": 0.6718, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.8987730061349692, |
|
"grad_norm": 2.7622530458915655, |
|
"learning_rate": 6.931543045073708e-08, |
|
"loss": 0.8227, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.914110429447853, |
|
"grad_norm": 2.1282608794755364, |
|
"learning_rate": 4.991818854640396e-08, |
|
"loss": 0.6131, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.9294478527607364, |
|
"grad_norm": 2.1900416244520295, |
|
"learning_rate": 3.369116758066171e-08, |
|
"loss": 0.6493, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.9447852760736195, |
|
"grad_norm": 2.3718267533912125, |
|
"learning_rate": 2.063953768603799e-08, |
|
"loss": 0.7364, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.960122699386503, |
|
"grad_norm": 2.2239316758606975, |
|
"learning_rate": 1.0767457275615567e-08, |
|
"loss": 0.6457, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.9754601226993866, |
|
"grad_norm": 2.6083276517224374, |
|
"learning_rate": 4.0780717181077015e-09, |
|
"loss": 0.7664, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.9907975460122698, |
|
"grad_norm": 2.554211412831243, |
|
"learning_rate": 5.735123357042405e-10, |
|
"loss": 0.7007, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.3528565168380737, |
|
"eval_runtime": 6.5488, |
|
"eval_samples_per_second": 24.279, |
|
"eval_steps_per_second": 6.108, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 978, |
|
"total_flos": 3642602029056.0, |
|
"train_loss": 1.0182966589927673, |
|
"train_runtime": 588.3296, |
|
"train_samples_per_second": 6.649, |
|
"train_steps_per_second": 1.662 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 978, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3642602029056.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|