|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9997593647228684, |
|
"eval_steps": 500, |
|
"global_step": 3116, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003208470361755033, |
|
"grad_norm": 6.442734116834532, |
|
"learning_rate": 1.9942233632862646e-05, |
|
"loss": 0.646, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006416940723510066, |
|
"grad_norm": 16.670848007363354, |
|
"learning_rate": 1.9878048780487806e-05, |
|
"loss": 0.5538, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0096254110852651, |
|
"grad_norm": 5.007229537641644, |
|
"learning_rate": 1.9813863928112966e-05, |
|
"loss": 0.5532, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.012833881447020133, |
|
"grad_norm": 6.381096528206163, |
|
"learning_rate": 1.974967907573813e-05, |
|
"loss": 0.5403, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.016042351808775165, |
|
"grad_norm": 3.202143132985174, |
|
"learning_rate": 1.968549422336329e-05, |
|
"loss": 0.5484, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0192508221705302, |
|
"grad_norm": 5.974056462183794, |
|
"learning_rate": 1.962130937098845e-05, |
|
"loss": 0.5738, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.022459292532285232, |
|
"grad_norm": 8.235054035652611, |
|
"learning_rate": 1.955712451861361e-05, |
|
"loss": 0.5311, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.025667762894040266, |
|
"grad_norm": 8.918082235487985, |
|
"learning_rate": 1.9492939666238767e-05, |
|
"loss": 0.5128, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0288762332557953, |
|
"grad_norm": 7.299533718731339, |
|
"learning_rate": 1.942875481386393e-05, |
|
"loss": 0.5444, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03208470361755033, |
|
"grad_norm": 6.793642690075093, |
|
"learning_rate": 1.936456996148909e-05, |
|
"loss": 0.4792, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03529317397930536, |
|
"grad_norm": 5.35608624427688, |
|
"learning_rate": 1.930038510911425e-05, |
|
"loss": 0.4838, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0385016443410604, |
|
"grad_norm": 8.214012839502532, |
|
"learning_rate": 1.923620025673941e-05, |
|
"loss": 0.4947, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04171011470281543, |
|
"grad_norm": 17.30363857344939, |
|
"learning_rate": 1.9172015404364574e-05, |
|
"loss": 0.539, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.044918585064570464, |
|
"grad_norm": 5.2411969812270405, |
|
"learning_rate": 1.910783055198973e-05, |
|
"loss": 0.4976, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0481270554263255, |
|
"grad_norm": 5.825912105537252, |
|
"learning_rate": 1.904364569961489e-05, |
|
"loss": 0.5374, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05133552578808053, |
|
"grad_norm": 7.038630553849513, |
|
"learning_rate": 1.897946084724005e-05, |
|
"loss": 0.4682, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.054543996149835565, |
|
"grad_norm": 5.686461374890298, |
|
"learning_rate": 1.8915275994865214e-05, |
|
"loss": 0.5205, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0577524665115906, |
|
"grad_norm": 5.179564550909498, |
|
"learning_rate": 1.8851091142490375e-05, |
|
"loss": 0.491, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06096093687334563, |
|
"grad_norm": 8.463629137447528, |
|
"learning_rate": 1.8786906290115535e-05, |
|
"loss": 0.5228, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06416940723510066, |
|
"grad_norm": 4.011565557460287, |
|
"learning_rate": 1.8722721437740695e-05, |
|
"loss": 0.479, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.06737787759685569, |
|
"grad_norm": 7.7055557079808805, |
|
"learning_rate": 1.8658536585365855e-05, |
|
"loss": 0.4687, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07058634795861073, |
|
"grad_norm": 6.724744753360089, |
|
"learning_rate": 1.8594351732991015e-05, |
|
"loss": 0.4818, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07379481832036576, |
|
"grad_norm": 4.678677086919036, |
|
"learning_rate": 1.8530166880616175e-05, |
|
"loss": 0.5139, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.0770032886821208, |
|
"grad_norm": 9.311365908507138, |
|
"learning_rate": 1.8465982028241335e-05, |
|
"loss": 0.4747, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08021175904387583, |
|
"grad_norm": 4.443842779885137, |
|
"learning_rate": 1.8401797175866495e-05, |
|
"loss": 0.4891, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08342022940563086, |
|
"grad_norm": 8.33835692877245, |
|
"learning_rate": 1.833761232349166e-05, |
|
"loss": 0.47, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0866286997673859, |
|
"grad_norm": 6.389873022971522, |
|
"learning_rate": 1.827342747111682e-05, |
|
"loss": 0.4484, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.08983717012914093, |
|
"grad_norm": 7.274203538153994, |
|
"learning_rate": 1.820924261874198e-05, |
|
"loss": 0.4517, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09304564049089596, |
|
"grad_norm": 3.978695084684557, |
|
"learning_rate": 1.814505776636714e-05, |
|
"loss": 0.4807, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.096254110852651, |
|
"grad_norm": 5.846527325287561, |
|
"learning_rate": 1.80808729139923e-05, |
|
"loss": 0.4572, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.09946258121440603, |
|
"grad_norm": 4.899874671814943, |
|
"learning_rate": 1.801668806161746e-05, |
|
"loss": 0.4337, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.10267105157616106, |
|
"grad_norm": 7.441397827942358, |
|
"learning_rate": 1.795250320924262e-05, |
|
"loss": 0.4794, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1058795219379161, |
|
"grad_norm": 4.8167626402693, |
|
"learning_rate": 1.788831835686778e-05, |
|
"loss": 0.4547, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.10908799229967113, |
|
"grad_norm": 4.830906300715028, |
|
"learning_rate": 1.7824133504492943e-05, |
|
"loss": 0.4155, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11229646266142616, |
|
"grad_norm": 2.783779241391255, |
|
"learning_rate": 1.7759948652118103e-05, |
|
"loss": 0.4254, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1155049330231812, |
|
"grad_norm": 4.482561979814794, |
|
"learning_rate": 1.7695763799743263e-05, |
|
"loss": 0.5048, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.11871340338493623, |
|
"grad_norm": 4.736082727818665, |
|
"learning_rate": 1.763157894736842e-05, |
|
"loss": 0.4088, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12192187374669126, |
|
"grad_norm": 3.540097005993698, |
|
"learning_rate": 1.7567394094993584e-05, |
|
"loss": 0.5214, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1251303441084463, |
|
"grad_norm": 4.695488227787937, |
|
"learning_rate": 1.7503209242618744e-05, |
|
"loss": 0.4434, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.12833881447020132, |
|
"grad_norm": 3.7024498981669662, |
|
"learning_rate": 1.7439024390243904e-05, |
|
"loss": 0.5021, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13154728483195635, |
|
"grad_norm": 2.707105021471608, |
|
"learning_rate": 1.7374839537869064e-05, |
|
"loss": 0.4844, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.13475575519371139, |
|
"grad_norm": 5.70948950157765, |
|
"learning_rate": 1.7310654685494224e-05, |
|
"loss": 0.4903, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.13796422555546642, |
|
"grad_norm": 3.696922556933147, |
|
"learning_rate": 1.7246469833119384e-05, |
|
"loss": 0.4441, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14117269591722145, |
|
"grad_norm": 4.483329616797758, |
|
"learning_rate": 1.7182284980744544e-05, |
|
"loss": 0.5387, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.14438116627897649, |
|
"grad_norm": 3.394450035525867, |
|
"learning_rate": 1.7118100128369705e-05, |
|
"loss": 0.4662, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.14758963664073152, |
|
"grad_norm": 8.726490331816862, |
|
"learning_rate": 1.7053915275994865e-05, |
|
"loss": 0.4507, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15079810700248655, |
|
"grad_norm": 4.353194363650416, |
|
"learning_rate": 1.6989730423620028e-05, |
|
"loss": 0.4526, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1540065773642416, |
|
"grad_norm": 3.950021174504244, |
|
"learning_rate": 1.692554557124519e-05, |
|
"loss": 0.4821, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.15721504772599662, |
|
"grad_norm": 3.5715866632711832, |
|
"learning_rate": 1.686136071887035e-05, |
|
"loss": 0.4288, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16042351808775165, |
|
"grad_norm": 3.8300844137879917, |
|
"learning_rate": 1.679717586649551e-05, |
|
"loss": 0.5043, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1636319884495067, |
|
"grad_norm": 3.6133285758576235, |
|
"learning_rate": 1.673299101412067e-05, |
|
"loss": 0.4682, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.16684045881126172, |
|
"grad_norm": 6.472583456901733, |
|
"learning_rate": 1.666880616174583e-05, |
|
"loss": 0.4002, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17004892917301676, |
|
"grad_norm": 3.701841978979134, |
|
"learning_rate": 1.660462130937099e-05, |
|
"loss": 0.5022, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1732573995347718, |
|
"grad_norm": 4.773360444080344, |
|
"learning_rate": 1.654043645699615e-05, |
|
"loss": 0.5066, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.17646586989652682, |
|
"grad_norm": 13.276543301213977, |
|
"learning_rate": 1.6476251604621313e-05, |
|
"loss": 0.4648, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.17967434025828186, |
|
"grad_norm": 8.185811344239216, |
|
"learning_rate": 1.6412066752246473e-05, |
|
"loss": 0.4597, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.1828828106200369, |
|
"grad_norm": 3.870551639158725, |
|
"learning_rate": 1.6347881899871633e-05, |
|
"loss": 0.4644, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.18609128098179192, |
|
"grad_norm": 2.9435165160063628, |
|
"learning_rate": 1.6283697047496793e-05, |
|
"loss": 0.4996, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.18929975134354696, |
|
"grad_norm": 5.242572609854114, |
|
"learning_rate": 1.6219512195121953e-05, |
|
"loss": 0.4465, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.192508221705302, |
|
"grad_norm": 3.296578668491081, |
|
"learning_rate": 1.6155327342747113e-05, |
|
"loss": 0.404, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.19571669206705702, |
|
"grad_norm": 5.090117348527818, |
|
"learning_rate": 1.6091142490372273e-05, |
|
"loss": 0.4277, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.19892516242881206, |
|
"grad_norm": 2.888476032144689, |
|
"learning_rate": 1.6026957637997433e-05, |
|
"loss": 0.4783, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2021336327905671, |
|
"grad_norm": 4.135431165940949, |
|
"learning_rate": 1.5962772785622594e-05, |
|
"loss": 0.4236, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.20534210315232213, |
|
"grad_norm": 4.833487869831366, |
|
"learning_rate": 1.5898587933247757e-05, |
|
"loss": 0.504, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.20855057351407716, |
|
"grad_norm": 4.282352689606544, |
|
"learning_rate": 1.5834403080872917e-05, |
|
"loss": 0.4277, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2117590438758322, |
|
"grad_norm": 6.760216642975651, |
|
"learning_rate": 1.5770218228498074e-05, |
|
"loss": 0.4916, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.21496751423758723, |
|
"grad_norm": 4.349450853536778, |
|
"learning_rate": 1.5706033376123234e-05, |
|
"loss": 0.4863, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.21817598459934226, |
|
"grad_norm": 8.907091580804641, |
|
"learning_rate": 1.5641848523748397e-05, |
|
"loss": 0.4454, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2213844549610973, |
|
"grad_norm": 12.495791509691397, |
|
"learning_rate": 1.5577663671373558e-05, |
|
"loss": 0.4374, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.22459292532285233, |
|
"grad_norm": 4.162195469033971, |
|
"learning_rate": 1.5513478818998718e-05, |
|
"loss": 0.398, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.22780139568460736, |
|
"grad_norm": 4.9440877367494585, |
|
"learning_rate": 1.5449293966623878e-05, |
|
"loss": 0.4386, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.2310098660463624, |
|
"grad_norm": 3.3193418621579194, |
|
"learning_rate": 1.5385109114249038e-05, |
|
"loss": 0.3993, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.23421833640811743, |
|
"grad_norm": 3.388886050375809, |
|
"learning_rate": 1.5320924261874198e-05, |
|
"loss": 0.3915, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.23742680676987246, |
|
"grad_norm": 3.758493220998932, |
|
"learning_rate": 1.525673940949936e-05, |
|
"loss": 0.4452, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.2406352771316275, |
|
"grad_norm": 5.132628959411491, |
|
"learning_rate": 1.519255455712452e-05, |
|
"loss": 0.3703, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.24384374749338253, |
|
"grad_norm": 6.2691579411030025, |
|
"learning_rate": 1.5128369704749682e-05, |
|
"loss": 0.4158, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.24705221785513756, |
|
"grad_norm": 4.103316818662405, |
|
"learning_rate": 1.5064184852374842e-05, |
|
"loss": 0.4233, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2502606882168926, |
|
"grad_norm": 6.0422539237827975, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.4278, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2534691585786476, |
|
"grad_norm": 2.7653877942568954, |
|
"learning_rate": 1.493581514762516e-05, |
|
"loss": 0.4095, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.25667762894040264, |
|
"grad_norm": 4.114902146610447, |
|
"learning_rate": 1.487163029525032e-05, |
|
"loss": 0.4584, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.25988609930215767, |
|
"grad_norm": 5.28663521958676, |
|
"learning_rate": 1.4807445442875482e-05, |
|
"loss": 0.4368, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.2630945696639127, |
|
"grad_norm": 3.8311121578946303, |
|
"learning_rate": 1.4743260590500643e-05, |
|
"loss": 0.4107, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.26630304002566774, |
|
"grad_norm": 3.318670161966376, |
|
"learning_rate": 1.4679075738125803e-05, |
|
"loss": 0.4245, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.26951151038742277, |
|
"grad_norm": 4.755364543588735, |
|
"learning_rate": 1.4614890885750963e-05, |
|
"loss": 0.4787, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.2727199807491778, |
|
"grad_norm": 2.786479482223897, |
|
"learning_rate": 1.4550706033376125e-05, |
|
"loss": 0.4336, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.27592845111093284, |
|
"grad_norm": 4.070784873347869, |
|
"learning_rate": 1.4486521181001285e-05, |
|
"loss": 0.4123, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.27913692147268787, |
|
"grad_norm": 5.342176013467221, |
|
"learning_rate": 1.4422336328626445e-05, |
|
"loss": 0.4497, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.2823453918344429, |
|
"grad_norm": 2.881520241461501, |
|
"learning_rate": 1.4358151476251605e-05, |
|
"loss": 0.4872, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.28555386219619794, |
|
"grad_norm": 4.478805078448799, |
|
"learning_rate": 1.4293966623876767e-05, |
|
"loss": 0.3984, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.28876233255795297, |
|
"grad_norm": 6.101899594897867, |
|
"learning_rate": 1.4229781771501927e-05, |
|
"loss": 0.4628, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.291970802919708, |
|
"grad_norm": 3.0826453501277715, |
|
"learning_rate": 1.4165596919127087e-05, |
|
"loss": 0.4432, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.29517927328146304, |
|
"grad_norm": 2.4770273632891953, |
|
"learning_rate": 1.4101412066752247e-05, |
|
"loss": 0.506, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.2983877436432181, |
|
"grad_norm": 3.422572341295809, |
|
"learning_rate": 1.4037227214377409e-05, |
|
"loss": 0.3826, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3015962140049731, |
|
"grad_norm": 6.410925492330843, |
|
"learning_rate": 1.3973042362002569e-05, |
|
"loss": 0.4976, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.30480468436672814, |
|
"grad_norm": 3.702470810813249, |
|
"learning_rate": 1.390885750962773e-05, |
|
"loss": 0.3977, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3080131547284832, |
|
"grad_norm": 4.496582483074578, |
|
"learning_rate": 1.384467265725289e-05, |
|
"loss": 0.3427, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3112216250902382, |
|
"grad_norm": 5.771712327672755, |
|
"learning_rate": 1.378048780487805e-05, |
|
"loss": 0.4011, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.31443009545199324, |
|
"grad_norm": 6.116944263448949, |
|
"learning_rate": 1.3716302952503211e-05, |
|
"loss": 0.3832, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.3176385658137483, |
|
"grad_norm": 5.217590349147242, |
|
"learning_rate": 1.3652118100128371e-05, |
|
"loss": 0.4484, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.3208470361755033, |
|
"grad_norm": 6.326859919098317, |
|
"learning_rate": 1.3587933247753531e-05, |
|
"loss": 0.4238, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.32405550653725834, |
|
"grad_norm": 3.6100683655298518, |
|
"learning_rate": 1.3523748395378692e-05, |
|
"loss": 0.4521, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.3272639768990134, |
|
"grad_norm": 8.35582852568852, |
|
"learning_rate": 1.3459563543003853e-05, |
|
"loss": 0.4277, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.3304724472607684, |
|
"grad_norm": 6.127881803928465, |
|
"learning_rate": 1.3395378690629014e-05, |
|
"loss": 0.3845, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.33368091762252344, |
|
"grad_norm": 5.346412016792554, |
|
"learning_rate": 1.3331193838254172e-05, |
|
"loss": 0.3598, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.3368893879842785, |
|
"grad_norm": 5.518712653284187, |
|
"learning_rate": 1.3267008985879332e-05, |
|
"loss": 0.4684, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3400978583460335, |
|
"grad_norm": 3.0171159430683647, |
|
"learning_rate": 1.3202824133504496e-05, |
|
"loss": 0.402, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.34330632870778854, |
|
"grad_norm": 3.4525267211589585, |
|
"learning_rate": 1.3138639281129654e-05, |
|
"loss": 0.4587, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.3465147990695436, |
|
"grad_norm": 4.923842642383016, |
|
"learning_rate": 1.3074454428754814e-05, |
|
"loss": 0.4231, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.3497232694312986, |
|
"grad_norm": 3.982906316656253, |
|
"learning_rate": 1.3010269576379974e-05, |
|
"loss": 0.419, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.35293173979305364, |
|
"grad_norm": 3.9801555228114998, |
|
"learning_rate": 1.2946084724005136e-05, |
|
"loss": 0.3668, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3561402101548087, |
|
"grad_norm": 4.030778915107762, |
|
"learning_rate": 1.2881899871630296e-05, |
|
"loss": 0.3739, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.3593486805165637, |
|
"grad_norm": 6.386525169017585, |
|
"learning_rate": 1.2817715019255456e-05, |
|
"loss": 0.4972, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.36255715087831875, |
|
"grad_norm": 4.20862662435033, |
|
"learning_rate": 1.2753530166880616e-05, |
|
"loss": 0.4154, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.3657656212400738, |
|
"grad_norm": 4.858924338028923, |
|
"learning_rate": 1.2689345314505778e-05, |
|
"loss": 0.4478, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.3689740916018288, |
|
"grad_norm": 5.608430455111102, |
|
"learning_rate": 1.2625160462130938e-05, |
|
"loss": 0.4599, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.37218256196358385, |
|
"grad_norm": 5.911587578207529, |
|
"learning_rate": 1.2560975609756098e-05, |
|
"loss": 0.3869, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3753910323253389, |
|
"grad_norm": 4.283524740830092, |
|
"learning_rate": 1.2496790757381259e-05, |
|
"loss": 0.3837, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3785995026870939, |
|
"grad_norm": 6.821302089832709, |
|
"learning_rate": 1.2432605905006419e-05, |
|
"loss": 0.442, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.38180797304884895, |
|
"grad_norm": 5.085358904961751, |
|
"learning_rate": 1.236842105263158e-05, |
|
"loss": 0.3264, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.385016443410604, |
|
"grad_norm": 4.605248058627308, |
|
"learning_rate": 1.230423620025674e-05, |
|
"loss": 0.3895, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.388224913772359, |
|
"grad_norm": 3.477144602759826, |
|
"learning_rate": 1.22400513478819e-05, |
|
"loss": 0.4405, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.39143338413411405, |
|
"grad_norm": 4.706693999402434, |
|
"learning_rate": 1.2175866495507061e-05, |
|
"loss": 0.4102, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.3946418544958691, |
|
"grad_norm": 3.204192427462165, |
|
"learning_rate": 1.2111681643132223e-05, |
|
"loss": 0.4244, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.3978503248576241, |
|
"grad_norm": 5.415400158823864, |
|
"learning_rate": 1.2047496790757383e-05, |
|
"loss": 0.389, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.40105879521937915, |
|
"grad_norm": 4.1016299695920555, |
|
"learning_rate": 1.1983311938382543e-05, |
|
"loss": 0.4348, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.4042672655811342, |
|
"grad_norm": 5.0119293140447265, |
|
"learning_rate": 1.1919127086007703e-05, |
|
"loss": 0.4076, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4074757359428892, |
|
"grad_norm": 3.1843597604004144, |
|
"learning_rate": 1.1854942233632865e-05, |
|
"loss": 0.4093, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.41068420630464425, |
|
"grad_norm": 3.243310369644707, |
|
"learning_rate": 1.1790757381258025e-05, |
|
"loss": 0.3996, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.4138926766663993, |
|
"grad_norm": 6.5805185922112654, |
|
"learning_rate": 1.1726572528883185e-05, |
|
"loss": 0.4443, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.4171011470281543, |
|
"grad_norm": 3.5901965588799833, |
|
"learning_rate": 1.1662387676508344e-05, |
|
"loss": 0.4222, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.42030961738990935, |
|
"grad_norm": 3.5975503587085513, |
|
"learning_rate": 1.1598202824133507e-05, |
|
"loss": 0.3803, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.4235180877516644, |
|
"grad_norm": 3.8370137218295612, |
|
"learning_rate": 1.1534017971758667e-05, |
|
"loss": 0.377, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.4267265581134194, |
|
"grad_norm": 6.055162453845242, |
|
"learning_rate": 1.1469833119383826e-05, |
|
"loss": 0.4371, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.42993502847517445, |
|
"grad_norm": 4.303475483832273, |
|
"learning_rate": 1.1405648267008986e-05, |
|
"loss": 0.3991, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.4331434988369295, |
|
"grad_norm": 3.481300920221893, |
|
"learning_rate": 1.1341463414634146e-05, |
|
"loss": 0.408, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.4363519691986845, |
|
"grad_norm": 2.70794769803311, |
|
"learning_rate": 1.1277278562259308e-05, |
|
"loss": 0.4105, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 4.540637646839311, |
|
"learning_rate": 1.1213093709884468e-05, |
|
"loss": 0.3762, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.4427689099221946, |
|
"grad_norm": 3.240923093729635, |
|
"learning_rate": 1.1148908857509628e-05, |
|
"loss": 0.3198, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.4459773802839496, |
|
"grad_norm": 4.079516203900489, |
|
"learning_rate": 1.1084724005134788e-05, |
|
"loss": 0.4255, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.44918585064570465, |
|
"grad_norm": 4.424913833083467, |
|
"learning_rate": 1.102053915275995e-05, |
|
"loss": 0.422, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.4523943210074597, |
|
"grad_norm": 4.215968513158678, |
|
"learning_rate": 1.095635430038511e-05, |
|
"loss": 0.3717, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.4556027913692147, |
|
"grad_norm": 7.080680051130726, |
|
"learning_rate": 1.089216944801027e-05, |
|
"loss": 0.4167, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.45881126173096975, |
|
"grad_norm": 54.87405885391659, |
|
"learning_rate": 1.082798459563543e-05, |
|
"loss": 0.3701, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.4620197320927248, |
|
"grad_norm": 4.745370324575341, |
|
"learning_rate": 1.0763799743260592e-05, |
|
"loss": 0.4192, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.4652282024544798, |
|
"grad_norm": 4.072739876459792, |
|
"learning_rate": 1.0699614890885752e-05, |
|
"loss": 0.3648, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.46843667281623486, |
|
"grad_norm": 5.361505931287111, |
|
"learning_rate": 1.0635430038510912e-05, |
|
"loss": 0.3908, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.4716451431779899, |
|
"grad_norm": 6.609039672159613, |
|
"learning_rate": 1.0571245186136072e-05, |
|
"loss": 0.3927, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.4748536135397449, |
|
"grad_norm": 5.257109634802722, |
|
"learning_rate": 1.0507060333761234e-05, |
|
"loss": 0.3901, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.47806208390149996, |
|
"grad_norm": 2.959492453957149, |
|
"learning_rate": 1.0442875481386394e-05, |
|
"loss": 0.3616, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.481270554263255, |
|
"grad_norm": 4.53017762648938, |
|
"learning_rate": 1.0378690629011554e-05, |
|
"loss": 0.4099, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.48447902462501, |
|
"grad_norm": 5.632280418736274, |
|
"learning_rate": 1.0314505776636715e-05, |
|
"loss": 0.4176, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.48768749498676506, |
|
"grad_norm": 3.4384598745003627, |
|
"learning_rate": 1.0250320924261875e-05, |
|
"loss": 0.3586, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.4908959653485201, |
|
"grad_norm": 3.024437029724823, |
|
"learning_rate": 1.0186136071887036e-05, |
|
"loss": 0.4255, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.4941044357102751, |
|
"grad_norm": 2.378291269172342, |
|
"learning_rate": 1.0121951219512197e-05, |
|
"loss": 0.3958, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.49731290607203016, |
|
"grad_norm": 4.62474422398616, |
|
"learning_rate": 1.0057766367137357e-05, |
|
"loss": 0.4006, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.5005213764337852, |
|
"grad_norm": 3.6745219934625144, |
|
"learning_rate": 9.993581514762517e-06, |
|
"loss": 0.3682, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.5037298467955402, |
|
"grad_norm": 4.448848307155705, |
|
"learning_rate": 9.929396662387677e-06, |
|
"loss": 0.4272, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.5069383171572952, |
|
"grad_norm": 3.8894278011874235, |
|
"learning_rate": 9.865211810012839e-06, |
|
"loss": 0.3569, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.5101467875190503, |
|
"grad_norm": 4.774303748597002, |
|
"learning_rate": 9.801026957637997e-06, |
|
"loss": 0.3933, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.5133552578808053, |
|
"grad_norm": 5.045270496853991, |
|
"learning_rate": 9.736842105263159e-06, |
|
"loss": 0.3719, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5165637282425604, |
|
"grad_norm": 4.740584901110403, |
|
"learning_rate": 9.672657252888319e-06, |
|
"loss": 0.4254, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.5197721986043153, |
|
"grad_norm": 5.884473992854898, |
|
"learning_rate": 9.60847240051348e-06, |
|
"loss": 0.4251, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.5229806689660704, |
|
"grad_norm": 3.4560812191487233, |
|
"learning_rate": 9.54428754813864e-06, |
|
"loss": 0.3884, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.5261891393278254, |
|
"grad_norm": 4.436488938480811, |
|
"learning_rate": 9.480102695763801e-06, |
|
"loss": 0.3791, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.5293976096895805, |
|
"grad_norm": 5.122741798151655, |
|
"learning_rate": 9.415917843388961e-06, |
|
"loss": 0.4109, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.5326060800513355, |
|
"grad_norm": 2.8268137365317694, |
|
"learning_rate": 9.351732991014121e-06, |
|
"loss": 0.4002, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.5358145504130906, |
|
"grad_norm": 4.961028503814302, |
|
"learning_rate": 9.287548138639282e-06, |
|
"loss": 0.4117, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.5390230207748455, |
|
"grad_norm": 5.913940405056516, |
|
"learning_rate": 9.223363286264443e-06, |
|
"loss": 0.3837, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.5422314911366006, |
|
"grad_norm": 4.981120773191797, |
|
"learning_rate": 9.159178433889603e-06, |
|
"loss": 0.4258, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.5454399614983556, |
|
"grad_norm": 5.444655853775516, |
|
"learning_rate": 9.094993581514764e-06, |
|
"loss": 0.4301, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.5486484318601107, |
|
"grad_norm": 3.848013181214462, |
|
"learning_rate": 9.030808729139924e-06, |
|
"loss": 0.4004, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.5518569022218657, |
|
"grad_norm": 2.7256377363961692, |
|
"learning_rate": 8.966623876765084e-06, |
|
"loss": 0.4199, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.5550653725836208, |
|
"grad_norm": 3.9591345977979278, |
|
"learning_rate": 8.902439024390244e-06, |
|
"loss": 0.3923, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.5582738429453757, |
|
"grad_norm": 3.694097683677857, |
|
"learning_rate": 8.838254172015404e-06, |
|
"loss": 0.4245, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.5614823133071308, |
|
"grad_norm": 4.752754487463968, |
|
"learning_rate": 8.774069319640566e-06, |
|
"loss": 0.3623, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.5646907836688858, |
|
"grad_norm": 4.933398001346964, |
|
"learning_rate": 8.709884467265726e-06, |
|
"loss": 0.3529, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.5678992540306409, |
|
"grad_norm": 4.10267822290099, |
|
"learning_rate": 8.645699614890886e-06, |
|
"loss": 0.3903, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.5711077243923959, |
|
"grad_norm": 5.948577818513, |
|
"learning_rate": 8.581514762516046e-06, |
|
"loss": 0.3923, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.574316194754151, |
|
"grad_norm": 4.32270415025434, |
|
"learning_rate": 8.517329910141208e-06, |
|
"loss": 0.4162, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.5775246651159059, |
|
"grad_norm": 4.186201731866516, |
|
"learning_rate": 8.453145057766368e-06, |
|
"loss": 0.4006, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.580733135477661, |
|
"grad_norm": 4.818472247869851, |
|
"learning_rate": 8.388960205391528e-06, |
|
"loss": 0.3841, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.583941605839416, |
|
"grad_norm": 3.722377675945526, |
|
"learning_rate": 8.324775353016688e-06, |
|
"loss": 0.4015, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.5871500762011711, |
|
"grad_norm": 3.581092339067193, |
|
"learning_rate": 8.26059050064185e-06, |
|
"loss": 0.4194, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.5903585465629261, |
|
"grad_norm": 3.8711833078388134, |
|
"learning_rate": 8.19640564826701e-06, |
|
"loss": 0.5039, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.5935670169246812, |
|
"grad_norm": 5.312406861505809, |
|
"learning_rate": 8.13222079589217e-06, |
|
"loss": 0.3954, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.5967754872864361, |
|
"grad_norm": 5.128909473296003, |
|
"learning_rate": 8.06803594351733e-06, |
|
"loss": 0.3476, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.5999839576481912, |
|
"grad_norm": 6.225968269360909, |
|
"learning_rate": 8.00385109114249e-06, |
|
"loss": 0.3918, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.6031924280099462, |
|
"grad_norm": 4.660989653248528, |
|
"learning_rate": 7.93966623876765e-06, |
|
"loss": 0.4013, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.6064008983717013, |
|
"grad_norm": 4.299519878675749, |
|
"learning_rate": 7.875481386392811e-06, |
|
"loss": 0.3775, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.6096093687334563, |
|
"grad_norm": 4.680629637314835, |
|
"learning_rate": 7.811296534017973e-06, |
|
"loss": 0.3612, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6128178390952114, |
|
"grad_norm": 4.152954271805758, |
|
"learning_rate": 7.747111681643133e-06, |
|
"loss": 0.3806, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.6160263094569663, |
|
"grad_norm": 4.6801179801552415, |
|
"learning_rate": 7.682926829268293e-06, |
|
"loss": 0.3506, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.6192347798187214, |
|
"grad_norm": 4.251504275426107, |
|
"learning_rate": 7.618741976893453e-06, |
|
"loss": 0.3589, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.6224432501804764, |
|
"grad_norm": 6.009730313403801, |
|
"learning_rate": 7.554557124518614e-06, |
|
"loss": 0.3968, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.6256517205422315, |
|
"grad_norm": 3.2723481829787464, |
|
"learning_rate": 7.490372272143774e-06, |
|
"loss": 0.3936, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.6288601909039865, |
|
"grad_norm": 4.097990240581078, |
|
"learning_rate": 7.426187419768935e-06, |
|
"loss": 0.4144, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.6320686612657416, |
|
"grad_norm": 2.7191742984555995, |
|
"learning_rate": 7.362002567394095e-06, |
|
"loss": 0.409, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.6352771316274965, |
|
"grad_norm": 4.40050340710905, |
|
"learning_rate": 7.297817715019256e-06, |
|
"loss": 0.3526, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.6384856019892516, |
|
"grad_norm": 3.5711401894801194, |
|
"learning_rate": 7.233632862644416e-06, |
|
"loss": 0.3865, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.6416940723510066, |
|
"grad_norm": 5.155189378251862, |
|
"learning_rate": 7.169448010269577e-06, |
|
"loss": 0.4037, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.6449025427127617, |
|
"grad_norm": 5.764606055822457, |
|
"learning_rate": 7.1052631578947375e-06, |
|
"loss": 0.3806, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.6481110130745167, |
|
"grad_norm": 3.3930437034797585, |
|
"learning_rate": 7.0410783055198984e-06, |
|
"loss": 0.3951, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.6513194834362718, |
|
"grad_norm": 4.922727726376936, |
|
"learning_rate": 6.9768934531450586e-06, |
|
"loss": 0.3847, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.6545279537980268, |
|
"grad_norm": 2.7890174522963367, |
|
"learning_rate": 6.9127086007702195e-06, |
|
"loss": 0.3479, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.6577364241597818, |
|
"grad_norm": 4.412823863289807, |
|
"learning_rate": 6.84852374839538e-06, |
|
"loss": 0.4173, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.6609448945215368, |
|
"grad_norm": 5.417820507568683, |
|
"learning_rate": 6.784338896020539e-06, |
|
"loss": 0.381, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.6641533648832919, |
|
"grad_norm": 3.8506255680933528, |
|
"learning_rate": 6.7201540436457e-06, |
|
"loss": 0.3693, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.6673618352450469, |
|
"grad_norm": 3.5416828656888786, |
|
"learning_rate": 6.65596919127086e-06, |
|
"loss": 0.358, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.670570305606802, |
|
"grad_norm": 3.5323170162835775, |
|
"learning_rate": 6.591784338896021e-06, |
|
"loss": 0.3237, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.673778775968557, |
|
"grad_norm": 6.128726604158042, |
|
"learning_rate": 6.527599486521181e-06, |
|
"loss": 0.3859, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.676987246330312, |
|
"grad_norm": 4.891887980181221, |
|
"learning_rate": 6.463414634146342e-06, |
|
"loss": 0.3223, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.680195716692067, |
|
"grad_norm": 3.565557213786112, |
|
"learning_rate": 6.399229781771502e-06, |
|
"loss": 0.363, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.6834041870538221, |
|
"grad_norm": 3.4294258572573337, |
|
"learning_rate": 6.335044929396663e-06, |
|
"loss": 0.3781, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.6866126574155771, |
|
"grad_norm": 4.518476500237087, |
|
"learning_rate": 6.270860077021823e-06, |
|
"loss": 0.4325, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.6898211277773322, |
|
"grad_norm": 4.743599740115472, |
|
"learning_rate": 6.206675224646984e-06, |
|
"loss": 0.411, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.6930295981390872, |
|
"grad_norm": 2.5630593956213015, |
|
"learning_rate": 6.142490372272144e-06, |
|
"loss": 0.3717, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.6962380685008422, |
|
"grad_norm": 6.2586555195558, |
|
"learning_rate": 6.078305519897305e-06, |
|
"loss": 0.3561, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.6994465388625972, |
|
"grad_norm": 4.894715427924892, |
|
"learning_rate": 6.0141206675224654e-06, |
|
"loss": 0.4164, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.7026550092243523, |
|
"grad_norm": 3.5252148379990573, |
|
"learning_rate": 5.949935815147626e-06, |
|
"loss": 0.3209, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.7058634795861073, |
|
"grad_norm": 3.256470353538883, |
|
"learning_rate": 5.885750962772786e-06, |
|
"loss": 0.3893, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.7090719499478624, |
|
"grad_norm": 3.4671867719647573, |
|
"learning_rate": 5.821566110397947e-06, |
|
"loss": 0.3836, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.7122804203096174, |
|
"grad_norm": 3.6300469654014806, |
|
"learning_rate": 5.757381258023107e-06, |
|
"loss": 0.3604, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.7154888906713724, |
|
"grad_norm": 4.8222353267885545, |
|
"learning_rate": 5.693196405648267e-06, |
|
"loss": 0.3402, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.7186973610331274, |
|
"grad_norm": 3.088736996240411, |
|
"learning_rate": 5.629011553273428e-06, |
|
"loss": 0.3571, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.7219058313948825, |
|
"grad_norm": 6.468377008869226, |
|
"learning_rate": 5.564826700898588e-06, |
|
"loss": 0.3857, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.7251143017566375, |
|
"grad_norm": 4.976006702523603, |
|
"learning_rate": 5.500641848523749e-06, |
|
"loss": 0.328, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.7283227721183926, |
|
"grad_norm": 7.44768864343581, |
|
"learning_rate": 5.436456996148909e-06, |
|
"loss": 0.3415, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.7315312424801476, |
|
"grad_norm": 6.342940977120239, |
|
"learning_rate": 5.37227214377407e-06, |
|
"loss": 0.3452, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.7347397128419026, |
|
"grad_norm": 4.281955030745344, |
|
"learning_rate": 5.30808729139923e-06, |
|
"loss": 0.4647, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.7379481832036576, |
|
"grad_norm": 3.678790025410135, |
|
"learning_rate": 5.243902439024391e-06, |
|
"loss": 0.3742, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.7411566535654127, |
|
"grad_norm": 3.3547281464794096, |
|
"learning_rate": 5.179717586649551e-06, |
|
"loss": 0.3835, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.7443651239271677, |
|
"grad_norm": 5.13222360440861, |
|
"learning_rate": 5.115532734274712e-06, |
|
"loss": 0.3859, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.7475735942889228, |
|
"grad_norm": 3.972710625172556, |
|
"learning_rate": 5.0513478818998715e-06, |
|
"loss": 0.3537, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.7507820646506778, |
|
"grad_norm": 3.2744384458301687, |
|
"learning_rate": 4.9871630295250324e-06, |
|
"loss": 0.3793, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.7539905350124328, |
|
"grad_norm": 3.8532031308587396, |
|
"learning_rate": 4.9229781771501926e-06, |
|
"loss": 0.3245, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.7571990053741878, |
|
"grad_norm": 6.097220741982411, |
|
"learning_rate": 4.8587933247753535e-06, |
|
"loss": 0.4381, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.7604074757359429, |
|
"grad_norm": 3.6161720053467743, |
|
"learning_rate": 4.794608472400514e-06, |
|
"loss": 0.3904, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.7636159460976979, |
|
"grad_norm": 3.2126570713513196, |
|
"learning_rate": 4.730423620025675e-06, |
|
"loss": 0.3698, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.766824416459453, |
|
"grad_norm": 4.440937942880446, |
|
"learning_rate": 4.666238767650835e-06, |
|
"loss": 0.3672, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.770032886821208, |
|
"grad_norm": 4.034010642616632, |
|
"learning_rate": 4.602053915275995e-06, |
|
"loss": 0.3453, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.773241357182963, |
|
"grad_norm": 4.320673481292839, |
|
"learning_rate": 4.537869062901156e-06, |
|
"loss": 0.3874, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.776449827544718, |
|
"grad_norm": 5.721110452311635, |
|
"learning_rate": 4.473684210526316e-06, |
|
"loss": 0.3361, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.7796582979064731, |
|
"grad_norm": 5.553593669289626, |
|
"learning_rate": 4.409499358151477e-06, |
|
"loss": 0.3761, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.7828667682682281, |
|
"grad_norm": 3.5931839661915506, |
|
"learning_rate": 4.345314505776637e-06, |
|
"loss": 0.3303, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.7860752386299832, |
|
"grad_norm": 3.191917593220325, |
|
"learning_rate": 4.281129653401798e-06, |
|
"loss": 0.4581, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.7892837089917382, |
|
"grad_norm": 2.071662511958508, |
|
"learning_rate": 4.216944801026958e-06, |
|
"loss": 0.3602, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.7924921793534933, |
|
"grad_norm": 4.1098396579418015, |
|
"learning_rate": 4.152759948652118e-06, |
|
"loss": 0.3658, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.7957006497152482, |
|
"grad_norm": 3.342343894793557, |
|
"learning_rate": 4.088575096277279e-06, |
|
"loss": 0.3533, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.7989091200770033, |
|
"grad_norm": 3.501852515883717, |
|
"learning_rate": 4.024390243902439e-06, |
|
"loss": 0.3086, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.8021175904387583, |
|
"grad_norm": 5.6740246031343275, |
|
"learning_rate": 3.9602053915276e-06, |
|
"loss": 0.3619, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.8053260608005134, |
|
"grad_norm": 3.6625993713157867, |
|
"learning_rate": 3.89602053915276e-06, |
|
"loss": 0.3536, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.8085345311622684, |
|
"grad_norm": 4.12817611506417, |
|
"learning_rate": 3.8318356867779205e-06, |
|
"loss": 0.3552, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.8117430015240235, |
|
"grad_norm": 6.752665333536269, |
|
"learning_rate": 3.767650834403081e-06, |
|
"loss": 0.3906, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.8149514718857784, |
|
"grad_norm": 4.0698833515045765, |
|
"learning_rate": 3.7034659820282416e-06, |
|
"loss": 0.369, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.8181599422475335, |
|
"grad_norm": 3.8061893249692167, |
|
"learning_rate": 3.639281129653402e-06, |
|
"loss": 0.391, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.8213684126092885, |
|
"grad_norm": 4.2054055524032075, |
|
"learning_rate": 3.5750962772785623e-06, |
|
"loss": 0.3781, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.8245768829710436, |
|
"grad_norm": 3.1509974167473294, |
|
"learning_rate": 3.510911424903723e-06, |
|
"loss": 0.4246, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.8277853533327986, |
|
"grad_norm": 3.344075353049933, |
|
"learning_rate": 3.4467265725288834e-06, |
|
"loss": 0.3839, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.8309938236945537, |
|
"grad_norm": 4.3862748314587625, |
|
"learning_rate": 3.382541720154044e-06, |
|
"loss": 0.3243, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.8342022940563086, |
|
"grad_norm": 4.430571971071977, |
|
"learning_rate": 3.3183568677792044e-06, |
|
"loss": 0.4219, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.8374107644180637, |
|
"grad_norm": 4.216572691360644, |
|
"learning_rate": 3.254172015404365e-06, |
|
"loss": 0.3453, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.8406192347798187, |
|
"grad_norm": 5.730663867597084, |
|
"learning_rate": 3.1899871630295255e-06, |
|
"loss": 0.3652, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.8438277051415738, |
|
"grad_norm": 3.152373466591917, |
|
"learning_rate": 3.125802310654686e-06, |
|
"loss": 0.3761, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.8470361755033288, |
|
"grad_norm": 4.049903812572876, |
|
"learning_rate": 3.061617458279846e-06, |
|
"loss": 0.4274, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.8502446458650839, |
|
"grad_norm": 3.7109620908295002, |
|
"learning_rate": 2.9974326059050067e-06, |
|
"loss": 0.3575, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.8534531162268388, |
|
"grad_norm": 6.151962412156622, |
|
"learning_rate": 2.9332477535301673e-06, |
|
"loss": 0.3935, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.8566615865885939, |
|
"grad_norm": 2.8483008748019474, |
|
"learning_rate": 2.869062901155328e-06, |
|
"loss": 0.3327, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.8598700569503489, |
|
"grad_norm": 3.884150141973327, |
|
"learning_rate": 2.8048780487804884e-06, |
|
"loss": 0.3222, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.863078527312104, |
|
"grad_norm": 5.091883011304281, |
|
"learning_rate": 2.740693196405648e-06, |
|
"loss": 0.4241, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.866286997673859, |
|
"grad_norm": 6.256322006655992, |
|
"learning_rate": 2.6765083440308086e-06, |
|
"loss": 0.3896, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.8694954680356141, |
|
"grad_norm": 3.185423190731702, |
|
"learning_rate": 2.612323491655969e-06, |
|
"loss": 0.3503, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.872703938397369, |
|
"grad_norm": 2.501677043437045, |
|
"learning_rate": 2.5481386392811297e-06, |
|
"loss": 0.308, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.8759124087591241, |
|
"grad_norm": 3.994307293016134, |
|
"learning_rate": 2.4839537869062902e-06, |
|
"loss": 0.3146, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 6.3218538833338185, |
|
"learning_rate": 2.4197689345314508e-06, |
|
"loss": 0.3861, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.8823293494826342, |
|
"grad_norm": 4.2929895131508875, |
|
"learning_rate": 2.3555840821566113e-06, |
|
"loss": 0.3514, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.8855378198443892, |
|
"grad_norm": 5.013441973517175, |
|
"learning_rate": 2.2913992297817714e-06, |
|
"loss": 0.3394, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.8887462902061443, |
|
"grad_norm": 4.942588457664286, |
|
"learning_rate": 2.227214377406932e-06, |
|
"loss": 0.3916, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.8919547605678992, |
|
"grad_norm": 4.093044885114057, |
|
"learning_rate": 2.1630295250320925e-06, |
|
"loss": 0.3316, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.8951632309296543, |
|
"grad_norm": 3.6241454294578044, |
|
"learning_rate": 2.098844672657253e-06, |
|
"loss": 0.328, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.8983717012914093, |
|
"grad_norm": 3.7585099242216105, |
|
"learning_rate": 2.0346598202824136e-06, |
|
"loss": 0.313, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.9015801716531644, |
|
"grad_norm": 3.092191111039683, |
|
"learning_rate": 1.970474967907574e-06, |
|
"loss": 0.3207, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.9047886420149194, |
|
"grad_norm": 7.126196821569378, |
|
"learning_rate": 1.9062901155327343e-06, |
|
"loss": 0.339, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.9079971123766745, |
|
"grad_norm": 3.98517835436473, |
|
"learning_rate": 1.8421052631578948e-06, |
|
"loss": 0.3119, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.9112055827384294, |
|
"grad_norm": 5.155996828725616, |
|
"learning_rate": 1.7779204107830554e-06, |
|
"loss": 0.3524, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.9144140531001845, |
|
"grad_norm": 6.2418873185093116, |
|
"learning_rate": 1.7137355584082157e-06, |
|
"loss": 0.3697, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9176225234619395, |
|
"grad_norm": 4.93642870905445, |
|
"learning_rate": 1.6495507060333762e-06, |
|
"loss": 0.3567, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.9208309938236946, |
|
"grad_norm": 3.4661449590899815, |
|
"learning_rate": 1.5853658536585368e-06, |
|
"loss": 0.3049, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.9240394641854496, |
|
"grad_norm": 3.9858026943112046, |
|
"learning_rate": 1.5211810012836973e-06, |
|
"loss": 0.342, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.9272479345472047, |
|
"grad_norm": 5.747224506777868, |
|
"learning_rate": 1.4569961489088577e-06, |
|
"loss": 0.3786, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.9304564049089596, |
|
"grad_norm": 4.2756510193950135, |
|
"learning_rate": 1.3928112965340182e-06, |
|
"loss": 0.345, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.9336648752707147, |
|
"grad_norm": 5.737301674162469, |
|
"learning_rate": 1.3286264441591783e-06, |
|
"loss": 0.3048, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.9368733456324697, |
|
"grad_norm": 4.5368921290576125, |
|
"learning_rate": 1.2644415917843389e-06, |
|
"loss": 0.3373, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.9400818159942248, |
|
"grad_norm": 6.281727114562743, |
|
"learning_rate": 1.2002567394094996e-06, |
|
"loss": 0.3713, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.9432902863559798, |
|
"grad_norm": 4.613863643534097, |
|
"learning_rate": 1.13607188703466e-06, |
|
"loss": 0.3115, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.9464987567177349, |
|
"grad_norm": 4.4955128133503415, |
|
"learning_rate": 1.0718870346598203e-06, |
|
"loss": 0.3544, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.9497072270794898, |
|
"grad_norm": 6.144202111803078, |
|
"learning_rate": 1.0077021822849808e-06, |
|
"loss": 0.3419, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.9529156974412449, |
|
"grad_norm": 3.416734695378003, |
|
"learning_rate": 9.435173299101414e-07, |
|
"loss": 0.3866, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.9561241678029999, |
|
"grad_norm": 2.9130051363163507, |
|
"learning_rate": 8.793324775353017e-07, |
|
"loss": 0.3294, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.959332638164755, |
|
"grad_norm": 4.1544972260670345, |
|
"learning_rate": 8.151476251604621e-07, |
|
"loss": 0.337, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.96254110852651, |
|
"grad_norm": 5.333751729311107, |
|
"learning_rate": 7.509627727856227e-07, |
|
"loss": 0.3707, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.9657495788882651, |
|
"grad_norm": 4.224378334244017, |
|
"learning_rate": 6.867779204107831e-07, |
|
"loss": 0.3266, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.96895804925002, |
|
"grad_norm": 4.68085662929714, |
|
"learning_rate": 6.225930680359436e-07, |
|
"loss": 0.3228, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.9721665196117751, |
|
"grad_norm": 2.44350959307893, |
|
"learning_rate": 5.584082156611041e-07, |
|
"loss": 0.3038, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.9753749899735301, |
|
"grad_norm": 5.5263577593599225, |
|
"learning_rate": 4.942233632862644e-07, |
|
"loss": 0.3073, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.9785834603352852, |
|
"grad_norm": 5.517219216862406, |
|
"learning_rate": 4.30038510911425e-07, |
|
"loss": 0.4016, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.9817919306970402, |
|
"grad_norm": 3.559703237499543, |
|
"learning_rate": 3.6585365853658536e-07, |
|
"loss": 0.4128, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.9850004010587953, |
|
"grad_norm": 3.2825888661809057, |
|
"learning_rate": 3.0166880616174585e-07, |
|
"loss": 0.2873, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.9882088714205502, |
|
"grad_norm": 3.755613548892917, |
|
"learning_rate": 2.374839537869063e-07, |
|
"loss": 0.3854, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.9914173417823053, |
|
"grad_norm": 4.172412303358024, |
|
"learning_rate": 1.7329910141206678e-07, |
|
"loss": 0.328, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.9946258121440603, |
|
"grad_norm": 6.547343932529505, |
|
"learning_rate": 1.0911424903722721e-07, |
|
"loss": 0.3947, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.9978342825058154, |
|
"grad_norm": 4.624930868093504, |
|
"learning_rate": 4.4929396662387685e-08, |
|
"loss": 0.3312, |
|
"step": 3110 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3116, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|