|
{
|
|
"best_global_step": 66218,
|
|
"best_metric": 0.9523090398381104,
|
|
"best_model_checkpoint": "ai-vs-hum\\checkpoint-66218",
|
|
"epoch": 1.0,
|
|
"eval_steps": 500,
|
|
"global_step": 66218,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.007550816998399227,
|
|
"grad_norm": 65.657958984375,
|
|
"learning_rate": 7.550816998399228e-08,
|
|
"loss": 0.7738,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.015101633996798454,
|
|
"grad_norm": 56.08807373046875,
|
|
"learning_rate": 1.5101633996798456e-07,
|
|
"loss": 0.715,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.02265245099519768,
|
|
"grad_norm": 80.93883514404297,
|
|
"learning_rate": 2.265245099519768e-07,
|
|
"loss": 0.6352,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.03020326799359691,
|
|
"grad_norm": 99.87760925292969,
|
|
"learning_rate": 3.020326799359691e-07,
|
|
"loss": 0.5265,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.03775408499199613,
|
|
"grad_norm": 61.885841369628906,
|
|
"learning_rate": 3.7754084991996136e-07,
|
|
"loss": 0.4273,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.04530490199039536,
|
|
"grad_norm": 704.5555419921875,
|
|
"learning_rate": 4.530490199039536e-07,
|
|
"loss": 0.4747,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.05285571898879459,
|
|
"grad_norm": 0.1631409078836441,
|
|
"learning_rate": 5.285571898879459e-07,
|
|
"loss": 0.5541,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.06040653598719382,
|
|
"grad_norm": 0.1648247241973877,
|
|
"learning_rate": 6.040653598719382e-07,
|
|
"loss": 0.7388,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.06795735298559304,
|
|
"grad_norm": 2444.441650390625,
|
|
"learning_rate": 6.795735298559304e-07,
|
|
"loss": 0.6825,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.07550816998399226,
|
|
"grad_norm": 0.37286192178726196,
|
|
"learning_rate": 7.550816998399227e-07,
|
|
"loss": 0.6872,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.08305898698239149,
|
|
"grad_norm": 0.28558024764060974,
|
|
"learning_rate": 8.305898698239149e-07,
|
|
"loss": 0.7039,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 0.09060980398079072,
|
|
"grad_norm": 182.67251586914062,
|
|
"learning_rate": 9.060980398079072e-07,
|
|
"loss": 0.6045,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 0.09816062097918994,
|
|
"grad_norm": 0.0031864135526120663,
|
|
"learning_rate": 9.816062097918996e-07,
|
|
"loss": 0.5625,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 0.10571143797758918,
|
|
"grad_norm": 0.03901828080415726,
|
|
"learning_rate": 1.0571143797758919e-06,
|
|
"loss": 0.5507,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 0.11326225497598841,
|
|
"grad_norm": 0.004638838116079569,
|
|
"learning_rate": 1.1326225497598842e-06,
|
|
"loss": 0.5191,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 0.12081307197438763,
|
|
"grad_norm": 0.14078642427921295,
|
|
"learning_rate": 1.2081307197438765e-06,
|
|
"loss": 0.5333,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 0.12836388897278686,
|
|
"grad_norm": 0.07704892754554749,
|
|
"learning_rate": 1.2836388897278688e-06,
|
|
"loss": 0.5258,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 0.13591470597118607,
|
|
"grad_norm": 0.03960805386304855,
|
|
"learning_rate": 1.3591470597118609e-06,
|
|
"loss": 0.4775,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 0.1434655229695853,
|
|
"grad_norm": 0.8385183215141296,
|
|
"learning_rate": 1.4346552296958531e-06,
|
|
"loss": 0.4945,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 0.15101633996798453,
|
|
"grad_norm": 574.739990234375,
|
|
"learning_rate": 1.5101633996798454e-06,
|
|
"loss": 0.5178,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 0.15856715696638377,
|
|
"grad_norm": 0.06548255681991577,
|
|
"learning_rate": 1.5856715696638377e-06,
|
|
"loss": 0.4735,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 0.16611797396478298,
|
|
"grad_norm": 1357.1788330078125,
|
|
"learning_rate": 1.6611797396478298e-06,
|
|
"loss": 0.5533,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 0.17366879096318222,
|
|
"grad_norm": 0.12339621037244797,
|
|
"learning_rate": 1.7366879096318223e-06,
|
|
"loss": 0.4891,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 0.18121960796158143,
|
|
"grad_norm": 0.3505186438560486,
|
|
"learning_rate": 1.8121960796158144e-06,
|
|
"loss": 0.4935,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 0.18877042495998067,
|
|
"grad_norm": 1131.322509765625,
|
|
"learning_rate": 1.887704249599807e-06,
|
|
"loss": 0.4447,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 0.19632124195837988,
|
|
"grad_norm": 0.030383553355932236,
|
|
"learning_rate": 1.963212419583799e-06,
|
|
"loss": 0.5052,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 0.20387205895677912,
|
|
"grad_norm": 0.06966419517993927,
|
|
"learning_rate": 2.0387205895677913e-06,
|
|
"loss": 0.4477,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 0.21142287595517836,
|
|
"grad_norm": 0.14627192914485931,
|
|
"learning_rate": 2.1142287595517838e-06,
|
|
"loss": 0.4535,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 0.21897369295357758,
|
|
"grad_norm": 0.013991514220833778,
|
|
"learning_rate": 2.189736929535776e-06,
|
|
"loss": 0.414,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 0.22652450995197682,
|
|
"grad_norm": 278.3962707519531,
|
|
"learning_rate": 2.2652450995197684e-06,
|
|
"loss": 0.3346,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 0.23407532695037603,
|
|
"grad_norm": 0.012267685495316982,
|
|
"learning_rate": 2.3407532695037605e-06,
|
|
"loss": 0.426,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 0.24162614394877527,
|
|
"grad_norm": 0.0350683256983757,
|
|
"learning_rate": 2.416261439487753e-06,
|
|
"loss": 0.3849,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 0.24917696094717448,
|
|
"grad_norm": 0.01037506852298975,
|
|
"learning_rate": 2.491769609471745e-06,
|
|
"loss": 0.3726,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 0.2567277779455737,
|
|
"grad_norm": 139.85562133789062,
|
|
"learning_rate": 2.5672777794557375e-06,
|
|
"loss": 0.4721,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 0.26427859494397293,
|
|
"grad_norm": 0.11431169509887695,
|
|
"learning_rate": 2.6427859494397296e-06,
|
|
"loss": 0.4746,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 0.27182941194237215,
|
|
"grad_norm": 0.024222318083047867,
|
|
"learning_rate": 2.7182941194237217e-06,
|
|
"loss": 0.3612,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 0.2793802289407714,
|
|
"grad_norm": 0.0600462444126606,
|
|
"learning_rate": 2.793802289407714e-06,
|
|
"loss": 0.3716,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 0.2869310459391706,
|
|
"grad_norm": 0.0730457752943039,
|
|
"learning_rate": 2.8693104593917063e-06,
|
|
"loss": 0.4519,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 0.29448186293756984,
|
|
"grad_norm": 0.1063174456357956,
|
|
"learning_rate": 2.944818629375699e-06,
|
|
"loss": 0.3865,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 0.30203267993596905,
|
|
"grad_norm": 0.009060272946953773,
|
|
"learning_rate": 3.020326799359691e-06,
|
|
"loss": 0.3653,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 0.3095834969343683,
|
|
"grad_norm": 0.0033603431656956673,
|
|
"learning_rate": 3.0958349693436834e-06,
|
|
"loss": 0.4347,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 0.31713431393276753,
|
|
"grad_norm": 0.011356896720826626,
|
|
"learning_rate": 3.1713431393276755e-06,
|
|
"loss": 0.3759,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 0.32468513093116674,
|
|
"grad_norm": 0.2442641258239746,
|
|
"learning_rate": 3.2468513093116675e-06,
|
|
"loss": 0.3776,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 0.33223594792956596,
|
|
"grad_norm": 0.029270794242620468,
|
|
"learning_rate": 3.3223594792956596e-06,
|
|
"loss": 0.3688,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 0.3397867649279652,
|
|
"grad_norm": 0.28456059098243713,
|
|
"learning_rate": 3.3978676492796526e-06,
|
|
"loss": 0.3656,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 0.34733758192636444,
|
|
"grad_norm": 0.07442634552717209,
|
|
"learning_rate": 3.4733758192636446e-06,
|
|
"loss": 0.4154,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 0.35488839892476365,
|
|
"grad_norm": 0.029278529807925224,
|
|
"learning_rate": 3.5488839892476367e-06,
|
|
"loss": 0.4295,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 0.36243921592316286,
|
|
"grad_norm": 128.08538818359375,
|
|
"learning_rate": 3.624392159231629e-06,
|
|
"loss": 0.376,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 0.36999003292156213,
|
|
"grad_norm": 0.05300796404480934,
|
|
"learning_rate": 3.6999003292156217e-06,
|
|
"loss": 0.3514,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 0.37754084991996134,
|
|
"grad_norm": 0.005980394314974546,
|
|
"learning_rate": 3.775408499199614e-06,
|
|
"loss": 0.4061,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 0.38509166691836055,
|
|
"grad_norm": 121.62598419189453,
|
|
"learning_rate": 3.8509166691836055e-06,
|
|
"loss": 0.3857,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 0.39264248391675977,
|
|
"grad_norm": 0.09926415979862213,
|
|
"learning_rate": 3.926424839167598e-06,
|
|
"loss": 0.3912,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 0.40019330091515903,
|
|
"grad_norm": 0.11781750619411469,
|
|
"learning_rate": 4.0019330091515905e-06,
|
|
"loss": 0.4562,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 0.40774411791355825,
|
|
"grad_norm": 0.034933462738990784,
|
|
"learning_rate": 4.0774411791355826e-06,
|
|
"loss": 0.3881,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 0.41529493491195746,
|
|
"grad_norm": 0.07762602716684341,
|
|
"learning_rate": 4.152949349119575e-06,
|
|
"loss": 0.4326,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 0.4228457519103567,
|
|
"grad_norm": 0.03593170642852783,
|
|
"learning_rate": 4.2284575191035676e-06,
|
|
"loss": 0.4169,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 0.43039656890875594,
|
|
"grad_norm": 6.517802715301514,
|
|
"learning_rate": 4.30396568908756e-06,
|
|
"loss": 0.3935,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 0.43794738590715515,
|
|
"grad_norm": 0.08031677454710007,
|
|
"learning_rate": 4.379473859071552e-06,
|
|
"loss": 0.3949,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 0.44549820290555436,
|
|
"grad_norm": 0.00936515349894762,
|
|
"learning_rate": 4.454982029055544e-06,
|
|
"loss": 0.4852,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 0.45304901990395363,
|
|
"grad_norm": 0.01062073465436697,
|
|
"learning_rate": 4.530490199039537e-06,
|
|
"loss": 0.3993,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 0.46059983690235284,
|
|
"grad_norm": 0.0008572549559175968,
|
|
"learning_rate": 4.605998369023529e-06,
|
|
"loss": 0.4309,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 0.46815065390075206,
|
|
"grad_norm": 0.042835455387830734,
|
|
"learning_rate": 4.681506539007521e-06,
|
|
"loss": 0.5426,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 0.47570147089915127,
|
|
"grad_norm": 0.006536947563290596,
|
|
"learning_rate": 4.757014708991513e-06,
|
|
"loss": 0.3503,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 0.48325228789755054,
|
|
"grad_norm": 0.16728435456752777,
|
|
"learning_rate": 4.832522878975506e-06,
|
|
"loss": 0.4025,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 0.49080310489594975,
|
|
"grad_norm": 0.24009940028190613,
|
|
"learning_rate": 4.908031048959498e-06,
|
|
"loss": 0.373,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"epoch": 0.49835392189434896,
|
|
"grad_norm": 0.03488277643918991,
|
|
"learning_rate": 4.98353921894349e-06,
|
|
"loss": 0.4516,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"epoch": 0.5059047388927482,
|
|
"grad_norm": 0.07614383846521378,
|
|
"learning_rate": 4.993439179008058e-06,
|
|
"loss": 0.4391,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"epoch": 0.5134555558911474,
|
|
"grad_norm": 0.2857000231742859,
|
|
"learning_rate": 4.98504938234317e-06,
|
|
"loss": 0.4202,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"epoch": 0.5210063728895467,
|
|
"grad_norm": 0.005722880829125643,
|
|
"learning_rate": 4.9766595856782815e-06,
|
|
"loss": 0.3904,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"epoch": 0.5285571898879459,
|
|
"grad_norm": 0.08243716508150101,
|
|
"learning_rate": 4.968269789013394e-06,
|
|
"loss": 0.355,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"epoch": 0.5361080068863451,
|
|
"grad_norm": 764.4724731445312,
|
|
"learning_rate": 4.959879992348506e-06,
|
|
"loss": 0.4357,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"epoch": 0.5436588238847443,
|
|
"grad_norm": 0.0467102974653244,
|
|
"learning_rate": 4.9514901956836175e-06,
|
|
"loss": 0.4425,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"epoch": 0.5512096408831435,
|
|
"grad_norm": 0.4159618318080902,
|
|
"learning_rate": 4.94310039901873e-06,
|
|
"loss": 0.404,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"epoch": 0.5587604578815428,
|
|
"grad_norm": 0.0004953582538291812,
|
|
"learning_rate": 4.934710602353842e-06,
|
|
"loss": 0.3587,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"epoch": 0.566311274879942,
|
|
"grad_norm": 0.03575737774372101,
|
|
"learning_rate": 4.926320805688954e-06,
|
|
"loss": 0.3726,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"epoch": 0.5738620918783413,
|
|
"grad_norm": 0.02385396882891655,
|
|
"learning_rate": 4.917931009024066e-06,
|
|
"loss": 0.3771,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"epoch": 0.5814129088767405,
|
|
"grad_norm": 0.02760937251150608,
|
|
"learning_rate": 4.909541212359178e-06,
|
|
"loss": 0.4108,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"epoch": 0.5889637258751397,
|
|
"grad_norm": 0.7694607377052307,
|
|
"learning_rate": 4.901151415694289e-06,
|
|
"loss": 0.3603,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"epoch": 0.5965145428735389,
|
|
"grad_norm": 1522.3602294921875,
|
|
"learning_rate": 4.892761619029401e-06,
|
|
"loss": 0.3335,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"epoch": 0.6040653598719381,
|
|
"grad_norm": 0.3060073256492615,
|
|
"learning_rate": 4.884371822364514e-06,
|
|
"loss": 0.3266,
|
|
"step": 40000
|
|
},
|
|
{
|
|
"epoch": 0.6116161768703374,
|
|
"grad_norm": 0.07775181531906128,
|
|
"learning_rate": 4.875982025699625e-06,
|
|
"loss": 0.3596,
|
|
"step": 40500
|
|
},
|
|
{
|
|
"epoch": 0.6191669938687366,
|
|
"grad_norm": 0.05560746416449547,
|
|
"learning_rate": 4.867592229034738e-06,
|
|
"loss": 0.4614,
|
|
"step": 41000
|
|
},
|
|
{
|
|
"epoch": 0.6267178108671358,
|
|
"grad_norm": 0.11702022701501846,
|
|
"learning_rate": 4.85920243236985e-06,
|
|
"loss": 0.4036,
|
|
"step": 41500
|
|
},
|
|
{
|
|
"epoch": 0.6342686278655351,
|
|
"grad_norm": 0.20153406262397766,
|
|
"learning_rate": 4.850812635704961e-06,
|
|
"loss": 0.3323,
|
|
"step": 42000
|
|
},
|
|
{
|
|
"epoch": 0.6418194448639343,
|
|
"grad_norm": 1046.9708251953125,
|
|
"learning_rate": 4.842422839040073e-06,
|
|
"loss": 0.423,
|
|
"step": 42500
|
|
},
|
|
{
|
|
"epoch": 0.6493702618623335,
|
|
"grad_norm": 0.005386498291045427,
|
|
"learning_rate": 4.834033042375186e-06,
|
|
"loss": 0.3607,
|
|
"step": 43000
|
|
},
|
|
{
|
|
"epoch": 0.6569210788607327,
|
|
"grad_norm": 0.0019247422460466623,
|
|
"learning_rate": 4.825643245710297e-06,
|
|
"loss": 0.3117,
|
|
"step": 43500
|
|
},
|
|
{
|
|
"epoch": 0.6644718958591319,
|
|
"grad_norm": 0.03873920440673828,
|
|
"learning_rate": 4.817253449045409e-06,
|
|
"loss": 0.4158,
|
|
"step": 44000
|
|
},
|
|
{
|
|
"epoch": 0.6720227128575312,
|
|
"grad_norm": 0.00777060491964221,
|
|
"learning_rate": 4.808863652380522e-06,
|
|
"loss": 0.3424,
|
|
"step": 44500
|
|
},
|
|
{
|
|
"epoch": 0.6795735298559304,
|
|
"grad_norm": 0.0689612627029419,
|
|
"learning_rate": 4.800473855715633e-06,
|
|
"loss": 0.3596,
|
|
"step": 45000
|
|
},
|
|
{
|
|
"epoch": 0.6871243468543297,
|
|
"grad_norm": 164.3638916015625,
|
|
"learning_rate": 4.792084059050745e-06,
|
|
"loss": 0.3271,
|
|
"step": 45500
|
|
},
|
|
{
|
|
"epoch": 0.6946751638527289,
|
|
"grad_norm": 0.9296920895576477,
|
|
"learning_rate": 4.783694262385858e-06,
|
|
"loss": 0.4008,
|
|
"step": 46000
|
|
},
|
|
{
|
|
"epoch": 0.7022259808511281,
|
|
"grad_norm": 0.008974584750831127,
|
|
"learning_rate": 4.775304465720969e-06,
|
|
"loss": 0.3232,
|
|
"step": 46500
|
|
},
|
|
{
|
|
"epoch": 0.7097767978495273,
|
|
"grad_norm": 126.27271270751953,
|
|
"learning_rate": 4.766914669056081e-06,
|
|
"loss": 0.3716,
|
|
"step": 47000
|
|
},
|
|
{
|
|
"epoch": 0.7173276148479265,
|
|
"grad_norm": 0.003123954404145479,
|
|
"learning_rate": 4.758524872391193e-06,
|
|
"loss": 0.3153,
|
|
"step": 47500
|
|
},
|
|
{
|
|
"epoch": 0.7248784318463257,
|
|
"grad_norm": 0.008505255915224552,
|
|
"learning_rate": 4.750135075726304e-06,
|
|
"loss": 0.3477,
|
|
"step": 48000
|
|
},
|
|
{
|
|
"epoch": 0.732429248844725,
|
|
"grad_norm": 0.07402774691581726,
|
|
"learning_rate": 4.741745279061417e-06,
|
|
"loss": 0.3588,
|
|
"step": 48500
|
|
},
|
|
{
|
|
"epoch": 0.7399800658431243,
|
|
"grad_norm": 0.029175467789173126,
|
|
"learning_rate": 4.733355482396529e-06,
|
|
"loss": 0.353,
|
|
"step": 49000
|
|
},
|
|
{
|
|
"epoch": 0.7475308828415235,
|
|
"grad_norm": 229.5810089111328,
|
|
"learning_rate": 4.724965685731641e-06,
|
|
"loss": 0.361,
|
|
"step": 49500
|
|
},
|
|
{
|
|
"epoch": 0.7550816998399227,
|
|
"grad_norm": 0.14088426530361176,
|
|
"learning_rate": 4.716575889066753e-06,
|
|
"loss": 0.3988,
|
|
"step": 50000
|
|
},
|
|
{
|
|
"epoch": 0.7626325168383219,
|
|
"grad_norm": 0.5763397216796875,
|
|
"learning_rate": 4.7081860924018655e-06,
|
|
"loss": 0.3646,
|
|
"step": 50500
|
|
},
|
|
{
|
|
"epoch": 0.7701833338367211,
|
|
"grad_norm": 4.555429458618164,
|
|
"learning_rate": 4.699796295736977e-06,
|
|
"loss": 0.3308,
|
|
"step": 51000
|
|
},
|
|
{
|
|
"epoch": 0.7777341508351203,
|
|
"grad_norm": 0.031139669939875603,
|
|
"learning_rate": 4.691406499072089e-06,
|
|
"loss": 0.3487,
|
|
"step": 51500
|
|
},
|
|
{
|
|
"epoch": 0.7852849678335195,
|
|
"grad_norm": 1.3805643320083618,
|
|
"learning_rate": 4.683016702407201e-06,
|
|
"loss": 0.3193,
|
|
"step": 52000
|
|
},
|
|
{
|
|
"epoch": 0.7928357848319189,
|
|
"grad_norm": 0.00282275746576488,
|
|
"learning_rate": 4.674626905742312e-06,
|
|
"loss": 0.4172,
|
|
"step": 52500
|
|
},
|
|
{
|
|
"epoch": 0.8003866018303181,
|
|
"grad_norm": 0.0008711374830454588,
|
|
"learning_rate": 4.666237109077425e-06,
|
|
"loss": 0.3291,
|
|
"step": 53000
|
|
},
|
|
{
|
|
"epoch": 0.8079374188287173,
|
|
"grad_norm": 0.007124012336134911,
|
|
"learning_rate": 4.657847312412537e-06,
|
|
"loss": 0.3363,
|
|
"step": 53500
|
|
},
|
|
{
|
|
"epoch": 0.8154882358271165,
|
|
"grad_norm": 0.3629874587059021,
|
|
"learning_rate": 4.649457515747648e-06,
|
|
"loss": 0.3525,
|
|
"step": 54000
|
|
},
|
|
{
|
|
"epoch": 0.8230390528255157,
|
|
"grad_norm": 89.42652130126953,
|
|
"learning_rate": 4.641067719082761e-06,
|
|
"loss": 0.3133,
|
|
"step": 54500
|
|
},
|
|
{
|
|
"epoch": 0.8305898698239149,
|
|
"grad_norm": 0.0034131056163460016,
|
|
"learning_rate": 4.6326779224178726e-06,
|
|
"loss": 0.3381,
|
|
"step": 55000
|
|
},
|
|
{
|
|
"epoch": 0.8381406868223141,
|
|
"grad_norm": 8.344011306762695,
|
|
"learning_rate": 4.624288125752985e-06,
|
|
"loss": 0.2783,
|
|
"step": 55500
|
|
},
|
|
{
|
|
"epoch": 0.8456915038207135,
|
|
"grad_norm": 0.015363700687885284,
|
|
"learning_rate": 4.615898329088097e-06,
|
|
"loss": 0.3976,
|
|
"step": 56000
|
|
},
|
|
{
|
|
"epoch": 0.8532423208191127,
|
|
"grad_norm": 218.98626708984375,
|
|
"learning_rate": 4.6075085324232085e-06,
|
|
"loss": 0.3916,
|
|
"step": 56500
|
|
},
|
|
{
|
|
"epoch": 0.8607931378175119,
|
|
"grad_norm": 0.052222587168216705,
|
|
"learning_rate": 4.59911873575832e-06,
|
|
"loss": 0.3614,
|
|
"step": 57000
|
|
},
|
|
{
|
|
"epoch": 0.8683439548159111,
|
|
"grad_norm": 0.042011819779872894,
|
|
"learning_rate": 4.590728939093432e-06,
|
|
"loss": 0.422,
|
|
"step": 57500
|
|
},
|
|
{
|
|
"epoch": 0.8758947718143103,
|
|
"grad_norm": 0.2174474447965622,
|
|
"learning_rate": 4.5823391424285445e-06,
|
|
"loss": 0.3086,
|
|
"step": 58000
|
|
},
|
|
{
|
|
"epoch": 0.8834455888127095,
|
|
"grad_norm": 0.11423001438379288,
|
|
"learning_rate": 4.573949345763656e-06,
|
|
"loss": 0.3596,
|
|
"step": 58500
|
|
},
|
|
{
|
|
"epoch": 0.8909964058111087,
|
|
"grad_norm": 0.1473008543252945,
|
|
"learning_rate": 4.565559549098769e-06,
|
|
"loss": 0.3845,
|
|
"step": 59000
|
|
},
|
|
{
|
|
"epoch": 0.8985472228095079,
|
|
"grad_norm": 0.6927057504653931,
|
|
"learning_rate": 4.5571697524338805e-06,
|
|
"loss": 0.3272,
|
|
"step": 59500
|
|
},
|
|
{
|
|
"epoch": 0.9060980398079073,
|
|
"grad_norm": 0.052435796707868576,
|
|
"learning_rate": 4.548779955768992e-06,
|
|
"loss": 0.3124,
|
|
"step": 60000
|
|
},
|
|
{
|
|
"epoch": 0.9136488568063065,
|
|
"grad_norm": 0.0011638773139566183,
|
|
"learning_rate": 4.540390159104105e-06,
|
|
"loss": 0.3051,
|
|
"step": 60500
|
|
},
|
|
{
|
|
"epoch": 0.9211996738047057,
|
|
"grad_norm": 0.002878799568861723,
|
|
"learning_rate": 4.5320003624392165e-06,
|
|
"loss": 0.3196,
|
|
"step": 61000
|
|
},
|
|
{
|
|
"epoch": 0.9287504908031049,
|
|
"grad_norm": 0.022700993344187737,
|
|
"learning_rate": 4.523610565774328e-06,
|
|
"loss": 0.3307,
|
|
"step": 61500
|
|
},
|
|
{
|
|
"epoch": 0.9363013078015041,
|
|
"grad_norm": 18.183610916137695,
|
|
"learning_rate": 4.51522076910944e-06,
|
|
"loss": 0.3215,
|
|
"step": 62000
|
|
},
|
|
{
|
|
"epoch": 0.9438521247999033,
|
|
"grad_norm": 0.006543469615280628,
|
|
"learning_rate": 4.5068309724445524e-06,
|
|
"loss": 0.3398,
|
|
"step": 62500
|
|
},
|
|
{
|
|
"epoch": 0.9514029417983025,
|
|
"grad_norm": 0.07925090938806534,
|
|
"learning_rate": 4.498441175779664e-06,
|
|
"loss": 0.2937,
|
|
"step": 63000
|
|
},
|
|
{
|
|
"epoch": 0.9589537587967017,
|
|
"grad_norm": 0.1537381261587143,
|
|
"learning_rate": 4.490051379114776e-06,
|
|
"loss": 0.2819,
|
|
"step": 63500
|
|
},
|
|
{
|
|
"epoch": 0.9665045757951011,
|
|
"grad_norm": 0.01872635819017887,
|
|
"learning_rate": 4.481661582449888e-06,
|
|
"loss": 0.2671,
|
|
"step": 64000
|
|
},
|
|
{
|
|
"epoch": 0.9740553927935003,
|
|
"grad_norm": 0.024023612961173058,
|
|
"learning_rate": 4.473271785785e-06,
|
|
"loss": 0.3206,
|
|
"step": 64500
|
|
},
|
|
{
|
|
"epoch": 0.9816062097918995,
|
|
"grad_norm": 0.059889055788517,
|
|
"learning_rate": 4.464881989120113e-06,
|
|
"loss": 0.2976,
|
|
"step": 65000
|
|
},
|
|
{
|
|
"epoch": 0.9891570267902987,
|
|
"grad_norm": 0.002819158136844635,
|
|
"learning_rate": 4.456492192455224e-06,
|
|
"loss": 0.3465,
|
|
"step": 65500
|
|
},
|
|
{
|
|
"epoch": 0.9967078437886979,
|
|
"grad_norm": 0.02669268473982811,
|
|
"learning_rate": 4.448102395790336e-06,
|
|
"loss": 0.3076,
|
|
"step": 66000
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_accuracy": 0.9523090398381104,
|
|
"eval_loss": 0.3044677674770355,
|
|
"eval_runtime": 202.6207,
|
|
"eval_samples_per_second": 163.404,
|
|
"eval_steps_per_second": 40.855,
|
|
"step": 66218
|
|
}
|
|
],
|
|
"logging_steps": 500,
|
|
"max_steps": 331090,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 5,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 1.1092242844591276e+19,
|
|
"train_batch_size": 2,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|