VideoLLaMA2.1-7B-AV-QA / trainer_state.json
lym0302's picture
Upload folder using huggingface_hub
5d465e5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9965397923875432,
"eval_steps": 500,
"global_step": 144,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.006920415224913495,
"grad_norm": 5.909927412110894,
"learning_rate": 4.000000000000001e-06,
"loss": 0.1796,
"step": 1
},
{
"epoch": 0.01384083044982699,
"grad_norm": 5.814173094234023,
"learning_rate": 8.000000000000001e-06,
"loss": 0.1745,
"step": 2
},
{
"epoch": 0.020761245674740483,
"grad_norm": 7.527394332552472,
"learning_rate": 1.2e-05,
"loss": 0.1664,
"step": 3
},
{
"epoch": 0.02768166089965398,
"grad_norm": 6.411723498568909,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.1111,
"step": 4
},
{
"epoch": 0.03460207612456748,
"grad_norm": 29.463780179754565,
"learning_rate": 2e-05,
"loss": 0.3775,
"step": 5
},
{
"epoch": 0.04152249134948097,
"grad_norm": 25.873155743529786,
"learning_rate": 1.999744599547812e-05,
"loss": 0.5048,
"step": 6
},
{
"epoch": 0.04844290657439446,
"grad_norm": 6.21107647977612,
"learning_rate": 1.9989785286500294e-05,
"loss": 0.1919,
"step": 7
},
{
"epoch": 0.05536332179930796,
"grad_norm": 2.467176199809234,
"learning_rate": 1.99770217861636e-05,
"loss": 0.1488,
"step": 8
},
{
"epoch": 0.06228373702422145,
"grad_norm": 3.511922315343916,
"learning_rate": 1.9959162014075553e-05,
"loss": 0.1299,
"step": 9
},
{
"epoch": 0.06920415224913495,
"grad_norm": 1.5637990196383564,
"learning_rate": 1.9936215093023884e-05,
"loss": 0.1313,
"step": 10
},
{
"epoch": 0.07612456747404844,
"grad_norm": 5.108268285094946,
"learning_rate": 1.990819274431662e-05,
"loss": 0.092,
"step": 11
},
{
"epoch": 0.08304498269896193,
"grad_norm": 4.9693481281925465,
"learning_rate": 1.9875109281794828e-05,
"loss": 0.1171,
"step": 12
},
{
"epoch": 0.08996539792387544,
"grad_norm": 7.995002644534826,
"learning_rate": 1.9836981604521077e-05,
"loss": 0.1178,
"step": 13
},
{
"epoch": 0.09688581314878893,
"grad_norm": 3.4504191020747883,
"learning_rate": 1.9793829188147406e-05,
"loss": 0.0989,
"step": 14
},
{
"epoch": 0.10380622837370242,
"grad_norm": 8.263808135194267,
"learning_rate": 1.974567407496712e-05,
"loss": 0.1486,
"step": 15
},
{
"epoch": 0.11072664359861592,
"grad_norm": 1.367572738961488,
"learning_rate": 1.9692540862655587e-05,
"loss": 0.0847,
"step": 16
},
{
"epoch": 0.11764705882352941,
"grad_norm": 5.4076849647712635,
"learning_rate": 1.9634456691705705e-05,
"loss": 0.1004,
"step": 17
},
{
"epoch": 0.1245674740484429,
"grad_norm": 1.455150235552633,
"learning_rate": 1.9571451231564523e-05,
"loss": 0.0776,
"step": 18
},
{
"epoch": 0.1314878892733564,
"grad_norm": 0.9765353662178354,
"learning_rate": 1.9503556665478066e-05,
"loss": 0.0501,
"step": 19
},
{
"epoch": 0.1384083044982699,
"grad_norm": 3.1660835804785594,
"learning_rate": 1.9430807674052092e-05,
"loss": 0.0676,
"step": 20
},
{
"epoch": 0.1453287197231834,
"grad_norm": 4.601906713563019,
"learning_rate": 1.9353241417537216e-05,
"loss": 0.1148,
"step": 21
},
{
"epoch": 0.1522491349480969,
"grad_norm": 0.9410185517641271,
"learning_rate": 1.9270897516847406e-05,
"loss": 0.0644,
"step": 22
},
{
"epoch": 0.15916955017301038,
"grad_norm": 2.003001609282354,
"learning_rate": 1.9183818033321612e-05,
"loss": 0.0365,
"step": 23
},
{
"epoch": 0.16608996539792387,
"grad_norm": 3.8004697116265125,
"learning_rate": 1.9092047447238775e-05,
"loss": 0.0601,
"step": 24
},
{
"epoch": 0.17301038062283736,
"grad_norm": 1.505231605536048,
"learning_rate": 1.899563263509725e-05,
"loss": 0.032,
"step": 25
},
{
"epoch": 0.17993079584775087,
"grad_norm": 1.4483444591893908,
"learning_rate": 1.8894622845670282e-05,
"loss": 0.0814,
"step": 26
},
{
"epoch": 0.18685121107266436,
"grad_norm": 1.769291133502307,
"learning_rate": 1.878906967484966e-05,
"loss": 0.0835,
"step": 27
},
{
"epoch": 0.19377162629757785,
"grad_norm": 1.6052993878109532,
"learning_rate": 1.86790270392905e-05,
"loss": 0.0508,
"step": 28
},
{
"epoch": 0.20069204152249134,
"grad_norm": 3.4046119290691355,
"learning_rate": 1.856455114887056e-05,
"loss": 0.0769,
"step": 29
},
{
"epoch": 0.20761245674740483,
"grad_norm": 2.3275130236844026,
"learning_rate": 1.8445700477978207e-05,
"loss": 0.1246,
"step": 30
},
{
"epoch": 0.21453287197231835,
"grad_norm": 3.059909729816116,
"learning_rate": 1.8322535735643604e-05,
"loss": 0.07,
"step": 31
},
{
"epoch": 0.22145328719723184,
"grad_norm": 4.494006378640971,
"learning_rate": 1.8195119834528535e-05,
"loss": 0.0606,
"step": 32
},
{
"epoch": 0.22837370242214533,
"grad_norm": 1.1539839844534825,
"learning_rate": 1.8063517858790517e-05,
"loss": 0.0648,
"step": 33
},
{
"epoch": 0.23529411764705882,
"grad_norm": 1.5380191902785147,
"learning_rate": 1.792779703083777e-05,
"loss": 0.0913,
"step": 34
},
{
"epoch": 0.2422145328719723,
"grad_norm": 0.7918508463404255,
"learning_rate": 1.778802667699196e-05,
"loss": 0.0299,
"step": 35
},
{
"epoch": 0.2491349480968858,
"grad_norm": 2.1853629013025704,
"learning_rate": 1.764427819207624e-05,
"loss": 0.0583,
"step": 36
},
{
"epoch": 0.2560553633217993,
"grad_norm": 2.316866722784186,
"learning_rate": 1.7496625002946702e-05,
"loss": 0.0543,
"step": 37
},
{
"epoch": 0.2629757785467128,
"grad_norm": 0.6907292526164213,
"learning_rate": 1.734514253098589e-05,
"loss": 0.0436,
"step": 38
},
{
"epoch": 0.2698961937716263,
"grad_norm": 0.6929541264038483,
"learning_rate": 1.7189908153577473e-05,
"loss": 0.059,
"step": 39
},
{
"epoch": 0.2768166089965398,
"grad_norm": 0.766614669335446,
"learning_rate": 1.7031001164581828e-05,
"loss": 0.0364,
"step": 40
},
{
"epoch": 0.2837370242214533,
"grad_norm": 1.1432954678059355,
"learning_rate": 1.6868502733832647e-05,
"loss": 0.0639,
"step": 41
},
{
"epoch": 0.2906574394463668,
"grad_norm": 1.4246783694756413,
"learning_rate": 1.670249586567531e-05,
"loss": 0.028,
"step": 42
},
{
"epoch": 0.2975778546712803,
"grad_norm": 0.6960925404059692,
"learning_rate": 1.6533065356568206e-05,
"loss": 0.0563,
"step": 43
},
{
"epoch": 0.3044982698961938,
"grad_norm": 0.917801830468414,
"learning_rate": 1.636029775176862e-05,
"loss": 0.0645,
"step": 44
},
{
"epoch": 0.31141868512110726,
"grad_norm": 1.147360995244464,
"learning_rate": 1.618428130112533e-05,
"loss": 0.0483,
"step": 45
},
{
"epoch": 0.31833910034602075,
"grad_norm": 1.0760514929547755,
"learning_rate": 1.6005105914000508e-05,
"loss": 0.0466,
"step": 46
},
{
"epoch": 0.32525951557093424,
"grad_norm": 2.0668922589924006,
"learning_rate": 1.5822863113343934e-05,
"loss": 0.0343,
"step": 47
},
{
"epoch": 0.33217993079584773,
"grad_norm": 1.1035230066017743,
"learning_rate": 1.5637645988943008e-05,
"loss": 0.0198,
"step": 48
},
{
"epoch": 0.3391003460207612,
"grad_norm": 0.5094633252436404,
"learning_rate": 1.544954914987238e-05,
"loss": 0.0405,
"step": 49
},
{
"epoch": 0.3460207612456747,
"grad_norm": 1.4387866581529392,
"learning_rate": 1.5258668676167548e-05,
"loss": 0.046,
"step": 50
},
{
"epoch": 0.35294117647058826,
"grad_norm": 0.7482935385793746,
"learning_rate": 1.5065102069747117e-05,
"loss": 0.027,
"step": 51
},
{
"epoch": 0.35986159169550175,
"grad_norm": 3.7792094686292246,
"learning_rate": 1.48689482046087e-05,
"loss": 0.0581,
"step": 52
},
{
"epoch": 0.36678200692041524,
"grad_norm": 1.3731751352374644,
"learning_rate": 1.467030727632401e-05,
"loss": 0.0228,
"step": 53
},
{
"epoch": 0.3737024221453287,
"grad_norm": 0.7887909443777626,
"learning_rate": 1.4469280750858854e-05,
"loss": 0.0739,
"step": 54
},
{
"epoch": 0.3806228373702422,
"grad_norm": 0.8647483470113037,
"learning_rate": 1.4265971312744252e-05,
"loss": 0.0551,
"step": 55
},
{
"epoch": 0.3875432525951557,
"grad_norm": 1.2742301466453625,
"learning_rate": 1.4060482812625055e-05,
"loss": 0.0364,
"step": 56
},
{
"epoch": 0.3944636678200692,
"grad_norm": 0.37834231118319656,
"learning_rate": 1.3852920214212966e-05,
"loss": 0.0229,
"step": 57
},
{
"epoch": 0.4013840830449827,
"grad_norm": 0.38818516965237315,
"learning_rate": 1.3643389540670963e-05,
"loss": 0.0188,
"step": 58
},
{
"epoch": 0.4083044982698962,
"grad_norm": 1.1203564083475976,
"learning_rate": 1.3431997820456592e-05,
"loss": 0.0295,
"step": 59
},
{
"epoch": 0.41522491349480967,
"grad_norm": 2.7193984196484697,
"learning_rate": 1.3218853032651719e-05,
"loss": 0.0592,
"step": 60
},
{
"epoch": 0.42214532871972316,
"grad_norm": 0.8457919661498918,
"learning_rate": 1.3004064051806712e-05,
"loss": 0.0559,
"step": 61
},
{
"epoch": 0.4290657439446367,
"grad_norm": 1.9644511219937209,
"learning_rate": 1.2787740592327232e-05,
"loss": 0.0755,
"step": 62
},
{
"epoch": 0.4359861591695502,
"grad_norm": 2.8810823496302977,
"learning_rate": 1.2569993152432028e-05,
"loss": 0.0771,
"step": 63
},
{
"epoch": 0.4429065743944637,
"grad_norm": 1.237673547458137,
"learning_rate": 1.2350932957710322e-05,
"loss": 0.0161,
"step": 64
},
{
"epoch": 0.44982698961937717,
"grad_norm": 1.0351256257207866,
"learning_rate": 1.2130671904307692e-05,
"loss": 0.0285,
"step": 65
},
{
"epoch": 0.45674740484429066,
"grad_norm": 2.390190069024017,
"learning_rate": 1.1909322501769407e-05,
"loss": 0.0595,
"step": 66
},
{
"epoch": 0.46366782006920415,
"grad_norm": 0.4602512676359961,
"learning_rate": 1.1686997815570473e-05,
"loss": 0.0565,
"step": 67
},
{
"epoch": 0.47058823529411764,
"grad_norm": 0.5724949291122662,
"learning_rate": 1.1463811409361667e-05,
"loss": 0.0306,
"step": 68
},
{
"epoch": 0.47750865051903113,
"grad_norm": 0.6234367847233119,
"learning_rate": 1.1239877286961123e-05,
"loss": 0.041,
"step": 69
},
{
"epoch": 0.4844290657439446,
"grad_norm": 0.6525151506863333,
"learning_rate": 1.1015309834121083e-05,
"loss": 0.0315,
"step": 70
},
{
"epoch": 0.4913494809688581,
"grad_norm": 1.2107180201820262,
"learning_rate": 1.079022376009955e-05,
"loss": 0.0463,
"step": 71
},
{
"epoch": 0.4982698961937716,
"grad_norm": 0.36904102477700823,
"learning_rate": 1.05647340390667e-05,
"loss": 0.0354,
"step": 72
},
{
"epoch": 0.5051903114186851,
"grad_norm": 0.4623564155552934,
"learning_rate": 1.0338955851375962e-05,
"loss": 0.0401,
"step": 73
},
{
"epoch": 0.5121107266435986,
"grad_norm": 0.4464635221070512,
"learning_rate": 1.01130045247298e-05,
"loss": 0.0178,
"step": 74
},
{
"epoch": 0.5190311418685121,
"grad_norm": 0.8151026231175525,
"learning_rate": 9.886995475270205e-06,
"loss": 0.0339,
"step": 75
},
{
"epoch": 0.5259515570934256,
"grad_norm": 0.8737807325813413,
"learning_rate": 9.661044148624038e-06,
"loss": 0.0431,
"step": 76
},
{
"epoch": 0.532871972318339,
"grad_norm": 0.5650128573973728,
"learning_rate": 9.435265960933304e-06,
"loss": 0.0429,
"step": 77
},
{
"epoch": 0.5397923875432526,
"grad_norm": 2.3196808467192658,
"learning_rate": 9.209776239900453e-06,
"loss": 0.0744,
"step": 78
},
{
"epoch": 0.5467128027681661,
"grad_norm": 0.6763919787895529,
"learning_rate": 8.98469016587892e-06,
"loss": 0.0273,
"step": 79
},
{
"epoch": 0.5536332179930796,
"grad_norm": 0.73804041555226,
"learning_rate": 8.76012271303888e-06,
"loss": 0.0414,
"step": 80
},
{
"epoch": 0.5605536332179931,
"grad_norm": 0.4375261353904343,
"learning_rate": 8.536188590638334e-06,
"loss": 0.0489,
"step": 81
},
{
"epoch": 0.5674740484429066,
"grad_norm": 1.9813329567182527,
"learning_rate": 8.313002184429529e-06,
"loss": 0.0455,
"step": 82
},
{
"epoch": 0.5743944636678201,
"grad_norm": 0.448941103281106,
"learning_rate": 8.090677498230598e-06,
"loss": 0.0432,
"step": 83
},
{
"epoch": 0.5813148788927336,
"grad_norm": 0.9180761063192592,
"learning_rate": 7.869328095692313e-06,
"loss": 0.0234,
"step": 84
},
{
"epoch": 0.5882352941176471,
"grad_norm": 0.5883629574508615,
"learning_rate": 7.649067042289681e-06,
"loss": 0.0509,
"step": 85
},
{
"epoch": 0.5951557093425606,
"grad_norm": 0.8009555104412699,
"learning_rate": 7.430006847567972e-06,
"loss": 0.0282,
"step": 86
},
{
"epoch": 0.6020761245674741,
"grad_norm": 1.3285299052953212,
"learning_rate": 7.2122594076727705e-06,
"loss": 0.0514,
"step": 87
},
{
"epoch": 0.6089965397923875,
"grad_norm": 0.8852543518710448,
"learning_rate": 6.995935948193294e-06,
"loss": 0.0248,
"step": 88
},
{
"epoch": 0.615916955017301,
"grad_norm": 2.3353752448807725,
"learning_rate": 6.781146967348283e-06,
"loss": 0.0693,
"step": 89
},
{
"epoch": 0.6228373702422145,
"grad_norm": 1.3882240308976475,
"learning_rate": 6.568002179543409e-06,
"loss": 0.0407,
"step": 90
},
{
"epoch": 0.629757785467128,
"grad_norm": 1.183088785309517,
"learning_rate": 6.356610459329038e-06,
"loss": 0.0234,
"step": 91
},
{
"epoch": 0.6366782006920415,
"grad_norm": 1.0365553239602958,
"learning_rate": 6.147079785787038e-06,
"loss": 0.0672,
"step": 92
},
{
"epoch": 0.643598615916955,
"grad_norm": 0.6479773351825772,
"learning_rate": 5.93951718737495e-06,
"loss": 0.0516,
"step": 93
},
{
"epoch": 0.6505190311418685,
"grad_norm": 0.5248885554897967,
"learning_rate": 5.7340286872557515e-06,
"loss": 0.0302,
"step": 94
},
{
"epoch": 0.657439446366782,
"grad_norm": 0.6857902639461477,
"learning_rate": 5.530719249141148e-06,
"loss": 0.0418,
"step": 95
},
{
"epoch": 0.6643598615916955,
"grad_norm": 0.7622230565978657,
"learning_rate": 5.329692723675994e-06,
"loss": 0.0302,
"step": 96
},
{
"epoch": 0.671280276816609,
"grad_norm": 0.5694955322805975,
"learning_rate": 5.131051795391302e-06,
"loss": 0.0297,
"step": 97
},
{
"epoch": 0.6782006920415224,
"grad_norm": 0.48280429993709745,
"learning_rate": 4.934897930252887e-06,
"loss": 0.0467,
"step": 98
},
{
"epoch": 0.6851211072664359,
"grad_norm": 0.7969826708541765,
"learning_rate": 4.7413313238324556e-06,
"loss": 0.0431,
"step": 99
},
{
"epoch": 0.6920415224913494,
"grad_norm": 0.40791439903766624,
"learning_rate": 4.550450850127626e-06,
"loss": 0.0296,
"step": 100
},
{
"epoch": 0.698961937716263,
"grad_norm": 0.8229666980540179,
"learning_rate": 4.3623540110569935e-06,
"loss": 0.068,
"step": 101
},
{
"epoch": 0.7058823529411765,
"grad_norm": 0.3873805504039957,
"learning_rate": 4.177136886656067e-06,
"loss": 0.0395,
"step": 102
},
{
"epoch": 0.71280276816609,
"grad_norm": 0.6160253305799199,
"learning_rate": 3.9948940859994964e-06,
"loss": 0.0426,
"step": 103
},
{
"epoch": 0.7197231833910035,
"grad_norm": 0.437203350354127,
"learning_rate": 3.815718698874672e-06,
"loss": 0.0493,
"step": 104
},
{
"epoch": 0.726643598615917,
"grad_norm": 0.970248480584735,
"learning_rate": 3.6397022482313804e-06,
"loss": 0.0157,
"step": 105
},
{
"epoch": 0.7335640138408305,
"grad_norm": 1.0489044138676327,
"learning_rate": 3.466934643431795e-06,
"loss": 0.0223,
"step": 106
},
{
"epoch": 0.740484429065744,
"grad_norm": 0.8154744910449274,
"learning_rate": 3.2975041343246937e-06,
"loss": 0.0372,
"step": 107
},
{
"epoch": 0.7474048442906575,
"grad_norm": 0.8284356354573794,
"learning_rate": 3.1314972661673572e-06,
"loss": 0.0595,
"step": 108
},
{
"epoch": 0.754325259515571,
"grad_norm": 0.891688730913265,
"learning_rate": 2.9689988354181742e-06,
"loss": 0.0371,
"step": 109
},
{
"epoch": 0.7612456747404844,
"grad_norm": 0.5373575786911854,
"learning_rate": 2.8100918464225304e-06,
"loss": 0.0359,
"step": 110
},
{
"epoch": 0.7681660899653979,
"grad_norm": 1.5437209149978486,
"learning_rate": 2.654857469014113e-06,
"loss": 0.0377,
"step": 111
},
{
"epoch": 0.7750865051903114,
"grad_norm": 0.8917767418171114,
"learning_rate": 2.5033749970533015e-06,
"loss": 0.0489,
"step": 112
},
{
"epoch": 0.7820069204152249,
"grad_norm": 0.6967641711242976,
"learning_rate": 2.3557218079237608e-06,
"loss": 0.0412,
"step": 113
},
{
"epoch": 0.7889273356401384,
"grad_norm": 0.6010523621864098,
"learning_rate": 2.211973323008041e-06,
"loss": 0.0496,
"step": 114
},
{
"epoch": 0.7958477508650519,
"grad_norm": 0.9326834234151709,
"learning_rate": 2.072202969162234e-06,
"loss": 0.0382,
"step": 115
},
{
"epoch": 0.8027681660899654,
"grad_norm": 0.28932924745655303,
"learning_rate": 1.936482141209486e-06,
"loss": 0.0246,
"step": 116
},
{
"epoch": 0.8096885813148789,
"grad_norm": 0.3646152590947324,
"learning_rate": 1.8048801654714687e-06,
"loss": 0.0363,
"step": 117
},
{
"epoch": 0.8166089965397924,
"grad_norm": 0.8316990911384655,
"learning_rate": 1.6774642643563955e-06,
"loss": 0.015,
"step": 118
},
{
"epoch": 0.8235294117647058,
"grad_norm": 0.7109476986868478,
"learning_rate": 1.5542995220217961e-06,
"loss": 0.0466,
"step": 119
},
{
"epoch": 0.8304498269896193,
"grad_norm": 0.6558256138579013,
"learning_rate": 1.4354488511294418e-06,
"loss": 0.0493,
"step": 120
},
{
"epoch": 0.8373702422145328,
"grad_norm": 0.6071205282852766,
"learning_rate": 1.3209729607095022e-06,
"loss": 0.0185,
"step": 121
},
{
"epoch": 0.8442906574394463,
"grad_norm": 0.8574874934268059,
"learning_rate": 1.2109303251503434e-06,
"loss": 0.0409,
"step": 122
},
{
"epoch": 0.8512110726643599,
"grad_norm": 0.26203721412837916,
"learning_rate": 1.1053771543297198e-06,
"loss": 0.03,
"step": 123
},
{
"epoch": 0.8581314878892734,
"grad_norm": 0.36209922289830276,
"learning_rate": 1.0043673649027519e-06,
"loss": 0.014,
"step": 124
},
{
"epoch": 0.8650519031141869,
"grad_norm": 0.37375093500576706,
"learning_rate": 9.079525527612321e-07,
"loss": 0.0208,
"step": 125
},
{
"epoch": 0.8719723183391004,
"grad_norm": 0.9164555453155439,
"learning_rate": 8.161819666783888e-07,
"loss": 0.0563,
"step": 126
},
{
"epoch": 0.8788927335640139,
"grad_norm": 0.7396537836277686,
"learning_rate": 7.291024831525961e-07,
"loss": 0.0516,
"step": 127
},
{
"epoch": 0.8858131487889274,
"grad_norm": 0.7564027962900852,
"learning_rate": 6.467585824627886e-07,
"loss": 0.0797,
"step": 128
},
{
"epoch": 0.8927335640138409,
"grad_norm": 0.701813356416935,
"learning_rate": 5.691923259479093e-07,
"loss": 0.0323,
"step": 129
},
{
"epoch": 0.8996539792387543,
"grad_norm": 0.6642690626780311,
"learning_rate": 4.964433345219354e-07,
"loss": 0.027,
"step": 130
},
{
"epoch": 0.9065743944636678,
"grad_norm": 0.5832787512217212,
"learning_rate": 4.285487684354772e-07,
"loss": 0.0227,
"step": 131
},
{
"epoch": 0.9134948096885813,
"grad_norm": 1.301649161039514,
"learning_rate": 3.6554330829429716e-07,
"loss": 0.0778,
"step": 132
},
{
"epoch": 0.9204152249134948,
"grad_norm": 0.8528660745704756,
"learning_rate": 3.0745913734441357e-07,
"loss": 0.033,
"step": 133
},
{
"epoch": 0.9273356401384083,
"grad_norm": 1.1082351009137044,
"learning_rate": 2.5432592503288e-07,
"loss": 0.0306,
"step": 134
},
{
"epoch": 0.9342560553633218,
"grad_norm": 0.9544686893620227,
"learning_rate": 2.0617081185259512e-07,
"loss": 0.0434,
"step": 135
},
{
"epoch": 0.9411764705882353,
"grad_norm": 0.4186137702661263,
"learning_rate": 1.630183954789233e-07,
"loss": 0.0275,
"step": 136
},
{
"epoch": 0.9480968858131488,
"grad_norm": 0.8661220155566242,
"learning_rate": 1.2489071820517394e-07,
"loss": 0.0397,
"step": 137
},
{
"epoch": 0.9550173010380623,
"grad_norm": 0.9542879421585415,
"learning_rate": 9.180725568338045e-08,
"loss": 0.0177,
"step": 138
},
{
"epoch": 0.9619377162629758,
"grad_norm": 1.221781165267405,
"learning_rate": 6.378490697611761e-08,
"loss": 0.0439,
"step": 139
},
{
"epoch": 0.9688581314878892,
"grad_norm": 0.47755949195114916,
"learning_rate": 4.083798592444899e-08,
"loss": 0.0573,
"step": 140
},
{
"epoch": 0.9757785467128027,
"grad_norm": 0.562330479749174,
"learning_rate": 2.2978213836400974e-08,
"loss": 0.045,
"step": 141
},
{
"epoch": 0.9826989619377162,
"grad_norm": 0.5335816510886274,
"learning_rate": 1.0214713499706596e-08,
"loss": 0.0416,
"step": 142
},
{
"epoch": 0.9896193771626297,
"grad_norm": 0.3303782652790597,
"learning_rate": 2.5540045218819256e-09,
"loss": 0.0314,
"step": 143
},
{
"epoch": 0.9965397923875432,
"grad_norm": 0.8440597744595137,
"learning_rate": 0.0,
"loss": 0.0266,
"step": 144
},
{
"epoch": 0.9965397923875432,
"step": 144,
"total_flos": 4.85462032515072e+16,
"train_loss": 0.05981730037860365,
"train_runtime": 2168.0646,
"train_samples_per_second": 8.526,
"train_steps_per_second": 0.066
}
],
"logging_steps": 1.0,
"max_steps": 144,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4.85462032515072e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}