new111 / trainer_state.json
zhang9302002's picture
Upload folder using huggingface_hub
11c3233 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 35.0,
"eval_steps": 100,
"global_step": 980,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.03571428571428571,
"grad_norm": 9.759769439697266,
"learning_rate": 0.0,
"loss": 2.7414,
"step": 1
},
{
"epoch": 0.07142857142857142,
"grad_norm": 9.035130500793457,
"learning_rate": 1.0204081632653061e-07,
"loss": 2.532,
"step": 2
},
{
"epoch": 0.10714285714285714,
"grad_norm": 8.679366111755371,
"learning_rate": 2.0408163265306121e-07,
"loss": 2.439,
"step": 3
},
{
"epoch": 0.14285714285714285,
"grad_norm": 9.50596809387207,
"learning_rate": 3.0612244897959183e-07,
"loss": 2.5109,
"step": 4
},
{
"epoch": 0.17857142857142858,
"grad_norm": 9.989800453186035,
"learning_rate": 4.0816326530612243e-07,
"loss": 2.5067,
"step": 5
},
{
"epoch": 0.21428571428571427,
"grad_norm": 9.429203033447266,
"learning_rate": 5.102040816326531e-07,
"loss": 2.6609,
"step": 6
},
{
"epoch": 0.25,
"grad_norm": 10.171276092529297,
"learning_rate": 6.122448979591837e-07,
"loss": 2.5307,
"step": 7
},
{
"epoch": 0.2857142857142857,
"grad_norm": 9.263684272766113,
"learning_rate": 7.142857142857143e-07,
"loss": 2.6476,
"step": 8
},
{
"epoch": 0.32142857142857145,
"grad_norm": 8.816690444946289,
"learning_rate": 8.163265306122449e-07,
"loss": 2.5897,
"step": 9
},
{
"epoch": 0.35714285714285715,
"grad_norm": 8.883913040161133,
"learning_rate": 9.183673469387756e-07,
"loss": 2.5036,
"step": 10
},
{
"epoch": 0.39285714285714285,
"grad_norm": 8.780312538146973,
"learning_rate": 1.0204081632653063e-06,
"loss": 2.3625,
"step": 11
},
{
"epoch": 0.42857142857142855,
"grad_norm": 8.389798164367676,
"learning_rate": 1.122448979591837e-06,
"loss": 2.3566,
"step": 12
},
{
"epoch": 0.4642857142857143,
"grad_norm": 9.69943904876709,
"learning_rate": 1.2244897959183673e-06,
"loss": 2.3067,
"step": 13
},
{
"epoch": 0.5,
"grad_norm": 7.483558654785156,
"learning_rate": 1.3265306122448982e-06,
"loss": 2.4596,
"step": 14
},
{
"epoch": 0.5357142857142857,
"grad_norm": 6.695621967315674,
"learning_rate": 1.4285714285714286e-06,
"loss": 2.4559,
"step": 15
},
{
"epoch": 0.5714285714285714,
"grad_norm": 6.5322794914245605,
"learning_rate": 1.5306122448979593e-06,
"loss": 2.2781,
"step": 16
},
{
"epoch": 0.6071428571428571,
"grad_norm": 6.274178504943848,
"learning_rate": 1.6326530612244897e-06,
"loss": 2.3636,
"step": 17
},
{
"epoch": 0.6428571428571429,
"grad_norm": 6.396870136260986,
"learning_rate": 1.7346938775510206e-06,
"loss": 2.4038,
"step": 18
},
{
"epoch": 0.6785714285714286,
"grad_norm": 4.775918960571289,
"learning_rate": 1.8367346938775512e-06,
"loss": 2.2931,
"step": 19
},
{
"epoch": 0.7142857142857143,
"grad_norm": 4.627754211425781,
"learning_rate": 1.938775510204082e-06,
"loss": 2.0346,
"step": 20
},
{
"epoch": 0.75,
"grad_norm": 4.323748588562012,
"learning_rate": 2.0408163265306125e-06,
"loss": 2.2192,
"step": 21
},
{
"epoch": 0.7857142857142857,
"grad_norm": 4.8098955154418945,
"learning_rate": 2.1428571428571427e-06,
"loss": 2.1561,
"step": 22
},
{
"epoch": 0.8214285714285714,
"grad_norm": 4.36919641494751,
"learning_rate": 2.244897959183674e-06,
"loss": 2.2667,
"step": 23
},
{
"epoch": 0.8571428571428571,
"grad_norm": 4.16939115524292,
"learning_rate": 2.3469387755102044e-06,
"loss": 2.0838,
"step": 24
},
{
"epoch": 0.8928571428571429,
"grad_norm": 4.2570366859436035,
"learning_rate": 2.4489795918367347e-06,
"loss": 2.102,
"step": 25
},
{
"epoch": 0.9285714285714286,
"grad_norm": 3.462320566177368,
"learning_rate": 2.5510204081632657e-06,
"loss": 2.0475,
"step": 26
},
{
"epoch": 0.9642857142857143,
"grad_norm": 2.8186793327331543,
"learning_rate": 2.6530612244897964e-06,
"loss": 2.0281,
"step": 27
},
{
"epoch": 1.0,
"grad_norm": 3.044743776321411,
"learning_rate": 2.7551020408163266e-06,
"loss": 2.019,
"step": 28
},
{
"epoch": 1.0357142857142858,
"grad_norm": 2.4265270233154297,
"learning_rate": 2.8571428571428573e-06,
"loss": 1.9081,
"step": 29
},
{
"epoch": 1.0714285714285714,
"grad_norm": 2.3639659881591797,
"learning_rate": 2.959183673469388e-06,
"loss": 1.9358,
"step": 30
},
{
"epoch": 1.1071428571428572,
"grad_norm": 2.0860698223114014,
"learning_rate": 3.0612244897959185e-06,
"loss": 1.9145,
"step": 31
},
{
"epoch": 1.1428571428571428,
"grad_norm": 1.9538745880126953,
"learning_rate": 3.1632653061224496e-06,
"loss": 1.7714,
"step": 32
},
{
"epoch": 1.1785714285714286,
"grad_norm": 1.7645140886306763,
"learning_rate": 3.2653061224489794e-06,
"loss": 1.7249,
"step": 33
},
{
"epoch": 1.2142857142857142,
"grad_norm": 1.7323063611984253,
"learning_rate": 3.3673469387755105e-06,
"loss": 1.7585,
"step": 34
},
{
"epoch": 1.25,
"grad_norm": 1.3425657749176025,
"learning_rate": 3.469387755102041e-06,
"loss": 1.5646,
"step": 35
},
{
"epoch": 1.2857142857142856,
"grad_norm": 1.5468051433563232,
"learning_rate": 3.5714285714285718e-06,
"loss": 1.6348,
"step": 36
},
{
"epoch": 1.3214285714285714,
"grad_norm": 1.569055438041687,
"learning_rate": 3.6734693877551024e-06,
"loss": 1.7431,
"step": 37
},
{
"epoch": 1.3571428571428572,
"grad_norm": 1.3902649879455566,
"learning_rate": 3.7755102040816327e-06,
"loss": 1.7093,
"step": 38
},
{
"epoch": 1.3928571428571428,
"grad_norm": 1.4763963222503662,
"learning_rate": 3.877551020408164e-06,
"loss": 1.7049,
"step": 39
},
{
"epoch": 1.4285714285714286,
"grad_norm": 1.287192463874817,
"learning_rate": 3.979591836734694e-06,
"loss": 1.6828,
"step": 40
},
{
"epoch": 1.4642857142857144,
"grad_norm": 1.175764799118042,
"learning_rate": 4.081632653061225e-06,
"loss": 1.6261,
"step": 41
},
{
"epoch": 1.5,
"grad_norm": 1.1271580457687378,
"learning_rate": 4.183673469387755e-06,
"loss": 1.6354,
"step": 42
},
{
"epoch": 1.5357142857142856,
"grad_norm": 1.1049038171768188,
"learning_rate": 4.2857142857142855e-06,
"loss": 1.7451,
"step": 43
},
{
"epoch": 1.5714285714285714,
"grad_norm": 0.9844851493835449,
"learning_rate": 4.3877551020408165e-06,
"loss": 1.5587,
"step": 44
},
{
"epoch": 1.6071428571428572,
"grad_norm": 0.9872157573699951,
"learning_rate": 4.489795918367348e-06,
"loss": 1.6485,
"step": 45
},
{
"epoch": 1.6428571428571428,
"grad_norm": 0.9424026012420654,
"learning_rate": 4.591836734693878e-06,
"loss": 1.5298,
"step": 46
},
{
"epoch": 1.6785714285714286,
"grad_norm": 1.15427827835083,
"learning_rate": 4.693877551020409e-06,
"loss": 1.5205,
"step": 47
},
{
"epoch": 1.7142857142857144,
"grad_norm": 1.1777286529541016,
"learning_rate": 4.795918367346939e-06,
"loss": 1.4686,
"step": 48
},
{
"epoch": 1.75,
"grad_norm": 0.9405274987220764,
"learning_rate": 4.897959183673469e-06,
"loss": 1.5902,
"step": 49
},
{
"epoch": 1.7857142857142856,
"grad_norm": 0.9954162240028381,
"learning_rate": 5e-06,
"loss": 1.513,
"step": 50
},
{
"epoch": 1.8214285714285714,
"grad_norm": 1.252631425857544,
"learning_rate": 5.1020408163265315e-06,
"loss": 1.5964,
"step": 51
},
{
"epoch": 1.8571428571428572,
"grad_norm": 1.0796560049057007,
"learning_rate": 5.204081632653062e-06,
"loss": 1.4724,
"step": 52
},
{
"epoch": 1.8928571428571428,
"grad_norm": 1.2125780582427979,
"learning_rate": 5.306122448979593e-06,
"loss": 1.589,
"step": 53
},
{
"epoch": 1.9285714285714286,
"grad_norm": 0.9952357411384583,
"learning_rate": 5.408163265306123e-06,
"loss": 1.5026,
"step": 54
},
{
"epoch": 1.9642857142857144,
"grad_norm": 1.0817049741744995,
"learning_rate": 5.510204081632653e-06,
"loss": 1.5709,
"step": 55
},
{
"epoch": 2.0,
"grad_norm": 1.2521437406539917,
"learning_rate": 5.6122448979591834e-06,
"loss": 1.528,
"step": 56
},
{
"epoch": 2.0357142857142856,
"grad_norm": 1.1188162565231323,
"learning_rate": 5.7142857142857145e-06,
"loss": 1.5065,
"step": 57
},
{
"epoch": 2.0714285714285716,
"grad_norm": 0.8846582174301147,
"learning_rate": 5.816326530612246e-06,
"loss": 1.5931,
"step": 58
},
{
"epoch": 2.107142857142857,
"grad_norm": 1.2342886924743652,
"learning_rate": 5.918367346938776e-06,
"loss": 1.5268,
"step": 59
},
{
"epoch": 2.142857142857143,
"grad_norm": 1.337417483329773,
"learning_rate": 6.020408163265307e-06,
"loss": 1.4429,
"step": 60
},
{
"epoch": 2.1785714285714284,
"grad_norm": 1.1548265218734741,
"learning_rate": 6.122448979591837e-06,
"loss": 1.4426,
"step": 61
},
{
"epoch": 2.2142857142857144,
"grad_norm": 1.0125023126602173,
"learning_rate": 6.224489795918368e-06,
"loss": 1.4092,
"step": 62
},
{
"epoch": 2.25,
"grad_norm": 1.1323378086090088,
"learning_rate": 6.326530612244899e-06,
"loss": 1.4018,
"step": 63
},
{
"epoch": 2.2857142857142856,
"grad_norm": 1.0492571592330933,
"learning_rate": 6.4285714285714295e-06,
"loss": 1.3959,
"step": 64
},
{
"epoch": 2.3214285714285716,
"grad_norm": 1.0407048463821411,
"learning_rate": 6.530612244897959e-06,
"loss": 1.3596,
"step": 65
},
{
"epoch": 2.357142857142857,
"grad_norm": 0.8995230197906494,
"learning_rate": 6.63265306122449e-06,
"loss": 1.4646,
"step": 66
},
{
"epoch": 2.392857142857143,
"grad_norm": 0.8292604088783264,
"learning_rate": 6.734693877551021e-06,
"loss": 1.4945,
"step": 67
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.9430536031723022,
"learning_rate": 6.836734693877551e-06,
"loss": 1.3554,
"step": 68
},
{
"epoch": 2.4642857142857144,
"grad_norm": 0.9344185590744019,
"learning_rate": 6.938775510204082e-06,
"loss": 1.3483,
"step": 69
},
{
"epoch": 2.5,
"grad_norm": 0.8723719716072083,
"learning_rate": 7.0408163265306125e-06,
"loss": 1.3678,
"step": 70
},
{
"epoch": 2.5357142857142856,
"grad_norm": 0.8184205889701843,
"learning_rate": 7.1428571428571436e-06,
"loss": 1.4009,
"step": 71
},
{
"epoch": 2.571428571428571,
"grad_norm": 0.9736836552619934,
"learning_rate": 7.244897959183675e-06,
"loss": 1.3519,
"step": 72
},
{
"epoch": 2.607142857142857,
"grad_norm": 0.9170386791229248,
"learning_rate": 7.346938775510205e-06,
"loss": 1.4351,
"step": 73
},
{
"epoch": 2.642857142857143,
"grad_norm": 0.799105703830719,
"learning_rate": 7.448979591836736e-06,
"loss": 1.4096,
"step": 74
},
{
"epoch": 2.678571428571429,
"grad_norm": 1.1402575969696045,
"learning_rate": 7.551020408163265e-06,
"loss": 1.4303,
"step": 75
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.8628937602043152,
"learning_rate": 7.653061224489796e-06,
"loss": 1.2549,
"step": 76
},
{
"epoch": 2.75,
"grad_norm": 1.0535259246826172,
"learning_rate": 7.755102040816327e-06,
"loss": 1.303,
"step": 77
},
{
"epoch": 2.7857142857142856,
"grad_norm": 1.0219699144363403,
"learning_rate": 7.857142857142858e-06,
"loss": 1.3732,
"step": 78
},
{
"epoch": 2.821428571428571,
"grad_norm": 0.8952599167823792,
"learning_rate": 7.959183673469388e-06,
"loss": 1.4765,
"step": 79
},
{
"epoch": 2.857142857142857,
"grad_norm": 0.8639037609100342,
"learning_rate": 8.06122448979592e-06,
"loss": 1.3302,
"step": 80
},
{
"epoch": 2.892857142857143,
"grad_norm": 1.001290202140808,
"learning_rate": 8.16326530612245e-06,
"loss": 1.3691,
"step": 81
},
{
"epoch": 2.928571428571429,
"grad_norm": 0.9045864939689636,
"learning_rate": 8.26530612244898e-06,
"loss": 1.3872,
"step": 82
},
{
"epoch": 2.9642857142857144,
"grad_norm": 0.8884599208831787,
"learning_rate": 8.36734693877551e-06,
"loss": 1.3095,
"step": 83
},
{
"epoch": 3.0,
"grad_norm": 0.8961983919143677,
"learning_rate": 8.469387755102042e-06,
"loss": 1.3005,
"step": 84
},
{
"epoch": 3.0357142857142856,
"grad_norm": 0.8967141509056091,
"learning_rate": 8.571428571428571e-06,
"loss": 1.3534,
"step": 85
},
{
"epoch": 3.0714285714285716,
"grad_norm": 0.7872421741485596,
"learning_rate": 8.673469387755103e-06,
"loss": 1.2637,
"step": 86
},
{
"epoch": 3.107142857142857,
"grad_norm": 0.7589600682258606,
"learning_rate": 8.775510204081633e-06,
"loss": 1.2964,
"step": 87
},
{
"epoch": 3.142857142857143,
"grad_norm": 0.8907157778739929,
"learning_rate": 8.877551020408163e-06,
"loss": 1.26,
"step": 88
},
{
"epoch": 3.1785714285714284,
"grad_norm": 0.8499204516410828,
"learning_rate": 8.979591836734695e-06,
"loss": 1.2085,
"step": 89
},
{
"epoch": 3.2142857142857144,
"grad_norm": 0.816241979598999,
"learning_rate": 9.081632653061225e-06,
"loss": 1.313,
"step": 90
},
{
"epoch": 3.25,
"grad_norm": 0.9863104820251465,
"learning_rate": 9.183673469387756e-06,
"loss": 1.2019,
"step": 91
},
{
"epoch": 3.2857142857142856,
"grad_norm": 0.8051828145980835,
"learning_rate": 9.285714285714288e-06,
"loss": 1.2688,
"step": 92
},
{
"epoch": 3.3214285714285716,
"grad_norm": 0.8511350154876709,
"learning_rate": 9.387755102040818e-06,
"loss": 1.2488,
"step": 93
},
{
"epoch": 3.357142857142857,
"grad_norm": 0.8502587676048279,
"learning_rate": 9.489795918367348e-06,
"loss": 1.1932,
"step": 94
},
{
"epoch": 3.392857142857143,
"grad_norm": 0.8357100486755371,
"learning_rate": 9.591836734693878e-06,
"loss": 1.2833,
"step": 95
},
{
"epoch": 3.4285714285714284,
"grad_norm": 0.8697457313537598,
"learning_rate": 9.693877551020408e-06,
"loss": 1.1389,
"step": 96
},
{
"epoch": 3.4642857142857144,
"grad_norm": 0.8650133609771729,
"learning_rate": 9.795918367346939e-06,
"loss": 1.1914,
"step": 97
},
{
"epoch": 3.5,
"grad_norm": 0.8051957488059998,
"learning_rate": 9.89795918367347e-06,
"loss": 1.1436,
"step": 98
},
{
"epoch": 3.5357142857142856,
"grad_norm": 0.9405279755592346,
"learning_rate": 1e-05,
"loss": 1.3277,
"step": 99
},
{
"epoch": 3.571428571428571,
"grad_norm": 0.7801042795181274,
"learning_rate": 9.999968282268043e-06,
"loss": 1.1546,
"step": 100
},
{
"epoch": 3.571428571428571,
"eval_loss": 1.2822846174240112,
"eval_runtime": 0.47,
"eval_samples_per_second": 297.86,
"eval_steps_per_second": 2.128,
"step": 100
},
{
"epoch": 3.607142857142857,
"grad_norm": 0.861859142780304,
"learning_rate": 9.999873129474573e-06,
"loss": 1.2382,
"step": 101
},
{
"epoch": 3.642857142857143,
"grad_norm": 0.8621125221252441,
"learning_rate": 9.999714542826806e-06,
"loss": 1.3204,
"step": 102
},
{
"epoch": 3.678571428571429,
"grad_norm": 0.7925216555595398,
"learning_rate": 9.999492524336743e-06,
"loss": 1.1399,
"step": 103
},
{
"epoch": 3.7142857142857144,
"grad_norm": 0.9186500906944275,
"learning_rate": 9.999207076821155e-06,
"loss": 1.1609,
"step": 104
},
{
"epoch": 3.75,
"grad_norm": 0.9390427470207214,
"learning_rate": 9.99885820390154e-06,
"loss": 1.1709,
"step": 105
},
{
"epoch": 3.7857142857142856,
"grad_norm": 0.8102933764457703,
"learning_rate": 9.998445910004082e-06,
"loss": 1.2304,
"step": 106
},
{
"epoch": 3.821428571428571,
"grad_norm": 0.9541983604431152,
"learning_rate": 9.997970200359592e-06,
"loss": 1.2702,
"step": 107
},
{
"epoch": 3.857142857142857,
"grad_norm": 0.8650714755058289,
"learning_rate": 9.99743108100344e-06,
"loss": 1.2122,
"step": 108
},
{
"epoch": 3.892857142857143,
"grad_norm": 0.8606563210487366,
"learning_rate": 9.996828558775486e-06,
"loss": 1.2045,
"step": 109
},
{
"epoch": 3.928571428571429,
"grad_norm": 0.8290749192237854,
"learning_rate": 9.996162641319985e-06,
"loss": 1.1833,
"step": 110
},
{
"epoch": 3.9642857142857144,
"grad_norm": 0.8623219132423401,
"learning_rate": 9.995433337085492e-06,
"loss": 1.1361,
"step": 111
},
{
"epoch": 4.0,
"grad_norm": 0.7909784913063049,
"learning_rate": 9.994640655324758e-06,
"loss": 1.1751,
"step": 112
},
{
"epoch": 4.035714285714286,
"grad_norm": 0.7679526209831238,
"learning_rate": 9.993784606094612e-06,
"loss": 1.0578,
"step": 113
},
{
"epoch": 4.071428571428571,
"grad_norm": 0.8180373907089233,
"learning_rate": 9.992865200255829e-06,
"loss": 1.0855,
"step": 114
},
{
"epoch": 4.107142857142857,
"grad_norm": 0.8258321285247803,
"learning_rate": 9.991882449472994e-06,
"loss": 1.0501,
"step": 115
},
{
"epoch": 4.142857142857143,
"grad_norm": 0.9095039963722229,
"learning_rate": 9.99083636621436e-06,
"loss": 1.0903,
"step": 116
},
{
"epoch": 4.178571428571429,
"grad_norm": 0.8757370710372925,
"learning_rate": 9.989726963751683e-06,
"loss": 1.0142,
"step": 117
},
{
"epoch": 4.214285714285714,
"grad_norm": 0.9213536381721497,
"learning_rate": 9.988554256160052e-06,
"loss": 1.03,
"step": 118
},
{
"epoch": 4.25,
"grad_norm": 1.1767222881317139,
"learning_rate": 9.987318258317718e-06,
"loss": 1.0143,
"step": 119
},
{
"epoch": 4.285714285714286,
"grad_norm": 0.9031118154525757,
"learning_rate": 9.986018985905901e-06,
"loss": 0.9545,
"step": 120
},
{
"epoch": 4.321428571428571,
"grad_norm": 0.9330540299415588,
"learning_rate": 9.984656455408591e-06,
"loss": 0.969,
"step": 121
},
{
"epoch": 4.357142857142857,
"grad_norm": 1.1439694166183472,
"learning_rate": 9.983230684112338e-06,
"loss": 1.1886,
"step": 122
},
{
"epoch": 4.392857142857143,
"grad_norm": 0.9566424489021301,
"learning_rate": 9.981741690106035e-06,
"loss": 0.9773,
"step": 123
},
{
"epoch": 4.428571428571429,
"grad_norm": 1.067845344543457,
"learning_rate": 9.980189492280688e-06,
"loss": 1.0976,
"step": 124
},
{
"epoch": 4.464285714285714,
"grad_norm": 1.0497171878814697,
"learning_rate": 9.978574110329174e-06,
"loss": 0.9478,
"step": 125
},
{
"epoch": 4.5,
"grad_norm": 0.9933088421821594,
"learning_rate": 9.976895564745993e-06,
"loss": 0.9368,
"step": 126
},
{
"epoch": 4.535714285714286,
"grad_norm": 0.8733711838722229,
"learning_rate": 9.975153876827008e-06,
"loss": 1.0447,
"step": 127
},
{
"epoch": 4.571428571428571,
"grad_norm": 0.9675903916358948,
"learning_rate": 9.973349068669178e-06,
"loss": 1.0044,
"step": 128
},
{
"epoch": 4.607142857142857,
"grad_norm": 1.0770503282546997,
"learning_rate": 9.97148116317027e-06,
"loss": 1.0712,
"step": 129
},
{
"epoch": 4.642857142857143,
"grad_norm": 0.8858775496482849,
"learning_rate": 9.969550184028572e-06,
"loss": 0.9705,
"step": 130
},
{
"epoch": 4.678571428571429,
"grad_norm": 1.0158929824829102,
"learning_rate": 9.9675561557426e-06,
"loss": 1.0405,
"step": 131
},
{
"epoch": 4.714285714285714,
"grad_norm": 1.1270577907562256,
"learning_rate": 9.965499103610775e-06,
"loss": 1.0283,
"step": 132
},
{
"epoch": 4.75,
"grad_norm": 0.8913384079933167,
"learning_rate": 9.963379053731104e-06,
"loss": 1.0334,
"step": 133
},
{
"epoch": 4.785714285714286,
"grad_norm": 1.0675349235534668,
"learning_rate": 9.961196033000862e-06,
"loss": 1.1302,
"step": 134
},
{
"epoch": 4.821428571428571,
"grad_norm": 0.9325812458992004,
"learning_rate": 9.95895006911623e-06,
"loss": 0.9444,
"step": 135
},
{
"epoch": 4.857142857142857,
"grad_norm": 1.0121493339538574,
"learning_rate": 9.956641190571967e-06,
"loss": 0.9612,
"step": 136
},
{
"epoch": 4.892857142857143,
"grad_norm": 0.8960527777671814,
"learning_rate": 9.954269426661023e-06,
"loss": 1.0558,
"step": 137
},
{
"epoch": 4.928571428571429,
"grad_norm": 0.9022102952003479,
"learning_rate": 9.951834807474191e-06,
"loss": 0.9476,
"step": 138
},
{
"epoch": 4.964285714285714,
"grad_norm": 0.921174168586731,
"learning_rate": 9.949337363899709e-06,
"loss": 0.994,
"step": 139
},
{
"epoch": 5.0,
"grad_norm": 0.8534979224205017,
"learning_rate": 9.946777127622874e-06,
"loss": 0.8216,
"step": 140
},
{
"epoch": 5.035714285714286,
"grad_norm": 0.9021621346473694,
"learning_rate": 9.944154131125643e-06,
"loss": 0.8672,
"step": 141
},
{
"epoch": 5.071428571428571,
"grad_norm": 0.9865545034408569,
"learning_rate": 9.941468407686216e-06,
"loss": 0.824,
"step": 142
},
{
"epoch": 5.107142857142857,
"grad_norm": 0.9824482798576355,
"learning_rate": 9.938719991378614e-06,
"loss": 0.8532,
"step": 143
},
{
"epoch": 5.142857142857143,
"grad_norm": 1.110496163368225,
"learning_rate": 9.935908917072253e-06,
"loss": 0.9036,
"step": 144
},
{
"epoch": 5.178571428571429,
"grad_norm": 1.095384120941162,
"learning_rate": 9.933035220431489e-06,
"loss": 0.8404,
"step": 145
},
{
"epoch": 5.214285714285714,
"grad_norm": 1.309322714805603,
"learning_rate": 9.930098937915177e-06,
"loss": 0.8397,
"step": 146
},
{
"epoch": 5.25,
"grad_norm": 1.5140193700790405,
"learning_rate": 9.927100106776213e-06,
"loss": 0.8126,
"step": 147
},
{
"epoch": 5.285714285714286,
"grad_norm": 1.2346681356430054,
"learning_rate": 9.924038765061042e-06,
"loss": 0.8103,
"step": 148
},
{
"epoch": 5.321428571428571,
"grad_norm": 1.0885480642318726,
"learning_rate": 9.920914951609189e-06,
"loss": 0.7544,
"step": 149
},
{
"epoch": 5.357142857142857,
"grad_norm": 1.0442003011703491,
"learning_rate": 9.917728706052765e-06,
"loss": 0.8543,
"step": 150
},
{
"epoch": 5.392857142857143,
"grad_norm": 1.0954079627990723,
"learning_rate": 9.914480068815964e-06,
"loss": 0.7868,
"step": 151
},
{
"epoch": 5.428571428571429,
"grad_norm": 1.0408991575241089,
"learning_rate": 9.91116908111455e-06,
"loss": 0.7768,
"step": 152
},
{
"epoch": 5.464285714285714,
"grad_norm": 1.1352790594100952,
"learning_rate": 9.907795784955327e-06,
"loss": 0.8077,
"step": 153
},
{
"epoch": 5.5,
"grad_norm": 1.1276353597640991,
"learning_rate": 9.90436022313562e-06,
"loss": 0.7622,
"step": 154
},
{
"epoch": 5.535714285714286,
"grad_norm": 1.1519376039505005,
"learning_rate": 9.900862439242719e-06,
"loss": 0.8162,
"step": 155
},
{
"epoch": 5.571428571428571,
"grad_norm": 1.187958836555481,
"learning_rate": 9.897302477653334e-06,
"loss": 0.7768,
"step": 156
},
{
"epoch": 5.607142857142857,
"grad_norm": 1.3001772165298462,
"learning_rate": 9.893680383533027e-06,
"loss": 0.8367,
"step": 157
},
{
"epoch": 5.642857142857143,
"grad_norm": 1.199255347251892,
"learning_rate": 9.889996202835642e-06,
"loss": 0.752,
"step": 158
},
{
"epoch": 5.678571428571429,
"grad_norm": 1.1505409479141235,
"learning_rate": 9.88624998230272e-06,
"loss": 0.878,
"step": 159
},
{
"epoch": 5.714285714285714,
"grad_norm": 1.1740809679031372,
"learning_rate": 9.882441769462911e-06,
"loss": 0.7647,
"step": 160
},
{
"epoch": 5.75,
"grad_norm": 1.0937939882278442,
"learning_rate": 9.878571612631364e-06,
"loss": 0.8364,
"step": 161
},
{
"epoch": 5.785714285714286,
"grad_norm": 1.0483719110488892,
"learning_rate": 9.874639560909118e-06,
"loss": 0.8453,
"step": 162
},
{
"epoch": 5.821428571428571,
"grad_norm": 1.0722523927688599,
"learning_rate": 9.870645664182478e-06,
"loss": 0.76,
"step": 163
},
{
"epoch": 5.857142857142857,
"grad_norm": 1.093754768371582,
"learning_rate": 9.86658997312238e-06,
"loss": 0.7596,
"step": 164
},
{
"epoch": 5.892857142857143,
"grad_norm": 1.1799498796463013,
"learning_rate": 9.862472539183757e-06,
"loss": 0.8612,
"step": 165
},
{
"epoch": 5.928571428571429,
"grad_norm": 1.1799345016479492,
"learning_rate": 9.858293414604871e-06,
"loss": 0.8048,
"step": 166
},
{
"epoch": 5.964285714285714,
"grad_norm": 1.090215802192688,
"learning_rate": 9.854052652406666e-06,
"loss": 0.7531,
"step": 167
},
{
"epoch": 6.0,
"grad_norm": 1.1735352277755737,
"learning_rate": 9.849750306392085e-06,
"loss": 0.6251,
"step": 168
},
{
"epoch": 6.035714285714286,
"grad_norm": 1.2174513339996338,
"learning_rate": 9.84538643114539e-06,
"loss": 0.6708,
"step": 169
},
{
"epoch": 6.071428571428571,
"grad_norm": 1.188399076461792,
"learning_rate": 9.840961082031473e-06,
"loss": 0.6973,
"step": 170
},
{
"epoch": 6.107142857142857,
"grad_norm": 1.3352035284042358,
"learning_rate": 9.836474315195148e-06,
"loss": 0.6517,
"step": 171
},
{
"epoch": 6.142857142857143,
"grad_norm": 1.464682698249817,
"learning_rate": 9.831926187560441e-06,
"loss": 0.6084,
"step": 172
},
{
"epoch": 6.178571428571429,
"grad_norm": 1.567628264427185,
"learning_rate": 9.827316756829871e-06,
"loss": 0.5943,
"step": 173
},
{
"epoch": 6.214285714285714,
"grad_norm": 1.671661376953125,
"learning_rate": 9.822646081483713e-06,
"loss": 0.6451,
"step": 174
},
{
"epoch": 6.25,
"grad_norm": 1.2910634279251099,
"learning_rate": 9.817914220779258e-06,
"loss": 0.6001,
"step": 175
},
{
"epoch": 6.285714285714286,
"grad_norm": 1.2736347913742065,
"learning_rate": 9.81312123475006e-06,
"loss": 0.5976,
"step": 176
},
{
"epoch": 6.321428571428571,
"grad_norm": 1.1851528882980347,
"learning_rate": 9.808267184205182e-06,
"loss": 0.5813,
"step": 177
},
{
"epoch": 6.357142857142857,
"grad_norm": 1.202125906944275,
"learning_rate": 9.80335213072841e-06,
"loss": 0.5511,
"step": 178
},
{
"epoch": 6.392857142857143,
"grad_norm": 1.1597651243209839,
"learning_rate": 9.798376136677486e-06,
"loss": 0.5579,
"step": 179
},
{
"epoch": 6.428571428571429,
"grad_norm": 1.2182499170303345,
"learning_rate": 9.793339265183303e-06,
"loss": 0.527,
"step": 180
},
{
"epoch": 6.464285714285714,
"grad_norm": 1.3637059926986694,
"learning_rate": 9.788241580149123e-06,
"loss": 0.5881,
"step": 181
},
{
"epoch": 6.5,
"grad_norm": 1.3525656461715698,
"learning_rate": 9.783083146249749e-06,
"loss": 0.5526,
"step": 182
},
{
"epoch": 6.535714285714286,
"grad_norm": 1.5944671630859375,
"learning_rate": 9.777864028930705e-06,
"loss": 0.5739,
"step": 183
},
{
"epoch": 6.571428571428571,
"grad_norm": 1.248140573501587,
"learning_rate": 9.77258429440742e-06,
"loss": 0.5514,
"step": 184
},
{
"epoch": 6.607142857142857,
"grad_norm": 1.1553500890731812,
"learning_rate": 9.767244009664376e-06,
"loss": 0.566,
"step": 185
},
{
"epoch": 6.642857142857143,
"grad_norm": 1.4417532682418823,
"learning_rate": 9.761843242454261e-06,
"loss": 0.596,
"step": 186
},
{
"epoch": 6.678571428571429,
"grad_norm": 1.140500783920288,
"learning_rate": 9.75638206129711e-06,
"loss": 0.5865,
"step": 187
},
{
"epoch": 6.714285714285714,
"grad_norm": 1.204921841621399,
"learning_rate": 9.750860535479434e-06,
"loss": 0.6362,
"step": 188
},
{
"epoch": 6.75,
"grad_norm": 1.2617542743682861,
"learning_rate": 9.745278735053345e-06,
"loss": 0.5731,
"step": 189
},
{
"epoch": 6.785714285714286,
"grad_norm": 1.2948684692382812,
"learning_rate": 9.73963673083566e-06,
"loss": 0.5541,
"step": 190
},
{
"epoch": 6.821428571428571,
"grad_norm": 1.2236193418502808,
"learning_rate": 9.733934594407012e-06,
"loss": 0.5517,
"step": 191
},
{
"epoch": 6.857142857142857,
"grad_norm": 1.3996909856796265,
"learning_rate": 9.728172398110935e-06,
"loss": 0.5111,
"step": 192
},
{
"epoch": 6.892857142857143,
"grad_norm": 1.3360153436660767,
"learning_rate": 9.722350215052946e-06,
"loss": 0.5726,
"step": 193
},
{
"epoch": 6.928571428571429,
"grad_norm": 1.3151594400405884,
"learning_rate": 9.716468119099626e-06,
"loss": 0.5435,
"step": 194
},
{
"epoch": 6.964285714285714,
"grad_norm": 1.371817708015442,
"learning_rate": 9.710526184877667e-06,
"loss": 0.5764,
"step": 195
},
{
"epoch": 7.0,
"grad_norm": 1.2646558284759521,
"learning_rate": 9.704524487772944e-06,
"loss": 0.4543,
"step": 196
},
{
"epoch": 7.035714285714286,
"grad_norm": 1.474683165550232,
"learning_rate": 9.698463103929542e-06,
"loss": 0.3993,
"step": 197
},
{
"epoch": 7.071428571428571,
"grad_norm": 1.3176275491714478,
"learning_rate": 9.692342110248802e-06,
"loss": 0.3759,
"step": 198
},
{
"epoch": 7.107142857142857,
"grad_norm": 1.3523168563842773,
"learning_rate": 9.68616158438834e-06,
"loss": 0.35,
"step": 199
},
{
"epoch": 7.142857142857143,
"grad_norm": 1.6409916877746582,
"learning_rate": 9.679921604761056e-06,
"loss": 0.4067,
"step": 200
},
{
"epoch": 7.142857142857143,
"eval_loss": 1.6618373394012451,
"eval_runtime": 0.4663,
"eval_samples_per_second": 300.222,
"eval_steps_per_second": 2.144,
"step": 200
},
{
"epoch": 7.178571428571429,
"grad_norm": 2.2635273933410645,
"learning_rate": 9.673622250534155e-06,
"loss": 0.3858,
"step": 201
},
{
"epoch": 7.214285714285714,
"grad_norm": 2.0102970600128174,
"learning_rate": 9.66726360162813e-06,
"loss": 0.3662,
"step": 202
},
{
"epoch": 7.25,
"grad_norm": 1.694830298423767,
"learning_rate": 9.660845738715743e-06,
"loss": 0.3926,
"step": 203
},
{
"epoch": 7.285714285714286,
"grad_norm": 1.3782461881637573,
"learning_rate": 9.654368743221022e-06,
"loss": 0.4011,
"step": 204
},
{
"epoch": 7.321428571428571,
"grad_norm": 1.4142459630966187,
"learning_rate": 9.647832697318207e-06,
"loss": 0.3561,
"step": 205
},
{
"epoch": 7.357142857142857,
"grad_norm": 1.3831766843795776,
"learning_rate": 9.641237683930722e-06,
"loss": 0.397,
"step": 206
},
{
"epoch": 7.392857142857143,
"grad_norm": 1.2713834047317505,
"learning_rate": 9.63458378673011e-06,
"loss": 0.3405,
"step": 207
},
{
"epoch": 7.428571428571429,
"grad_norm": 1.6474286317825317,
"learning_rate": 9.627871090134984e-06,
"loss": 0.327,
"step": 208
},
{
"epoch": 7.464285714285714,
"grad_norm": 1.477697730064392,
"learning_rate": 9.621099679309948e-06,
"loss": 0.3921,
"step": 209
},
{
"epoch": 7.5,
"grad_norm": 1.6822872161865234,
"learning_rate": 9.61426964016452e-06,
"loss": 0.3775,
"step": 210
},
{
"epoch": 7.535714285714286,
"grad_norm": 1.541839838027954,
"learning_rate": 9.60738105935204e-06,
"loss": 0.3586,
"step": 211
},
{
"epoch": 7.571428571428571,
"grad_norm": 1.4712790250778198,
"learning_rate": 9.60043402426857e-06,
"loss": 0.3626,
"step": 212
},
{
"epoch": 7.607142857142857,
"grad_norm": 1.3638650178909302,
"learning_rate": 9.593428623051793e-06,
"loss": 0.3889,
"step": 213
},
{
"epoch": 7.642857142857143,
"grad_norm": 1.3237727880477905,
"learning_rate": 9.58636494457988e-06,
"loss": 0.4192,
"step": 214
},
{
"epoch": 7.678571428571429,
"grad_norm": 1.4845259189605713,
"learning_rate": 9.57924307847038e-06,
"loss": 0.408,
"step": 215
},
{
"epoch": 7.714285714285714,
"grad_norm": 1.4447940587997437,
"learning_rate": 9.572063115079063e-06,
"loss": 0.3425,
"step": 216
},
{
"epoch": 7.75,
"grad_norm": 1.365172266960144,
"learning_rate": 9.564825145498795e-06,
"loss": 0.3447,
"step": 217
},
{
"epoch": 7.785714285714286,
"grad_norm": 1.419386625289917,
"learning_rate": 9.557529261558367e-06,
"loss": 0.3816,
"step": 218
},
{
"epoch": 7.821428571428571,
"grad_norm": 1.5688282251358032,
"learning_rate": 9.550175555821333e-06,
"loss": 0.3568,
"step": 219
},
{
"epoch": 7.857142857142857,
"grad_norm": 1.4586085081100464,
"learning_rate": 9.542764121584845e-06,
"loss": 0.4127,
"step": 220
},
{
"epoch": 7.892857142857143,
"grad_norm": 1.4946858882904053,
"learning_rate": 9.53529505287845e-06,
"loss": 0.3844,
"step": 221
},
{
"epoch": 7.928571428571429,
"grad_norm": 1.4703893661499023,
"learning_rate": 9.527768444462922e-06,
"loss": 0.3597,
"step": 222
},
{
"epoch": 7.964285714285714,
"grad_norm": 1.5224125385284424,
"learning_rate": 9.520184391829037e-06,
"loss": 0.4019,
"step": 223
},
{
"epoch": 8.0,
"grad_norm": 1.2209713459014893,
"learning_rate": 9.512542991196377e-06,
"loss": 0.2579,
"step": 224
},
{
"epoch": 8.035714285714286,
"grad_norm": 1.3363842964172363,
"learning_rate": 9.504844339512096e-06,
"loss": 0.2908,
"step": 225
},
{
"epoch": 8.071428571428571,
"grad_norm": 1.3185490369796753,
"learning_rate": 9.497088534449707e-06,
"loss": 0.2495,
"step": 226
},
{
"epoch": 8.107142857142858,
"grad_norm": 1.544686198234558,
"learning_rate": 9.489275674407826e-06,
"loss": 0.2688,
"step": 227
},
{
"epoch": 8.142857142857142,
"grad_norm": 1.9225260019302368,
"learning_rate": 9.481405858508935e-06,
"loss": 0.2262,
"step": 228
},
{
"epoch": 8.178571428571429,
"grad_norm": 1.690416693687439,
"learning_rate": 9.473479186598115e-06,
"loss": 0.2447,
"step": 229
},
{
"epoch": 8.214285714285714,
"grad_norm": 1.738194465637207,
"learning_rate": 9.465495759241793e-06,
"loss": 0.2446,
"step": 230
},
{
"epoch": 8.25,
"grad_norm": 1.5437583923339844,
"learning_rate": 9.457455677726447e-06,
"loss": 0.2381,
"step": 231
},
{
"epoch": 8.285714285714286,
"grad_norm": 1.5749611854553223,
"learning_rate": 9.449359044057344e-06,
"loss": 0.2048,
"step": 232
},
{
"epoch": 8.321428571428571,
"grad_norm": 1.3072010278701782,
"learning_rate": 9.441205960957221e-06,
"loss": 0.2285,
"step": 233
},
{
"epoch": 8.357142857142858,
"grad_norm": 1.231365442276001,
"learning_rate": 9.432996531865001e-06,
"loss": 0.2356,
"step": 234
},
{
"epoch": 8.392857142857142,
"grad_norm": 1.3257508277893066,
"learning_rate": 9.424730860934474e-06,
"loss": 0.2461,
"step": 235
},
{
"epoch": 8.428571428571429,
"grad_norm": 1.645837426185608,
"learning_rate": 9.416409053032971e-06,
"loss": 0.2078,
"step": 236
},
{
"epoch": 8.464285714285714,
"grad_norm": 1.4033600091934204,
"learning_rate": 9.408031213740045e-06,
"loss": 0.2085,
"step": 237
},
{
"epoch": 8.5,
"grad_norm": 1.4971022605895996,
"learning_rate": 9.399597449346119e-06,
"loss": 0.2101,
"step": 238
},
{
"epoch": 8.535714285714286,
"grad_norm": 1.5445584058761597,
"learning_rate": 9.391107866851143e-06,
"loss": 0.2305,
"step": 239
},
{
"epoch": 8.571428571428571,
"grad_norm": 1.6261149644851685,
"learning_rate": 9.382562573963238e-06,
"loss": 0.2392,
"step": 240
},
{
"epoch": 8.607142857142858,
"grad_norm": 1.9455870389938354,
"learning_rate": 9.37396167909733e-06,
"loss": 0.2314,
"step": 241
},
{
"epoch": 8.642857142857142,
"grad_norm": 1.6666280031204224,
"learning_rate": 9.365305291373769e-06,
"loss": 0.2831,
"step": 242
},
{
"epoch": 8.678571428571429,
"grad_norm": 1.313746690750122,
"learning_rate": 9.356593520616948e-06,
"loss": 0.2016,
"step": 243
},
{
"epoch": 8.714285714285714,
"grad_norm": 1.4457995891571045,
"learning_rate": 9.347826477353911e-06,
"loss": 0.1895,
"step": 244
},
{
"epoch": 8.75,
"grad_norm": 1.6301132440567017,
"learning_rate": 9.33900427281295e-06,
"loss": 0.2404,
"step": 245
},
{
"epoch": 8.785714285714286,
"grad_norm": 1.0887130498886108,
"learning_rate": 9.330127018922195e-06,
"loss": 0.2037,
"step": 246
},
{
"epoch": 8.821428571428571,
"grad_norm": 1.372310996055603,
"learning_rate": 9.321194828308185e-06,
"loss": 0.2165,
"step": 247
},
{
"epoch": 8.857142857142858,
"grad_norm": 1.2410130500793457,
"learning_rate": 9.312207814294454e-06,
"loss": 0.2109,
"step": 248
},
{
"epoch": 8.892857142857142,
"grad_norm": 1.3588364124298096,
"learning_rate": 9.303166090900082e-06,
"loss": 0.212,
"step": 249
},
{
"epoch": 8.928571428571429,
"grad_norm": 1.1505169868469238,
"learning_rate": 9.294069772838253e-06,
"loss": 0.2078,
"step": 250
},
{
"epoch": 8.964285714285714,
"grad_norm": 1.456135869026184,
"learning_rate": 9.284918975514798e-06,
"loss": 0.2207,
"step": 251
},
{
"epoch": 9.0,
"grad_norm": 1.2700921297073364,
"learning_rate": 9.275713815026732e-06,
"loss": 0.1748,
"step": 252
},
{
"epoch": 9.035714285714286,
"grad_norm": 1.0988452434539795,
"learning_rate": 9.266454408160779e-06,
"loss": 0.1457,
"step": 253
},
{
"epoch": 9.071428571428571,
"grad_norm": 1.0384048223495483,
"learning_rate": 9.257140872391895e-06,
"loss": 0.1342,
"step": 254
},
{
"epoch": 9.107142857142858,
"grad_norm": 1.3684717416763306,
"learning_rate": 9.24777332588177e-06,
"loss": 0.1019,
"step": 255
},
{
"epoch": 9.142857142857142,
"grad_norm": 1.41575026512146,
"learning_rate": 9.238351887477338e-06,
"loss": 0.1322,
"step": 256
},
{
"epoch": 9.178571428571429,
"grad_norm": 2.176236391067505,
"learning_rate": 9.22887667670926e-06,
"loss": 0.1442,
"step": 257
},
{
"epoch": 9.214285714285714,
"grad_norm": 1.456347942352295,
"learning_rate": 9.219347813790416e-06,
"loss": 0.1149,
"step": 258
},
{
"epoch": 9.25,
"grad_norm": 1.751892328262329,
"learning_rate": 9.209765419614375e-06,
"loss": 0.1544,
"step": 259
},
{
"epoch": 9.285714285714286,
"grad_norm": 1.2597330808639526,
"learning_rate": 9.200129615753858e-06,
"loss": 0.1247,
"step": 260
},
{
"epoch": 9.321428571428571,
"grad_norm": 1.3868889808654785,
"learning_rate": 9.190440524459203e-06,
"loss": 0.1597,
"step": 261
},
{
"epoch": 9.357142857142858,
"grad_norm": 1.349303960800171,
"learning_rate": 9.180698268656814e-06,
"loss": 0.1262,
"step": 262
},
{
"epoch": 9.392857142857142,
"grad_norm": 1.2209405899047852,
"learning_rate": 9.170902971947589e-06,
"loss": 0.1283,
"step": 263
},
{
"epoch": 9.428571428571429,
"grad_norm": 1.1362848281860352,
"learning_rate": 9.16105475860537e-06,
"loss": 0.1142,
"step": 264
},
{
"epoch": 9.464285714285714,
"grad_norm": 1.333030343055725,
"learning_rate": 9.151153753575351e-06,
"loss": 0.1241,
"step": 265
},
{
"epoch": 9.5,
"grad_norm": 1.1623438596725464,
"learning_rate": 9.141200082472503e-06,
"loss": 0.1157,
"step": 266
},
{
"epoch": 9.535714285714286,
"grad_norm": 1.3490017652511597,
"learning_rate": 9.131193871579975e-06,
"loss": 0.1696,
"step": 267
},
{
"epoch": 9.571428571428571,
"grad_norm": 1.5136406421661377,
"learning_rate": 9.121135247847492e-06,
"loss": 0.1364,
"step": 268
},
{
"epoch": 9.607142857142858,
"grad_norm": 1.6852158308029175,
"learning_rate": 9.111024338889748e-06,
"loss": 0.1288,
"step": 269
},
{
"epoch": 9.642857142857142,
"grad_norm": 1.359726071357727,
"learning_rate": 9.10086127298478e-06,
"loss": 0.1219,
"step": 270
},
{
"epoch": 9.678571428571429,
"grad_norm": 1.0993460416793823,
"learning_rate": 9.090646179072352e-06,
"loss": 0.1314,
"step": 271
},
{
"epoch": 9.714285714285714,
"grad_norm": 1.2296324968338013,
"learning_rate": 9.080379186752304e-06,
"loss": 0.1295,
"step": 272
},
{
"epoch": 9.75,
"grad_norm": 1.2677600383758545,
"learning_rate": 9.070060426282924e-06,
"loss": 0.1494,
"step": 273
},
{
"epoch": 9.785714285714286,
"grad_norm": 0.98940509557724,
"learning_rate": 9.059690028579285e-06,
"loss": 0.1142,
"step": 274
},
{
"epoch": 9.821428571428571,
"grad_norm": 1.3205757141113281,
"learning_rate": 9.049268125211577e-06,
"loss": 0.1331,
"step": 275
},
{
"epoch": 9.857142857142858,
"grad_norm": 1.6608718633651733,
"learning_rate": 9.038794848403463e-06,
"loss": 0.158,
"step": 276
},
{
"epoch": 9.892857142857142,
"grad_norm": 1.2538622617721558,
"learning_rate": 9.028270331030373e-06,
"loss": 0.1281,
"step": 277
},
{
"epoch": 9.928571428571429,
"grad_norm": 1.2672852277755737,
"learning_rate": 9.017694706617836e-06,
"loss": 0.1477,
"step": 278
},
{
"epoch": 9.964285714285714,
"grad_norm": 1.1793338060379028,
"learning_rate": 9.007068109339783e-06,
"loss": 0.116,
"step": 279
},
{
"epoch": 10.0,
"grad_norm": 0.9627423286437988,
"learning_rate": 8.996390674016839e-06,
"loss": 0.0847,
"step": 280
},
{
"epoch": 10.035714285714286,
"grad_norm": 0.9174171686172485,
"learning_rate": 8.985662536114614e-06,
"loss": 0.0765,
"step": 281
},
{
"epoch": 10.071428571428571,
"grad_norm": 1.147916555404663,
"learning_rate": 8.97488383174199e-06,
"loss": 0.086,
"step": 282
},
{
"epoch": 10.107142857142858,
"grad_norm": 1.289789080619812,
"learning_rate": 8.964054697649389e-06,
"loss": 0.0736,
"step": 283
},
{
"epoch": 10.142857142857142,
"grad_norm": 0.9539849758148193,
"learning_rate": 8.953175271227042e-06,
"loss": 0.0702,
"step": 284
},
{
"epoch": 10.178571428571429,
"grad_norm": 1.0880526304244995,
"learning_rate": 8.94224569050324e-06,
"loss": 0.0662,
"step": 285
},
{
"epoch": 10.214285714285714,
"grad_norm": 1.3260632753372192,
"learning_rate": 8.931266094142588e-06,
"loss": 0.0878,
"step": 286
},
{
"epoch": 10.25,
"grad_norm": 1.0914793014526367,
"learning_rate": 8.920236621444243e-06,
"loss": 0.0716,
"step": 287
},
{
"epoch": 10.285714285714286,
"grad_norm": 1.0627952814102173,
"learning_rate": 8.90915741234015e-06,
"loss": 0.0769,
"step": 288
},
{
"epoch": 10.321428571428571,
"grad_norm": 0.9562764763832092,
"learning_rate": 8.89802860739326e-06,
"loss": 0.062,
"step": 289
},
{
"epoch": 10.357142857142858,
"grad_norm": 1.1574186086654663,
"learning_rate": 8.88685034779576e-06,
"loss": 0.0857,
"step": 290
},
{
"epoch": 10.392857142857142,
"grad_norm": 1.2380372285842896,
"learning_rate": 8.87562277536726e-06,
"loss": 0.0908,
"step": 291
},
{
"epoch": 10.428571428571429,
"grad_norm": 0.8159970045089722,
"learning_rate": 8.864346032553016e-06,
"loss": 0.068,
"step": 292
},
{
"epoch": 10.464285714285714,
"grad_norm": 1.1167945861816406,
"learning_rate": 8.853020262422111e-06,
"loss": 0.0696,
"step": 293
},
{
"epoch": 10.5,
"grad_norm": 1.2967480421066284,
"learning_rate": 8.84164560866564e-06,
"loss": 0.0803,
"step": 294
},
{
"epoch": 10.535714285714286,
"grad_norm": 1.5502207279205322,
"learning_rate": 8.83022221559489e-06,
"loss": 0.0984,
"step": 295
},
{
"epoch": 10.571428571428571,
"grad_norm": 0.9975540637969971,
"learning_rate": 8.818750228139513e-06,
"loss": 0.0675,
"step": 296
},
{
"epoch": 10.607142857142858,
"grad_norm": 0.9909189939498901,
"learning_rate": 8.807229791845673e-06,
"loss": 0.079,
"step": 297
},
{
"epoch": 10.642857142857142,
"grad_norm": 1.3548370599746704,
"learning_rate": 8.795661052874217e-06,
"loss": 0.0909,
"step": 298
},
{
"epoch": 10.678571428571429,
"grad_norm": 1.3748208284378052,
"learning_rate": 8.78404415799881e-06,
"loss": 0.079,
"step": 299
},
{
"epoch": 10.714285714285714,
"grad_norm": 1.6325222253799438,
"learning_rate": 8.772379254604074e-06,
"loss": 0.0978,
"step": 300
},
{
"epoch": 10.714285714285714,
"eval_loss": 2.0024502277374268,
"eval_runtime": 0.4759,
"eval_samples_per_second": 294.154,
"eval_steps_per_second": 2.101,
"step": 300
},
{
"epoch": 10.75,
"grad_norm": 1.1626653671264648,
"learning_rate": 8.76066649068372e-06,
"loss": 0.0926,
"step": 301
},
{
"epoch": 10.785714285714286,
"grad_norm": 1.312601089477539,
"learning_rate": 8.748906014838672e-06,
"loss": 0.0895,
"step": 302
},
{
"epoch": 10.821428571428571,
"grad_norm": 0.9997202754020691,
"learning_rate": 8.737097976275177e-06,
"loss": 0.0765,
"step": 303
},
{
"epoch": 10.857142857142858,
"grad_norm": 0.9879180788993835,
"learning_rate": 8.725242524802919e-06,
"loss": 0.081,
"step": 304
},
{
"epoch": 10.892857142857142,
"grad_norm": 1.0174518823623657,
"learning_rate": 8.713339810833105e-06,
"loss": 0.0714,
"step": 305
},
{
"epoch": 10.928571428571429,
"grad_norm": 1.340936541557312,
"learning_rate": 8.701389985376578e-06,
"loss": 0.0826,
"step": 306
},
{
"epoch": 10.964285714285714,
"grad_norm": 1.0532597303390503,
"learning_rate": 8.689393200041878e-06,
"loss": 0.0914,
"step": 307
},
{
"epoch": 11.0,
"grad_norm": 0.8435774445533752,
"learning_rate": 8.677349607033336e-06,
"loss": 0.0473,
"step": 308
},
{
"epoch": 11.035714285714286,
"grad_norm": 0.7733930349349976,
"learning_rate": 8.665259359149132e-06,
"loss": 0.0461,
"step": 309
},
{
"epoch": 11.071428571428571,
"grad_norm": 0.7377230525016785,
"learning_rate": 8.653122609779365e-06,
"loss": 0.0506,
"step": 310
},
{
"epoch": 11.107142857142858,
"grad_norm": 0.9703754186630249,
"learning_rate": 8.640939512904097e-06,
"loss": 0.044,
"step": 311
},
{
"epoch": 11.142857142857142,
"grad_norm": 0.9164966940879822,
"learning_rate": 8.62871022309141e-06,
"loss": 0.0442,
"step": 312
},
{
"epoch": 11.178571428571429,
"grad_norm": 0.5867542624473572,
"learning_rate": 8.61643489549544e-06,
"loss": 0.0356,
"step": 313
},
{
"epoch": 11.214285714285714,
"grad_norm": 0.9844388365745544,
"learning_rate": 8.604113685854407e-06,
"loss": 0.0465,
"step": 314
},
{
"epoch": 11.25,
"grad_norm": 0.8281243443489075,
"learning_rate": 8.591746750488639e-06,
"loss": 0.0485,
"step": 315
},
{
"epoch": 11.285714285714286,
"grad_norm": 1.1860142946243286,
"learning_rate": 8.579334246298593e-06,
"loss": 0.0623,
"step": 316
},
{
"epoch": 11.321428571428571,
"grad_norm": 0.7477958798408508,
"learning_rate": 8.566876330762861e-06,
"loss": 0.0485,
"step": 317
},
{
"epoch": 11.357142857142858,
"grad_norm": 1.0370478630065918,
"learning_rate": 8.554373161936176e-06,
"loss": 0.0549,
"step": 318
},
{
"epoch": 11.392857142857142,
"grad_norm": 0.7288575172424316,
"learning_rate": 8.541824898447399e-06,
"loss": 0.0419,
"step": 319
},
{
"epoch": 11.428571428571429,
"grad_norm": 0.7542941570281982,
"learning_rate": 8.529231699497512e-06,
"loss": 0.0538,
"step": 320
},
{
"epoch": 11.464285714285714,
"grad_norm": 0.7196518778800964,
"learning_rate": 8.516593724857598e-06,
"loss": 0.0476,
"step": 321
},
{
"epoch": 11.5,
"grad_norm": 0.6946846842765808,
"learning_rate": 8.503911134866819e-06,
"loss": 0.048,
"step": 322
},
{
"epoch": 11.535714285714286,
"grad_norm": 0.81142657995224,
"learning_rate": 8.491184090430365e-06,
"loss": 0.052,
"step": 323
},
{
"epoch": 11.571428571428571,
"grad_norm": 0.9150547981262207,
"learning_rate": 8.478412753017433e-06,
"loss": 0.0451,
"step": 324
},
{
"epoch": 11.607142857142858,
"grad_norm": 1.1183067560195923,
"learning_rate": 8.465597284659163e-06,
"loss": 0.0511,
"step": 325
},
{
"epoch": 11.642857142857142,
"grad_norm": 1.272926688194275,
"learning_rate": 8.452737847946597e-06,
"loss": 0.0612,
"step": 326
},
{
"epoch": 11.678571428571429,
"grad_norm": 0.9870100617408752,
"learning_rate": 8.439834606028594e-06,
"loss": 0.0663,
"step": 327
},
{
"epoch": 11.714285714285714,
"grad_norm": 0.9883282780647278,
"learning_rate": 8.426887722609787e-06,
"loss": 0.0538,
"step": 328
},
{
"epoch": 11.75,
"grad_norm": 1.1376686096191406,
"learning_rate": 8.413897361948484e-06,
"loss": 0.059,
"step": 329
},
{
"epoch": 11.785714285714286,
"grad_norm": 1.1064047813415527,
"learning_rate": 8.400863688854598e-06,
"loss": 0.0597,
"step": 330
},
{
"epoch": 11.821428571428571,
"grad_norm": 1.1057697534561157,
"learning_rate": 8.387786868687549e-06,
"loss": 0.0445,
"step": 331
},
{
"epoch": 11.857142857142858,
"grad_norm": 0.7781133651733398,
"learning_rate": 8.374667067354164e-06,
"loss": 0.0616,
"step": 332
},
{
"epoch": 11.892857142857142,
"grad_norm": 0.7614454627037048,
"learning_rate": 8.361504451306585e-06,
"loss": 0.0467,
"step": 333
},
{
"epoch": 11.928571428571429,
"grad_norm": 1.1504416465759277,
"learning_rate": 8.34829918754014e-06,
"loss": 0.0685,
"step": 334
},
{
"epoch": 11.964285714285714,
"grad_norm": 0.943291962146759,
"learning_rate": 8.335051443591236e-06,
"loss": 0.0503,
"step": 335
},
{
"epoch": 12.0,
"grad_norm": 0.6303163766860962,
"learning_rate": 8.321761387535231e-06,
"loss": 0.0429,
"step": 336
},
{
"epoch": 12.035714285714286,
"grad_norm": 0.5599375367164612,
"learning_rate": 8.308429187984298e-06,
"loss": 0.033,
"step": 337
},
{
"epoch": 12.071428571428571,
"grad_norm": 0.5403127074241638,
"learning_rate": 8.295055014085289e-06,
"loss": 0.0379,
"step": 338
},
{
"epoch": 12.107142857142858,
"grad_norm": 0.6652883887290955,
"learning_rate": 8.281639035517591e-06,
"loss": 0.0423,
"step": 339
},
{
"epoch": 12.142857142857142,
"grad_norm": 0.6625171899795532,
"learning_rate": 8.268181422490969e-06,
"loss": 0.0284,
"step": 340
},
{
"epoch": 12.178571428571429,
"grad_norm": 0.6045703291893005,
"learning_rate": 8.254682345743406e-06,
"loss": 0.0302,
"step": 341
},
{
"epoch": 12.214285714285714,
"grad_norm": 0.9115837216377258,
"learning_rate": 8.241141976538944e-06,
"loss": 0.0341,
"step": 342
},
{
"epoch": 12.25,
"grad_norm": 0.650475263595581,
"learning_rate": 8.227560486665498e-06,
"loss": 0.025,
"step": 343
},
{
"epoch": 12.285714285714286,
"grad_norm": 0.8756570816040039,
"learning_rate": 8.213938048432697e-06,
"loss": 0.0273,
"step": 344
},
{
"epoch": 12.321428571428571,
"grad_norm": 0.5917655229568481,
"learning_rate": 8.200274834669675e-06,
"loss": 0.0299,
"step": 345
},
{
"epoch": 12.357142857142858,
"grad_norm": 1.3080816268920898,
"learning_rate": 8.186571018722894e-06,
"loss": 0.0278,
"step": 346
},
{
"epoch": 12.392857142857142,
"grad_norm": 0.994071364402771,
"learning_rate": 8.172826774453937e-06,
"loss": 0.037,
"step": 347
},
{
"epoch": 12.428571428571429,
"grad_norm": 1.0529603958129883,
"learning_rate": 8.159042276237308e-06,
"loss": 0.0457,
"step": 348
},
{
"epoch": 12.464285714285714,
"grad_norm": 0.8449487686157227,
"learning_rate": 8.145217698958213e-06,
"loss": 0.0334,
"step": 349
},
{
"epoch": 12.5,
"grad_norm": 0.7479754090309143,
"learning_rate": 8.131353218010347e-06,
"loss": 0.0381,
"step": 350
},
{
"epoch": 12.535714285714286,
"grad_norm": 0.4850471317768097,
"learning_rate": 8.117449009293668e-06,
"loss": 0.0331,
"step": 351
},
{
"epoch": 12.571428571428571,
"grad_norm": 1.2365081310272217,
"learning_rate": 8.10350524921216e-06,
"loss": 0.0379,
"step": 352
},
{
"epoch": 12.607142857142858,
"grad_norm": 0.731280505657196,
"learning_rate": 8.089522114671603e-06,
"loss": 0.0349,
"step": 353
},
{
"epoch": 12.642857142857142,
"grad_norm": 0.6076593995094299,
"learning_rate": 8.075499783077321e-06,
"loss": 0.0354,
"step": 354
},
{
"epoch": 12.678571428571429,
"grad_norm": 0.5255799293518066,
"learning_rate": 8.061438432331935e-06,
"loss": 0.0358,
"step": 355
},
{
"epoch": 12.714285714285714,
"grad_norm": 0.8889122009277344,
"learning_rate": 8.047338240833108e-06,
"loss": 0.0485,
"step": 356
},
{
"epoch": 12.75,
"grad_norm": 0.5412372350692749,
"learning_rate": 8.033199387471278e-06,
"loss": 0.04,
"step": 357
},
{
"epoch": 12.785714285714286,
"grad_norm": 0.8177741765975952,
"learning_rate": 8.019022051627387e-06,
"loss": 0.0285,
"step": 358
},
{
"epoch": 12.821428571428571,
"grad_norm": 0.5282717347145081,
"learning_rate": 8.004806413170613e-06,
"loss": 0.0356,
"step": 359
},
{
"epoch": 12.857142857142858,
"grad_norm": 0.7756925225257874,
"learning_rate": 7.99055265245608e-06,
"loss": 0.0436,
"step": 360
},
{
"epoch": 12.892857142857142,
"grad_norm": 0.9730148911476135,
"learning_rate": 7.976260950322572e-06,
"loss": 0.0434,
"step": 361
},
{
"epoch": 12.928571428571429,
"grad_norm": 0.7948063015937805,
"learning_rate": 7.96193148809024e-06,
"loss": 0.0451,
"step": 362
},
{
"epoch": 12.964285714285714,
"grad_norm": 0.5865257978439331,
"learning_rate": 7.9475644475583e-06,
"loss": 0.0291,
"step": 363
},
{
"epoch": 13.0,
"grad_norm": 0.4693900942802429,
"learning_rate": 7.933160011002729e-06,
"loss": 0.0277,
"step": 364
},
{
"epoch": 13.035714285714286,
"grad_norm": 0.3828136920928955,
"learning_rate": 7.918718361173951e-06,
"loss": 0.02,
"step": 365
},
{
"epoch": 13.071428571428571,
"grad_norm": 0.596170961856842,
"learning_rate": 7.904239681294515e-06,
"loss": 0.0185,
"step": 366
},
{
"epoch": 13.107142857142858,
"grad_norm": 0.39570969343185425,
"learning_rate": 7.889724155056776e-06,
"loss": 0.0195,
"step": 367
},
{
"epoch": 13.142857142857142,
"grad_norm": 0.5549917221069336,
"learning_rate": 7.875171966620567e-06,
"loss": 0.0266,
"step": 368
},
{
"epoch": 13.178571428571429,
"grad_norm": 0.42615193128585815,
"learning_rate": 7.860583300610849e-06,
"loss": 0.0193,
"step": 369
},
{
"epoch": 13.214285714285714,
"grad_norm": 0.6593537926673889,
"learning_rate": 7.84595834211538e-06,
"loss": 0.0213,
"step": 370
},
{
"epoch": 13.25,
"grad_norm": 0.49755775928497314,
"learning_rate": 7.83129727668237e-06,
"loss": 0.0235,
"step": 371
},
{
"epoch": 13.285714285714286,
"grad_norm": 0.9553983807563782,
"learning_rate": 7.81660029031811e-06,
"loss": 0.0254,
"step": 372
},
{
"epoch": 13.321428571428571,
"grad_norm": 0.6323786377906799,
"learning_rate": 7.801867569484635e-06,
"loss": 0.0255,
"step": 373
},
{
"epoch": 13.357142857142858,
"grad_norm": 0.7185072302818298,
"learning_rate": 7.78709930109734e-06,
"loss": 0.0277,
"step": 374
},
{
"epoch": 13.392857142857142,
"grad_norm": 0.508331835269928,
"learning_rate": 7.772295672522615e-06,
"loss": 0.0285,
"step": 375
},
{
"epoch": 13.428571428571429,
"grad_norm": 0.5408092141151428,
"learning_rate": 7.75745687157547e-06,
"loss": 0.0236,
"step": 376
},
{
"epoch": 13.464285714285714,
"grad_norm": 0.7089282274246216,
"learning_rate": 7.742583086517151e-06,
"loss": 0.0184,
"step": 377
},
{
"epoch": 13.5,
"grad_norm": 0.5609864592552185,
"learning_rate": 7.727674506052744e-06,
"loss": 0.026,
"step": 378
},
{
"epoch": 13.535714285714286,
"grad_norm": 0.5647308826446533,
"learning_rate": 7.712731319328798e-06,
"loss": 0.0251,
"step": 379
},
{
"epoch": 13.571428571428571,
"grad_norm": 0.46377676725387573,
"learning_rate": 7.697753715930906e-06,
"loss": 0.0224,
"step": 380
},
{
"epoch": 13.607142857142858,
"grad_norm": 0.44435882568359375,
"learning_rate": 7.682741885881314e-06,
"loss": 0.0261,
"step": 381
},
{
"epoch": 13.642857142857142,
"grad_norm": 0.9688218832015991,
"learning_rate": 7.667696019636504e-06,
"loss": 0.0418,
"step": 382
},
{
"epoch": 13.678571428571429,
"grad_norm": 0.48680123686790466,
"learning_rate": 7.652616308084774e-06,
"loss": 0.0296,
"step": 383
},
{
"epoch": 13.714285714285714,
"grad_norm": 0.9336219429969788,
"learning_rate": 7.637502942543825e-06,
"loss": 0.0345,
"step": 384
},
{
"epoch": 13.75,
"grad_norm": 0.5719844102859497,
"learning_rate": 7.622356114758328e-06,
"loss": 0.0374,
"step": 385
},
{
"epoch": 13.785714285714286,
"grad_norm": 0.7421361207962036,
"learning_rate": 7.607176016897491e-06,
"loss": 0.0335,
"step": 386
},
{
"epoch": 13.821428571428571,
"grad_norm": 0.4875544011592865,
"learning_rate": 7.591962841552627e-06,
"loss": 0.0312,
"step": 387
},
{
"epoch": 13.857142857142858,
"grad_norm": 0.5115516185760498,
"learning_rate": 7.576716781734699e-06,
"loss": 0.0306,
"step": 388
},
{
"epoch": 13.892857142857142,
"grad_norm": 0.4477997422218323,
"learning_rate": 7.561438030871886e-06,
"loss": 0.0248,
"step": 389
},
{
"epoch": 13.928571428571429,
"grad_norm": 0.35774773359298706,
"learning_rate": 7.546126782807117e-06,
"loss": 0.0243,
"step": 390
},
{
"epoch": 13.964285714285714,
"grad_norm": 0.3818391263484955,
"learning_rate": 7.530783231795615e-06,
"loss": 0.0276,
"step": 391
},
{
"epoch": 14.0,
"grad_norm": 0.2867811620235443,
"learning_rate": 7.515407572502438e-06,
"loss": 0.016,
"step": 392
},
{
"epoch": 14.035714285714286,
"grad_norm": 0.33533164858818054,
"learning_rate": 7.500000000000001e-06,
"loss": 0.0185,
"step": 393
},
{
"epoch": 14.071428571428571,
"grad_norm": 0.27466756105422974,
"learning_rate": 7.484560709765605e-06,
"loss": 0.0171,
"step": 394
},
{
"epoch": 14.107142857142858,
"grad_norm": 0.4225900173187256,
"learning_rate": 7.469089897678958e-06,
"loss": 0.0227,
"step": 395
},
{
"epoch": 14.142857142857142,
"grad_norm": 1.1478960514068604,
"learning_rate": 7.453587760019691e-06,
"loss": 0.0308,
"step": 396
},
{
"epoch": 14.178571428571429,
"grad_norm": 0.3350810408592224,
"learning_rate": 7.438054493464859e-06,
"loss": 0.016,
"step": 397
},
{
"epoch": 14.214285714285714,
"grad_norm": 0.3689461946487427,
"learning_rate": 7.422490295086457e-06,
"loss": 0.0219,
"step": 398
},
{
"epoch": 14.25,
"grad_norm": 0.34574565291404724,
"learning_rate": 7.406895362348916e-06,
"loss": 0.0165,
"step": 399
},
{
"epoch": 14.285714285714286,
"grad_norm": 0.39184218645095825,
"learning_rate": 7.391269893106592e-06,
"loss": 0.0219,
"step": 400
},
{
"epoch": 14.285714285714286,
"eval_loss": 2.3083860874176025,
"eval_runtime": 0.4688,
"eval_samples_per_second": 298.631,
"eval_steps_per_second": 2.133,
"step": 400
},
{
"epoch": 14.321428571428571,
"grad_norm": 0.5275436043739319,
"learning_rate": 7.375614085601265e-06,
"loss": 0.0188,
"step": 401
},
{
"epoch": 14.357142857142858,
"grad_norm": 0.36986497044563293,
"learning_rate": 7.359928138459615e-06,
"loss": 0.0194,
"step": 402
},
{
"epoch": 14.392857142857142,
"grad_norm": 0.35321933031082153,
"learning_rate": 7.344212250690712e-06,
"loss": 0.0195,
"step": 403
},
{
"epoch": 14.428571428571429,
"grad_norm": 0.5846700072288513,
"learning_rate": 7.328466621683481e-06,
"loss": 0.0199,
"step": 404
},
{
"epoch": 14.464285714285714,
"grad_norm": 0.36776694655418396,
"learning_rate": 7.312691451204178e-06,
"loss": 0.0182,
"step": 405
},
{
"epoch": 14.5,
"grad_norm": 0.7046879529953003,
"learning_rate": 7.296886939393852e-06,
"loss": 0.0241,
"step": 406
},
{
"epoch": 14.535714285714286,
"grad_norm": 0.5038862824440002,
"learning_rate": 7.281053286765816e-06,
"loss": 0.0154,
"step": 407
},
{
"epoch": 14.571428571428571,
"grad_norm": 0.42418766021728516,
"learning_rate": 7.265190694203086e-06,
"loss": 0.0212,
"step": 408
},
{
"epoch": 14.607142857142858,
"grad_norm": 0.5494039058685303,
"learning_rate": 7.249299362955846e-06,
"loss": 0.0254,
"step": 409
},
{
"epoch": 14.642857142857142,
"grad_norm": 0.5212989449501038,
"learning_rate": 7.233379494638891e-06,
"loss": 0.0216,
"step": 410
},
{
"epoch": 14.678571428571429,
"grad_norm": 0.3063950836658478,
"learning_rate": 7.217431291229068e-06,
"loss": 0.0173,
"step": 411
},
{
"epoch": 14.714285714285714,
"grad_norm": 0.26393306255340576,
"learning_rate": 7.201454955062712e-06,
"loss": 0.0169,
"step": 412
},
{
"epoch": 14.75,
"grad_norm": 0.44368988275527954,
"learning_rate": 7.185450688833083e-06,
"loss": 0.0229,
"step": 413
},
{
"epoch": 14.785714285714286,
"grad_norm": 0.4474826753139496,
"learning_rate": 7.169418695587791e-06,
"loss": 0.0196,
"step": 414
},
{
"epoch": 14.821428571428571,
"grad_norm": 0.39892250299453735,
"learning_rate": 7.153359178726222e-06,
"loss": 0.0239,
"step": 415
},
{
"epoch": 14.857142857142858,
"grad_norm": 0.6791783571243286,
"learning_rate": 7.137272341996958e-06,
"loss": 0.0241,
"step": 416
},
{
"epoch": 14.892857142857142,
"grad_norm": 0.3901786804199219,
"learning_rate": 7.121158389495187e-06,
"loss": 0.0232,
"step": 417
},
{
"epoch": 14.928571428571429,
"grad_norm": 0.3575139045715332,
"learning_rate": 7.10501752566012e-06,
"loss": 0.0225,
"step": 418
},
{
"epoch": 14.964285714285714,
"grad_norm": 0.48408985137939453,
"learning_rate": 7.088849955272396e-06,
"loss": 0.0246,
"step": 419
},
{
"epoch": 15.0,
"grad_norm": 0.30433982610702515,
"learning_rate": 7.072655883451478e-06,
"loss": 0.0183,
"step": 420
},
{
"epoch": 15.035714285714286,
"grad_norm": 0.23058322072029114,
"learning_rate": 7.056435515653059e-06,
"loss": 0.0147,
"step": 421
},
{
"epoch": 15.071428571428571,
"grad_norm": 0.21939268708229065,
"learning_rate": 7.040189057666449e-06,
"loss": 0.0162,
"step": 422
},
{
"epoch": 15.107142857142858,
"grad_norm": 0.30123960971832275,
"learning_rate": 7.023916715611969e-06,
"loss": 0.0152,
"step": 423
},
{
"epoch": 15.142857142857142,
"grad_norm": 0.24728672206401825,
"learning_rate": 7.007618695938334e-06,
"loss": 0.0126,
"step": 424
},
{
"epoch": 15.178571428571429,
"grad_norm": 0.37947019934654236,
"learning_rate": 6.991295205420028e-06,
"loss": 0.0133,
"step": 425
},
{
"epoch": 15.214285714285714,
"grad_norm": 0.2239277958869934,
"learning_rate": 6.974946451154694e-06,
"loss": 0.0134,
"step": 426
},
{
"epoch": 15.25,
"grad_norm": 0.2562693655490875,
"learning_rate": 6.9585726405604915e-06,
"loss": 0.0159,
"step": 427
},
{
"epoch": 15.285714285714286,
"grad_norm": 0.32447507977485657,
"learning_rate": 6.942173981373474e-06,
"loss": 0.0141,
"step": 428
},
{
"epoch": 15.321428571428571,
"grad_norm": 0.30523914098739624,
"learning_rate": 6.925750681644954e-06,
"loss": 0.0145,
"step": 429
},
{
"epoch": 15.357142857142858,
"grad_norm": 0.33100420236587524,
"learning_rate": 6.90930294973886e-06,
"loss": 0.0158,
"step": 430
},
{
"epoch": 15.392857142857142,
"grad_norm": 0.3664904534816742,
"learning_rate": 6.892830994329089e-06,
"loss": 0.0187,
"step": 431
},
{
"epoch": 15.428571428571429,
"grad_norm": 0.4915764331817627,
"learning_rate": 6.876335024396872e-06,
"loss": 0.0141,
"step": 432
},
{
"epoch": 15.464285714285714,
"grad_norm": 0.3460884392261505,
"learning_rate": 6.859815249228106e-06,
"loss": 0.0206,
"step": 433
},
{
"epoch": 15.5,
"grad_norm": 0.5521484017372131,
"learning_rate": 6.8432718784107145e-06,
"loss": 0.0145,
"step": 434
},
{
"epoch": 15.535714285714286,
"grad_norm": 0.6945492625236511,
"learning_rate": 6.8267051218319766e-06,
"loss": 0.0168,
"step": 435
},
{
"epoch": 15.571428571428571,
"grad_norm": 0.4687165915966034,
"learning_rate": 6.81011518967587e-06,
"loss": 0.0184,
"step": 436
},
{
"epoch": 15.607142857142858,
"grad_norm": 0.30982425808906555,
"learning_rate": 6.793502292420402e-06,
"loss": 0.0191,
"step": 437
},
{
"epoch": 15.642857142857142,
"grad_norm": 0.3576700687408447,
"learning_rate": 6.7768666408349445e-06,
"loss": 0.0118,
"step": 438
},
{
"epoch": 15.678571428571429,
"grad_norm": 0.4062334895133972,
"learning_rate": 6.760208445977551e-06,
"loss": 0.0201,
"step": 439
},
{
"epoch": 15.714285714285714,
"grad_norm": 0.44516295194625854,
"learning_rate": 6.743527919192285e-06,
"loss": 0.0183,
"step": 440
},
{
"epoch": 15.75,
"grad_norm": 0.3430921137332916,
"learning_rate": 6.726825272106539e-06,
"loss": 0.0172,
"step": 441
},
{
"epoch": 15.785714285714286,
"grad_norm": 0.34668734669685364,
"learning_rate": 6.710100716628345e-06,
"loss": 0.0202,
"step": 442
},
{
"epoch": 15.821428571428571,
"grad_norm": 0.34746137261390686,
"learning_rate": 6.693354464943689e-06,
"loss": 0.0175,
"step": 443
},
{
"epoch": 15.857142857142858,
"grad_norm": 0.38835713267326355,
"learning_rate": 6.676586729513823e-06,
"loss": 0.0205,
"step": 444
},
{
"epoch": 15.892857142857142,
"grad_norm": 0.4056047797203064,
"learning_rate": 6.659797723072558e-06,
"loss": 0.0206,
"step": 445
},
{
"epoch": 15.928571428571429,
"grad_norm": 0.3119281530380249,
"learning_rate": 6.642987658623581e-06,
"loss": 0.0207,
"step": 446
},
{
"epoch": 15.964285714285714,
"grad_norm": 0.4128764271736145,
"learning_rate": 6.626156749437736e-06,
"loss": 0.0216,
"step": 447
},
{
"epoch": 16.0,
"grad_norm": 0.4920613765716553,
"learning_rate": 6.609305209050332e-06,
"loss": 0.0178,
"step": 448
},
{
"epoch": 16.035714285714285,
"grad_norm": 0.5353336930274963,
"learning_rate": 6.592433251258423e-06,
"loss": 0.0132,
"step": 449
},
{
"epoch": 16.071428571428573,
"grad_norm": 0.2517736554145813,
"learning_rate": 6.575541090118105e-06,
"loss": 0.0142,
"step": 450
},
{
"epoch": 16.107142857142858,
"grad_norm": 0.23888464272022247,
"learning_rate": 6.558628939941792e-06,
"loss": 0.0139,
"step": 451
},
{
"epoch": 16.142857142857142,
"grad_norm": 0.23904645442962646,
"learning_rate": 6.541697015295503e-06,
"loss": 0.0147,
"step": 452
},
{
"epoch": 16.178571428571427,
"grad_norm": 0.16995219886302948,
"learning_rate": 6.524745530996137e-06,
"loss": 0.0112,
"step": 453
},
{
"epoch": 16.214285714285715,
"grad_norm": 0.301095575094223,
"learning_rate": 6.507774702108748e-06,
"loss": 0.0138,
"step": 454
},
{
"epoch": 16.25,
"grad_norm": 0.19944924116134644,
"learning_rate": 6.490784743943819e-06,
"loss": 0.0111,
"step": 455
},
{
"epoch": 16.285714285714285,
"grad_norm": 0.2589744031429291,
"learning_rate": 6.473775872054522e-06,
"loss": 0.0149,
"step": 456
},
{
"epoch": 16.321428571428573,
"grad_norm": 0.25151684880256653,
"learning_rate": 6.456748302233995e-06,
"loss": 0.0134,
"step": 457
},
{
"epoch": 16.357142857142858,
"grad_norm": 0.23779137432575226,
"learning_rate": 6.439702250512596e-06,
"loss": 0.0153,
"step": 458
},
{
"epoch": 16.392857142857142,
"grad_norm": 0.30735448002815247,
"learning_rate": 6.4226379331551625e-06,
"loss": 0.0133,
"step": 459
},
{
"epoch": 16.428571428571427,
"grad_norm": 0.34077078104019165,
"learning_rate": 6.405555566658276e-06,
"loss": 0.0172,
"step": 460
},
{
"epoch": 16.464285714285715,
"grad_norm": 0.33910149335861206,
"learning_rate": 6.388455367747503e-06,
"loss": 0.013,
"step": 461
},
{
"epoch": 16.5,
"grad_norm": 0.5794575810432434,
"learning_rate": 6.3713375533746525e-06,
"loss": 0.0133,
"step": 462
},
{
"epoch": 16.535714285714285,
"grad_norm": 0.2994963228702545,
"learning_rate": 6.354202340715027e-06,
"loss": 0.0143,
"step": 463
},
{
"epoch": 16.571428571428573,
"grad_norm": 0.2810235023498535,
"learning_rate": 6.337049947164656e-06,
"loss": 0.0153,
"step": 464
},
{
"epoch": 16.607142857142858,
"grad_norm": 0.28916069865226746,
"learning_rate": 6.319880590337549e-06,
"loss": 0.0173,
"step": 465
},
{
"epoch": 16.642857142857142,
"grad_norm": 0.3143618106842041,
"learning_rate": 6.302694488062931e-06,
"loss": 0.0185,
"step": 466
},
{
"epoch": 16.678571428571427,
"grad_norm": 0.24434876441955566,
"learning_rate": 6.2854918583824745e-06,
"loss": 0.014,
"step": 467
},
{
"epoch": 16.714285714285715,
"grad_norm": 0.37958788871765137,
"learning_rate": 6.268272919547537e-06,
"loss": 0.0172,
"step": 468
},
{
"epoch": 16.75,
"grad_norm": 0.21300822496414185,
"learning_rate": 6.251037890016396e-06,
"loss": 0.0128,
"step": 469
},
{
"epoch": 16.785714285714285,
"grad_norm": 0.35435208678245544,
"learning_rate": 6.233786988451468e-06,
"loss": 0.0148,
"step": 470
},
{
"epoch": 16.821428571428573,
"grad_norm": 0.4400753080844879,
"learning_rate": 6.216520433716544e-06,
"loss": 0.0188,
"step": 471
},
{
"epoch": 16.857142857142858,
"grad_norm": 0.30804184079170227,
"learning_rate": 6.199238444874005e-06,
"loss": 0.0172,
"step": 472
},
{
"epoch": 16.892857142857142,
"grad_norm": 0.23466266691684723,
"learning_rate": 6.181941241182044e-06,
"loss": 0.0149,
"step": 473
},
{
"epoch": 16.928571428571427,
"grad_norm": 0.40080544352531433,
"learning_rate": 6.164629042091894e-06,
"loss": 0.0159,
"step": 474
},
{
"epoch": 16.964285714285715,
"grad_norm": 0.29989176988601685,
"learning_rate": 6.1473020672450275e-06,
"loss": 0.0175,
"step": 475
},
{
"epoch": 17.0,
"grad_norm": 0.23383177816867828,
"learning_rate": 6.1299605364703826e-06,
"loss": 0.0137,
"step": 476
},
{
"epoch": 17.035714285714285,
"grad_norm": 0.1993263214826584,
"learning_rate": 6.112604669781572e-06,
"loss": 0.0126,
"step": 477
},
{
"epoch": 17.071428571428573,
"grad_norm": 0.19340385496616364,
"learning_rate": 6.095234687374085e-06,
"loss": 0.0111,
"step": 478
},
{
"epoch": 17.107142857142858,
"grad_norm": 0.2317463755607605,
"learning_rate": 6.0778508096224985e-06,
"loss": 0.014,
"step": 479
},
{
"epoch": 17.142857142857142,
"grad_norm": 0.17298246920108795,
"learning_rate": 6.060453257077686e-06,
"loss": 0.011,
"step": 480
},
{
"epoch": 17.178571428571427,
"grad_norm": 0.17967282235622406,
"learning_rate": 6.043042250464005e-06,
"loss": 0.0113,
"step": 481
},
{
"epoch": 17.214285714285715,
"grad_norm": 0.14388182759284973,
"learning_rate": 6.025618010676516e-06,
"loss": 0.0076,
"step": 482
},
{
"epoch": 17.25,
"grad_norm": 0.32818639278411865,
"learning_rate": 6.008180758778167e-06,
"loss": 0.0116,
"step": 483
},
{
"epoch": 17.285714285714285,
"grad_norm": 0.29826852679252625,
"learning_rate": 5.990730715996989e-06,
"loss": 0.0152,
"step": 484
},
{
"epoch": 17.321428571428573,
"grad_norm": 0.2203068882226944,
"learning_rate": 5.973268103723293e-06,
"loss": 0.0116,
"step": 485
},
{
"epoch": 17.357142857142858,
"grad_norm": 0.19426509737968445,
"learning_rate": 5.955793143506863e-06,
"loss": 0.0108,
"step": 486
},
{
"epoch": 17.392857142857142,
"grad_norm": 0.18054582178592682,
"learning_rate": 5.938306057054139e-06,
"loss": 0.0094,
"step": 487
},
{
"epoch": 17.428571428571427,
"grad_norm": 0.23862619698047638,
"learning_rate": 5.920807066225409e-06,
"loss": 0.0135,
"step": 488
},
{
"epoch": 17.464285714285715,
"grad_norm": 0.21144980192184448,
"learning_rate": 5.903296393031996e-06,
"loss": 0.0109,
"step": 489
},
{
"epoch": 17.5,
"grad_norm": 0.32449236512184143,
"learning_rate": 5.885774259633432e-06,
"loss": 0.0151,
"step": 490
},
{
"epoch": 17.535714285714285,
"grad_norm": 0.7206212282180786,
"learning_rate": 5.8682408883346535e-06,
"loss": 0.0104,
"step": 491
},
{
"epoch": 17.571428571428573,
"grad_norm": 0.2740480303764343,
"learning_rate": 5.850696501583164e-06,
"loss": 0.0145,
"step": 492
},
{
"epoch": 17.607142857142858,
"grad_norm": 0.24609383940696716,
"learning_rate": 5.8331413219662295e-06,
"loss": 0.0153,
"step": 493
},
{
"epoch": 17.642857142857142,
"grad_norm": 0.296929270029068,
"learning_rate": 5.815575572208042e-06,
"loss": 0.018,
"step": 494
},
{
"epoch": 17.678571428571427,
"grad_norm": 0.7659111022949219,
"learning_rate": 5.797999475166897e-06,
"loss": 0.017,
"step": 495
},
{
"epoch": 17.714285714285715,
"grad_norm": 0.35650327801704407,
"learning_rate": 5.78041325383237e-06,
"loss": 0.0152,
"step": 496
},
{
"epoch": 17.75,
"grad_norm": 0.2899593412876129,
"learning_rate": 5.762817131322482e-06,
"loss": 0.0143,
"step": 497
},
{
"epoch": 17.785714285714285,
"grad_norm": 0.20666421949863434,
"learning_rate": 5.745211330880872e-06,
"loss": 0.0153,
"step": 498
},
{
"epoch": 17.821428571428573,
"grad_norm": 0.2033378630876541,
"learning_rate": 5.7275960758739655e-06,
"loss": 0.014,
"step": 499
},
{
"epoch": 17.857142857142858,
"grad_norm": 0.24258489906787872,
"learning_rate": 5.709971589788136e-06,
"loss": 0.0181,
"step": 500
},
{
"epoch": 17.857142857142858,
"eval_loss": 2.345672607421875,
"eval_runtime": 0.5108,
"eval_samples_per_second": 274.081,
"eval_steps_per_second": 1.958,
"step": 500
},
{
"epoch": 17.892857142857142,
"grad_norm": 0.28151920437812805,
"learning_rate": 5.69233809622687e-06,
"loss": 0.0181,
"step": 501
},
{
"epoch": 17.928571428571427,
"grad_norm": 0.3348303437232971,
"learning_rate": 5.674695818907943e-06,
"loss": 0.0161,
"step": 502
},
{
"epoch": 17.964285714285715,
"grad_norm": 0.7704312801361084,
"learning_rate": 5.65704498166056e-06,
"loss": 0.0176,
"step": 503
},
{
"epoch": 18.0,
"grad_norm": 0.2007305771112442,
"learning_rate": 5.6393858084225305e-06,
"loss": 0.0129,
"step": 504
},
{
"epoch": 18.035714285714285,
"grad_norm": 0.15058767795562744,
"learning_rate": 5.621718523237427e-06,
"loss": 0.0088,
"step": 505
},
{
"epoch": 18.071428571428573,
"grad_norm": 0.16612547636032104,
"learning_rate": 5.604043350251733e-06,
"loss": 0.0099,
"step": 506
},
{
"epoch": 18.107142857142858,
"grad_norm": 0.20791685581207275,
"learning_rate": 5.586360513712011e-06,
"loss": 0.0117,
"step": 507
},
{
"epoch": 18.142857142857142,
"grad_norm": 0.13226307928562164,
"learning_rate": 5.568670237962045e-06,
"loss": 0.009,
"step": 508
},
{
"epoch": 18.178571428571427,
"grad_norm": 0.2213960736989975,
"learning_rate": 5.550972747440007e-06,
"loss": 0.0124,
"step": 509
},
{
"epoch": 18.214285714285715,
"grad_norm": 0.19490720331668854,
"learning_rate": 5.533268266675601e-06,
"loss": 0.0128,
"step": 510
},
{
"epoch": 18.25,
"grad_norm": 0.1481325626373291,
"learning_rate": 5.515557020287219e-06,
"loss": 0.0078,
"step": 511
},
{
"epoch": 18.285714285714285,
"grad_norm": 0.196213498711586,
"learning_rate": 5.497839232979084e-06,
"loss": 0.0119,
"step": 512
},
{
"epoch": 18.321428571428573,
"grad_norm": 0.26103919744491577,
"learning_rate": 5.480115129538409e-06,
"loss": 0.0152,
"step": 513
},
{
"epoch": 18.357142857142858,
"grad_norm": 0.17787081003189087,
"learning_rate": 5.4623849348325396e-06,
"loss": 0.0102,
"step": 514
},
{
"epoch": 18.392857142857142,
"grad_norm": 0.21203866600990295,
"learning_rate": 5.444648873806101e-06,
"loss": 0.0122,
"step": 515
},
{
"epoch": 18.428571428571427,
"grad_norm": 0.1847071498632431,
"learning_rate": 5.426907171478143e-06,
"loss": 0.0099,
"step": 516
},
{
"epoch": 18.464285714285715,
"grad_norm": 0.2914210557937622,
"learning_rate": 5.409160052939292e-06,
"loss": 0.0133,
"step": 517
},
{
"epoch": 18.5,
"grad_norm": 0.30277934670448303,
"learning_rate": 5.391407743348884e-06,
"loss": 0.0165,
"step": 518
},
{
"epoch": 18.535714285714285,
"grad_norm": 0.22359944880008698,
"learning_rate": 5.373650467932122e-06,
"loss": 0.0121,
"step": 519
},
{
"epoch": 18.571428571428573,
"grad_norm": 0.3102841377258301,
"learning_rate": 5.355888451977204e-06,
"loss": 0.0163,
"step": 520
},
{
"epoch": 18.607142857142858,
"grad_norm": 0.26486146450042725,
"learning_rate": 5.3381219208324755e-06,
"loss": 0.0141,
"step": 521
},
{
"epoch": 18.642857142857142,
"grad_norm": 0.2617660462856293,
"learning_rate": 5.320351099903565e-06,
"loss": 0.0157,
"step": 522
},
{
"epoch": 18.678571428571427,
"grad_norm": 0.21751557290554047,
"learning_rate": 5.302576214650527e-06,
"loss": 0.0136,
"step": 523
},
{
"epoch": 18.714285714285715,
"grad_norm": 0.2128409892320633,
"learning_rate": 5.284797490584979e-06,
"loss": 0.0121,
"step": 524
},
{
"epoch": 18.75,
"grad_norm": 0.21959584951400757,
"learning_rate": 5.267015153267246e-06,
"loss": 0.0125,
"step": 525
},
{
"epoch": 18.785714285714285,
"grad_norm": 0.22850820422172546,
"learning_rate": 5.249229428303486e-06,
"loss": 0.0124,
"step": 526
},
{
"epoch": 18.821428571428573,
"grad_norm": 0.8751614689826965,
"learning_rate": 5.231440541342846e-06,
"loss": 0.0197,
"step": 527
},
{
"epoch": 18.857142857142858,
"grad_norm": 0.18895405530929565,
"learning_rate": 5.213648718074584e-06,
"loss": 0.0122,
"step": 528
},
{
"epoch": 18.892857142857142,
"grad_norm": 0.22877931594848633,
"learning_rate": 5.1958541842252145e-06,
"loss": 0.0136,
"step": 529
},
{
"epoch": 18.928571428571427,
"grad_norm": 0.31640875339508057,
"learning_rate": 5.178057165555636e-06,
"loss": 0.0143,
"step": 530
},
{
"epoch": 18.964285714285715,
"grad_norm": 0.2516610026359558,
"learning_rate": 5.160257887858278e-06,
"loss": 0.0144,
"step": 531
},
{
"epoch": 19.0,
"grad_norm": 0.17745497822761536,
"learning_rate": 5.142456576954225e-06,
"loss": 0.0104,
"step": 532
},
{
"epoch": 19.035714285714285,
"grad_norm": 0.1342853158712387,
"learning_rate": 5.1246534586903655e-06,
"loss": 0.0082,
"step": 533
},
{
"epoch": 19.071428571428573,
"grad_norm": 0.18636493384838104,
"learning_rate": 5.106848758936508e-06,
"loss": 0.0122,
"step": 534
},
{
"epoch": 19.107142857142858,
"grad_norm": 0.14282165467739105,
"learning_rate": 5.089042703582533e-06,
"loss": 0.0101,
"step": 535
},
{
"epoch": 19.142857142857142,
"grad_norm": 0.16291485726833344,
"learning_rate": 5.071235518535516e-06,
"loss": 0.0105,
"step": 536
},
{
"epoch": 19.178571428571427,
"grad_norm": 0.17317119240760803,
"learning_rate": 5.053427429716867e-06,
"loss": 0.0106,
"step": 537
},
{
"epoch": 19.214285714285715,
"grad_norm": 0.14252512156963348,
"learning_rate": 5.0356186630594585e-06,
"loss": 0.0076,
"step": 538
},
{
"epoch": 19.25,
"grad_norm": 0.17922668159008026,
"learning_rate": 5.017809444504768e-06,
"loss": 0.0117,
"step": 539
},
{
"epoch": 19.285714285714285,
"grad_norm": 0.2028946876525879,
"learning_rate": 5e-06,
"loss": 0.012,
"step": 540
},
{
"epoch": 19.321428571428573,
"grad_norm": 0.13679425418376923,
"learning_rate": 4.982190555495236e-06,
"loss": 0.0073,
"step": 541
},
{
"epoch": 19.357142857142858,
"grad_norm": 0.6376967430114746,
"learning_rate": 4.964381336940542e-06,
"loss": 0.0157,
"step": 542
},
{
"epoch": 19.392857142857142,
"grad_norm": 0.19504405558109283,
"learning_rate": 4.946572570283135e-06,
"loss": 0.0098,
"step": 543
},
{
"epoch": 19.428571428571427,
"grad_norm": 0.21073895692825317,
"learning_rate": 4.928764481464485e-06,
"loss": 0.0138,
"step": 544
},
{
"epoch": 19.464285714285715,
"grad_norm": 0.3294600546360016,
"learning_rate": 4.910957296417467e-06,
"loss": 0.0131,
"step": 545
},
{
"epoch": 19.5,
"grad_norm": 0.21520303189754486,
"learning_rate": 4.893151241063493e-06,
"loss": 0.0133,
"step": 546
},
{
"epoch": 19.535714285714285,
"grad_norm": 0.2246185541152954,
"learning_rate": 4.875346541309637e-06,
"loss": 0.0139,
"step": 547
},
{
"epoch": 19.571428571428573,
"grad_norm": 0.15642490983009338,
"learning_rate": 4.857543423045775e-06,
"loss": 0.0105,
"step": 548
},
{
"epoch": 19.607142857142858,
"grad_norm": 0.1819118857383728,
"learning_rate": 4.839742112141725e-06,
"loss": 0.0104,
"step": 549
},
{
"epoch": 19.642857142857142,
"grad_norm": 0.2114618718624115,
"learning_rate": 4.821942834444367e-06,
"loss": 0.0127,
"step": 550
},
{
"epoch": 19.678571428571427,
"grad_norm": 0.2271358221769333,
"learning_rate": 4.804145815774787e-06,
"loss": 0.0106,
"step": 551
},
{
"epoch": 19.714285714285715,
"grad_norm": 0.21014434099197388,
"learning_rate": 4.786351281925417e-06,
"loss": 0.0114,
"step": 552
},
{
"epoch": 19.75,
"grad_norm": 0.21880529820919037,
"learning_rate": 4.768559458657156e-06,
"loss": 0.0127,
"step": 553
},
{
"epoch": 19.785714285714285,
"grad_norm": 0.28759488463401794,
"learning_rate": 4.750770571696514e-06,
"loss": 0.0176,
"step": 554
},
{
"epoch": 19.821428571428573,
"grad_norm": 0.2287236750125885,
"learning_rate": 4.732984846732755e-06,
"loss": 0.0139,
"step": 555
},
{
"epoch": 19.857142857142858,
"grad_norm": 0.16419465839862823,
"learning_rate": 4.7152025094150214e-06,
"loss": 0.0098,
"step": 556
},
{
"epoch": 19.892857142857142,
"grad_norm": 0.18456807732582092,
"learning_rate": 4.697423785349475e-06,
"loss": 0.0112,
"step": 557
},
{
"epoch": 19.928571428571427,
"grad_norm": 0.24935705959796906,
"learning_rate": 4.679648900096436e-06,
"loss": 0.018,
"step": 558
},
{
"epoch": 19.964285714285715,
"grad_norm": 0.208778977394104,
"learning_rate": 4.661878079167527e-06,
"loss": 0.0134,
"step": 559
},
{
"epoch": 20.0,
"grad_norm": 0.16569219529628754,
"learning_rate": 4.644111548022798e-06,
"loss": 0.0077,
"step": 560
},
{
"epoch": 20.035714285714285,
"grad_norm": 0.16942204535007477,
"learning_rate": 4.626349532067879e-06,
"loss": 0.0101,
"step": 561
},
{
"epoch": 20.071428571428573,
"grad_norm": 0.143929585814476,
"learning_rate": 4.608592256651117e-06,
"loss": 0.0083,
"step": 562
},
{
"epoch": 20.107142857142858,
"grad_norm": 0.18715058267116547,
"learning_rate": 4.5908399470607106e-06,
"loss": 0.0109,
"step": 563
},
{
"epoch": 20.142857142857142,
"grad_norm": 0.14599822461605072,
"learning_rate": 4.573092828521857e-06,
"loss": 0.0101,
"step": 564
},
{
"epoch": 20.178571428571427,
"grad_norm": 0.1562821865081787,
"learning_rate": 4.555351126193901e-06,
"loss": 0.0097,
"step": 565
},
{
"epoch": 20.214285714285715,
"grad_norm": 0.34438711404800415,
"learning_rate": 4.537615065167461e-06,
"loss": 0.011,
"step": 566
},
{
"epoch": 20.25,
"grad_norm": 0.18419024348258972,
"learning_rate": 4.5198848704615915e-06,
"loss": 0.012,
"step": 567
},
{
"epoch": 20.285714285714285,
"grad_norm": 0.1408761739730835,
"learning_rate": 4.502160767020918e-06,
"loss": 0.0086,
"step": 568
},
{
"epoch": 20.321428571428573,
"grad_norm": 0.18469876050949097,
"learning_rate": 4.484442979712783e-06,
"loss": 0.0114,
"step": 569
},
{
"epoch": 20.357142857142858,
"grad_norm": 0.1582385003566742,
"learning_rate": 4.466731733324399e-06,
"loss": 0.0084,
"step": 570
},
{
"epoch": 20.392857142857142,
"grad_norm": 0.16665539145469666,
"learning_rate": 4.449027252559994e-06,
"loss": 0.0095,
"step": 571
},
{
"epoch": 20.428571428571427,
"grad_norm": 0.1675061285495758,
"learning_rate": 4.431329762037958e-06,
"loss": 0.009,
"step": 572
},
{
"epoch": 20.464285714285715,
"grad_norm": 0.22415050864219666,
"learning_rate": 4.413639486287992e-06,
"loss": 0.0147,
"step": 573
},
{
"epoch": 20.5,
"grad_norm": 0.24033626914024353,
"learning_rate": 4.395956649748269e-06,
"loss": 0.0144,
"step": 574
},
{
"epoch": 20.535714285714285,
"grad_norm": 0.18106594681739807,
"learning_rate": 4.3782814767625755e-06,
"loss": 0.0097,
"step": 575
},
{
"epoch": 20.571428571428573,
"grad_norm": 0.18736809492111206,
"learning_rate": 4.3606141915774695e-06,
"loss": 0.0107,
"step": 576
},
{
"epoch": 20.607142857142858,
"grad_norm": 0.1885957419872284,
"learning_rate": 4.342955018339442e-06,
"loss": 0.0125,
"step": 577
},
{
"epoch": 20.642857142857142,
"grad_norm": 0.17403589189052582,
"learning_rate": 4.3253041810920595e-06,
"loss": 0.0097,
"step": 578
},
{
"epoch": 20.678571428571427,
"grad_norm": 0.2727905511856079,
"learning_rate": 4.307661903773129e-06,
"loss": 0.0104,
"step": 579
},
{
"epoch": 20.714285714285715,
"grad_norm": 0.17853574454784393,
"learning_rate": 4.290028410211866e-06,
"loss": 0.0094,
"step": 580
},
{
"epoch": 20.75,
"grad_norm": 0.17175881564617157,
"learning_rate": 4.272403924126035e-06,
"loss": 0.0108,
"step": 581
},
{
"epoch": 20.785714285714285,
"grad_norm": 0.19546657800674438,
"learning_rate": 4.254788669119127e-06,
"loss": 0.0138,
"step": 582
},
{
"epoch": 20.821428571428573,
"grad_norm": 0.2268596589565277,
"learning_rate": 4.237182868677519e-06,
"loss": 0.0121,
"step": 583
},
{
"epoch": 20.857142857142858,
"grad_norm": 0.22344724833965302,
"learning_rate": 4.219586746167632e-06,
"loss": 0.0122,
"step": 584
},
{
"epoch": 20.892857142857142,
"grad_norm": 0.18819448351860046,
"learning_rate": 4.2020005248331056e-06,
"loss": 0.0115,
"step": 585
},
{
"epoch": 20.928571428571427,
"grad_norm": 0.27732348442077637,
"learning_rate": 4.18442442779196e-06,
"loss": 0.0163,
"step": 586
},
{
"epoch": 20.964285714285715,
"grad_norm": 0.18527792394161224,
"learning_rate": 4.166858678033771e-06,
"loss": 0.0113,
"step": 587
},
{
"epoch": 21.0,
"grad_norm": 0.18095916509628296,
"learning_rate": 4.149303498416838e-06,
"loss": 0.0107,
"step": 588
},
{
"epoch": 21.035714285714285,
"grad_norm": 0.1701073944568634,
"learning_rate": 4.131759111665349e-06,
"loss": 0.0105,
"step": 589
},
{
"epoch": 21.071428571428573,
"grad_norm": 0.14660249650478363,
"learning_rate": 4.114225740366569e-06,
"loss": 0.0073,
"step": 590
},
{
"epoch": 21.107142857142858,
"grad_norm": 0.1561962366104126,
"learning_rate": 4.096703606968007e-06,
"loss": 0.0095,
"step": 591
},
{
"epoch": 21.142857142857142,
"grad_norm": 0.15540745854377747,
"learning_rate": 4.079192933774592e-06,
"loss": 0.0096,
"step": 592
},
{
"epoch": 21.178571428571427,
"grad_norm": 0.14585469663143158,
"learning_rate": 4.061693942945863e-06,
"loss": 0.0088,
"step": 593
},
{
"epoch": 21.214285714285715,
"grad_norm": 0.15605901181697845,
"learning_rate": 4.04420685649314e-06,
"loss": 0.0092,
"step": 594
},
{
"epoch": 21.25,
"grad_norm": 0.14791828393936157,
"learning_rate": 4.026731896276708e-06,
"loss": 0.0088,
"step": 595
},
{
"epoch": 21.285714285714285,
"grad_norm": 0.16311927139759064,
"learning_rate": 4.009269284003014e-06,
"loss": 0.0105,
"step": 596
},
{
"epoch": 21.321428571428573,
"grad_norm": 0.1642966866493225,
"learning_rate": 3.991819241221836e-06,
"loss": 0.0096,
"step": 597
},
{
"epoch": 21.357142857142858,
"grad_norm": 0.15627528727054596,
"learning_rate": 3.974381989323484e-06,
"loss": 0.0087,
"step": 598
},
{
"epoch": 21.392857142857142,
"grad_norm": 0.1767517775297165,
"learning_rate": 3.956957749535997e-06,
"loss": 0.0111,
"step": 599
},
{
"epoch": 21.428571428571427,
"grad_norm": 0.1780816912651062,
"learning_rate": 3.939546742922318e-06,
"loss": 0.0108,
"step": 600
},
{
"epoch": 21.428571428571427,
"eval_loss": 2.483830213546753,
"eval_runtime": 0.4708,
"eval_samples_per_second": 297.392,
"eval_steps_per_second": 2.124,
"step": 600
},
{
"epoch": 21.464285714285715,
"grad_norm": 0.15922360122203827,
"learning_rate": 3.9221491903775014e-06,
"loss": 0.0088,
"step": 601
},
{
"epoch": 21.5,
"grad_norm": 0.14839443564414978,
"learning_rate": 3.904765312625916e-06,
"loss": 0.0093,
"step": 602
},
{
"epoch": 21.535714285714285,
"grad_norm": 0.20598483085632324,
"learning_rate": 3.887395330218429e-06,
"loss": 0.0122,
"step": 603
},
{
"epoch": 21.571428571428573,
"grad_norm": 0.2388192117214203,
"learning_rate": 3.8700394635296166e-06,
"loss": 0.0101,
"step": 604
},
{
"epoch": 21.607142857142858,
"grad_norm": 0.21544699370861053,
"learning_rate": 3.852697932754974e-06,
"loss": 0.0126,
"step": 605
},
{
"epoch": 21.642857142857142,
"grad_norm": 0.2086128294467926,
"learning_rate": 3.835370957908108e-06,
"loss": 0.0108,
"step": 606
},
{
"epoch": 21.678571428571427,
"grad_norm": 0.1632758378982544,
"learning_rate": 3.818058758817956e-06,
"loss": 0.0091,
"step": 607
},
{
"epoch": 21.714285714285715,
"grad_norm": 0.21566864848136902,
"learning_rate": 3.800761555125997e-06,
"loss": 0.0118,
"step": 608
},
{
"epoch": 21.75,
"grad_norm": 0.1939634531736374,
"learning_rate": 3.783479566283457e-06,
"loss": 0.0107,
"step": 609
},
{
"epoch": 21.785714285714285,
"grad_norm": 0.24293866753578186,
"learning_rate": 3.7662130115485317e-06,
"loss": 0.0147,
"step": 610
},
{
"epoch": 21.821428571428573,
"grad_norm": 0.22918657958507538,
"learning_rate": 3.748962109983605e-06,
"loss": 0.0125,
"step": 611
},
{
"epoch": 21.857142857142858,
"grad_norm": 0.1861945390701294,
"learning_rate": 3.731727080452464e-06,
"loss": 0.011,
"step": 612
},
{
"epoch": 21.892857142857142,
"grad_norm": 0.2174372524023056,
"learning_rate": 3.714508141617527e-06,
"loss": 0.0121,
"step": 613
},
{
"epoch": 21.928571428571427,
"grad_norm": 0.23144198954105377,
"learning_rate": 3.69730551193707e-06,
"loss": 0.0128,
"step": 614
},
{
"epoch": 21.964285714285715,
"grad_norm": 0.20174066722393036,
"learning_rate": 3.6801194096624515e-06,
"loss": 0.0121,
"step": 615
},
{
"epoch": 22.0,
"grad_norm": 0.16544412076473236,
"learning_rate": 3.6629500528353464e-06,
"loss": 0.0098,
"step": 616
},
{
"epoch": 22.035714285714285,
"grad_norm": 0.13743500411510468,
"learning_rate": 3.6457976592849753e-06,
"loss": 0.0082,
"step": 617
},
{
"epoch": 22.071428571428573,
"grad_norm": 0.14166727662086487,
"learning_rate": 3.6286624466253496e-06,
"loss": 0.0079,
"step": 618
},
{
"epoch": 22.107142857142858,
"grad_norm": 0.13137659430503845,
"learning_rate": 3.6115446322525007e-06,
"loss": 0.0074,
"step": 619
},
{
"epoch": 22.142857142857142,
"grad_norm": 0.1719367355108261,
"learning_rate": 3.594444433341725e-06,
"loss": 0.0113,
"step": 620
},
{
"epoch": 22.178571428571427,
"grad_norm": 0.14747175574302673,
"learning_rate": 3.5773620668448384e-06,
"loss": 0.0079,
"step": 621
},
{
"epoch": 22.214285714285715,
"grad_norm": 0.141871839761734,
"learning_rate": 3.560297749487407e-06,
"loss": 0.0077,
"step": 622
},
{
"epoch": 22.25,
"grad_norm": 0.15803030133247375,
"learning_rate": 3.543251697766006e-06,
"loss": 0.0097,
"step": 623
},
{
"epoch": 22.285714285714285,
"grad_norm": 0.12996134161949158,
"learning_rate": 3.526224127945479e-06,
"loss": 0.0074,
"step": 624
},
{
"epoch": 22.321428571428573,
"grad_norm": 0.14572900533676147,
"learning_rate": 3.5092152560561833e-06,
"loss": 0.0087,
"step": 625
},
{
"epoch": 22.357142857142858,
"grad_norm": 0.13245287537574768,
"learning_rate": 3.4922252978912523e-06,
"loss": 0.0069,
"step": 626
},
{
"epoch": 22.392857142857142,
"grad_norm": 0.1103530079126358,
"learning_rate": 3.475254469003865e-06,
"loss": 0.0076,
"step": 627
},
{
"epoch": 22.428571428571427,
"grad_norm": 0.15494345128536224,
"learning_rate": 3.4583029847044996e-06,
"loss": 0.0099,
"step": 628
},
{
"epoch": 22.464285714285715,
"grad_norm": 0.14390411972999573,
"learning_rate": 3.4413710600582096e-06,
"loss": 0.0092,
"step": 629
},
{
"epoch": 22.5,
"grad_norm": 0.18431764841079712,
"learning_rate": 3.424458909881897e-06,
"loss": 0.0113,
"step": 630
},
{
"epoch": 22.535714285714285,
"grad_norm": 0.17182418704032898,
"learning_rate": 3.4075667487415785e-06,
"loss": 0.0102,
"step": 631
},
{
"epoch": 22.571428571428573,
"grad_norm": 0.2069699615240097,
"learning_rate": 3.3906947909496696e-06,
"loss": 0.0126,
"step": 632
},
{
"epoch": 22.607142857142858,
"grad_norm": 0.17551185190677643,
"learning_rate": 3.3738432505622653e-06,
"loss": 0.0099,
"step": 633
},
{
"epoch": 22.642857142857142,
"grad_norm": 0.17832733690738678,
"learning_rate": 3.357012341376421e-06,
"loss": 0.0105,
"step": 634
},
{
"epoch": 22.678571428571427,
"grad_norm": 0.25220856070518494,
"learning_rate": 3.3402022769274422e-06,
"loss": 0.014,
"step": 635
},
{
"epoch": 22.714285714285715,
"grad_norm": 0.1663549244403839,
"learning_rate": 3.3234132704861786e-06,
"loss": 0.0094,
"step": 636
},
{
"epoch": 22.75,
"grad_norm": 0.23084159195423126,
"learning_rate": 3.306645535056312e-06,
"loss": 0.0127,
"step": 637
},
{
"epoch": 22.785714285714285,
"grad_norm": 0.18974913656711578,
"learning_rate": 3.289899283371657e-06,
"loss": 0.0125,
"step": 638
},
{
"epoch": 22.821428571428573,
"grad_norm": 0.17198923230171204,
"learning_rate": 3.273174727893463e-06,
"loss": 0.0098,
"step": 639
},
{
"epoch": 22.857142857142858,
"grad_norm": 0.2270413190126419,
"learning_rate": 3.2564720808077167e-06,
"loss": 0.0149,
"step": 640
},
{
"epoch": 22.892857142857142,
"grad_norm": 0.20339983701705933,
"learning_rate": 3.2397915540224493e-06,
"loss": 0.0115,
"step": 641
},
{
"epoch": 22.928571428571427,
"grad_norm": 0.2048344910144806,
"learning_rate": 3.2231333591650567e-06,
"loss": 0.0111,
"step": 642
},
{
"epoch": 22.964285714285715,
"grad_norm": 0.19488964974880219,
"learning_rate": 3.2064977075795988e-06,
"loss": 0.0117,
"step": 643
},
{
"epoch": 23.0,
"grad_norm": 0.14864051342010498,
"learning_rate": 3.189884810324133e-06,
"loss": 0.0086,
"step": 644
},
{
"epoch": 23.035714285714285,
"grad_norm": 0.18157891929149628,
"learning_rate": 3.173294878168025e-06,
"loss": 0.0107,
"step": 645
},
{
"epoch": 23.071428571428573,
"grad_norm": 0.09408193826675415,
"learning_rate": 3.1567281215892868e-06,
"loss": 0.0047,
"step": 646
},
{
"epoch": 23.107142857142858,
"grad_norm": 0.15541407465934753,
"learning_rate": 3.140184750771895e-06,
"loss": 0.009,
"step": 647
},
{
"epoch": 23.142857142857142,
"grad_norm": 0.15836073458194733,
"learning_rate": 3.12366497560313e-06,
"loss": 0.0091,
"step": 648
},
{
"epoch": 23.178571428571427,
"grad_norm": 0.12873704731464386,
"learning_rate": 3.1071690056709125e-06,
"loss": 0.0079,
"step": 649
},
{
"epoch": 23.214285714285715,
"grad_norm": 0.15572570264339447,
"learning_rate": 3.090697050261143e-06,
"loss": 0.0082,
"step": 650
},
{
"epoch": 23.25,
"grad_norm": 0.14964070916175842,
"learning_rate": 3.074249318355046e-06,
"loss": 0.0078,
"step": 651
},
{
"epoch": 23.285714285714285,
"grad_norm": 0.13191375136375427,
"learning_rate": 3.057826018626527e-06,
"loss": 0.0077,
"step": 652
},
{
"epoch": 23.321428571428573,
"grad_norm": 0.13573727011680603,
"learning_rate": 3.0414273594395106e-06,
"loss": 0.0073,
"step": 653
},
{
"epoch": 23.357142857142858,
"grad_norm": 0.17368032038211823,
"learning_rate": 3.0250535488453077e-06,
"loss": 0.0113,
"step": 654
},
{
"epoch": 23.392857142857142,
"grad_norm": 0.14922013878822327,
"learning_rate": 3.008704794579973e-06,
"loss": 0.0079,
"step": 655
},
{
"epoch": 23.428571428571427,
"grad_norm": 0.14999383687973022,
"learning_rate": 2.9923813040616685e-06,
"loss": 0.0088,
"step": 656
},
{
"epoch": 23.464285714285715,
"grad_norm": 0.19925668835639954,
"learning_rate": 2.976083284388031e-06,
"loss": 0.0111,
"step": 657
},
{
"epoch": 23.5,
"grad_norm": 0.19494231045246124,
"learning_rate": 2.959810942333552e-06,
"loss": 0.0117,
"step": 658
},
{
"epoch": 23.535714285714285,
"grad_norm": 0.14841331541538239,
"learning_rate": 2.9435644843469434e-06,
"loss": 0.008,
"step": 659
},
{
"epoch": 23.571428571428573,
"grad_norm": 0.15695911645889282,
"learning_rate": 2.9273441165485227e-06,
"loss": 0.0078,
"step": 660
},
{
"epoch": 23.607142857142858,
"grad_norm": 0.2552220821380615,
"learning_rate": 2.9111500447276053e-06,
"loss": 0.0141,
"step": 661
},
{
"epoch": 23.642857142857142,
"grad_norm": 0.17610006034374237,
"learning_rate": 2.8949824743398804e-06,
"loss": 0.0095,
"step": 662
},
{
"epoch": 23.678571428571427,
"grad_norm": 0.17815729975700378,
"learning_rate": 2.8788416105048124e-06,
"loss": 0.0104,
"step": 663
},
{
"epoch": 23.714285714285715,
"grad_norm": 0.21268025040626526,
"learning_rate": 2.862727658003042e-06,
"loss": 0.0122,
"step": 664
},
{
"epoch": 23.75,
"grad_norm": 0.1655074954032898,
"learning_rate": 2.8466408212737777e-06,
"loss": 0.0091,
"step": 665
},
{
"epoch": 23.785714285714285,
"grad_norm": 0.18781046569347382,
"learning_rate": 2.83058130441221e-06,
"loss": 0.0115,
"step": 666
},
{
"epoch": 23.821428571428573,
"grad_norm": 0.19021618366241455,
"learning_rate": 2.8145493111669186e-06,
"loss": 0.01,
"step": 667
},
{
"epoch": 23.857142857142858,
"grad_norm": 0.21351297199726105,
"learning_rate": 2.79854504493729e-06,
"loss": 0.0133,
"step": 668
},
{
"epoch": 23.892857142857142,
"grad_norm": 0.17441827058792114,
"learning_rate": 2.782568708770933e-06,
"loss": 0.0094,
"step": 669
},
{
"epoch": 23.928571428571427,
"grad_norm": 0.18894660472869873,
"learning_rate": 2.7666205053611097e-06,
"loss": 0.0126,
"step": 670
},
{
"epoch": 23.964285714285715,
"grad_norm": 0.19022874534130096,
"learning_rate": 2.7507006370441557e-06,
"loss": 0.012,
"step": 671
},
{
"epoch": 24.0,
"grad_norm": 0.17531032860279083,
"learning_rate": 2.734809305796915e-06,
"loss": 0.0101,
"step": 672
},
{
"epoch": 24.035714285714285,
"grad_norm": 0.17306911945343018,
"learning_rate": 2.718946713234185e-06,
"loss": 0.0113,
"step": 673
},
{
"epoch": 24.071428571428573,
"grad_norm": 0.16345195472240448,
"learning_rate": 2.7031130606061486e-06,
"loss": 0.0094,
"step": 674
},
{
"epoch": 24.107142857142858,
"grad_norm": 0.1428937315940857,
"learning_rate": 2.687308548795825e-06,
"loss": 0.0078,
"step": 675
},
{
"epoch": 24.142857142857142,
"grad_norm": 0.1553509682416916,
"learning_rate": 2.67153337831652e-06,
"loss": 0.0085,
"step": 676
},
{
"epoch": 24.178571428571427,
"grad_norm": 0.16722197830677032,
"learning_rate": 2.6557877493092885e-06,
"loss": 0.0098,
"step": 677
},
{
"epoch": 24.214285714285715,
"grad_norm": 0.14916226267814636,
"learning_rate": 2.6400718615403852e-06,
"loss": 0.0075,
"step": 678
},
{
"epoch": 24.25,
"grad_norm": 0.1756383627653122,
"learning_rate": 2.624385914398737e-06,
"loss": 0.0122,
"step": 679
},
{
"epoch": 24.285714285714285,
"grad_norm": 0.12129461765289307,
"learning_rate": 2.608730106893411e-06,
"loss": 0.0067,
"step": 680
},
{
"epoch": 24.321428571428573,
"grad_norm": 0.1598646193742752,
"learning_rate": 2.5931046376510875e-06,
"loss": 0.0077,
"step": 681
},
{
"epoch": 24.357142857142858,
"grad_norm": 0.13373102247714996,
"learning_rate": 2.5775097049135445e-06,
"loss": 0.0075,
"step": 682
},
{
"epoch": 24.392857142857142,
"grad_norm": 0.1555156260728836,
"learning_rate": 2.561945506535144e-06,
"loss": 0.0102,
"step": 683
},
{
"epoch": 24.428571428571427,
"grad_norm": 0.15757839381694794,
"learning_rate": 2.5464122399803126e-06,
"loss": 0.0099,
"step": 684
},
{
"epoch": 24.464285714285715,
"grad_norm": 0.19057762622833252,
"learning_rate": 2.5309101023210426e-06,
"loss": 0.0125,
"step": 685
},
{
"epoch": 24.5,
"grad_norm": 0.11190243065357208,
"learning_rate": 2.5154392902343966e-06,
"loss": 0.0061,
"step": 686
},
{
"epoch": 24.535714285714285,
"grad_norm": 0.1816374808549881,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.0104,
"step": 687
},
{
"epoch": 24.571428571428573,
"grad_norm": 0.148861825466156,
"learning_rate": 2.4845924274975625e-06,
"loss": 0.008,
"step": 688
},
{
"epoch": 24.607142857142858,
"grad_norm": 0.14404255151748657,
"learning_rate": 2.4692167682043855e-06,
"loss": 0.0072,
"step": 689
},
{
"epoch": 24.642857142857142,
"grad_norm": 0.20183296501636505,
"learning_rate": 2.4538732171928847e-06,
"loss": 0.0129,
"step": 690
},
{
"epoch": 24.678571428571427,
"grad_norm": 0.16051512956619263,
"learning_rate": 2.4385619691281144e-06,
"loss": 0.0093,
"step": 691
},
{
"epoch": 24.714285714285715,
"grad_norm": 0.18522028625011444,
"learning_rate": 2.4232832182653014e-06,
"loss": 0.01,
"step": 692
},
{
"epoch": 24.75,
"grad_norm": 0.1713511347770691,
"learning_rate": 2.408037158447375e-06,
"loss": 0.0092,
"step": 693
},
{
"epoch": 24.785714285714285,
"grad_norm": 0.18777602910995483,
"learning_rate": 2.39282398310251e-06,
"loss": 0.0099,
"step": 694
},
{
"epoch": 24.821428571428573,
"grad_norm": 0.15512120723724365,
"learning_rate": 2.3776438852416743e-06,
"loss": 0.009,
"step": 695
},
{
"epoch": 24.857142857142858,
"grad_norm": 0.17730368673801422,
"learning_rate": 2.3624970574561773e-06,
"loss": 0.0098,
"step": 696
},
{
"epoch": 24.892857142857142,
"grad_norm": 0.2074640393257141,
"learning_rate": 2.3473836919152267e-06,
"loss": 0.013,
"step": 697
},
{
"epoch": 24.928571428571427,
"grad_norm": 0.17625132203102112,
"learning_rate": 2.332303980363497e-06,
"loss": 0.0098,
"step": 698
},
{
"epoch": 24.964285714285715,
"grad_norm": 0.19898360967636108,
"learning_rate": 2.317258114118686e-06,
"loss": 0.0109,
"step": 699
},
{
"epoch": 25.0,
"grad_norm": 0.19583548605442047,
"learning_rate": 2.3022462840690933e-06,
"loss": 0.0119,
"step": 700
},
{
"epoch": 25.0,
"eval_loss": 2.5329432487487793,
"eval_runtime": 0.4711,
"eval_samples_per_second": 297.205,
"eval_steps_per_second": 2.123,
"step": 700
},
{
"epoch": 25.035714285714285,
"grad_norm": 0.1791287660598755,
"learning_rate": 2.2872686806712037e-06,
"loss": 0.0101,
"step": 701
},
{
"epoch": 25.071428571428573,
"grad_norm": 0.15796615183353424,
"learning_rate": 2.272325493947257e-06,
"loss": 0.0078,
"step": 702
},
{
"epoch": 25.107142857142858,
"grad_norm": 0.1644110381603241,
"learning_rate": 2.257416913482853e-06,
"loss": 0.0084,
"step": 703
},
{
"epoch": 25.142857142857142,
"grad_norm": 0.1331474632024765,
"learning_rate": 2.2425431284245302e-06,
"loss": 0.0071,
"step": 704
},
{
"epoch": 25.178571428571427,
"grad_norm": 0.152842178940773,
"learning_rate": 2.2277043274773856e-06,
"loss": 0.0085,
"step": 705
},
{
"epoch": 25.214285714285715,
"grad_norm": 0.15128417313098907,
"learning_rate": 2.2129006989026612e-06,
"loss": 0.0079,
"step": 706
},
{
"epoch": 25.25,
"grad_norm": 0.18646658957004547,
"learning_rate": 2.1981324305153644e-06,
"loss": 0.0105,
"step": 707
},
{
"epoch": 25.285714285714285,
"grad_norm": 0.1679907739162445,
"learning_rate": 2.1833997096818897e-06,
"loss": 0.0099,
"step": 708
},
{
"epoch": 25.321428571428573,
"grad_norm": 0.13102103769779205,
"learning_rate": 2.168702723317632e-06,
"loss": 0.0067,
"step": 709
},
{
"epoch": 25.357142857142858,
"grad_norm": 0.1524365246295929,
"learning_rate": 2.1540416578846207e-06,
"loss": 0.0083,
"step": 710
},
{
"epoch": 25.392857142857142,
"grad_norm": 0.20382919907569885,
"learning_rate": 2.139416699389153e-06,
"loss": 0.0091,
"step": 711
},
{
"epoch": 25.428571428571427,
"grad_norm": 0.14647948741912842,
"learning_rate": 2.1248280333794347e-06,
"loss": 0.0075,
"step": 712
},
{
"epoch": 25.464285714285715,
"grad_norm": 0.15126314759254456,
"learning_rate": 2.1102758449432233e-06,
"loss": 0.0075,
"step": 713
},
{
"epoch": 25.5,
"grad_norm": 0.14693264663219452,
"learning_rate": 2.095760318705487e-06,
"loss": 0.0081,
"step": 714
},
{
"epoch": 25.535714285714285,
"grad_norm": 0.18826764822006226,
"learning_rate": 2.081281638826052e-06,
"loss": 0.01,
"step": 715
},
{
"epoch": 25.571428571428573,
"grad_norm": 0.1897839456796646,
"learning_rate": 2.0668399889972717e-06,
"loss": 0.0136,
"step": 716
},
{
"epoch": 25.607142857142858,
"grad_norm": 0.17151887714862823,
"learning_rate": 2.0524355524417017e-06,
"loss": 0.009,
"step": 717
},
{
"epoch": 25.642857142857142,
"grad_norm": 0.1408870816230774,
"learning_rate": 2.038068511909762e-06,
"loss": 0.0076,
"step": 718
},
{
"epoch": 25.678571428571427,
"grad_norm": 0.2066071778535843,
"learning_rate": 2.0237390496774284e-06,
"loss": 0.0108,
"step": 719
},
{
"epoch": 25.714285714285715,
"grad_norm": 0.17692652344703674,
"learning_rate": 2.00944734754392e-06,
"loss": 0.009,
"step": 720
},
{
"epoch": 25.75,
"grad_norm": 0.2316911220550537,
"learning_rate": 1.995193586829387e-06,
"loss": 0.0139,
"step": 721
},
{
"epoch": 25.785714285714285,
"grad_norm": 0.1862197071313858,
"learning_rate": 1.980977948372612e-06,
"loss": 0.01,
"step": 722
},
{
"epoch": 25.821428571428573,
"grad_norm": 0.21679328382015228,
"learning_rate": 1.966800612528723e-06,
"loss": 0.0125,
"step": 723
},
{
"epoch": 25.857142857142858,
"grad_norm": 0.19006772339344025,
"learning_rate": 1.952661759166893e-06,
"loss": 0.0115,
"step": 724
},
{
"epoch": 25.892857142857142,
"grad_norm": 0.12936274707317352,
"learning_rate": 1.9385615676680663e-06,
"loss": 0.0064,
"step": 725
},
{
"epoch": 25.928571428571427,
"grad_norm": 0.2046034336090088,
"learning_rate": 1.9245002169226814e-06,
"loss": 0.0117,
"step": 726
},
{
"epoch": 25.964285714285715,
"grad_norm": 0.16442671418190002,
"learning_rate": 1.910477885328399e-06,
"loss": 0.0098,
"step": 727
},
{
"epoch": 26.0,
"grad_norm": 0.15325023233890533,
"learning_rate": 1.8964947507878401e-06,
"loss": 0.0086,
"step": 728
},
{
"epoch": 26.035714285714285,
"grad_norm": 0.17084500193595886,
"learning_rate": 1.8825509907063328e-06,
"loss": 0.0094,
"step": 729
},
{
"epoch": 26.071428571428573,
"grad_norm": 0.1273665577173233,
"learning_rate": 1.8686467819896542e-06,
"loss": 0.007,
"step": 730
},
{
"epoch": 26.107142857142858,
"grad_norm": 0.1403261423110962,
"learning_rate": 1.8547823010417876e-06,
"loss": 0.0071,
"step": 731
},
{
"epoch": 26.142857142857142,
"grad_norm": 0.15290212631225586,
"learning_rate": 1.8409577237626935e-06,
"loss": 0.0092,
"step": 732
},
{
"epoch": 26.178571428571427,
"grad_norm": 0.16614960134029388,
"learning_rate": 1.8271732255460644e-06,
"loss": 0.0095,
"step": 733
},
{
"epoch": 26.214285714285715,
"grad_norm": 0.12265990674495697,
"learning_rate": 1.8134289812771077e-06,
"loss": 0.0068,
"step": 734
},
{
"epoch": 26.25,
"grad_norm": 0.12230860441923141,
"learning_rate": 1.7997251653303249e-06,
"loss": 0.0058,
"step": 735
},
{
"epoch": 26.285714285714285,
"grad_norm": 0.15943826735019684,
"learning_rate": 1.7860619515673034e-06,
"loss": 0.0089,
"step": 736
},
{
"epoch": 26.321428571428573,
"grad_norm": 0.15436235070228577,
"learning_rate": 1.7724395133345025e-06,
"loss": 0.0078,
"step": 737
},
{
"epoch": 26.357142857142858,
"grad_norm": 0.19412128627300262,
"learning_rate": 1.7588580234610592e-06,
"loss": 0.0112,
"step": 738
},
{
"epoch": 26.392857142857142,
"grad_norm": 0.15104462206363678,
"learning_rate": 1.7453176542565958e-06,
"loss": 0.0078,
"step": 739
},
{
"epoch": 26.428571428571427,
"grad_norm": 0.1390601247549057,
"learning_rate": 1.7318185775090336e-06,
"loss": 0.0084,
"step": 740
},
{
"epoch": 26.464285714285715,
"grad_norm": 0.1711408495903015,
"learning_rate": 1.7183609644824096e-06,
"loss": 0.01,
"step": 741
},
{
"epoch": 26.5,
"grad_norm": 0.16743941605091095,
"learning_rate": 1.7049449859147121e-06,
"loss": 0.008,
"step": 742
},
{
"epoch": 26.535714285714285,
"grad_norm": 0.14585740864276886,
"learning_rate": 1.6915708120157042e-06,
"loss": 0.008,
"step": 743
},
{
"epoch": 26.571428571428573,
"grad_norm": 0.17908959090709686,
"learning_rate": 1.67823861246477e-06,
"loss": 0.0108,
"step": 744
},
{
"epoch": 26.607142857142858,
"grad_norm": 0.15890510380268097,
"learning_rate": 1.6649485564087646e-06,
"loss": 0.0088,
"step": 745
},
{
"epoch": 26.642857142857142,
"grad_norm": 0.16608744859695435,
"learning_rate": 1.6517008124598622e-06,
"loss": 0.0102,
"step": 746
},
{
"epoch": 26.678571428571427,
"grad_norm": 0.20837059617042542,
"learning_rate": 1.6384955486934157e-06,
"loss": 0.0123,
"step": 747
},
{
"epoch": 26.714285714285715,
"grad_norm": 0.1581769585609436,
"learning_rate": 1.6253329326458367e-06,
"loss": 0.0086,
"step": 748
},
{
"epoch": 26.75,
"grad_norm": 0.1689825803041458,
"learning_rate": 1.612213131312454e-06,
"loss": 0.0089,
"step": 749
},
{
"epoch": 26.785714285714285,
"grad_norm": 0.18127354979515076,
"learning_rate": 1.5991363111454023e-06,
"loss": 0.009,
"step": 750
},
{
"epoch": 26.821428571428573,
"grad_norm": 0.18165580928325653,
"learning_rate": 1.5861026380515165e-06,
"loss": 0.0098,
"step": 751
},
{
"epoch": 26.857142857142858,
"grad_norm": 0.18343548476696014,
"learning_rate": 1.5731122773902147e-06,
"loss": 0.0102,
"step": 752
},
{
"epoch": 26.892857142857142,
"grad_norm": 0.17770545184612274,
"learning_rate": 1.5601653939714073e-06,
"loss": 0.0098,
"step": 753
},
{
"epoch": 26.928571428571427,
"grad_norm": 0.19391901791095734,
"learning_rate": 1.547262152053406e-06,
"loss": 0.0112,
"step": 754
},
{
"epoch": 26.964285714285715,
"grad_norm": 0.19513018429279327,
"learning_rate": 1.5344027153408375e-06,
"loss": 0.0097,
"step": 755
},
{
"epoch": 27.0,
"grad_norm": 0.15814387798309326,
"learning_rate": 1.5215872469825682e-06,
"loss": 0.0085,
"step": 756
},
{
"epoch": 27.035714285714285,
"grad_norm": 0.17074698209762573,
"learning_rate": 1.5088159095696365e-06,
"loss": 0.0086,
"step": 757
},
{
"epoch": 27.071428571428573,
"grad_norm": 0.16025789082050323,
"learning_rate": 1.4960888651331833e-06,
"loss": 0.0088,
"step": 758
},
{
"epoch": 27.107142857142858,
"grad_norm": 0.1347316950559616,
"learning_rate": 1.4834062751424018e-06,
"loss": 0.008,
"step": 759
},
{
"epoch": 27.142857142857142,
"grad_norm": 0.1638944447040558,
"learning_rate": 1.4707683005024898e-06,
"loss": 0.0093,
"step": 760
},
{
"epoch": 27.178571428571427,
"grad_norm": 0.14700771868228912,
"learning_rate": 1.4581751015526035e-06,
"loss": 0.007,
"step": 761
},
{
"epoch": 27.214285714285715,
"grad_norm": 0.1447134166955948,
"learning_rate": 1.4456268380638262e-06,
"loss": 0.0079,
"step": 762
},
{
"epoch": 27.25,
"grad_norm": 0.1443662941455841,
"learning_rate": 1.4331236692371386e-06,
"loss": 0.0086,
"step": 763
},
{
"epoch": 27.285714285714285,
"grad_norm": 0.17928117513656616,
"learning_rate": 1.4206657537014078e-06,
"loss": 0.0087,
"step": 764
},
{
"epoch": 27.321428571428573,
"grad_norm": 0.13179831206798553,
"learning_rate": 1.4082532495113627e-06,
"loss": 0.0083,
"step": 765
},
{
"epoch": 27.357142857142858,
"grad_norm": 0.12459900975227356,
"learning_rate": 1.3958863141455937e-06,
"loss": 0.0068,
"step": 766
},
{
"epoch": 27.392857142857142,
"grad_norm": 0.17652547359466553,
"learning_rate": 1.38356510450456e-06,
"loss": 0.0098,
"step": 767
},
{
"epoch": 27.428571428571427,
"grad_norm": 0.20520183444023132,
"learning_rate": 1.3712897769085903e-06,
"loss": 0.0126,
"step": 768
},
{
"epoch": 27.464285714285715,
"grad_norm": 0.18517449498176575,
"learning_rate": 1.3590604870959046e-06,
"loss": 0.0093,
"step": 769
},
{
"epoch": 27.5,
"grad_norm": 0.19475194811820984,
"learning_rate": 1.3468773902206378e-06,
"loss": 0.011,
"step": 770
},
{
"epoch": 27.535714285714285,
"grad_norm": 0.1644594371318817,
"learning_rate": 1.3347406408508695e-06,
"loss": 0.0084,
"step": 771
},
{
"epoch": 27.571428571428573,
"grad_norm": 0.16756068170070648,
"learning_rate": 1.322650392966665e-06,
"loss": 0.0082,
"step": 772
},
{
"epoch": 27.607142857142858,
"grad_norm": 0.14502035081386566,
"learning_rate": 1.3106067999581224e-06,
"loss": 0.0084,
"step": 773
},
{
"epoch": 27.642857142857142,
"grad_norm": 0.14674372971057892,
"learning_rate": 1.298610014623423e-06,
"loss": 0.0076,
"step": 774
},
{
"epoch": 27.678571428571427,
"grad_norm": 0.17021062970161438,
"learning_rate": 1.2866601891668945e-06,
"loss": 0.0098,
"step": 775
},
{
"epoch": 27.714285714285715,
"grad_norm": 0.1700037717819214,
"learning_rate": 1.2747574751970826e-06,
"loss": 0.0077,
"step": 776
},
{
"epoch": 27.75,
"grad_norm": 0.19784575700759888,
"learning_rate": 1.2629020237248241e-06,
"loss": 0.0107,
"step": 777
},
{
"epoch": 27.785714285714285,
"grad_norm": 0.15924608707427979,
"learning_rate": 1.2510939851613285e-06,
"loss": 0.0096,
"step": 778
},
{
"epoch": 27.821428571428573,
"grad_norm": 0.1310548335313797,
"learning_rate": 1.239333509316281e-06,
"loss": 0.0067,
"step": 779
},
{
"epoch": 27.857142857142858,
"grad_norm": 0.17363598942756653,
"learning_rate": 1.2276207453959283e-06,
"loss": 0.0088,
"step": 780
},
{
"epoch": 27.892857142857142,
"grad_norm": 0.161713108420372,
"learning_rate": 1.2159558420011907e-06,
"loss": 0.0082,
"step": 781
},
{
"epoch": 27.928571428571427,
"grad_norm": 0.18847158551216125,
"learning_rate": 1.2043389471257833e-06,
"loss": 0.0105,
"step": 782
},
{
"epoch": 27.964285714285715,
"grad_norm": 0.16982363164424896,
"learning_rate": 1.1927702081543279e-06,
"loss": 0.01,
"step": 783
},
{
"epoch": 28.0,
"grad_norm": 0.15337583422660828,
"learning_rate": 1.1812497718604887e-06,
"loss": 0.0082,
"step": 784
},
{
"epoch": 28.035714285714285,
"grad_norm": 0.15224626660346985,
"learning_rate": 1.1697777844051105e-06,
"loss": 0.0076,
"step": 785
},
{
"epoch": 28.071428571428573,
"grad_norm": 0.1463271677494049,
"learning_rate": 1.158354391334362e-06,
"loss": 0.0082,
"step": 786
},
{
"epoch": 28.107142857142858,
"grad_norm": 0.1552111953496933,
"learning_rate": 1.1469797375778902e-06,
"loss": 0.0083,
"step": 787
},
{
"epoch": 28.142857142857142,
"grad_norm": 0.14375971257686615,
"learning_rate": 1.1356539674469852e-06,
"loss": 0.0064,
"step": 788
},
{
"epoch": 28.178571428571427,
"grad_norm": 0.14443305134773254,
"learning_rate": 1.1243772246327416e-06,
"loss": 0.009,
"step": 789
},
{
"epoch": 28.214285714285715,
"grad_norm": 0.18736590445041656,
"learning_rate": 1.1131496522042424e-06,
"loss": 0.0109,
"step": 790
},
{
"epoch": 28.25,
"grad_norm": 0.1467347890138626,
"learning_rate": 1.1019713926067394e-06,
"loss": 0.0083,
"step": 791
},
{
"epoch": 28.285714285714285,
"grad_norm": 0.1514614075422287,
"learning_rate": 1.0908425876598512e-06,
"loss": 0.0091,
"step": 792
},
{
"epoch": 28.321428571428573,
"grad_norm": 0.16135913133621216,
"learning_rate": 1.0797633785557582e-06,
"loss": 0.0093,
"step": 793
},
{
"epoch": 28.357142857142858,
"grad_norm": 0.12219899147748947,
"learning_rate": 1.068733905857413e-06,
"loss": 0.0057,
"step": 794
},
{
"epoch": 28.392857142857142,
"grad_norm": 0.1365957111120224,
"learning_rate": 1.0577543094967613e-06,
"loss": 0.0063,
"step": 795
},
{
"epoch": 28.428571428571427,
"grad_norm": 0.14362846314907074,
"learning_rate": 1.0468247287729593e-06,
"loss": 0.0072,
"step": 796
},
{
"epoch": 28.464285714285715,
"grad_norm": 0.20032982528209686,
"learning_rate": 1.0359453023506123e-06,
"loss": 0.0116,
"step": 797
},
{
"epoch": 28.5,
"grad_norm": 0.14230377972126007,
"learning_rate": 1.0251161682580125e-06,
"loss": 0.0074,
"step": 798
},
{
"epoch": 28.535714285714285,
"grad_norm": 0.17326034605503082,
"learning_rate": 1.0143374638853892e-06,
"loss": 0.0099,
"step": 799
},
{
"epoch": 28.571428571428573,
"grad_norm": 0.1817915439605713,
"learning_rate": 1.0036093259831624e-06,
"loss": 0.0106,
"step": 800
},
{
"epoch": 28.571428571428573,
"eval_loss": 2.567107677459717,
"eval_runtime": 0.4721,
"eval_samples_per_second": 296.568,
"eval_steps_per_second": 2.118,
"step": 800
},
{
"epoch": 28.607142857142858,
"grad_norm": 0.16237416863441467,
"learning_rate": 9.929318906602176e-07,
"loss": 0.0088,
"step": 801
},
{
"epoch": 28.642857142857142,
"grad_norm": 0.15039542317390442,
"learning_rate": 9.823052933821643e-07,
"loss": 0.0067,
"step": 802
},
{
"epoch": 28.678571428571427,
"grad_norm": 0.1549944132566452,
"learning_rate": 9.717296689696283e-07,
"loss": 0.0081,
"step": 803
},
{
"epoch": 28.714285714285715,
"grad_norm": 0.161531463265419,
"learning_rate": 9.612051515965388e-07,
"loss": 0.0069,
"step": 804
},
{
"epoch": 28.75,
"grad_norm": 0.1960175782442093,
"learning_rate": 9.507318747884243e-07,
"loss": 0.0107,
"step": 805
},
{
"epoch": 28.785714285714285,
"grad_norm": 0.23234865069389343,
"learning_rate": 9.403099714207175e-07,
"loss": 0.0137,
"step": 806
},
{
"epoch": 28.821428571428573,
"grad_norm": 0.16583284735679626,
"learning_rate": 9.299395737170758e-07,
"loss": 0.0096,
"step": 807
},
{
"epoch": 28.857142857142858,
"grad_norm": 0.14131559431552887,
"learning_rate": 9.196208132476963e-07,
"loss": 0.0068,
"step": 808
},
{
"epoch": 28.892857142857142,
"grad_norm": 0.18351143598556519,
"learning_rate": 9.093538209276487e-07,
"loss": 0.0074,
"step": 809
},
{
"epoch": 28.928571428571427,
"grad_norm": 0.18050339818000793,
"learning_rate": 8.991387270152202e-07,
"loss": 0.0105,
"step": 810
},
{
"epoch": 28.964285714285715,
"grad_norm": 0.16577477753162384,
"learning_rate": 8.88975661110254e-07,
"loss": 0.0079,
"step": 811
},
{
"epoch": 29.0,
"grad_norm": 0.1450648307800293,
"learning_rate": 8.78864752152509e-07,
"loss": 0.007,
"step": 812
},
{
"epoch": 29.035714285714285,
"grad_norm": 0.17009346187114716,
"learning_rate": 8.688061284200266e-07,
"loss": 0.0091,
"step": 813
},
{
"epoch": 29.071428571428573,
"grad_norm": 0.17218393087387085,
"learning_rate": 8.587999175274986e-07,
"loss": 0.0087,
"step": 814
},
{
"epoch": 29.107142857142858,
"grad_norm": 0.17585642635822296,
"learning_rate": 8.488462464246495e-07,
"loss": 0.0089,
"step": 815
},
{
"epoch": 29.142857142857142,
"grad_norm": 0.14578095078468323,
"learning_rate": 8.389452413946314e-07,
"loss": 0.0077,
"step": 816
},
{
"epoch": 29.178571428571427,
"grad_norm": 0.16501793265342712,
"learning_rate": 8.290970280524124e-07,
"loss": 0.0088,
"step": 817
},
{
"epoch": 29.214285714285715,
"grad_norm": 0.1998208463191986,
"learning_rate": 8.193017313431872e-07,
"loss": 0.0106,
"step": 818
},
{
"epoch": 29.25,
"grad_norm": 0.13896793127059937,
"learning_rate": 8.095594755407971e-07,
"loss": 0.0068,
"step": 819
},
{
"epoch": 29.285714285714285,
"grad_norm": 0.1557644158601761,
"learning_rate": 7.99870384246143e-07,
"loss": 0.0085,
"step": 820
},
{
"epoch": 29.321428571428573,
"grad_norm": 0.1574714481830597,
"learning_rate": 7.902345803856265e-07,
"loss": 0.0075,
"step": 821
},
{
"epoch": 29.357142857142858,
"grad_norm": 0.15094228088855743,
"learning_rate": 7.806521862095834e-07,
"loss": 0.0074,
"step": 822
},
{
"epoch": 29.392857142857142,
"grad_norm": 0.13187389075756073,
"learning_rate": 7.711233232907401e-07,
"loss": 0.0068,
"step": 823
},
{
"epoch": 29.428571428571427,
"grad_norm": 0.1584806889295578,
"learning_rate": 7.616481125226632e-07,
"loss": 0.0087,
"step": 824
},
{
"epoch": 29.464285714285715,
"grad_norm": 0.17923088371753693,
"learning_rate": 7.522266741182305e-07,
"loss": 0.0106,
"step": 825
},
{
"epoch": 29.5,
"grad_norm": 0.15848712623119354,
"learning_rate": 7.42859127608106e-07,
"loss": 0.0094,
"step": 826
},
{
"epoch": 29.535714285714285,
"grad_norm": 0.21657797694206238,
"learning_rate": 7.33545591839222e-07,
"loss": 0.0114,
"step": 827
},
{
"epoch": 29.571428571428573,
"grad_norm": 0.14506831765174866,
"learning_rate": 7.242861849732696e-07,
"loss": 0.0069,
"step": 828
},
{
"epoch": 29.607142857142858,
"grad_norm": 0.14687834680080414,
"learning_rate": 7.150810244852036e-07,
"loss": 0.0075,
"step": 829
},
{
"epoch": 29.642857142857142,
"grad_norm": 0.1327245533466339,
"learning_rate": 7.059302271617485e-07,
"loss": 0.0067,
"step": 830
},
{
"epoch": 29.678571428571427,
"grad_norm": 0.17178060114383698,
"learning_rate": 6.968339090999188e-07,
"loss": 0.0094,
"step": 831
},
{
"epoch": 29.714285714285715,
"grad_norm": 0.13178479671478271,
"learning_rate": 6.877921857055476e-07,
"loss": 0.0068,
"step": 832
},
{
"epoch": 29.75,
"grad_norm": 0.17513161897659302,
"learning_rate": 6.78805171691817e-07,
"loss": 0.009,
"step": 833
},
{
"epoch": 29.785714285714285,
"grad_norm": 0.16417403519153595,
"learning_rate": 6.698729810778065e-07,
"loss": 0.0088,
"step": 834
},
{
"epoch": 29.821428571428573,
"grad_norm": 0.1668260246515274,
"learning_rate": 6.609957271870505e-07,
"loss": 0.0087,
"step": 835
},
{
"epoch": 29.857142857142858,
"grad_norm": 0.13266846537590027,
"learning_rate": 6.521735226460901e-07,
"loss": 0.0065,
"step": 836
},
{
"epoch": 29.892857142857142,
"grad_norm": 0.18691495060920715,
"learning_rate": 6.43406479383053e-07,
"loss": 0.0107,
"step": 837
},
{
"epoch": 29.928571428571427,
"grad_norm": 0.1541401743888855,
"learning_rate": 6.346947086262323e-07,
"loss": 0.007,
"step": 838
},
{
"epoch": 29.964285714285715,
"grad_norm": 0.19448424875736237,
"learning_rate": 6.260383209026704e-07,
"loss": 0.0103,
"step": 839
},
{
"epoch": 30.0,
"grad_norm": 0.17279107868671417,
"learning_rate": 6.174374260367611e-07,
"loss": 0.0093,
"step": 840
},
{
"epoch": 30.035714285714285,
"grad_norm": 0.1862161010503769,
"learning_rate": 6.088921331488568e-07,
"loss": 0.0097,
"step": 841
},
{
"epoch": 30.071428571428573,
"grad_norm": 0.1417967826128006,
"learning_rate": 6.004025506538813e-07,
"loss": 0.0065,
"step": 842
},
{
"epoch": 30.107142857142858,
"grad_norm": 0.15217995643615723,
"learning_rate": 5.919687862599549e-07,
"loss": 0.008,
"step": 843
},
{
"epoch": 30.142857142857142,
"grad_norm": 0.1934983730316162,
"learning_rate": 5.835909469670292e-07,
"loss": 0.0091,
"step": 844
},
{
"epoch": 30.178571428571427,
"grad_norm": 0.11829309910535812,
"learning_rate": 5.752691390655279e-07,
"loss": 0.005,
"step": 845
},
{
"epoch": 30.214285714285715,
"grad_norm": 0.156152606010437,
"learning_rate": 5.670034681349995e-07,
"loss": 0.008,
"step": 846
},
{
"epoch": 30.25,
"grad_norm": 0.12935622036457062,
"learning_rate": 5.587940390427804e-07,
"loss": 0.0068,
"step": 847
},
{
"epoch": 30.285714285714285,
"grad_norm": 0.17266200482845306,
"learning_rate": 5.506409559426573e-07,
"loss": 0.0101,
"step": 848
},
{
"epoch": 30.321428571428573,
"grad_norm": 0.1491808146238327,
"learning_rate": 5.425443222735527e-07,
"loss": 0.0078,
"step": 849
},
{
"epoch": 30.357142857142858,
"grad_norm": 0.18063697218894958,
"learning_rate": 5.345042407582079e-07,
"loss": 0.009,
"step": 850
},
{
"epoch": 30.392857142857142,
"grad_norm": 0.13006682693958282,
"learning_rate": 5.265208134018851e-07,
"loss": 0.0063,
"step": 851
},
{
"epoch": 30.428571428571427,
"grad_norm": 0.16981807351112366,
"learning_rate": 5.185941414910673e-07,
"loss": 0.0095,
"step": 852
},
{
"epoch": 30.464285714285715,
"grad_norm": 0.16083835065364838,
"learning_rate": 5.107243255921746e-07,
"loss": 0.008,
"step": 853
},
{
"epoch": 30.5,
"grad_norm": 0.16899225115776062,
"learning_rate": 5.029114655502937e-07,
"loss": 0.0081,
"step": 854
},
{
"epoch": 30.535714285714285,
"grad_norm": 0.16869334876537323,
"learning_rate": 4.951556604879049e-07,
"loss": 0.0092,
"step": 855
},
{
"epoch": 30.571428571428573,
"grad_norm": 0.19343996047973633,
"learning_rate": 4.874570088036252e-07,
"loss": 0.01,
"step": 856
},
{
"epoch": 30.607142857142858,
"grad_norm": 0.1612548828125,
"learning_rate": 4.798156081709638e-07,
"loss": 0.0085,
"step": 857
},
{
"epoch": 30.642857142857142,
"grad_norm": 0.18179935216903687,
"learning_rate": 4.722315555370793e-07,
"loss": 0.0095,
"step": 858
},
{
"epoch": 30.678571428571427,
"grad_norm": 0.16212452948093414,
"learning_rate": 4.647049471215498e-07,
"loss": 0.0079,
"step": 859
},
{
"epoch": 30.714285714285715,
"grad_norm": 0.15953488647937775,
"learning_rate": 4.5723587841515707e-07,
"loss": 0.0082,
"step": 860
},
{
"epoch": 30.75,
"grad_norm": 0.17954635620117188,
"learning_rate": 4.4982444417866753e-07,
"loss": 0.0088,
"step": 861
},
{
"epoch": 30.785714285714285,
"grad_norm": 0.15342117846012115,
"learning_rate": 4.4247073844163434e-07,
"loss": 0.0096,
"step": 862
},
{
"epoch": 30.821428571428573,
"grad_norm": 0.1617392897605896,
"learning_rate": 4.351748545012058e-07,
"loss": 0.0086,
"step": 863
},
{
"epoch": 30.857142857142858,
"grad_norm": 0.17065538465976715,
"learning_rate": 4.279368849209381e-07,
"loss": 0.0096,
"step": 864
},
{
"epoch": 30.892857142857142,
"grad_norm": 0.15124979615211487,
"learning_rate": 4.2075692152962145e-07,
"loss": 0.0085,
"step": 865
},
{
"epoch": 30.928571428571427,
"grad_norm": 0.17575320601463318,
"learning_rate": 4.136350554201196e-07,
"loss": 0.0087,
"step": 866
},
{
"epoch": 30.964285714285715,
"grad_norm": 0.15727750957012177,
"learning_rate": 4.0657137694820826e-07,
"loss": 0.0081,
"step": 867
},
{
"epoch": 31.0,
"grad_norm": 0.17699168622493744,
"learning_rate": 3.9956597573142966e-07,
"loss": 0.0089,
"step": 868
},
{
"epoch": 31.035714285714285,
"grad_norm": 0.1416083723306656,
"learning_rate": 3.9261894064796136e-07,
"loss": 0.0079,
"step": 869
},
{
"epoch": 31.071428571428573,
"grad_norm": 0.14622581005096436,
"learning_rate": 3.8573035983548167e-07,
"loss": 0.0073,
"step": 870
},
{
"epoch": 31.107142857142858,
"grad_norm": 0.14799736440181732,
"learning_rate": 3.789003206900538e-07,
"loss": 0.0072,
"step": 871
},
{
"epoch": 31.142857142857142,
"grad_norm": 0.15523618459701538,
"learning_rate": 3.7212890986501773e-07,
"loss": 0.0082,
"step": 872
},
{
"epoch": 31.178571428571427,
"grad_norm": 0.17029112577438354,
"learning_rate": 3.6541621326989183e-07,
"loss": 0.0107,
"step": 873
},
{
"epoch": 31.214285714285715,
"grad_norm": 0.15336446464061737,
"learning_rate": 3.5876231606927936e-07,
"loss": 0.0071,
"step": 874
},
{
"epoch": 31.25,
"grad_norm": 0.17205260694026947,
"learning_rate": 3.5216730268179346e-07,
"loss": 0.0085,
"step": 875
},
{
"epoch": 31.285714285714285,
"grad_norm": 0.13131991028785706,
"learning_rate": 3.4563125677897936e-07,
"loss": 0.0075,
"step": 876
},
{
"epoch": 31.321428571428573,
"grad_norm": 0.1458451747894287,
"learning_rate": 3.3915426128425744e-07,
"loss": 0.0077,
"step": 877
},
{
"epoch": 31.357142857142858,
"grad_norm": 0.14860883355140686,
"learning_rate": 3.327363983718723e-07,
"loss": 0.007,
"step": 878
},
{
"epoch": 31.392857142857142,
"grad_norm": 0.18567205965518951,
"learning_rate": 3.263777494658449e-07,
"loss": 0.0106,
"step": 879
},
{
"epoch": 31.428571428571427,
"grad_norm": 0.16162905097007751,
"learning_rate": 3.200783952389447e-07,
"loss": 0.0091,
"step": 880
},
{
"epoch": 31.464285714285715,
"grad_norm": 0.16891200840473175,
"learning_rate": 3.138384156116614e-07,
"loss": 0.0083,
"step": 881
},
{
"epoch": 31.5,
"grad_norm": 0.1782492995262146,
"learning_rate": 3.076578897511978e-07,
"loss": 0.0083,
"step": 882
},
{
"epoch": 31.535714285714285,
"grad_norm": 0.1816483587026596,
"learning_rate": 3.015368960704584e-07,
"loss": 0.0093,
"step": 883
},
{
"epoch": 31.571428571428573,
"grad_norm": 0.1914282590150833,
"learning_rate": 2.954755122270564e-07,
"loss": 0.0107,
"step": 884
},
{
"epoch": 31.607142857142858,
"grad_norm": 0.16533005237579346,
"learning_rate": 2.894738151223331e-07,
"loss": 0.0094,
"step": 885
},
{
"epoch": 31.642857142857142,
"grad_norm": 0.12326805293560028,
"learning_rate": 2.835318809003751e-07,
"loss": 0.0054,
"step": 886
},
{
"epoch": 31.678571428571427,
"grad_norm": 0.15294621884822845,
"learning_rate": 2.776497849470544e-07,
"loss": 0.0071,
"step": 887
},
{
"epoch": 31.714285714285715,
"grad_norm": 0.15630222856998444,
"learning_rate": 2.71827601889067e-07,
"loss": 0.0069,
"step": 888
},
{
"epoch": 31.75,
"grad_norm": 0.15990734100341797,
"learning_rate": 2.6606540559298956e-07,
"loss": 0.0079,
"step": 889
},
{
"epoch": 31.785714285714285,
"grad_norm": 0.15448947250843048,
"learning_rate": 2.6036326916434153e-07,
"loss": 0.0081,
"step": 890
},
{
"epoch": 31.821428571428573,
"grad_norm": 0.14279963076114655,
"learning_rate": 2.547212649466568e-07,
"loss": 0.0067,
"step": 891
},
{
"epoch": 31.857142857142858,
"grad_norm": 0.16762612760066986,
"learning_rate": 2.491394645205669e-07,
"loss": 0.0084,
"step": 892
},
{
"epoch": 31.892857142857142,
"grad_norm": 0.1591629981994629,
"learning_rate": 2.436179387028903e-07,
"loss": 0.0079,
"step": 893
},
{
"epoch": 31.928571428571427,
"grad_norm": 0.16853336989879608,
"learning_rate": 2.3815675754573885e-07,
"loss": 0.0088,
"step": 894
},
{
"epoch": 31.964285714285715,
"grad_norm": 0.19901008903980255,
"learning_rate": 2.3275599033562414e-07,
"loss": 0.0102,
"step": 895
},
{
"epoch": 32.0,
"grad_norm": 0.12494003027677536,
"learning_rate": 2.274157055925802e-07,
"loss": 0.007,
"step": 896
},
{
"epoch": 32.035714285714285,
"grad_norm": 0.16306279599666595,
"learning_rate": 2.2213597106929608e-07,
"loss": 0.0093,
"step": 897
},
{
"epoch": 32.07142857142857,
"grad_norm": 0.1226641833782196,
"learning_rate": 2.1691685375025362e-07,
"loss": 0.007,
"step": 898
},
{
"epoch": 32.107142857142854,
"grad_norm": 0.16900447010993958,
"learning_rate": 2.117584198508771e-07,
"loss": 0.0079,
"step": 899
},
{
"epoch": 32.142857142857146,
"grad_norm": 0.16100150346755981,
"learning_rate": 2.0666073481669714e-07,
"loss": 0.0086,
"step": 900
},
{
"epoch": 32.142857142857146,
"eval_loss": 2.5882554054260254,
"eval_runtime": 0.4733,
"eval_samples_per_second": 295.815,
"eval_steps_per_second": 2.113,
"step": 900
},
{
"epoch": 32.17857142857143,
"grad_norm": 0.16585837304592133,
"learning_rate": 2.016238633225165e-07,
"loss": 0.008,
"step": 901
},
{
"epoch": 32.214285714285715,
"grad_norm": 0.15317648649215698,
"learning_rate": 1.9664786927159064e-07,
"loss": 0.008,
"step": 902
},
{
"epoch": 32.25,
"grad_norm": 0.18521113693714142,
"learning_rate": 1.9173281579481896e-07,
"loss": 0.01,
"step": 903
},
{
"epoch": 32.285714285714285,
"grad_norm": 0.1476742923259735,
"learning_rate": 1.8687876524993987e-07,
"loss": 0.0065,
"step": 904
},
{
"epoch": 32.32142857142857,
"grad_norm": 0.15810611844062805,
"learning_rate": 1.820857792207431e-07,
"loss": 0.0072,
"step": 905
},
{
"epoch": 32.357142857142854,
"grad_norm": 0.20258426666259766,
"learning_rate": 1.7735391851628814e-07,
"loss": 0.0105,
"step": 906
},
{
"epoch": 32.392857142857146,
"grad_norm": 0.16195912659168243,
"learning_rate": 1.7268324317012974e-07,
"loss": 0.0076,
"step": 907
},
{
"epoch": 32.42857142857143,
"grad_norm": 0.15066063404083252,
"learning_rate": 1.680738124395598e-07,
"loss": 0.0069,
"step": 908
},
{
"epoch": 32.464285714285715,
"grad_norm": 0.13371895253658295,
"learning_rate": 1.6352568480485277e-07,
"loss": 0.0063,
"step": 909
},
{
"epoch": 32.5,
"grad_norm": 0.1613711714744568,
"learning_rate": 1.5903891796852756e-07,
"loss": 0.0098,
"step": 910
},
{
"epoch": 32.535714285714285,
"grad_norm": 0.16096334159374237,
"learning_rate": 1.5461356885461077e-07,
"loss": 0.0068,
"step": 911
},
{
"epoch": 32.57142857142857,
"grad_norm": 0.14560391008853912,
"learning_rate": 1.5024969360791564e-07,
"loss": 0.0064,
"step": 912
},
{
"epoch": 32.607142857142854,
"grad_norm": 0.16068080067634583,
"learning_rate": 1.4594734759333484e-07,
"loss": 0.0075,
"step": 913
},
{
"epoch": 32.642857142857146,
"grad_norm": 0.1491153985261917,
"learning_rate": 1.4170658539512993e-07,
"loss": 0.008,
"step": 914
},
{
"epoch": 32.67857142857143,
"grad_norm": 0.16911394894123077,
"learning_rate": 1.375274608162447e-07,
"loss": 0.0085,
"step": 915
},
{
"epoch": 32.714285714285715,
"grad_norm": 0.14449740946292877,
"learning_rate": 1.3341002687762062e-07,
"loss": 0.0074,
"step": 916
},
{
"epoch": 32.75,
"grad_norm": 0.1513504683971405,
"learning_rate": 1.2935433581752365e-07,
"loss": 0.0082,
"step": 917
},
{
"epoch": 32.785714285714285,
"grad_norm": 0.1615770161151886,
"learning_rate": 1.253604390908819e-07,
"loss": 0.008,
"step": 918
},
{
"epoch": 32.82142857142857,
"grad_norm": 0.18504442274570465,
"learning_rate": 1.2142838736863562e-07,
"loss": 0.0097,
"step": 919
},
{
"epoch": 32.857142857142854,
"grad_norm": 0.17463642358779907,
"learning_rate": 1.175582305370887e-07,
"loss": 0.0094,
"step": 920
},
{
"epoch": 32.892857142857146,
"grad_norm": 0.16344715654850006,
"learning_rate": 1.1375001769728e-07,
"loss": 0.0092,
"step": 921
},
{
"epoch": 32.92857142857143,
"grad_norm": 0.11732794344425201,
"learning_rate": 1.1000379716435916e-07,
"loss": 0.0069,
"step": 922
},
{
"epoch": 32.964285714285715,
"grad_norm": 0.1688617467880249,
"learning_rate": 1.0631961646697387e-07,
"loss": 0.0101,
"step": 923
},
{
"epoch": 33.0,
"grad_norm": 0.15955369174480438,
"learning_rate": 1.0269752234666642e-07,
"loss": 0.009,
"step": 924
},
{
"epoch": 33.035714285714285,
"grad_norm": 0.14984415471553802,
"learning_rate": 9.913756075728088e-08,
"loss": 0.0078,
"step": 925
},
{
"epoch": 33.07142857142857,
"grad_norm": 0.20997703075408936,
"learning_rate": 9.563977686438019e-08,
"loss": 0.0104,
"step": 926
},
{
"epoch": 33.107142857142854,
"grad_norm": 0.17377646267414093,
"learning_rate": 9.22042150446728e-08,
"loss": 0.0093,
"step": 927
},
{
"epoch": 33.142857142857146,
"grad_norm": 0.15954937040805817,
"learning_rate": 8.883091888545136e-08,
"loss": 0.0077,
"step": 928
},
{
"epoch": 33.17857142857143,
"grad_norm": 0.18202899396419525,
"learning_rate": 8.551993118403656e-08,
"loss": 0.0105,
"step": 929
},
{
"epoch": 33.214285714285715,
"grad_norm": 0.16078250110149384,
"learning_rate": 8.227129394723643e-08,
"loss": 0.0082,
"step": 930
},
{
"epoch": 33.25,
"grad_norm": 0.15054886043071747,
"learning_rate": 7.908504839081343e-08,
"loss": 0.0071,
"step": 931
},
{
"epoch": 33.285714285714285,
"grad_norm": 0.1575610637664795,
"learning_rate": 7.59612349389599e-08,
"loss": 0.007,
"step": 932
},
{
"epoch": 33.32142857142857,
"grad_norm": 0.13964974880218506,
"learning_rate": 7.289989322378732e-08,
"loss": 0.0062,
"step": 933
},
{
"epoch": 33.357142857142854,
"grad_norm": 0.17797306180000305,
"learning_rate": 6.990106208482227e-08,
"loss": 0.0077,
"step": 934
},
{
"epoch": 33.392857142857146,
"grad_norm": 0.1711435765028,
"learning_rate": 6.696477956851356e-08,
"loss": 0.0095,
"step": 935
},
{
"epoch": 33.42857142857143,
"grad_norm": 0.19469492137432098,
"learning_rate": 6.409108292774912e-08,
"loss": 0.0109,
"step": 936
},
{
"epoch": 33.464285714285715,
"grad_norm": 0.16585536301136017,
"learning_rate": 6.12800086213866e-08,
"loss": 0.0083,
"step": 937
},
{
"epoch": 33.5,
"grad_norm": 0.1359519064426422,
"learning_rate": 5.853159231378469e-08,
"loss": 0.0078,
"step": 938
},
{
"epoch": 33.535714285714285,
"grad_norm": 0.1809779703617096,
"learning_rate": 5.584586887435739e-08,
"loss": 0.0104,
"step": 939
},
{
"epoch": 33.57142857142857,
"grad_norm": 0.13768798112869263,
"learning_rate": 5.322287237712664e-08,
"loss": 0.0055,
"step": 940
},
{
"epoch": 33.607142857142854,
"grad_norm": 0.16895705461502075,
"learning_rate": 5.0662636100292094e-08,
"loss": 0.0087,
"step": 941
},
{
"epoch": 33.642857142857146,
"grad_norm": 0.12327758222818375,
"learning_rate": 4.8165192525809754e-08,
"loss": 0.0063,
"step": 942
},
{
"epoch": 33.67857142857143,
"grad_norm": 0.15800786018371582,
"learning_rate": 4.573057333897679e-08,
"loss": 0.0072,
"step": 943
},
{
"epoch": 33.714285714285715,
"grad_norm": 0.1666928380727768,
"learning_rate": 4.335880942803405e-08,
"loss": 0.0088,
"step": 944
},
{
"epoch": 33.75,
"grad_norm": 0.16826018691062927,
"learning_rate": 4.104993088376974e-08,
"loss": 0.0081,
"step": 945
},
{
"epoch": 33.785714285714285,
"grad_norm": 0.12341219186782837,
"learning_rate": 3.8803966999139686e-08,
"loss": 0.0059,
"step": 946
},
{
"epoch": 33.82142857142857,
"grad_norm": 0.14304673671722412,
"learning_rate": 3.662094626889656e-08,
"loss": 0.0067,
"step": 947
},
{
"epoch": 33.857142857142854,
"grad_norm": 0.15671806037425995,
"learning_rate": 3.450089638922738e-08,
"loss": 0.008,
"step": 948
},
{
"epoch": 33.892857142857146,
"grad_norm": 0.16353370249271393,
"learning_rate": 3.2443844257400434e-08,
"loss": 0.008,
"step": 949
},
{
"epoch": 33.92857142857143,
"grad_norm": 0.19006143510341644,
"learning_rate": 3.044981597142837e-08,
"loss": 0.0096,
"step": 950
},
{
"epoch": 33.964285714285715,
"grad_norm": 0.16587476432323456,
"learning_rate": 2.8518836829732332e-08,
"loss": 0.0081,
"step": 951
},
{
"epoch": 34.0,
"grad_norm": 0.16104213893413544,
"learning_rate": 2.6650931330823305e-08,
"loss": 0.0083,
"step": 952
},
{
"epoch": 34.035714285714285,
"grad_norm": 0.1580321341753006,
"learning_rate": 2.4846123172992953e-08,
"loss": 0.0086,
"step": 953
},
{
"epoch": 34.07142857142857,
"grad_norm": 0.1464652419090271,
"learning_rate": 2.3104435254008852e-08,
"loss": 0.0073,
"step": 954
},
{
"epoch": 34.107142857142854,
"grad_norm": 0.12108955532312393,
"learning_rate": 2.1425889670827483e-08,
"loss": 0.0053,
"step": 955
},
{
"epoch": 34.142857142857146,
"grad_norm": 0.1350407600402832,
"learning_rate": 1.981050771931281e-08,
"loss": 0.007,
"step": 956
},
{
"epoch": 34.17857142857143,
"grad_norm": 0.16024035215377808,
"learning_rate": 1.8258309893965375e-08,
"loss": 0.0087,
"step": 957
},
{
"epoch": 34.214285714285715,
"grad_norm": 0.1773962676525116,
"learning_rate": 1.6769315887662508e-08,
"loss": 0.0082,
"step": 958
},
{
"epoch": 34.25,
"grad_norm": 0.17347432672977448,
"learning_rate": 1.5343544591409632e-08,
"loss": 0.0088,
"step": 959
},
{
"epoch": 34.285714285714285,
"grad_norm": 0.17487084865570068,
"learning_rate": 1.3981014094099354e-08,
"loss": 0.0097,
"step": 960
},
{
"epoch": 34.32142857142857,
"grad_norm": 0.18157510459423065,
"learning_rate": 1.2681741682282755e-08,
"loss": 0.01,
"step": 961
},
{
"epoch": 34.357142857142854,
"grad_norm": 0.177931547164917,
"learning_rate": 1.1445743839949008e-08,
"loss": 0.0096,
"step": 962
},
{
"epoch": 34.392857142857146,
"grad_norm": 0.17463018000125885,
"learning_rate": 1.0273036248318325e-08,
"loss": 0.0098,
"step": 963
},
{
"epoch": 34.42857142857143,
"grad_norm": 0.17054671049118042,
"learning_rate": 9.163633785639892e-09,
"loss": 0.0084,
"step": 964
},
{
"epoch": 34.464285714285715,
"grad_norm": 0.17844708263874054,
"learning_rate": 8.117550527005913e-09,
"loss": 0.0089,
"step": 965
},
{
"epoch": 34.5,
"grad_norm": 0.14375606179237366,
"learning_rate": 7.13479974417175e-09,
"loss": 0.0073,
"step": 966
},
{
"epoch": 34.535714285714285,
"grad_norm": 0.1640142798423767,
"learning_rate": 6.215393905388278e-09,
"loss": 0.0083,
"step": 967
},
{
"epoch": 34.57142857142857,
"grad_norm": 0.13746048510074615,
"learning_rate": 5.359344675242018e-09,
"loss": 0.007,
"step": 968
},
{
"epoch": 34.607142857142854,
"grad_norm": 0.18680772185325623,
"learning_rate": 4.56666291450858e-09,
"loss": 0.0104,
"step": 969
},
{
"epoch": 34.642857142857146,
"grad_norm": 0.17095445096492767,
"learning_rate": 3.837358680016112e-09,
"loss": 0.0087,
"step": 970
},
{
"epoch": 34.67857142857143,
"grad_norm": 0.13912023603916168,
"learning_rate": 3.1714412245148486e-09,
"loss": 0.0065,
"step": 971
},
{
"epoch": 34.714285714285715,
"grad_norm": 0.16117946803569794,
"learning_rate": 2.568918996560532e-09,
"loss": 0.0088,
"step": 972
},
{
"epoch": 34.75,
"grad_norm": 0.15318843722343445,
"learning_rate": 2.029799640409502e-09,
"loss": 0.009,
"step": 973
},
{
"epoch": 34.785714285714285,
"grad_norm": 0.15008534491062164,
"learning_rate": 1.5540899959187727e-09,
"loss": 0.0072,
"step": 974
},
{
"epoch": 34.82142857142857,
"grad_norm": 0.13134929537773132,
"learning_rate": 1.1417960984605459e-09,
"loss": 0.0056,
"step": 975
},
{
"epoch": 34.857142857142854,
"grad_norm": 0.14197276532649994,
"learning_rate": 7.92923178845606e-10,
"loss": 0.0073,
"step": 976
},
{
"epoch": 34.892857142857146,
"grad_norm": 0.15681394934654236,
"learning_rate": 5.07475663257262e-10,
"loss": 0.0084,
"step": 977
},
{
"epoch": 34.92857142857143,
"grad_norm": 0.13825960457324982,
"learning_rate": 2.854571731947253e-10,
"loss": 0.0059,
"step": 978
},
{
"epoch": 34.964285714285715,
"grad_norm": 0.14714203774929047,
"learning_rate": 1.2687052542759148e-10,
"loss": 0.008,
"step": 979
},
{
"epoch": 35.0,
"grad_norm": 0.14494210481643677,
"learning_rate": 3.171773195809191e-11,
"loss": 0.0075,
"step": 980
},
{
"epoch": 35.0,
"step": 980,
"total_flos": 1.12061156850611e+18,
"train_loss": 0.2908882966189056,
"train_runtime": 4060.664,
"train_samples_per_second": 119.463,
"train_steps_per_second": 0.241
}
],
"logging_steps": 1,
"max_steps": 980,
"num_input_tokens_seen": 0,
"num_train_epochs": 35,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.12061156850611e+18,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}