ErrorAI's picture
Training in progress, step 918, checkpoint
d25b196 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 230,
"global_step": 918,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0010893246187363835,
"grad_norm": 497.369384765625,
"learning_rate": 2e-05,
"loss": 12.1306,
"step": 1
},
{
"epoch": 0.0010893246187363835,
"eval_loss": 3.1589934825897217,
"eval_runtime": 4.1717,
"eval_samples_per_second": 92.767,
"eval_steps_per_second": 46.503,
"step": 1
},
{
"epoch": 0.002178649237472767,
"grad_norm": 384.06939697265625,
"learning_rate": 4e-05,
"loss": 11.9955,
"step": 2
},
{
"epoch": 0.0032679738562091504,
"grad_norm": 418.12237548828125,
"learning_rate": 6e-05,
"loss": 11.2344,
"step": 3
},
{
"epoch": 0.004357298474945534,
"grad_norm": 319.05267333984375,
"learning_rate": 8e-05,
"loss": 13.0403,
"step": 4
},
{
"epoch": 0.0054466230936819175,
"grad_norm": 275.94830322265625,
"learning_rate": 0.0001,
"loss": 12.7821,
"step": 5
},
{
"epoch": 0.006535947712418301,
"grad_norm": 446.70166015625,
"learning_rate": 0.00012,
"loss": 12.3682,
"step": 6
},
{
"epoch": 0.007625272331154684,
"grad_norm": 403.9425964355469,
"learning_rate": 0.00014,
"loss": 12.164,
"step": 7
},
{
"epoch": 0.008714596949891068,
"grad_norm": 370.84893798828125,
"learning_rate": 0.00016,
"loss": 12.4891,
"step": 8
},
{
"epoch": 0.00980392156862745,
"grad_norm": 357.8050537109375,
"learning_rate": 0.00018,
"loss": 12.3877,
"step": 9
},
{
"epoch": 0.010893246187363835,
"grad_norm": 409.2673645019531,
"learning_rate": 0.0002,
"loss": 12.547,
"step": 10
},
{
"epoch": 0.011982570806100218,
"grad_norm": 395.81939697265625,
"learning_rate": 0.00019999940145388063,
"loss": 13.2017,
"step": 11
},
{
"epoch": 0.013071895424836602,
"grad_norm": 440.0821228027344,
"learning_rate": 0.00019999760582268763,
"loss": 12.3833,
"step": 12
},
{
"epoch": 0.014161220043572984,
"grad_norm": 394.557373046875,
"learning_rate": 0.00019999461312791638,
"loss": 11.9402,
"step": 13
},
{
"epoch": 0.015250544662309368,
"grad_norm": 332.89373779296875,
"learning_rate": 0.0001999904234053922,
"loss": 11.9442,
"step": 14
},
{
"epoch": 0.016339869281045753,
"grad_norm": 312.7531433105469,
"learning_rate": 0.00019998503670526994,
"loss": 11.8069,
"step": 15
},
{
"epoch": 0.017429193899782137,
"grad_norm": 472.9444580078125,
"learning_rate": 0.00019997845309203334,
"loss": 12.1533,
"step": 16
},
{
"epoch": 0.018518518518518517,
"grad_norm": 397.2251892089844,
"learning_rate": 0.00019997067264449433,
"loss": 11.0714,
"step": 17
},
{
"epoch": 0.0196078431372549,
"grad_norm": 369.2944641113281,
"learning_rate": 0.00019996169545579207,
"loss": 12.2346,
"step": 18
},
{
"epoch": 0.020697167755991286,
"grad_norm": 574.8084106445312,
"learning_rate": 0.00019995152163339178,
"loss": 11.162,
"step": 19
},
{
"epoch": 0.02178649237472767,
"grad_norm": 611.6285400390625,
"learning_rate": 0.00019994015129908346,
"loss": 12.0132,
"step": 20
},
{
"epoch": 0.02287581699346405,
"grad_norm": 425.55072021484375,
"learning_rate": 0.00019992758458898055,
"loss": 12.0108,
"step": 21
},
{
"epoch": 0.023965141612200435,
"grad_norm": 498.50439453125,
"learning_rate": 0.00019991382165351814,
"loss": 11.8223,
"step": 22
},
{
"epoch": 0.02505446623093682,
"grad_norm": 454.2897644042969,
"learning_rate": 0.00019989886265745128,
"loss": 12.0506,
"step": 23
},
{
"epoch": 0.026143790849673203,
"grad_norm": 557.8632202148438,
"learning_rate": 0.00019988270777985292,
"loss": 11.8529,
"step": 24
},
{
"epoch": 0.027233115468409588,
"grad_norm": 412.888916015625,
"learning_rate": 0.00019986535721411186,
"loss": 11.4545,
"step": 25
},
{
"epoch": 0.02832244008714597,
"grad_norm": 375.8723449707031,
"learning_rate": 0.00019984681116793038,
"loss": 10.8877,
"step": 26
},
{
"epoch": 0.029411764705882353,
"grad_norm": 445.2213134765625,
"learning_rate": 0.00019982706986332175,
"loss": 11.8088,
"step": 27
},
{
"epoch": 0.030501089324618737,
"grad_norm": 390.9215087890625,
"learning_rate": 0.00019980613353660763,
"loss": 12.8907,
"step": 28
},
{
"epoch": 0.03159041394335512,
"grad_norm": 876.1612548828125,
"learning_rate": 0.00019978400243841508,
"loss": 12.3549,
"step": 29
},
{
"epoch": 0.032679738562091505,
"grad_norm": 573.1425170898438,
"learning_rate": 0.00019976067683367385,
"loss": 12.5321,
"step": 30
},
{
"epoch": 0.03376906318082789,
"grad_norm": 470.3711853027344,
"learning_rate": 0.0001997361570016129,
"loss": 12.0172,
"step": 31
},
{
"epoch": 0.034858387799564274,
"grad_norm": 679.3441772460938,
"learning_rate": 0.00019971044323575728,
"loss": 10.8628,
"step": 32
},
{
"epoch": 0.03594771241830065,
"grad_norm": 626.5648193359375,
"learning_rate": 0.0001996835358439244,
"loss": 11.3385,
"step": 33
},
{
"epoch": 0.037037037037037035,
"grad_norm": 733.734375,
"learning_rate": 0.00019965543514822062,
"loss": 10.4336,
"step": 34
},
{
"epoch": 0.03812636165577342,
"grad_norm": 587.365966796875,
"learning_rate": 0.00019962614148503718,
"loss": 12.0594,
"step": 35
},
{
"epoch": 0.0392156862745098,
"grad_norm": 405.00531005859375,
"learning_rate": 0.00019959565520504623,
"loss": 11.8103,
"step": 36
},
{
"epoch": 0.04030501089324619,
"grad_norm": 398.32415771484375,
"learning_rate": 0.00019956397667319668,
"loss": 11.0375,
"step": 37
},
{
"epoch": 0.04139433551198257,
"grad_norm": 422.72808837890625,
"learning_rate": 0.00019953110626870979,
"loss": 12.0295,
"step": 38
},
{
"epoch": 0.042483660130718956,
"grad_norm": 482.6229553222656,
"learning_rate": 0.00019949704438507459,
"loss": 10.7632,
"step": 39
},
{
"epoch": 0.04357298474945534,
"grad_norm": 526.7586059570312,
"learning_rate": 0.00019946179143004325,
"loss": 11.2995,
"step": 40
},
{
"epoch": 0.044662309368191724,
"grad_norm": 503.21221923828125,
"learning_rate": 0.0001994253478256262,
"loss": 12.1067,
"step": 41
},
{
"epoch": 0.0457516339869281,
"grad_norm": 655.2205810546875,
"learning_rate": 0.0001993877140080869,
"loss": 11.5295,
"step": 42
},
{
"epoch": 0.046840958605664486,
"grad_norm": 440.0908203125,
"learning_rate": 0.000199348890427937,
"loss": 11.6821,
"step": 43
},
{
"epoch": 0.04793028322440087,
"grad_norm": 456.47406005859375,
"learning_rate": 0.00019930887754993044,
"loss": 11.7873,
"step": 44
},
{
"epoch": 0.049019607843137254,
"grad_norm": 841.0929565429688,
"learning_rate": 0.00019926767585305835,
"loss": 11.6674,
"step": 45
},
{
"epoch": 0.05010893246187364,
"grad_norm": 581.7565307617188,
"learning_rate": 0.000199225285830543,
"loss": 11.7123,
"step": 46
},
{
"epoch": 0.05119825708061002,
"grad_norm": 547.7841796875,
"learning_rate": 0.00019918170798983211,
"loss": 10.9854,
"step": 47
},
{
"epoch": 0.05228758169934641,
"grad_norm": 685.59814453125,
"learning_rate": 0.00019913694285259256,
"loss": 11.8064,
"step": 48
},
{
"epoch": 0.05337690631808279,
"grad_norm": 636.0125122070312,
"learning_rate": 0.00019909099095470444,
"loss": 11.1934,
"step": 49
},
{
"epoch": 0.054466230936819175,
"grad_norm": 528.2201538085938,
"learning_rate": 0.00019904385284625424,
"loss": 12.0213,
"step": 50
},
{
"epoch": 0.05555555555555555,
"grad_norm": 490.21875,
"learning_rate": 0.00019899552909152866,
"loss": 10.6632,
"step": 51
},
{
"epoch": 0.05664488017429194,
"grad_norm": 500.16021728515625,
"learning_rate": 0.00019894602026900758,
"loss": 11.65,
"step": 52
},
{
"epoch": 0.05773420479302832,
"grad_norm": 477.1259765625,
"learning_rate": 0.00019889532697135734,
"loss": 11.6934,
"step": 53
},
{
"epoch": 0.058823529411764705,
"grad_norm": 1109.7750244140625,
"learning_rate": 0.00019884344980542338,
"loss": 10.7624,
"step": 54
},
{
"epoch": 0.05991285403050109,
"grad_norm": 555.9796142578125,
"learning_rate": 0.00019879038939222329,
"loss": 12.7329,
"step": 55
},
{
"epoch": 0.06100217864923747,
"grad_norm": 557.53271484375,
"learning_rate": 0.0001987361463669392,
"loss": 11.0998,
"step": 56
},
{
"epoch": 0.06209150326797386,
"grad_norm": 483.9259948730469,
"learning_rate": 0.00019868072137891002,
"loss": 11.8222,
"step": 57
},
{
"epoch": 0.06318082788671024,
"grad_norm": 740.6929321289062,
"learning_rate": 0.00019862411509162406,
"loss": 10.6816,
"step": 58
},
{
"epoch": 0.06427015250544663,
"grad_norm": 873.5588989257812,
"learning_rate": 0.0001985663281827108,
"loss": 10.5937,
"step": 59
},
{
"epoch": 0.06535947712418301,
"grad_norm": 617.0968627929688,
"learning_rate": 0.00019850736134393286,
"loss": 11.6752,
"step": 60
},
{
"epoch": 0.0664488017429194,
"grad_norm": 652.836669921875,
"learning_rate": 0.00019844721528117766,
"loss": 10.4189,
"step": 61
},
{
"epoch": 0.06753812636165578,
"grad_norm": 681.41064453125,
"learning_rate": 0.00019838589071444903,
"loss": 11.4517,
"step": 62
},
{
"epoch": 0.06862745098039216,
"grad_norm": 610.8583984375,
"learning_rate": 0.00019832338837785863,
"loss": 11.4514,
"step": 63
},
{
"epoch": 0.06971677559912855,
"grad_norm": 703.3029174804688,
"learning_rate": 0.00019825970901961705,
"loss": 11.0479,
"step": 64
},
{
"epoch": 0.07080610021786492,
"grad_norm": 558.1321411132812,
"learning_rate": 0.000198194853402025,
"loss": 10.8034,
"step": 65
},
{
"epoch": 0.0718954248366013,
"grad_norm": 729.005126953125,
"learning_rate": 0.00019812882230146398,
"loss": 11.2189,
"step": 66
},
{
"epoch": 0.07298474945533769,
"grad_norm": 563.0946044921875,
"learning_rate": 0.00019806161650838723,
"loss": 11.8089,
"step": 67
},
{
"epoch": 0.07407407407407407,
"grad_norm": 774.91796875,
"learning_rate": 0.00019799323682731,
"loss": 10.8409,
"step": 68
},
{
"epoch": 0.07516339869281045,
"grad_norm": 636.3812866210938,
"learning_rate": 0.00019792368407680025,
"loss": 11.4898,
"step": 69
},
{
"epoch": 0.07625272331154684,
"grad_norm": 741.6439208984375,
"learning_rate": 0.00019785295908946848,
"loss": 11.1101,
"step": 70
},
{
"epoch": 0.07734204793028322,
"grad_norm": 902.0681762695312,
"learning_rate": 0.00019778106271195806,
"loss": 10.9148,
"step": 71
},
{
"epoch": 0.0784313725490196,
"grad_norm": 608.00634765625,
"learning_rate": 0.00019770799580493494,
"loss": 12.5593,
"step": 72
},
{
"epoch": 0.07952069716775599,
"grad_norm": 645.3396606445312,
"learning_rate": 0.00019763375924307735,
"loss": 11.0296,
"step": 73
},
{
"epoch": 0.08061002178649238,
"grad_norm": 532.682861328125,
"learning_rate": 0.0001975583539150655,
"loss": 10.9512,
"step": 74
},
{
"epoch": 0.08169934640522876,
"grad_norm": 606.407470703125,
"learning_rate": 0.00019748178072357065,
"loss": 10.0461,
"step": 75
},
{
"epoch": 0.08278867102396514,
"grad_norm": 538.4356079101562,
"learning_rate": 0.00019740404058524457,
"loss": 10.923,
"step": 76
},
{
"epoch": 0.08387799564270153,
"grad_norm": 593.4065551757812,
"learning_rate": 0.00019732513443070836,
"loss": 11.2116,
"step": 77
},
{
"epoch": 0.08496732026143791,
"grad_norm": 667.9852905273438,
"learning_rate": 0.00019724506320454153,
"loss": 10.7769,
"step": 78
},
{
"epoch": 0.0860566448801743,
"grad_norm": 667.6882934570312,
"learning_rate": 0.0001971638278652705,
"loss": 11.6724,
"step": 79
},
{
"epoch": 0.08714596949891068,
"grad_norm": 664.25244140625,
"learning_rate": 0.0001970814293853572,
"loss": 11.0462,
"step": 80
},
{
"epoch": 0.08823529411764706,
"grad_norm": 713.4217529296875,
"learning_rate": 0.00019699786875118747,
"loss": 11.3038,
"step": 81
},
{
"epoch": 0.08932461873638345,
"grad_norm": 636.1452026367188,
"learning_rate": 0.00019691314696305913,
"loss": 11.0193,
"step": 82
},
{
"epoch": 0.09041394335511982,
"grad_norm": 536.7321166992188,
"learning_rate": 0.00019682726503517017,
"loss": 11.3826,
"step": 83
},
{
"epoch": 0.0915032679738562,
"grad_norm": 599.102783203125,
"learning_rate": 0.00019674022399560648,
"loss": 10.964,
"step": 84
},
{
"epoch": 0.09259259259259259,
"grad_norm": 1010.7560424804688,
"learning_rate": 0.00019665202488632956,
"loss": 11.0961,
"step": 85
},
{
"epoch": 0.09368191721132897,
"grad_norm": 651.3975219726562,
"learning_rate": 0.0001965626687631641,
"loss": 10.7751,
"step": 86
},
{
"epoch": 0.09477124183006536,
"grad_norm": 691.282958984375,
"learning_rate": 0.00019647215669578536,
"loss": 11.463,
"step": 87
},
{
"epoch": 0.09586056644880174,
"grad_norm": 780.7763061523438,
"learning_rate": 0.00019638048976770628,
"loss": 11.4905,
"step": 88
},
{
"epoch": 0.09694989106753812,
"grad_norm": 731.2135620117188,
"learning_rate": 0.00019628766907626446,
"loss": 11.0423,
"step": 89
},
{
"epoch": 0.09803921568627451,
"grad_norm": 606.6620483398438,
"learning_rate": 0.00019619369573260924,
"loss": 10.0837,
"step": 90
},
{
"epoch": 0.09912854030501089,
"grad_norm": 954.3007202148438,
"learning_rate": 0.00019609857086168823,
"loss": 10.4564,
"step": 91
},
{
"epoch": 0.10021786492374728,
"grad_norm": 489.27496337890625,
"learning_rate": 0.00019600229560223388,
"loss": 11.548,
"step": 92
},
{
"epoch": 0.10130718954248366,
"grad_norm": 563.7119140625,
"learning_rate": 0.00019590487110674983,
"loss": 10.8069,
"step": 93
},
{
"epoch": 0.10239651416122005,
"grad_norm": 460.32440185546875,
"learning_rate": 0.0001958062985414972,
"loss": 10.7325,
"step": 94
},
{
"epoch": 0.10348583877995643,
"grad_norm": 642.6327514648438,
"learning_rate": 0.00019570657908648048,
"loss": 12.1817,
"step": 95
},
{
"epoch": 0.10457516339869281,
"grad_norm": 686.3006591796875,
"learning_rate": 0.0001956057139354335,
"loss": 10.7484,
"step": 96
},
{
"epoch": 0.1056644880174292,
"grad_norm": 1625.68359375,
"learning_rate": 0.0001955037042958052,
"loss": 9.9405,
"step": 97
},
{
"epoch": 0.10675381263616558,
"grad_norm": 661.0103759765625,
"learning_rate": 0.00019540055138874505,
"loss": 10.6417,
"step": 98
},
{
"epoch": 0.10784313725490197,
"grad_norm": 754.760009765625,
"learning_rate": 0.00019529625644908847,
"loss": 10.5583,
"step": 99
},
{
"epoch": 0.10893246187363835,
"grad_norm": 638.9000854492188,
"learning_rate": 0.0001951908207253421,
"loss": 11.0231,
"step": 100
},
{
"epoch": 0.11002178649237472,
"grad_norm": 553.3854370117188,
"learning_rate": 0.00019508424547966884,
"loss": 10.9961,
"step": 101
},
{
"epoch": 0.1111111111111111,
"grad_norm": 587.3681030273438,
"learning_rate": 0.00019497653198787264,
"loss": 10.701,
"step": 102
},
{
"epoch": 0.11220043572984749,
"grad_norm": 751.8494262695312,
"learning_rate": 0.00019486768153938338,
"loss": 11.4205,
"step": 103
},
{
"epoch": 0.11328976034858387,
"grad_norm": 620.5661010742188,
"learning_rate": 0.0001947576954372413,
"loss": 11.6781,
"step": 104
},
{
"epoch": 0.11437908496732026,
"grad_norm": 592.9255981445312,
"learning_rate": 0.00019464657499808152,
"loss": 10.2305,
"step": 105
},
{
"epoch": 0.11546840958605664,
"grad_norm": 504.8605651855469,
"learning_rate": 0.0001945343215521182,
"loss": 12.4109,
"step": 106
},
{
"epoch": 0.11655773420479303,
"grad_norm": 567.1307983398438,
"learning_rate": 0.0001944209364431286,
"loss": 10.948,
"step": 107
},
{
"epoch": 0.11764705882352941,
"grad_norm": 802.73046875,
"learning_rate": 0.00019430642102843707,
"loss": 11.1612,
"step": 108
},
{
"epoch": 0.1187363834422658,
"grad_norm": 692.0497436523438,
"learning_rate": 0.00019419077667889872,
"loss": 10.6958,
"step": 109
},
{
"epoch": 0.11982570806100218,
"grad_norm": 678.1286010742188,
"learning_rate": 0.00019407400477888315,
"loss": 11.3934,
"step": 110
},
{
"epoch": 0.12091503267973856,
"grad_norm": 545.8530883789062,
"learning_rate": 0.00019395610672625767,
"loss": 11.7126,
"step": 111
},
{
"epoch": 0.12200435729847495,
"grad_norm": 551.471435546875,
"learning_rate": 0.00019383708393237075,
"loss": 11.3177,
"step": 112
},
{
"epoch": 0.12309368191721133,
"grad_norm": 747.8993530273438,
"learning_rate": 0.00019371693782203498,
"loss": 10.9254,
"step": 113
},
{
"epoch": 0.12418300653594772,
"grad_norm": 814.4208374023438,
"learning_rate": 0.00019359566983351013,
"loss": 9.821,
"step": 114
},
{
"epoch": 0.12527233115468409,
"grad_norm": 862.31494140625,
"learning_rate": 0.0001934732814184859,
"loss": 11.2622,
"step": 115
},
{
"epoch": 0.12636165577342048,
"grad_norm": 636.8887939453125,
"learning_rate": 0.00019334977404206443,
"loss": 10.6286,
"step": 116
},
{
"epoch": 0.12745098039215685,
"grad_norm": 670.830322265625,
"learning_rate": 0.00019322514918274308,
"loss": 11.1403,
"step": 117
},
{
"epoch": 0.12854030501089325,
"grad_norm": 545.7217407226562,
"learning_rate": 0.00019309940833239626,
"loss": 11.752,
"step": 118
},
{
"epoch": 0.12962962962962962,
"grad_norm": 587.8623046875,
"learning_rate": 0.00019297255299625797,
"loss": 11.7527,
"step": 119
},
{
"epoch": 0.13071895424836602,
"grad_norm": 1006.7312622070312,
"learning_rate": 0.00019284458469290354,
"loss": 10.5548,
"step": 120
},
{
"epoch": 0.1318082788671024,
"grad_norm": 590.1889038085938,
"learning_rate": 0.00019271550495423168,
"loss": 10.2359,
"step": 121
},
{
"epoch": 0.1328976034858388,
"grad_norm": 605.3558959960938,
"learning_rate": 0.00019258531532544585,
"loss": 10.7942,
"step": 122
},
{
"epoch": 0.13398692810457516,
"grad_norm": 605.3388061523438,
"learning_rate": 0.00019245401736503608,
"loss": 10.9458,
"step": 123
},
{
"epoch": 0.13507625272331156,
"grad_norm": 672.321533203125,
"learning_rate": 0.00019232161264475997,
"loss": 12.0993,
"step": 124
},
{
"epoch": 0.13616557734204793,
"grad_norm": 554.8330688476562,
"learning_rate": 0.00019218810274962417,
"loss": 11.2272,
"step": 125
},
{
"epoch": 0.13725490196078433,
"grad_norm": 865.9181518554688,
"learning_rate": 0.00019205348927786532,
"loss": 10.1409,
"step": 126
},
{
"epoch": 0.1383442265795207,
"grad_norm": 788.566162109375,
"learning_rate": 0.00019191777384093081,
"loss": 12.0527,
"step": 127
},
{
"epoch": 0.1394335511982571,
"grad_norm": 699.4249267578125,
"learning_rate": 0.0001917809580634596,
"loss": 10.8955,
"step": 128
},
{
"epoch": 0.14052287581699346,
"grad_norm": 726.8323974609375,
"learning_rate": 0.00019164304358326275,
"loss": 10.264,
"step": 129
},
{
"epoch": 0.14161220043572983,
"grad_norm": 878.0653686523438,
"learning_rate": 0.00019150403205130383,
"loss": 11.0059,
"step": 130
},
{
"epoch": 0.14270152505446623,
"grad_norm": 523.306884765625,
"learning_rate": 0.00019136392513167903,
"loss": 10.8748,
"step": 131
},
{
"epoch": 0.1437908496732026,
"grad_norm": 962.2786254882812,
"learning_rate": 0.00019122272450159745,
"loss": 11.4164,
"step": 132
},
{
"epoch": 0.144880174291939,
"grad_norm": 911.98193359375,
"learning_rate": 0.0001910804318513609,
"loss": 11.5345,
"step": 133
},
{
"epoch": 0.14596949891067537,
"grad_norm": 1103.3953857421875,
"learning_rate": 0.0001909370488843436,
"loss": 10.3984,
"step": 134
},
{
"epoch": 0.14705882352941177,
"grad_norm": 617.2359008789062,
"learning_rate": 0.00019079257731697196,
"loss": 11.3573,
"step": 135
},
{
"epoch": 0.14814814814814814,
"grad_norm": 704.6926879882812,
"learning_rate": 0.0001906470188787039,
"loss": 11.3981,
"step": 136
},
{
"epoch": 0.14923747276688454,
"grad_norm": 702.8963012695312,
"learning_rate": 0.00019050037531200814,
"loss": 11.738,
"step": 137
},
{
"epoch": 0.1503267973856209,
"grad_norm": 848.1795043945312,
"learning_rate": 0.00019035264837234347,
"loss": 11.4592,
"step": 138
},
{
"epoch": 0.1514161220043573,
"grad_norm": 863.1195068359375,
"learning_rate": 0.00019020383982813765,
"loss": 11.1281,
"step": 139
},
{
"epoch": 0.15250544662309368,
"grad_norm": 755.1558837890625,
"learning_rate": 0.00019005395146076616,
"loss": 11.8929,
"step": 140
},
{
"epoch": 0.15359477124183007,
"grad_norm": 1292.68359375,
"learning_rate": 0.00018990298506453104,
"loss": 11.1247,
"step": 141
},
{
"epoch": 0.15468409586056645,
"grad_norm": 623.200927734375,
"learning_rate": 0.0001897509424466393,
"loss": 10.7395,
"step": 142
},
{
"epoch": 0.15577342047930284,
"grad_norm": 628.1660766601562,
"learning_rate": 0.00018959782542718128,
"loss": 11.6995,
"step": 143
},
{
"epoch": 0.1568627450980392,
"grad_norm": 818.7709350585938,
"learning_rate": 0.000189443635839109,
"loss": 11.4782,
"step": 144
},
{
"epoch": 0.1579520697167756,
"grad_norm": 777.9385986328125,
"learning_rate": 0.00018928837552821404,
"loss": 9.1531,
"step": 145
},
{
"epoch": 0.15904139433551198,
"grad_norm": 632.1226196289062,
"learning_rate": 0.0001891320463531055,
"loss": 10.9672,
"step": 146
},
{
"epoch": 0.16013071895424835,
"grad_norm": 1045.4794921875,
"learning_rate": 0.00018897465018518782,
"loss": 12.5507,
"step": 147
},
{
"epoch": 0.16122004357298475,
"grad_norm": 1832.7476806640625,
"learning_rate": 0.0001888161889086383,
"loss": 10.2606,
"step": 148
},
{
"epoch": 0.16230936819172112,
"grad_norm": 1147.483154296875,
"learning_rate": 0.00018865666442038456,
"loss": 11.287,
"step": 149
},
{
"epoch": 0.16339869281045752,
"grad_norm": 1081.8319091796875,
"learning_rate": 0.00018849607863008193,
"loss": 10.9169,
"step": 150
},
{
"epoch": 0.1644880174291939,
"grad_norm": 896.3159790039062,
"learning_rate": 0.0001883344334600904,
"loss": 11.1309,
"step": 151
},
{
"epoch": 0.1655773420479303,
"grad_norm": 1027.65869140625,
"learning_rate": 0.00018817173084545176,
"loss": 10.6067,
"step": 152
},
{
"epoch": 0.16666666666666666,
"grad_norm": 792.0274047851562,
"learning_rate": 0.0001880079727338664,
"loss": 11.5252,
"step": 153
},
{
"epoch": 0.16775599128540306,
"grad_norm": 1679.2010498046875,
"learning_rate": 0.00018784316108566996,
"loss": 12.0362,
"step": 154
},
{
"epoch": 0.16884531590413943,
"grad_norm": 897.4620971679688,
"learning_rate": 0.00018767729787380985,
"loss": 11.3795,
"step": 155
},
{
"epoch": 0.16993464052287582,
"grad_norm": 1291.0208740234375,
"learning_rate": 0.00018751038508382176,
"loss": 11.0511,
"step": 156
},
{
"epoch": 0.1710239651416122,
"grad_norm": 1339.29345703125,
"learning_rate": 0.00018734242471380572,
"loss": 10.7022,
"step": 157
},
{
"epoch": 0.1721132897603486,
"grad_norm": 900.9859619140625,
"learning_rate": 0.00018717341877440226,
"loss": 11.0906,
"step": 158
},
{
"epoch": 0.17320261437908496,
"grad_norm": 811.28662109375,
"learning_rate": 0.0001870033692887684,
"loss": 10.1273,
"step": 159
},
{
"epoch": 0.17429193899782136,
"grad_norm": 1265.2435302734375,
"learning_rate": 0.00018683227829255334,
"loss": 12.1877,
"step": 160
},
{
"epoch": 0.17538126361655773,
"grad_norm": 1323.2113037109375,
"learning_rate": 0.00018666014783387408,
"loss": 11.6536,
"step": 161
},
{
"epoch": 0.17647058823529413,
"grad_norm": 1310.9921875,
"learning_rate": 0.000186486979973291,
"loss": 12.1153,
"step": 162
},
{
"epoch": 0.1775599128540305,
"grad_norm": 806.4589233398438,
"learning_rate": 0.0001863127767837831,
"loss": 11.7038,
"step": 163
},
{
"epoch": 0.1786492374727669,
"grad_norm": 845.1607666015625,
"learning_rate": 0.0001861375403507233,
"loss": 11.3316,
"step": 164
},
{
"epoch": 0.17973856209150327,
"grad_norm": 1319.9588623046875,
"learning_rate": 0.00018596127277185329,
"loss": 12.2223,
"step": 165
},
{
"epoch": 0.18082788671023964,
"grad_norm": 1126.65234375,
"learning_rate": 0.0001857839761572586,
"loss": 11.2331,
"step": 166
},
{
"epoch": 0.18191721132897604,
"grad_norm": 827.2431640625,
"learning_rate": 0.00018560565262934318,
"loss": 11.2991,
"step": 167
},
{
"epoch": 0.1830065359477124,
"grad_norm": 804.45166015625,
"learning_rate": 0.00018542630432280422,
"loss": 11.1516,
"step": 168
},
{
"epoch": 0.1840958605664488,
"grad_norm": 868.1654052734375,
"learning_rate": 0.00018524593338460635,
"loss": 12.0214,
"step": 169
},
{
"epoch": 0.18518518518518517,
"grad_norm": 1219.4190673828125,
"learning_rate": 0.00018506454197395606,
"loss": 12.0915,
"step": 170
},
{
"epoch": 0.18627450980392157,
"grad_norm": 1242.7969970703125,
"learning_rate": 0.00018488213226227588,
"loss": 11.2175,
"step": 171
},
{
"epoch": 0.18736383442265794,
"grad_norm": 984.5662231445312,
"learning_rate": 0.0001846987064331783,
"loss": 11.5393,
"step": 172
},
{
"epoch": 0.18845315904139434,
"grad_norm": 1010.8961181640625,
"learning_rate": 0.00018451426668243963,
"loss": 11.7417,
"step": 173
},
{
"epoch": 0.1895424836601307,
"grad_norm": 784.3214721679688,
"learning_rate": 0.0001843288152179739,
"loss": 11.8699,
"step": 174
},
{
"epoch": 0.1906318082788671,
"grad_norm": 750.0485229492188,
"learning_rate": 0.00018414235425980616,
"loss": 10.6044,
"step": 175
},
{
"epoch": 0.19172113289760348,
"grad_norm": 1271.091064453125,
"learning_rate": 0.00018395488604004603,
"loss": 10.5923,
"step": 176
},
{
"epoch": 0.19281045751633988,
"grad_norm": 778.1675415039062,
"learning_rate": 0.00018376641280286107,
"loss": 11.4075,
"step": 177
},
{
"epoch": 0.19389978213507625,
"grad_norm": 999.4036254882812,
"learning_rate": 0.00018357693680444976,
"loss": 11.6131,
"step": 178
},
{
"epoch": 0.19498910675381265,
"grad_norm": 905.7958374023438,
"learning_rate": 0.00018338646031301458,
"loss": 10.323,
"step": 179
},
{
"epoch": 0.19607843137254902,
"grad_norm": 872.347412109375,
"learning_rate": 0.00018319498560873476,
"loss": 11.0557,
"step": 180
},
{
"epoch": 0.19716775599128541,
"grad_norm": 756.1710815429688,
"learning_rate": 0.00018300251498373923,
"loss": 10.2497,
"step": 181
},
{
"epoch": 0.19825708061002179,
"grad_norm": 845.9336547851562,
"learning_rate": 0.00018280905074207884,
"loss": 10.9945,
"step": 182
},
{
"epoch": 0.19934640522875818,
"grad_norm": 694.05224609375,
"learning_rate": 0.000182614595199699,
"loss": 10.545,
"step": 183
},
{
"epoch": 0.20043572984749455,
"grad_norm": 714.1306762695312,
"learning_rate": 0.00018241915068441196,
"loss": 11.1389,
"step": 184
},
{
"epoch": 0.20152505446623092,
"grad_norm": 722.7552490234375,
"learning_rate": 0.00018222271953586883,
"loss": 11.0365,
"step": 185
},
{
"epoch": 0.20261437908496732,
"grad_norm": 708.6851196289062,
"learning_rate": 0.00018202530410553163,
"loss": 11.2533,
"step": 186
},
{
"epoch": 0.2037037037037037,
"grad_norm": 1047.9068603515625,
"learning_rate": 0.00018182690675664514,
"loss": 10.9609,
"step": 187
},
{
"epoch": 0.2047930283224401,
"grad_norm": 562.74072265625,
"learning_rate": 0.00018162752986420868,
"loss": 11.5543,
"step": 188
},
{
"epoch": 0.20588235294117646,
"grad_norm": 690.9712524414062,
"learning_rate": 0.0001814271758149475,
"loss": 10.2702,
"step": 189
},
{
"epoch": 0.20697167755991286,
"grad_norm": 1054.808349609375,
"learning_rate": 0.00018122584700728443,
"loss": 11.0848,
"step": 190
},
{
"epoch": 0.20806100217864923,
"grad_norm": 633.4119262695312,
"learning_rate": 0.00018102354585131092,
"loss": 11.1002,
"step": 191
},
{
"epoch": 0.20915032679738563,
"grad_norm": 508.4339294433594,
"learning_rate": 0.00018082027476875847,
"loss": 10.9004,
"step": 192
},
{
"epoch": 0.210239651416122,
"grad_norm": 616.0364379882812,
"learning_rate": 0.00018061603619296942,
"loss": 10.9211,
"step": 193
},
{
"epoch": 0.2113289760348584,
"grad_norm": 553.75,
"learning_rate": 0.0001804108325688679,
"loss": 10.5755,
"step": 194
},
{
"epoch": 0.21241830065359477,
"grad_norm": 554.069580078125,
"learning_rate": 0.00018020466635293057,
"loss": 11.7447,
"step": 195
},
{
"epoch": 0.21350762527233116,
"grad_norm": 681.5348510742188,
"learning_rate": 0.0001799975400131572,
"loss": 11.0779,
"step": 196
},
{
"epoch": 0.21459694989106753,
"grad_norm": 632.8909301757812,
"learning_rate": 0.00017978945602904116,
"loss": 11.4297,
"step": 197
},
{
"epoch": 0.21568627450980393,
"grad_norm": 485.4712829589844,
"learning_rate": 0.0001795804168915396,
"loss": 9.903,
"step": 198
},
{
"epoch": 0.2167755991285403,
"grad_norm": 668.1004638671875,
"learning_rate": 0.00017937042510304392,
"loss": 11.2272,
"step": 199
},
{
"epoch": 0.2178649237472767,
"grad_norm": 798.0782470703125,
"learning_rate": 0.00017915948317734942,
"loss": 10.4403,
"step": 200
},
{
"epoch": 0.21895424836601307,
"grad_norm": 484.9952697753906,
"learning_rate": 0.00017894759363962554,
"loss": 11.3431,
"step": 201
},
{
"epoch": 0.22004357298474944,
"grad_norm": 557.5073852539062,
"learning_rate": 0.00017873475902638553,
"loss": 10.0637,
"step": 202
},
{
"epoch": 0.22113289760348584,
"grad_norm": 457.8398742675781,
"learning_rate": 0.00017852098188545602,
"loss": 10.7078,
"step": 203
},
{
"epoch": 0.2222222222222222,
"grad_norm": 1345.8021240234375,
"learning_rate": 0.00017830626477594654,
"loss": 10.8073,
"step": 204
},
{
"epoch": 0.2233115468409586,
"grad_norm": 508.4226379394531,
"learning_rate": 0.00017809061026821896,
"loss": 11.756,
"step": 205
},
{
"epoch": 0.22440087145969498,
"grad_norm": 543.2045288085938,
"learning_rate": 0.00017787402094385666,
"loss": 11.3398,
"step": 206
},
{
"epoch": 0.22549019607843138,
"grad_norm": 600.871337890625,
"learning_rate": 0.00017765649939563365,
"loss": 10.9323,
"step": 207
},
{
"epoch": 0.22657952069716775,
"grad_norm": 491.7249450683594,
"learning_rate": 0.00017743804822748345,
"loss": 11.4056,
"step": 208
},
{
"epoch": 0.22766884531590414,
"grad_norm": 584.7307739257812,
"learning_rate": 0.00017721867005446806,
"loss": 10.6625,
"step": 209
},
{
"epoch": 0.22875816993464052,
"grad_norm": 781.3718872070312,
"learning_rate": 0.00017699836750274662,
"loss": 11.4948,
"step": 210
},
{
"epoch": 0.2298474945533769,
"grad_norm": 561.0650024414062,
"learning_rate": 0.00017677714320954378,
"loss": 11.5926,
"step": 211
},
{
"epoch": 0.23093681917211328,
"grad_norm": 662.3781127929688,
"learning_rate": 0.00017655499982311847,
"loss": 11.2635,
"step": 212
},
{
"epoch": 0.23202614379084968,
"grad_norm": 485.6428527832031,
"learning_rate": 0.00017633194000273188,
"loss": 11.3912,
"step": 213
},
{
"epoch": 0.23311546840958605,
"grad_norm": 1000.7589111328125,
"learning_rate": 0.00017610796641861581,
"loss": 11.0748,
"step": 214
},
{
"epoch": 0.23420479302832245,
"grad_norm": 546.7896728515625,
"learning_rate": 0.0001758830817519407,
"loss": 11.1538,
"step": 215
},
{
"epoch": 0.23529411764705882,
"grad_norm": 498.8612976074219,
"learning_rate": 0.00017565728869478337,
"loss": 10.7306,
"step": 216
},
{
"epoch": 0.23638344226579522,
"grad_norm": 954.06787109375,
"learning_rate": 0.00017543058995009503,
"loss": 9.8866,
"step": 217
},
{
"epoch": 0.2374727668845316,
"grad_norm": 474.430419921875,
"learning_rate": 0.00017520298823166873,
"loss": 11.1717,
"step": 218
},
{
"epoch": 0.238562091503268,
"grad_norm": 446.2909851074219,
"learning_rate": 0.000174974486264107,
"loss": 11.0507,
"step": 219
},
{
"epoch": 0.23965141612200436,
"grad_norm": 574.3330688476562,
"learning_rate": 0.00017474508678278915,
"loss": 10.7534,
"step": 220
},
{
"epoch": 0.24074074074074073,
"grad_norm": 751.9092407226562,
"learning_rate": 0.00017451479253383857,
"loss": 11.041,
"step": 221
},
{
"epoch": 0.24183006535947713,
"grad_norm": 969.604248046875,
"learning_rate": 0.00017428360627408978,
"loss": 10.7288,
"step": 222
},
{
"epoch": 0.2429193899782135,
"grad_norm": 716.7567138671875,
"learning_rate": 0.0001740515307710557,
"loss": 11.062,
"step": 223
},
{
"epoch": 0.2440087145969499,
"grad_norm": 580.2841796875,
"learning_rate": 0.000173818568802894,
"loss": 10.2888,
"step": 224
},
{
"epoch": 0.24509803921568626,
"grad_norm": 488.0480041503906,
"learning_rate": 0.00017358472315837447,
"loss": 10.0662,
"step": 225
},
{
"epoch": 0.24618736383442266,
"grad_norm": 805.732666015625,
"learning_rate": 0.00017334999663684504,
"loss": 10.4939,
"step": 226
},
{
"epoch": 0.24727668845315903,
"grad_norm": 610.5254516601562,
"learning_rate": 0.00017311439204819874,
"loss": 10.3649,
"step": 227
},
{
"epoch": 0.24836601307189543,
"grad_norm": 464.3783264160156,
"learning_rate": 0.00017287791221283984,
"loss": 10.8539,
"step": 228
},
{
"epoch": 0.2494553376906318,
"grad_norm": 645.302001953125,
"learning_rate": 0.00017264055996165007,
"loss": 10.8646,
"step": 229
},
{
"epoch": 0.25054466230936817,
"grad_norm": 466.3519287109375,
"learning_rate": 0.00017240233813595478,
"loss": 10.1154,
"step": 230
},
{
"epoch": 0.25054466230936817,
"eval_loss": 2.678065061569214,
"eval_runtime": 2.7886,
"eval_samples_per_second": 138.78,
"eval_steps_per_second": 69.569,
"step": 230
},
{
"epoch": 0.25163398692810457,
"grad_norm": 656.3611450195312,
"learning_rate": 0.000172163249587489,
"loss": 10.7119,
"step": 231
},
{
"epoch": 0.25272331154684097,
"grad_norm": 593.1845703125,
"learning_rate": 0.00017192329717836315,
"loss": 11.6847,
"step": 232
},
{
"epoch": 0.25381263616557737,
"grad_norm": 629.67578125,
"learning_rate": 0.00017168248378102892,
"loss": 10.5297,
"step": 233
},
{
"epoch": 0.2549019607843137,
"grad_norm": 763.1748657226562,
"learning_rate": 0.0001714408122782448,
"loss": 10.4083,
"step": 234
},
{
"epoch": 0.2559912854030501,
"grad_norm": 723.7062377929688,
"learning_rate": 0.0001711982855630416,
"loss": 11.5429,
"step": 235
},
{
"epoch": 0.2570806100217865,
"grad_norm": 814.066650390625,
"learning_rate": 0.00017095490653868778,
"loss": 9.9942,
"step": 236
},
{
"epoch": 0.2581699346405229,
"grad_norm": 593.72216796875,
"learning_rate": 0.00017071067811865476,
"loss": 9.9854,
"step": 237
},
{
"epoch": 0.25925925925925924,
"grad_norm": 915.5996704101562,
"learning_rate": 0.000170465603226582,
"loss": 10.0837,
"step": 238
},
{
"epoch": 0.26034858387799564,
"grad_norm": 775.1221923828125,
"learning_rate": 0.00017021968479624203,
"loss": 11.7044,
"step": 239
},
{
"epoch": 0.26143790849673204,
"grad_norm": 569.85791015625,
"learning_rate": 0.00016997292577150528,
"loss": 10.7066,
"step": 240
},
{
"epoch": 0.2625272331154684,
"grad_norm": 497.0193176269531,
"learning_rate": 0.0001697253291063049,
"loss": 10.1781,
"step": 241
},
{
"epoch": 0.2636165577342048,
"grad_norm": 784.2388305664062,
"learning_rate": 0.0001694768977646013,
"loss": 10.108,
"step": 242
},
{
"epoch": 0.2647058823529412,
"grad_norm": 553.2965698242188,
"learning_rate": 0.00016922763472034685,
"loss": 10.667,
"step": 243
},
{
"epoch": 0.2657952069716776,
"grad_norm": 534.915771484375,
"learning_rate": 0.00016897754295745008,
"loss": 11.2736,
"step": 244
},
{
"epoch": 0.2668845315904139,
"grad_norm": 593.677490234375,
"learning_rate": 0.00016872662546974008,
"loss": 10.4391,
"step": 245
},
{
"epoch": 0.2679738562091503,
"grad_norm": 647.2657470703125,
"learning_rate": 0.0001684748852609306,
"loss": 11.6138,
"step": 246
},
{
"epoch": 0.2690631808278867,
"grad_norm": 678.768798828125,
"learning_rate": 0.00016822232534458416,
"loss": 11.0187,
"step": 247
},
{
"epoch": 0.2701525054466231,
"grad_norm": 804.8411254882812,
"learning_rate": 0.00016796894874407595,
"loss": 11.2911,
"step": 248
},
{
"epoch": 0.27124183006535946,
"grad_norm": 440.8306579589844,
"learning_rate": 0.00016771475849255754,
"loss": 11.4534,
"step": 249
},
{
"epoch": 0.27233115468409586,
"grad_norm": 730.95166015625,
"learning_rate": 0.0001674597576329207,
"loss": 11.5775,
"step": 250
},
{
"epoch": 0.27342047930283225,
"grad_norm": 570.8353881835938,
"learning_rate": 0.00016720394921776097,
"loss": 11.6714,
"step": 251
},
{
"epoch": 0.27450980392156865,
"grad_norm": 532.7552490234375,
"learning_rate": 0.000166947336309341,
"loss": 10.7977,
"step": 252
},
{
"epoch": 0.275599128540305,
"grad_norm": 416.3326110839844,
"learning_rate": 0.00016668992197955398,
"loss": 11.0194,
"step": 253
},
{
"epoch": 0.2766884531590414,
"grad_norm": 458.32904052734375,
"learning_rate": 0.00016643170930988698,
"loss": 10.1537,
"step": 254
},
{
"epoch": 0.2777777777777778,
"grad_norm": 4145.63330078125,
"learning_rate": 0.00016617270139138371,
"loss": 10.5428,
"step": 255
},
{
"epoch": 0.2788671023965142,
"grad_norm": 435.748291015625,
"learning_rate": 0.0001659129013246079,
"loss": 12.5706,
"step": 256
},
{
"epoch": 0.27995642701525053,
"grad_norm": 448.35015869140625,
"learning_rate": 0.000165652312219606,
"loss": 10.5882,
"step": 257
},
{
"epoch": 0.28104575163398693,
"grad_norm": 541.2523193359375,
"learning_rate": 0.00016539093719586994,
"loss": 9.4986,
"step": 258
},
{
"epoch": 0.2821350762527233,
"grad_norm": 436.6683349609375,
"learning_rate": 0.00016512877938229986,
"loss": 9.6175,
"step": 259
},
{
"epoch": 0.28322440087145967,
"grad_norm": 732.4791259765625,
"learning_rate": 0.0001648658419171666,
"loss": 11.2249,
"step": 260
},
{
"epoch": 0.28431372549019607,
"grad_norm": 557.4008178710938,
"learning_rate": 0.00016460212794807414,
"loss": 11.2695,
"step": 261
},
{
"epoch": 0.28540305010893247,
"grad_norm": 441.1900939941406,
"learning_rate": 0.00016433764063192194,
"loss": 10.2486,
"step": 262
},
{
"epoch": 0.28649237472766886,
"grad_norm": 500.40753173828125,
"learning_rate": 0.00016407238313486712,
"loss": 10.4287,
"step": 263
},
{
"epoch": 0.2875816993464052,
"grad_norm": 517.90478515625,
"learning_rate": 0.0001638063586322866,
"loss": 10.9458,
"step": 264
},
{
"epoch": 0.2886710239651416,
"grad_norm": 477.6897888183594,
"learning_rate": 0.0001635395703087391,
"loss": 10.7061,
"step": 265
},
{
"epoch": 0.289760348583878,
"grad_norm": 577.167724609375,
"learning_rate": 0.00016327202135792685,
"loss": 10.5098,
"step": 266
},
{
"epoch": 0.2908496732026144,
"grad_norm": 502.87469482421875,
"learning_rate": 0.00016300371498265763,
"loss": 10.3983,
"step": 267
},
{
"epoch": 0.29193899782135074,
"grad_norm": 584.27197265625,
"learning_rate": 0.00016273465439480618,
"loss": 11.3498,
"step": 268
},
{
"epoch": 0.29302832244008714,
"grad_norm": 718.4893188476562,
"learning_rate": 0.000162464842815276,
"loss": 11.7879,
"step": 269
},
{
"epoch": 0.29411764705882354,
"grad_norm": 415.97320556640625,
"learning_rate": 0.00016219428347396053,
"loss": 11.5378,
"step": 270
},
{
"epoch": 0.29520697167755994,
"grad_norm": 965.6848754882812,
"learning_rate": 0.0001619229796097046,
"loss": 10.5206,
"step": 271
},
{
"epoch": 0.2962962962962963,
"grad_norm": 319.21905517578125,
"learning_rate": 0.0001616509344702658,
"loss": 9.4684,
"step": 272
},
{
"epoch": 0.2973856209150327,
"grad_norm": 399.8673095703125,
"learning_rate": 0.00016137815131227526,
"loss": 10.1383,
"step": 273
},
{
"epoch": 0.2984749455337691,
"grad_norm": 905.3511962890625,
"learning_rate": 0.00016110463340119913,
"loss": 10.4312,
"step": 274
},
{
"epoch": 0.2995642701525055,
"grad_norm": 561.3602294921875,
"learning_rate": 0.000160830384011299,
"loss": 10.7353,
"step": 275
},
{
"epoch": 0.3006535947712418,
"grad_norm": 664.7014770507812,
"learning_rate": 0.00016055540642559305,
"loss": 11.0443,
"step": 276
},
{
"epoch": 0.3017429193899782,
"grad_norm": 605.7849731445312,
"learning_rate": 0.00016027970393581666,
"loss": 10.5288,
"step": 277
},
{
"epoch": 0.3028322440087146,
"grad_norm": 745.9011840820312,
"learning_rate": 0.00016000327984238292,
"loss": 11.5525,
"step": 278
},
{
"epoch": 0.30392156862745096,
"grad_norm": 453.7929382324219,
"learning_rate": 0.00015972613745434314,
"loss": 10.4069,
"step": 279
},
{
"epoch": 0.30501089324618735,
"grad_norm": 679.0534057617188,
"learning_rate": 0.0001594482800893474,
"loss": 10.7409,
"step": 280
},
{
"epoch": 0.30610021786492375,
"grad_norm": 601.0300903320312,
"learning_rate": 0.00015916971107360461,
"loss": 10.1115,
"step": 281
},
{
"epoch": 0.30718954248366015,
"grad_norm": 559.4019165039062,
"learning_rate": 0.00015889043374184286,
"loss": 11.9414,
"step": 282
},
{
"epoch": 0.3082788671023965,
"grad_norm": 552.6649169921875,
"learning_rate": 0.00015861045143726946,
"loss": 11.0226,
"step": 283
},
{
"epoch": 0.3093681917211329,
"grad_norm": 488.8516540527344,
"learning_rate": 0.00015832976751153078,
"loss": 11.4205,
"step": 284
},
{
"epoch": 0.3104575163398693,
"grad_norm": 666.8306274414062,
"learning_rate": 0.0001580483853246723,
"loss": 11.0604,
"step": 285
},
{
"epoch": 0.3115468409586057,
"grad_norm": 710.2818603515625,
"learning_rate": 0.0001577663082450984,
"loss": 10.4147,
"step": 286
},
{
"epoch": 0.31263616557734203,
"grad_norm": 615.7288818359375,
"learning_rate": 0.00015748353964953186,
"loss": 11.2711,
"step": 287
},
{
"epoch": 0.3137254901960784,
"grad_norm": 613.173828125,
"learning_rate": 0.00015720008292297364,
"loss": 12.3367,
"step": 288
},
{
"epoch": 0.3148148148148148,
"grad_norm": 516.6195678710938,
"learning_rate": 0.00015691594145866215,
"loss": 11.0611,
"step": 289
},
{
"epoch": 0.3159041394335512,
"grad_norm": 572.4487915039062,
"learning_rate": 0.00015663111865803285,
"loss": 10.9705,
"step": 290
},
{
"epoch": 0.31699346405228757,
"grad_norm": 2503.04736328125,
"learning_rate": 0.00015634561793067737,
"loss": 11.5724,
"step": 291
},
{
"epoch": 0.31808278867102396,
"grad_norm": 945.3132934570312,
"learning_rate": 0.00015605944269430277,
"loss": 10.8974,
"step": 292
},
{
"epoch": 0.31917211328976036,
"grad_norm": 566.0651245117188,
"learning_rate": 0.00015577259637469058,
"loss": 11.5792,
"step": 293
},
{
"epoch": 0.3202614379084967,
"grad_norm": 736.163818359375,
"learning_rate": 0.00015548508240565583,
"loss": 11.0325,
"step": 294
},
{
"epoch": 0.3213507625272331,
"grad_norm": 757.3773803710938,
"learning_rate": 0.00015519690422900593,
"loss": 11.0976,
"step": 295
},
{
"epoch": 0.3224400871459695,
"grad_norm": 936.12890625,
"learning_rate": 0.00015490806529449945,
"loss": 11.1016,
"step": 296
},
{
"epoch": 0.3235294117647059,
"grad_norm": 720.9487915039062,
"learning_rate": 0.0001546185690598049,
"loss": 11.4778,
"step": 297
},
{
"epoch": 0.32461873638344224,
"grad_norm": 653.9827880859375,
"learning_rate": 0.0001543284189904592,
"loss": 12.2587,
"step": 298
},
{
"epoch": 0.32570806100217864,
"grad_norm": 722.68115234375,
"learning_rate": 0.00015403761855982631,
"loss": 10.4469,
"step": 299
},
{
"epoch": 0.32679738562091504,
"grad_norm": 1037.672607421875,
"learning_rate": 0.00015374617124905564,
"loss": 11.2673,
"step": 300
},
{
"epoch": 0.32788671023965144,
"grad_norm": 721.6505126953125,
"learning_rate": 0.0001534540805470403,
"loss": 11.505,
"step": 301
},
{
"epoch": 0.3289760348583878,
"grad_norm": 855.7329711914062,
"learning_rate": 0.00015316134995037545,
"loss": 10.711,
"step": 302
},
{
"epoch": 0.3300653594771242,
"grad_norm": 733.3320922851562,
"learning_rate": 0.00015286798296331632,
"loss": 11.2952,
"step": 303
},
{
"epoch": 0.3311546840958606,
"grad_norm": 763.12744140625,
"learning_rate": 0.00015257398309773633,
"loss": 11.3105,
"step": 304
},
{
"epoch": 0.332244008714597,
"grad_norm": 462.6761169433594,
"learning_rate": 0.00015227935387308511,
"loss": 11.7857,
"step": 305
},
{
"epoch": 0.3333333333333333,
"grad_norm": 793.5400390625,
"learning_rate": 0.00015198409881634617,
"loss": 10.805,
"step": 306
},
{
"epoch": 0.3344226579520697,
"grad_norm": 587.91455078125,
"learning_rate": 0.0001516882214619949,
"loss": 10.8593,
"step": 307
},
{
"epoch": 0.3355119825708061,
"grad_norm": 587.7955322265625,
"learning_rate": 0.00015139172535195617,
"loss": 11.7922,
"step": 308
},
{
"epoch": 0.3366013071895425,
"grad_norm": 534.8583374023438,
"learning_rate": 0.0001510946140355619,
"loss": 11.4238,
"step": 309
},
{
"epoch": 0.33769063180827885,
"grad_norm": 458.095458984375,
"learning_rate": 0.00015079689106950854,
"loss": 10.8279,
"step": 310
},
{
"epoch": 0.33877995642701525,
"grad_norm": 1109.0992431640625,
"learning_rate": 0.0001504985600178147,
"loss": 11.2836,
"step": 311
},
{
"epoch": 0.33986928104575165,
"grad_norm": 678.5516967773438,
"learning_rate": 0.00015019962445177819,
"loss": 11.7628,
"step": 312
},
{
"epoch": 0.340958605664488,
"grad_norm": 569.3182373046875,
"learning_rate": 0.00014990008794993345,
"loss": 11.4081,
"step": 313
},
{
"epoch": 0.3420479302832244,
"grad_norm": 545.4651489257812,
"learning_rate": 0.00014959995409800873,
"loss": 12.5765,
"step": 314
},
{
"epoch": 0.3431372549019608,
"grad_norm": 667.7208862304688,
"learning_rate": 0.00014929922648888308,
"loss": 11.5474,
"step": 315
},
{
"epoch": 0.3442265795206972,
"grad_norm": 621.5955810546875,
"learning_rate": 0.0001489979087225434,
"loss": 11.6176,
"step": 316
},
{
"epoch": 0.3453159041394335,
"grad_norm": 673.23828125,
"learning_rate": 0.00014869600440604118,
"loss": 10.7684,
"step": 317
},
{
"epoch": 0.3464052287581699,
"grad_norm": 874.149658203125,
"learning_rate": 0.00014839351715344968,
"loss": 11.6673,
"step": 318
},
{
"epoch": 0.3474945533769063,
"grad_norm": 869.7573852539062,
"learning_rate": 0.00014809045058582026,
"loss": 11.8444,
"step": 319
},
{
"epoch": 0.3485838779956427,
"grad_norm": 971.1417236328125,
"learning_rate": 0.00014778680833113926,
"loss": 11.0821,
"step": 320
},
{
"epoch": 0.34967320261437906,
"grad_norm": 1166.9410400390625,
"learning_rate": 0.00014748259402428462,
"loss": 11.7491,
"step": 321
},
{
"epoch": 0.35076252723311546,
"grad_norm": 1266.657470703125,
"learning_rate": 0.00014717781130698212,
"loss": 11.2385,
"step": 322
},
{
"epoch": 0.35185185185185186,
"grad_norm": 972.2568969726562,
"learning_rate": 0.00014687246382776205,
"loss": 10.9266,
"step": 323
},
{
"epoch": 0.35294117647058826,
"grad_norm": 1106.99951171875,
"learning_rate": 0.00014656655524191537,
"loss": 11.3394,
"step": 324
},
{
"epoch": 0.3540305010893246,
"grad_norm": 609.1690673828125,
"learning_rate": 0.0001462600892114501,
"loss": 11.5497,
"step": 325
},
{
"epoch": 0.355119825708061,
"grad_norm": 930.3221435546875,
"learning_rate": 0.00014595306940504716,
"loss": 10.7787,
"step": 326
},
{
"epoch": 0.3562091503267974,
"grad_norm": 902.9607543945312,
"learning_rate": 0.00014564549949801694,
"loss": 11.3869,
"step": 327
},
{
"epoch": 0.3572984749455338,
"grad_norm": 826.9057006835938,
"learning_rate": 0.00014533738317225485,
"loss": 11.0435,
"step": 328
},
{
"epoch": 0.35838779956427014,
"grad_norm": 802.20751953125,
"learning_rate": 0.00014502872411619757,
"loss": 11.4467,
"step": 329
},
{
"epoch": 0.35947712418300654,
"grad_norm": 1908.7032470703125,
"learning_rate": 0.00014471952602477866,
"loss": 11.7794,
"step": 330
},
{
"epoch": 0.36056644880174293,
"grad_norm": 1125.180908203125,
"learning_rate": 0.0001444097925993845,
"loss": 11.4379,
"step": 331
},
{
"epoch": 0.3616557734204793,
"grad_norm": 944.010986328125,
"learning_rate": 0.0001440995275478099,
"loss": 11.4291,
"step": 332
},
{
"epoch": 0.3627450980392157,
"grad_norm": 979.970703125,
"learning_rate": 0.0001437887345842137,
"loss": 12.0289,
"step": 333
},
{
"epoch": 0.3638344226579521,
"grad_norm": 1132.85205078125,
"learning_rate": 0.00014347741742907433,
"loss": 11.7372,
"step": 334
},
{
"epoch": 0.36492374727668847,
"grad_norm": 898.1516723632812,
"learning_rate": 0.00014316557980914528,
"loss": 10.979,
"step": 335
},
{
"epoch": 0.3660130718954248,
"grad_norm": 988.018310546875,
"learning_rate": 0.00014285322545741052,
"loss": 12.0688,
"step": 336
},
{
"epoch": 0.3671023965141612,
"grad_norm": 870.642822265625,
"learning_rate": 0.0001425403581130398,
"loss": 9.5806,
"step": 337
},
{
"epoch": 0.3681917211328976,
"grad_norm": 1094.388671875,
"learning_rate": 0.00014222698152134374,
"loss": 11.5287,
"step": 338
},
{
"epoch": 0.369281045751634,
"grad_norm": 758.43994140625,
"learning_rate": 0.0001419130994337292,
"loss": 10.2106,
"step": 339
},
{
"epoch": 0.37037037037037035,
"grad_norm": 994.099853515625,
"learning_rate": 0.00014159871560765432,
"loss": 13.7915,
"step": 340
},
{
"epoch": 0.37145969498910675,
"grad_norm": 1287.93115234375,
"learning_rate": 0.0001412838338065835,
"loss": 11.4081,
"step": 341
},
{
"epoch": 0.37254901960784315,
"grad_norm": 895.5205688476562,
"learning_rate": 0.0001409684577999423,
"loss": 10.9777,
"step": 342
},
{
"epoch": 0.37363834422657954,
"grad_norm": 843.1907348632812,
"learning_rate": 0.00014065259136307242,
"loss": 11.0484,
"step": 343
},
{
"epoch": 0.3747276688453159,
"grad_norm": 1058.655029296875,
"learning_rate": 0.0001403362382771865,
"loss": 10.9008,
"step": 344
},
{
"epoch": 0.3758169934640523,
"grad_norm": 1154.1737060546875,
"learning_rate": 0.0001400194023293228,
"loss": 11.1368,
"step": 345
},
{
"epoch": 0.3769063180827887,
"grad_norm": 866.00830078125,
"learning_rate": 0.00013970208731229974,
"loss": 11.8841,
"step": 346
},
{
"epoch": 0.3779956427015251,
"grad_norm": 740.2269287109375,
"learning_rate": 0.00013938429702467086,
"loss": 11.8118,
"step": 347
},
{
"epoch": 0.3790849673202614,
"grad_norm": 1207.28564453125,
"learning_rate": 0.000139066035270679,
"loss": 10.8065,
"step": 348
},
{
"epoch": 0.3801742919389978,
"grad_norm": 846.9767456054688,
"learning_rate": 0.00013874730586021093,
"loss": 10.8413,
"step": 349
},
{
"epoch": 0.3812636165577342,
"grad_norm": 835.8090209960938,
"learning_rate": 0.00013842811260875168,
"loss": 11.4149,
"step": 350
},
{
"epoch": 0.38235294117647056,
"grad_norm": 854.9374389648438,
"learning_rate": 0.0001381084593373389,
"loss": 11.2507,
"step": 351
},
{
"epoch": 0.38344226579520696,
"grad_norm": 1257.4901123046875,
"learning_rate": 0.00013778834987251707,
"loss": 10.6451,
"step": 352
},
{
"epoch": 0.38453159041394336,
"grad_norm": 1082.4285888671875,
"learning_rate": 0.00013746778804629177,
"loss": 11.6068,
"step": 353
},
{
"epoch": 0.38562091503267976,
"grad_norm": 989.6134033203125,
"learning_rate": 0.0001371467776960837,
"loss": 12.2527,
"step": 354
},
{
"epoch": 0.3867102396514161,
"grad_norm": 875.22021484375,
"learning_rate": 0.0001368253226646829,
"loss": 11.4817,
"step": 355
},
{
"epoch": 0.3877995642701525,
"grad_norm": 1244.9932861328125,
"learning_rate": 0.00013650342680020258,
"loss": 11.4714,
"step": 356
},
{
"epoch": 0.3888888888888889,
"grad_norm": 1482.9927978515625,
"learning_rate": 0.00013618109395603317,
"loss": 12.19,
"step": 357
},
{
"epoch": 0.3899782135076253,
"grad_norm": 1198.56298828125,
"learning_rate": 0.0001358583279907961,
"loss": 11.1256,
"step": 358
},
{
"epoch": 0.39106753812636164,
"grad_norm": 1113.496826171875,
"learning_rate": 0.0001355351327682977,
"loss": 10.5545,
"step": 359
},
{
"epoch": 0.39215686274509803,
"grad_norm": 1492.8271484375,
"learning_rate": 0.0001352115121574829,
"loss": 11.6428,
"step": 360
},
{
"epoch": 0.39324618736383443,
"grad_norm": 1067.7490234375,
"learning_rate": 0.00013488747003238892,
"loss": 11.2813,
"step": 361
},
{
"epoch": 0.39433551198257083,
"grad_norm": 1344.341064453125,
"learning_rate": 0.00013456301027209882,
"loss": 11.7236,
"step": 362
},
{
"epoch": 0.3954248366013072,
"grad_norm": 913.9457397460938,
"learning_rate": 0.00013423813676069534,
"loss": 11.2616,
"step": 363
},
{
"epoch": 0.39651416122004357,
"grad_norm": 1023.7560424804688,
"learning_rate": 0.000133912853387214,
"loss": 11.0282,
"step": 364
},
{
"epoch": 0.39760348583877997,
"grad_norm": 999.524658203125,
"learning_rate": 0.0001335871640455968,
"loss": 11.9099,
"step": 365
},
{
"epoch": 0.39869281045751637,
"grad_norm": 1470.6510009765625,
"learning_rate": 0.00013326107263464558,
"loss": 10.54,
"step": 366
},
{
"epoch": 0.3997821350762527,
"grad_norm": 1311.093017578125,
"learning_rate": 0.00013293458305797533,
"loss": 11.6321,
"step": 367
},
{
"epoch": 0.4008714596949891,
"grad_norm": 958.1239013671875,
"learning_rate": 0.0001326076992239674,
"loss": 12.1061,
"step": 368
},
{
"epoch": 0.4019607843137255,
"grad_norm": 1676.4158935546875,
"learning_rate": 0.00013228042504572285,
"loss": 12.0777,
"step": 369
},
{
"epoch": 0.40305010893246185,
"grad_norm": 1228.0052490234375,
"learning_rate": 0.00013195276444101547,
"loss": 11.9471,
"step": 370
},
{
"epoch": 0.40413943355119825,
"grad_norm": 2634.232666015625,
"learning_rate": 0.00013162472133224483,
"loss": 11.8953,
"step": 371
},
{
"epoch": 0.40522875816993464,
"grad_norm": 1066.639404296875,
"learning_rate": 0.0001312962996463896,
"loss": 11.6727,
"step": 372
},
{
"epoch": 0.40631808278867104,
"grad_norm": 2369.989990234375,
"learning_rate": 0.00013096750331496033,
"loss": 12.0725,
"step": 373
},
{
"epoch": 0.4074074074074074,
"grad_norm": 1213.9959716796875,
"learning_rate": 0.0001306383362739523,
"loss": 12.2408,
"step": 374
},
{
"epoch": 0.4084967320261438,
"grad_norm": 1465.7821044921875,
"learning_rate": 0.00013030880246379866,
"loss": 11.4288,
"step": 375
},
{
"epoch": 0.4095860566448802,
"grad_norm": 1068.891357421875,
"learning_rate": 0.00012997890582932303,
"loss": 11.5368,
"step": 376
},
{
"epoch": 0.4106753812636166,
"grad_norm": 1787.318115234375,
"learning_rate": 0.00012964865031969252,
"loss": 11.4801,
"step": 377
},
{
"epoch": 0.4117647058823529,
"grad_norm": 1149.781494140625,
"learning_rate": 0.0001293180398883701,
"loss": 12.9816,
"step": 378
},
{
"epoch": 0.4128540305010893,
"grad_norm": 2007.1617431640625,
"learning_rate": 0.00012898707849306763,
"loss": 11.9605,
"step": 379
},
{
"epoch": 0.4139433551198257,
"grad_norm": 2235.568603515625,
"learning_rate": 0.00012865577009569824,
"loss": 12.821,
"step": 380
},
{
"epoch": 0.4150326797385621,
"grad_norm": 1477.2247314453125,
"learning_rate": 0.0001283241186623291,
"loss": 11.7628,
"step": 381
},
{
"epoch": 0.41612200435729846,
"grad_norm": 1697.255615234375,
"learning_rate": 0.00012799212816313376,
"loss": 12.1798,
"step": 382
},
{
"epoch": 0.41721132897603486,
"grad_norm": 2948.6123046875,
"learning_rate": 0.00012765980257234473,
"loss": 12.3417,
"step": 383
},
{
"epoch": 0.41830065359477125,
"grad_norm": 1735.9176025390625,
"learning_rate": 0.00012732714586820583,
"loss": 12.736,
"step": 384
},
{
"epoch": 0.41938997821350765,
"grad_norm": 1974.7271728515625,
"learning_rate": 0.00012699416203292466,
"loss": 11.0415,
"step": 385
},
{
"epoch": 0.420479302832244,
"grad_norm": 1942.8695068359375,
"learning_rate": 0.00012666085505262485,
"loss": 11.3046,
"step": 386
},
{
"epoch": 0.4215686274509804,
"grad_norm": 2619.687744140625,
"learning_rate": 0.00012632722891729845,
"loss": 13.1539,
"step": 387
},
{
"epoch": 0.4226579520697168,
"grad_norm": 2373.228271484375,
"learning_rate": 0.000125993287620758,
"loss": 12.7629,
"step": 388
},
{
"epoch": 0.42374727668845313,
"grad_norm": 2018.0404052734375,
"learning_rate": 0.00012565903516058882,
"loss": 12.3341,
"step": 389
},
{
"epoch": 0.42483660130718953,
"grad_norm": 1830.7042236328125,
"learning_rate": 0.00012532447553810126,
"loss": 12.9322,
"step": 390
},
{
"epoch": 0.42592592592592593,
"grad_norm": 3418.471923828125,
"learning_rate": 0.00012498961275828247,
"loss": 12.1159,
"step": 391
},
{
"epoch": 0.42701525054466233,
"grad_norm": 1846.1995849609375,
"learning_rate": 0.00012465445082974886,
"loss": 10.8845,
"step": 392
},
{
"epoch": 0.42810457516339867,
"grad_norm": 2432.753173828125,
"learning_rate": 0.00012431899376469784,
"loss": 12.9188,
"step": 393
},
{
"epoch": 0.42919389978213507,
"grad_norm": 4167.3408203125,
"learning_rate": 0.00012398324557885994,
"loss": 13.8031,
"step": 394
},
{
"epoch": 0.43028322440087147,
"grad_norm": 2242.124267578125,
"learning_rate": 0.0001236472102914506,
"loss": 12.6964,
"step": 395
},
{
"epoch": 0.43137254901960786,
"grad_norm": 2378.07861328125,
"learning_rate": 0.00012331089192512218,
"loss": 12.1325,
"step": 396
},
{
"epoch": 0.4324618736383442,
"grad_norm": 1914.8848876953125,
"learning_rate": 0.00012297429450591575,
"loss": 11.3239,
"step": 397
},
{
"epoch": 0.4335511982570806,
"grad_norm": 5270.39697265625,
"learning_rate": 0.00012263742206321287,
"loss": 13.2649,
"step": 398
},
{
"epoch": 0.434640522875817,
"grad_norm": 4266.642578125,
"learning_rate": 0.00012230027862968743,
"loss": 13.9944,
"step": 399
},
{
"epoch": 0.4357298474945534,
"grad_norm": 2909.033203125,
"learning_rate": 0.00012196286824125726,
"loss": 12.5143,
"step": 400
},
{
"epoch": 0.43681917211328974,
"grad_norm": 2279.06494140625,
"learning_rate": 0.000121625194937036,
"loss": 13.4036,
"step": 401
},
{
"epoch": 0.43790849673202614,
"grad_norm": 3242.800048828125,
"learning_rate": 0.0001212872627592845,
"loss": 12.6792,
"step": 402
},
{
"epoch": 0.43899782135076254,
"grad_norm": 2338.9423828125,
"learning_rate": 0.00012094907575336267,
"loss": 12.0955,
"step": 403
},
{
"epoch": 0.4400871459694989,
"grad_norm": 2894.48046875,
"learning_rate": 0.0001206106379676809,
"loss": 13.7315,
"step": 404
},
{
"epoch": 0.4411764705882353,
"grad_norm": 2612.1435546875,
"learning_rate": 0.00012027195345365167,
"loss": 13.4882,
"step": 405
},
{
"epoch": 0.4422657952069717,
"grad_norm": 2377.11328125,
"learning_rate": 0.00011993302626564102,
"loss": 13.5782,
"step": 406
},
{
"epoch": 0.4433551198257081,
"grad_norm": 3777.18701171875,
"learning_rate": 0.00011959386046091998,
"loss": 13.3125,
"step": 407
},
{
"epoch": 0.4444444444444444,
"grad_norm": 2583.8447265625,
"learning_rate": 0.00011925446009961607,
"loss": 12.2817,
"step": 408
},
{
"epoch": 0.4455337690631808,
"grad_norm": 1550.3592529296875,
"learning_rate": 0.00011891482924466471,
"loss": 12.1185,
"step": 409
},
{
"epoch": 0.4466230936819172,
"grad_norm": 2206.906005859375,
"learning_rate": 0.00011857497196176049,
"loss": 13.4228,
"step": 410
},
{
"epoch": 0.4477124183006536,
"grad_norm": 2955.05419921875,
"learning_rate": 0.00011823489231930854,
"loss": 14.0393,
"step": 411
},
{
"epoch": 0.44880174291938996,
"grad_norm": 1879.666259765625,
"learning_rate": 0.00011789459438837589,
"loss": 12.6743,
"step": 412
},
{
"epoch": 0.44989106753812635,
"grad_norm": 3064.472900390625,
"learning_rate": 0.00011755408224264269,
"loss": 13.7009,
"step": 413
},
{
"epoch": 0.45098039215686275,
"grad_norm": 2606.11328125,
"learning_rate": 0.00011721335995835336,
"loss": 13.3532,
"step": 414
},
{
"epoch": 0.45206971677559915,
"grad_norm": 2937.61083984375,
"learning_rate": 0.00011687243161426793,
"loss": 13.8534,
"step": 415
},
{
"epoch": 0.4531590413943355,
"grad_norm": 2084.185791015625,
"learning_rate": 0.00011653130129161316,
"loss": 13.3512,
"step": 416
},
{
"epoch": 0.4542483660130719,
"grad_norm": 2084.17578125,
"learning_rate": 0.00011618997307403367,
"loss": 13.8318,
"step": 417
},
{
"epoch": 0.4553376906318083,
"grad_norm": 3513.52734375,
"learning_rate": 0.00011584845104754304,
"loss": 14.0658,
"step": 418
},
{
"epoch": 0.4564270152505447,
"grad_norm": 1889.5738525390625,
"learning_rate": 0.00011550673930047498,
"loss": 13.4544,
"step": 419
},
{
"epoch": 0.45751633986928103,
"grad_norm": 2762.25439453125,
"learning_rate": 0.00011516484192343425,
"loss": 13.4629,
"step": 420
},
{
"epoch": 0.45860566448801743,
"grad_norm": 2621.8154296875,
"learning_rate": 0.00011482276300924782,
"loss": 13.2982,
"step": 421
},
{
"epoch": 0.4596949891067538,
"grad_norm": 2016.842041015625,
"learning_rate": 0.00011448050665291587,
"loss": 12.8009,
"step": 422
},
{
"epoch": 0.46078431372549017,
"grad_norm": 2019.709228515625,
"learning_rate": 0.00011413807695156262,
"loss": 12.7923,
"step": 423
},
{
"epoch": 0.46187363834422657,
"grad_norm": 2043.737060546875,
"learning_rate": 0.00011379547800438747,
"loss": 13.6543,
"step": 424
},
{
"epoch": 0.46296296296296297,
"grad_norm": 1839.9501953125,
"learning_rate": 0.00011345271391261584,
"loss": 12.6804,
"step": 425
},
{
"epoch": 0.46405228758169936,
"grad_norm": 1405.8519287109375,
"learning_rate": 0.00011310978877945007,
"loss": 12.3567,
"step": 426
},
{
"epoch": 0.4651416122004357,
"grad_norm": 2460.86572265625,
"learning_rate": 0.00011276670671002028,
"loss": 12.8338,
"step": 427
},
{
"epoch": 0.4662309368191721,
"grad_norm": 1966.4986572265625,
"learning_rate": 0.00011242347181133533,
"loss": 13.6811,
"step": 428
},
{
"epoch": 0.4673202614379085,
"grad_norm": 2038.7354736328125,
"learning_rate": 0.00011208008819223354,
"loss": 13.591,
"step": 429
},
{
"epoch": 0.4684095860566449,
"grad_norm": 2427.309814453125,
"learning_rate": 0.00011173655996333357,
"loss": 13.6258,
"step": 430
},
{
"epoch": 0.46949891067538124,
"grad_norm": 3444.146484375,
"learning_rate": 0.00011139289123698518,
"loss": 12.7897,
"step": 431
},
{
"epoch": 0.47058823529411764,
"grad_norm": 1727.6273193359375,
"learning_rate": 0.00011104908612722001,
"loss": 13.4163,
"step": 432
},
{
"epoch": 0.47167755991285404,
"grad_norm": 2945.229736328125,
"learning_rate": 0.00011070514874970237,
"loss": 12.7424,
"step": 433
},
{
"epoch": 0.47276688453159044,
"grad_norm": 1918.266845703125,
"learning_rate": 0.00011036108322167988,
"loss": 12.9925,
"step": 434
},
{
"epoch": 0.4738562091503268,
"grad_norm": 1834.51123046875,
"learning_rate": 0.00011001689366193433,
"loss": 13.6747,
"step": 435
},
{
"epoch": 0.4749455337690632,
"grad_norm": 1388.4371337890625,
"learning_rate": 0.00010967258419073217,
"loss": 13.6131,
"step": 436
},
{
"epoch": 0.4760348583877996,
"grad_norm": 2213.042236328125,
"learning_rate": 0.00010932815892977535,
"loss": 13.7537,
"step": 437
},
{
"epoch": 0.477124183006536,
"grad_norm": 1625.793212890625,
"learning_rate": 0.00010898362200215197,
"loss": 13.7964,
"step": 438
},
{
"epoch": 0.4782135076252723,
"grad_norm": 1119.17919921875,
"learning_rate": 0.00010863897753228687,
"loss": 13.6206,
"step": 439
},
{
"epoch": 0.4793028322440087,
"grad_norm": 2075.09228515625,
"learning_rate": 0.0001082942296458922,
"loss": 13.5352,
"step": 440
},
{
"epoch": 0.4803921568627451,
"grad_norm": 2514.07958984375,
"learning_rate": 0.00010794938246991817,
"loss": 14.4007,
"step": 441
},
{
"epoch": 0.48148148148148145,
"grad_norm": 1654.1142578125,
"learning_rate": 0.0001076044401325036,
"loss": 14.1484,
"step": 442
},
{
"epoch": 0.48257080610021785,
"grad_norm": 1555.315185546875,
"learning_rate": 0.00010725940676292636,
"loss": 12.6995,
"step": 443
},
{
"epoch": 0.48366013071895425,
"grad_norm": 1430.394287109375,
"learning_rate": 0.0001069142864915542,
"loss": 12.6135,
"step": 444
},
{
"epoch": 0.48474945533769065,
"grad_norm": 1317.9832763671875,
"learning_rate": 0.00010656908344979506,
"loss": 13.5794,
"step": 445
},
{
"epoch": 0.485838779956427,
"grad_norm": 1727.5115966796875,
"learning_rate": 0.0001062238017700478,
"loss": 13.8786,
"step": 446
},
{
"epoch": 0.4869281045751634,
"grad_norm": 1529.5556640625,
"learning_rate": 0.00010587844558565261,
"loss": 13.5966,
"step": 447
},
{
"epoch": 0.4880174291938998,
"grad_norm": 1605.1805419921875,
"learning_rate": 0.00010553301903084157,
"loss": 14.9925,
"step": 448
},
{
"epoch": 0.4891067538126362,
"grad_norm": 1363.312255859375,
"learning_rate": 0.00010518752624068911,
"loss": 13.2978,
"step": 449
},
{
"epoch": 0.49019607843137253,
"grad_norm": 1511.412841796875,
"learning_rate": 0.00010484197135106263,
"loss": 15.2164,
"step": 450
},
{
"epoch": 0.4912854030501089,
"grad_norm": 1557.5146484375,
"learning_rate": 0.0001044963584985729,
"loss": 13.6396,
"step": 451
},
{
"epoch": 0.4923747276688453,
"grad_norm": 1717.6806640625,
"learning_rate": 0.0001041506918205246,
"loss": 13.6635,
"step": 452
},
{
"epoch": 0.4934640522875817,
"grad_norm": 1665.452880859375,
"learning_rate": 0.00010380497545486663,
"loss": 14.2642,
"step": 453
},
{
"epoch": 0.49455337690631807,
"grad_norm": 1279.9322509765625,
"learning_rate": 0.00010345921354014279,
"loss": 14.4201,
"step": 454
},
{
"epoch": 0.49564270152505446,
"grad_norm": 1384.8275146484375,
"learning_rate": 0.00010311341021544218,
"loss": 15.3935,
"step": 455
},
{
"epoch": 0.49673202614379086,
"grad_norm": 1815.6390380859375,
"learning_rate": 0.0001027675696203495,
"loss": 14.6728,
"step": 456
},
{
"epoch": 0.49782135076252726,
"grad_norm": 1295.0675048828125,
"learning_rate": 0.00010242169589489568,
"loss": 12.9198,
"step": 457
},
{
"epoch": 0.4989106753812636,
"grad_norm": 1431.1907958984375,
"learning_rate": 0.00010207579317950827,
"loss": 14.3134,
"step": 458
},
{
"epoch": 0.5,
"grad_norm": 1028.9598388671875,
"learning_rate": 0.0001017298656149618,
"loss": 13.316,
"step": 459
},
{
"epoch": 0.5010893246187363,
"grad_norm": 1440.857421875,
"learning_rate": 0.00010138391734232832,
"loss": 14.7119,
"step": 460
},
{
"epoch": 0.5010893246187363,
"eval_loss": 3.616103410720825,
"eval_runtime": 2.7823,
"eval_samples_per_second": 139.095,
"eval_steps_per_second": 69.727,
"step": 460
},
{
"epoch": 0.5021786492374728,
"grad_norm": 1697.1995849609375,
"learning_rate": 0.00010103795250292778,
"loss": 15.0099,
"step": 461
},
{
"epoch": 0.5032679738562091,
"grad_norm": 1438.0911865234375,
"learning_rate": 0.00010069197523827833,
"loss": 16.3135,
"step": 462
},
{
"epoch": 0.5043572984749455,
"grad_norm": 1578.0306396484375,
"learning_rate": 0.00010034598969004705,
"loss": 14.3354,
"step": 463
},
{
"epoch": 0.5054466230936819,
"grad_norm": 1145.7254638671875,
"learning_rate": 0.0001,
"loss": 15.1201,
"step": 464
},
{
"epoch": 0.5065359477124183,
"grad_norm": 1281.2977294921875,
"learning_rate": 9.965401030995301e-05,
"loss": 15.0423,
"step": 465
},
{
"epoch": 0.5076252723311547,
"grad_norm": 2497.565673828125,
"learning_rate": 9.930802476172169e-05,
"loss": 12.9232,
"step": 466
},
{
"epoch": 0.5087145969498911,
"grad_norm": 1575.5599365234375,
"learning_rate": 9.896204749707228e-05,
"loss": 14.6681,
"step": 467
},
{
"epoch": 0.5098039215686274,
"grad_norm": 941.0279541015625,
"learning_rate": 9.861608265767167e-05,
"loss": 14.7297,
"step": 468
},
{
"epoch": 0.5108932461873639,
"grad_norm": 1556.48388671875,
"learning_rate": 9.827013438503822e-05,
"loss": 14.6033,
"step": 469
},
{
"epoch": 0.5119825708061002,
"grad_norm": 1751.578125,
"learning_rate": 9.792420682049174e-05,
"loss": 15.0415,
"step": 470
},
{
"epoch": 0.5130718954248366,
"grad_norm": 1077.553955078125,
"learning_rate": 9.757830410510433e-05,
"loss": 13.5249,
"step": 471
},
{
"epoch": 0.514161220043573,
"grad_norm": 1023.6539916992188,
"learning_rate": 9.723243037965056e-05,
"loss": 13.8337,
"step": 472
},
{
"epoch": 0.5152505446623094,
"grad_norm": 1367.58984375,
"learning_rate": 9.688658978455784e-05,
"loss": 15.3791,
"step": 473
},
{
"epoch": 0.5163398692810458,
"grad_norm": 1138.333740234375,
"learning_rate": 9.654078645985722e-05,
"loss": 14.343,
"step": 474
},
{
"epoch": 0.5174291938997821,
"grad_norm": 958.8590698242188,
"learning_rate": 9.619502454513338e-05,
"loss": 14.1471,
"step": 475
},
{
"epoch": 0.5185185185185185,
"grad_norm": 1362.31689453125,
"learning_rate": 9.584930817947544e-05,
"loss": 14.2489,
"step": 476
},
{
"epoch": 0.5196078431372549,
"grad_norm": 1034.6534423828125,
"learning_rate": 9.550364150142713e-05,
"loss": 13.4652,
"step": 477
},
{
"epoch": 0.5206971677559913,
"grad_norm": 1429.5125732421875,
"learning_rate": 9.515802864893739e-05,
"loss": 14.1628,
"step": 478
},
{
"epoch": 0.5217864923747276,
"grad_norm": 1044.7430419921875,
"learning_rate": 9.481247375931094e-05,
"loss": 13.1049,
"step": 479
},
{
"epoch": 0.5228758169934641,
"grad_norm": 1164.5643310546875,
"learning_rate": 9.446698096915847e-05,
"loss": 13.8229,
"step": 480
},
{
"epoch": 0.5239651416122004,
"grad_norm": 1188.510986328125,
"learning_rate": 9.412155441434741e-05,
"loss": 13.4517,
"step": 481
},
{
"epoch": 0.5250544662309368,
"grad_norm": 1218.689208984375,
"learning_rate": 9.377619822995219e-05,
"loss": 15.032,
"step": 482
},
{
"epoch": 0.5261437908496732,
"grad_norm": 802.5537109375,
"learning_rate": 9.343091655020495e-05,
"loss": 13.9797,
"step": 483
},
{
"epoch": 0.5272331154684096,
"grad_norm": 938.1593627929688,
"learning_rate": 9.308571350844584e-05,
"loss": 13.8941,
"step": 484
},
{
"epoch": 0.528322440087146,
"grad_norm": 1472.3194580078125,
"learning_rate": 9.274059323707366e-05,
"loss": 13.2638,
"step": 485
},
{
"epoch": 0.5294117647058824,
"grad_norm": 1048.2354736328125,
"learning_rate": 9.239555986749645e-05,
"loss": 14.3531,
"step": 486
},
{
"epoch": 0.5305010893246187,
"grad_norm": 1296.9527587890625,
"learning_rate": 9.205061753008183e-05,
"loss": 14.203,
"step": 487
},
{
"epoch": 0.5315904139433552,
"grad_norm": 1657.743896484375,
"learning_rate": 9.170577035410783e-05,
"loss": 13.7112,
"step": 488
},
{
"epoch": 0.5326797385620915,
"grad_norm": 1212.4053955078125,
"learning_rate": 9.136102246771314e-05,
"loss": 13.2756,
"step": 489
},
{
"epoch": 0.5337690631808278,
"grad_norm": 1304.51220703125,
"learning_rate": 9.101637799784804e-05,
"loss": 14.4237,
"step": 490
},
{
"epoch": 0.5348583877995643,
"grad_norm": 1542.832275390625,
"learning_rate": 9.06718410702247e-05,
"loss": 14.4378,
"step": 491
},
{
"epoch": 0.5359477124183006,
"grad_norm": 1302.9571533203125,
"learning_rate": 9.032741580926787e-05,
"loss": 13.1055,
"step": 492
},
{
"epoch": 0.5370370370370371,
"grad_norm": 1939.5728759765625,
"learning_rate": 8.998310633806571e-05,
"loss": 12.4175,
"step": 493
},
{
"epoch": 0.5381263616557734,
"grad_norm": 1355.798095703125,
"learning_rate": 8.963891677832011e-05,
"loss": 12.9592,
"step": 494
},
{
"epoch": 0.5392156862745098,
"grad_norm": 2074.481201171875,
"learning_rate": 8.929485125029766e-05,
"loss": 13.6742,
"step": 495
},
{
"epoch": 0.5403050108932462,
"grad_norm": 1087.583740234375,
"learning_rate": 8.895091387277999e-05,
"loss": 14.9366,
"step": 496
},
{
"epoch": 0.5413943355119826,
"grad_norm": 1182.4432373046875,
"learning_rate": 8.860710876301484e-05,
"loss": 12.0956,
"step": 497
},
{
"epoch": 0.5424836601307189,
"grad_norm": 1062.96923828125,
"learning_rate": 8.826344003666647e-05,
"loss": 13.0663,
"step": 498
},
{
"epoch": 0.5435729847494554,
"grad_norm": 1257.4434814453125,
"learning_rate": 8.791991180776648e-05,
"loss": 14.0272,
"step": 499
},
{
"epoch": 0.5446623093681917,
"grad_norm": 1445.0548095703125,
"learning_rate": 8.757652818866471e-05,
"loss": 12.8504,
"step": 500
},
{
"epoch": 0.545751633986928,
"grad_norm": 1665.5859375,
"learning_rate": 8.723329328997973e-05,
"loss": 13.8356,
"step": 501
},
{
"epoch": 0.5468409586056645,
"grad_norm": 995.88427734375,
"learning_rate": 8.689021122054996e-05,
"loss": 12.8165,
"step": 502
},
{
"epoch": 0.5479302832244008,
"grad_norm": 1658.434326171875,
"learning_rate": 8.654728608738418e-05,
"loss": 14.196,
"step": 503
},
{
"epoch": 0.5490196078431373,
"grad_norm": 984.2662963867188,
"learning_rate": 8.620452199561254e-05,
"loss": 13.6334,
"step": 504
},
{
"epoch": 0.5501089324618736,
"grad_norm": 1411.3946533203125,
"learning_rate": 8.58619230484374e-05,
"loss": 13.7294,
"step": 505
},
{
"epoch": 0.55119825708061,
"grad_norm": 1083.3192138671875,
"learning_rate": 8.551949334708415e-05,
"loss": 12.9704,
"step": 506
},
{
"epoch": 0.5522875816993464,
"grad_norm": 1257.832275390625,
"learning_rate": 8.51772369907522e-05,
"loss": 12.5718,
"step": 507
},
{
"epoch": 0.5533769063180828,
"grad_norm": 871.9485473632812,
"learning_rate": 8.483515807656576e-05,
"loss": 13.1949,
"step": 508
},
{
"epoch": 0.5544662309368191,
"grad_norm": 1094.6038818359375,
"learning_rate": 8.449326069952506e-05,
"loss": 14.2022,
"step": 509
},
{
"epoch": 0.5555555555555556,
"grad_norm": 1254.0933837890625,
"learning_rate": 8.415154895245697e-05,
"loss": 13.1929,
"step": 510
},
{
"epoch": 0.5566448801742919,
"grad_norm": 819.5022583007812,
"learning_rate": 8.381002692596635e-05,
"loss": 13.739,
"step": 511
},
{
"epoch": 0.5577342047930284,
"grad_norm": 1080.34521484375,
"learning_rate": 8.346869870838685e-05,
"loss": 12.4172,
"step": 512
},
{
"epoch": 0.5588235294117647,
"grad_norm": 844.1549072265625,
"learning_rate": 8.312756838573208e-05,
"loss": 14.1843,
"step": 513
},
{
"epoch": 0.5599128540305011,
"grad_norm": 1664.24462890625,
"learning_rate": 8.278664004164665e-05,
"loss": 13.9817,
"step": 514
},
{
"epoch": 0.5610021786492375,
"grad_norm": 1837.83544921875,
"learning_rate": 8.244591775735732e-05,
"loss": 14.2748,
"step": 515
},
{
"epoch": 0.5620915032679739,
"grad_norm": 794.2677612304688,
"learning_rate": 8.210540561162412e-05,
"loss": 13.3806,
"step": 516
},
{
"epoch": 0.5631808278867102,
"grad_norm": 1501.6959228515625,
"learning_rate": 8.176510768069147e-05,
"loss": 13.5997,
"step": 517
},
{
"epoch": 0.5642701525054467,
"grad_norm": 1058.7418212890625,
"learning_rate": 8.142502803823955e-05,
"loss": 12.9171,
"step": 518
},
{
"epoch": 0.565359477124183,
"grad_norm": 1096.179443359375,
"learning_rate": 8.108517075533531e-05,
"loss": 13.1182,
"step": 519
},
{
"epoch": 0.5664488017429193,
"grad_norm": 1459.3277587890625,
"learning_rate": 8.074553990038395e-05,
"loss": 14.0618,
"step": 520
},
{
"epoch": 0.5675381263616558,
"grad_norm": 1117.51318359375,
"learning_rate": 8.040613953908005e-05,
"loss": 12.8453,
"step": 521
},
{
"epoch": 0.5686274509803921,
"grad_norm": 1164.1474609375,
"learning_rate": 8.0066973734359e-05,
"loss": 13.228,
"step": 522
},
{
"epoch": 0.5697167755991286,
"grad_norm": 1310.04296875,
"learning_rate": 7.972804654634834e-05,
"loss": 13.3479,
"step": 523
},
{
"epoch": 0.5708061002178649,
"grad_norm": 1271.54833984375,
"learning_rate": 7.938936203231912e-05,
"loss": 12.7188,
"step": 524
},
{
"epoch": 0.5718954248366013,
"grad_norm": 1231.713623046875,
"learning_rate": 7.905092424663735e-05,
"loss": 14.596,
"step": 525
},
{
"epoch": 0.5729847494553377,
"grad_norm": 2087.1767578125,
"learning_rate": 7.871273724071553e-05,
"loss": 12.5966,
"step": 526
},
{
"epoch": 0.5740740740740741,
"grad_norm": 1349.419189453125,
"learning_rate": 7.837480506296404e-05,
"loss": 12.99,
"step": 527
},
{
"epoch": 0.5751633986928104,
"grad_norm": 1333.2598876953125,
"learning_rate": 7.803713175874275e-05,
"loss": 12.8456,
"step": 528
},
{
"epoch": 0.5762527233115469,
"grad_norm": 1339.005615234375,
"learning_rate": 7.769972137031262e-05,
"loss": 14.4523,
"step": 529
},
{
"epoch": 0.5773420479302832,
"grad_norm": 1520.0718994140625,
"learning_rate": 7.736257793678714e-05,
"loss": 13.4148,
"step": 530
},
{
"epoch": 0.5784313725490197,
"grad_norm": 1333.5904541015625,
"learning_rate": 7.702570549408428e-05,
"loss": 13.4515,
"step": 531
},
{
"epoch": 0.579520697167756,
"grad_norm": 1032.6591796875,
"learning_rate": 7.668910807487783e-05,
"loss": 13.9835,
"step": 532
},
{
"epoch": 0.5806100217864923,
"grad_norm": 1167.9801025390625,
"learning_rate": 7.635278970854943e-05,
"loss": 13.1408,
"step": 533
},
{
"epoch": 0.5816993464052288,
"grad_norm": 1407.249267578125,
"learning_rate": 7.601675442114009e-05,
"loss": 13.9407,
"step": 534
},
{
"epoch": 0.5827886710239651,
"grad_norm": 2209.746826171875,
"learning_rate": 7.568100623530217e-05,
"loss": 12.4755,
"step": 535
},
{
"epoch": 0.5838779956427015,
"grad_norm": 1797.746826171875,
"learning_rate": 7.534554917025119e-05,
"loss": 12.8268,
"step": 536
},
{
"epoch": 0.5849673202614379,
"grad_norm": 1302.199462890625,
"learning_rate": 7.501038724171756e-05,
"loss": 13.197,
"step": 537
},
{
"epoch": 0.5860566448801743,
"grad_norm": 2067.767822265625,
"learning_rate": 7.46755244618988e-05,
"loss": 12.8685,
"step": 538
},
{
"epoch": 0.5871459694989106,
"grad_norm": 1580.0186767578125,
"learning_rate": 7.434096483941115e-05,
"loss": 13.4972,
"step": 539
},
{
"epoch": 0.5882352941176471,
"grad_norm": 894.298828125,
"learning_rate": 7.400671237924202e-05,
"loss": 13.1393,
"step": 540
},
{
"epoch": 0.5893246187363834,
"grad_norm": 1165.6744384765625,
"learning_rate": 7.367277108270156e-05,
"loss": 13.9111,
"step": 541
},
{
"epoch": 0.5904139433551199,
"grad_norm": 1790.1239013671875,
"learning_rate": 7.333914494737514e-05,
"loss": 13.351,
"step": 542
},
{
"epoch": 0.5915032679738562,
"grad_norm": 984.1708374023438,
"learning_rate": 7.300583796707539e-05,
"loss": 14.5447,
"step": 543
},
{
"epoch": 0.5925925925925926,
"grad_norm": 1341.8519287109375,
"learning_rate": 7.267285413179421e-05,
"loss": 14.1327,
"step": 544
},
{
"epoch": 0.593681917211329,
"grad_norm": 1000.2693481445312,
"learning_rate": 7.234019742765532e-05,
"loss": 13.3989,
"step": 545
},
{
"epoch": 0.5947712418300654,
"grad_norm": 1023.481689453125,
"learning_rate": 7.200787183686625e-05,
"loss": 13.069,
"step": 546
},
{
"epoch": 0.5958605664488017,
"grad_norm": 1108.57470703125,
"learning_rate": 7.167588133767091e-05,
"loss": 12.2698,
"step": 547
},
{
"epoch": 0.5969498910675382,
"grad_norm": 964.7139282226562,
"learning_rate": 7.134422990430176e-05,
"loss": 13.6585,
"step": 548
},
{
"epoch": 0.5980392156862745,
"grad_norm": 1239.9150390625,
"learning_rate": 7.101292150693241e-05,
"loss": 13.0273,
"step": 549
},
{
"epoch": 0.599128540305011,
"grad_norm": 846.2174072265625,
"learning_rate": 7.068196011162994e-05,
"loss": 13.0756,
"step": 550
},
{
"epoch": 0.6002178649237473,
"grad_norm": 987.310546875,
"learning_rate": 7.03513496803075e-05,
"loss": 14.1704,
"step": 551
},
{
"epoch": 0.6013071895424836,
"grad_norm": 5245.39697265625,
"learning_rate": 7.002109417067697e-05,
"loss": 14.4467,
"step": 552
},
{
"epoch": 0.6023965141612201,
"grad_norm": 1111.834228515625,
"learning_rate": 6.969119753620135e-05,
"loss": 13.517,
"step": 553
},
{
"epoch": 0.6034858387799564,
"grad_norm": 996.4967041015625,
"learning_rate": 6.936166372604773e-05,
"loss": 13.5025,
"step": 554
},
{
"epoch": 0.6045751633986928,
"grad_norm": 867.002685546875,
"learning_rate": 6.903249668503972e-05,
"loss": 12.8567,
"step": 555
},
{
"epoch": 0.6056644880174292,
"grad_norm": 1155.432373046875,
"learning_rate": 6.87037003536104e-05,
"loss": 13.0582,
"step": 556
},
{
"epoch": 0.6067538126361656,
"grad_norm": 1074.625732421875,
"learning_rate": 6.837527866775522e-05,
"loss": 13.5709,
"step": 557
},
{
"epoch": 0.6078431372549019,
"grad_norm": 1409.87841796875,
"learning_rate": 6.804723555898458e-05,
"loss": 13.7728,
"step": 558
},
{
"epoch": 0.6089324618736384,
"grad_norm": 987.9152221679688,
"learning_rate": 6.771957495427716e-05,
"loss": 13.0499,
"step": 559
},
{
"epoch": 0.6100217864923747,
"grad_norm": 1056.71240234375,
"learning_rate": 6.739230077603259e-05,
"loss": 13.4185,
"step": 560
},
{
"epoch": 0.6111111111111112,
"grad_norm": 965.7537841796875,
"learning_rate": 6.706541694202471e-05,
"loss": 13.3033,
"step": 561
},
{
"epoch": 0.6122004357298475,
"grad_norm": 950.843505859375,
"learning_rate": 6.673892736535448e-05,
"loss": 13.2638,
"step": 562
},
{
"epoch": 0.6132897603485838,
"grad_norm": 888.200439453125,
"learning_rate": 6.641283595440323e-05,
"loss": 14.2555,
"step": 563
},
{
"epoch": 0.6143790849673203,
"grad_norm": 1082.6995849609375,
"learning_rate": 6.608714661278606e-05,
"loss": 13.0727,
"step": 564
},
{
"epoch": 0.6154684095860566,
"grad_norm": 1750.720947265625,
"learning_rate": 6.576186323930466e-05,
"loss": 12.9111,
"step": 565
},
{
"epoch": 0.616557734204793,
"grad_norm": 1437.4244384765625,
"learning_rate": 6.543698972790117e-05,
"loss": 14.7961,
"step": 566
},
{
"epoch": 0.6176470588235294,
"grad_norm": 1005.5140380859375,
"learning_rate": 6.51125299676111e-05,
"loss": 14.3845,
"step": 567
},
{
"epoch": 0.6187363834422658,
"grad_norm": 802.7247924804688,
"learning_rate": 6.478848784251713e-05,
"loss": 14.1054,
"step": 568
},
{
"epoch": 0.6198257080610022,
"grad_norm": 780.930908203125,
"learning_rate": 6.446486723170236e-05,
"loss": 13.4999,
"step": 569
},
{
"epoch": 0.6209150326797386,
"grad_norm": 1568.1295166015625,
"learning_rate": 6.414167200920391e-05,
"loss": 14.3808,
"step": 570
},
{
"epoch": 0.6220043572984749,
"grad_norm": 649.1087036132812,
"learning_rate": 6.381890604396687e-05,
"loss": 14.4266,
"step": 571
},
{
"epoch": 0.6230936819172114,
"grad_norm": 1113.2542724609375,
"learning_rate": 6.349657319979742e-05,
"loss": 14.2873,
"step": 572
},
{
"epoch": 0.6241830065359477,
"grad_norm": 735.5198974609375,
"learning_rate": 6.317467733531712e-05,
"loss": 14.1431,
"step": 573
},
{
"epoch": 0.6252723311546841,
"grad_norm": 889.42822265625,
"learning_rate": 6.28532223039163e-05,
"loss": 14.5709,
"step": 574
},
{
"epoch": 0.6263616557734205,
"grad_norm": 1000.8912963867188,
"learning_rate": 6.253221195370826e-05,
"loss": 14.9789,
"step": 575
},
{
"epoch": 0.6274509803921569,
"grad_norm": 1058.2774658203125,
"learning_rate": 6.221165012748297e-05,
"loss": 13.2536,
"step": 576
},
{
"epoch": 0.6285403050108932,
"grad_norm": 752.534912109375,
"learning_rate": 6.189154066266112e-05,
"loss": 14.1625,
"step": 577
},
{
"epoch": 0.6296296296296297,
"grad_norm": 756.6798095703125,
"learning_rate": 6.157188739124834e-05,
"loss": 13.6298,
"step": 578
},
{
"epoch": 0.630718954248366,
"grad_norm": 1065.2354736328125,
"learning_rate": 6.125269413978907e-05,
"loss": 13.4351,
"step": 579
},
{
"epoch": 0.6318082788671024,
"grad_norm": 1031.8475341796875,
"learning_rate": 6.093396472932103e-05,
"loss": 12.9427,
"step": 580
},
{
"epoch": 0.6328976034858388,
"grad_norm": 545.2572631835938,
"learning_rate": 6.0615702975329194e-05,
"loss": 13.4271,
"step": 581
},
{
"epoch": 0.6339869281045751,
"grad_norm": 629.153076171875,
"learning_rate": 6.029791268770029e-05,
"loss": 13.7342,
"step": 582
},
{
"epoch": 0.6350762527233116,
"grad_norm": 808.3855590820312,
"learning_rate": 5.998059767067728e-05,
"loss": 12.8523,
"step": 583
},
{
"epoch": 0.6361655773420479,
"grad_norm": 966.9733276367188,
"learning_rate": 5.9663761722813495e-05,
"loss": 13.8446,
"step": 584
},
{
"epoch": 0.6372549019607843,
"grad_norm": 832.753173828125,
"learning_rate": 5.934740863692759e-05,
"loss": 14.0291,
"step": 585
},
{
"epoch": 0.6383442265795207,
"grad_norm": 870.2808837890625,
"learning_rate": 5.903154220005771e-05,
"loss": 14.7135,
"step": 586
},
{
"epoch": 0.6394335511982571,
"grad_norm": 888.448974609375,
"learning_rate": 5.871616619341653e-05,
"loss": 13.6237,
"step": 587
},
{
"epoch": 0.6405228758169934,
"grad_norm": 604.5468139648438,
"learning_rate": 5.840128439234571e-05,
"loss": 13.1112,
"step": 588
},
{
"epoch": 0.6416122004357299,
"grad_norm": 740.3869018554688,
"learning_rate": 5.80869005662708e-05,
"loss": 13.2927,
"step": 589
},
{
"epoch": 0.6427015250544662,
"grad_norm": 729.2908935546875,
"learning_rate": 5.777301847865629e-05,
"loss": 13.1883,
"step": 590
},
{
"epoch": 0.6437908496732027,
"grad_norm": 679.76171875,
"learning_rate": 5.7459641886960244e-05,
"loss": 12.7278,
"step": 591
},
{
"epoch": 0.644880174291939,
"grad_norm": 771.1044921875,
"learning_rate": 5.714677454258947e-05,
"loss": 14.2043,
"step": 592
},
{
"epoch": 0.6459694989106753,
"grad_norm": 525.4684448242188,
"learning_rate": 5.6834420190854745e-05,
"loss": 13.7949,
"step": 593
},
{
"epoch": 0.6470588235294118,
"grad_norm": 1367.8231201171875,
"learning_rate": 5.652258257092569e-05,
"loss": 13.5713,
"step": 594
},
{
"epoch": 0.6481481481481481,
"grad_norm": 1199.23876953125,
"learning_rate": 5.621126541578632e-05,
"loss": 13.6268,
"step": 595
},
{
"epoch": 0.6492374727668845,
"grad_norm": 995.0260009765625,
"learning_rate": 5.590047245219009e-05,
"loss": 14.3565,
"step": 596
},
{
"epoch": 0.6503267973856209,
"grad_norm": 805.2388916015625,
"learning_rate": 5.559020740061549e-05,
"loss": 14.2207,
"step": 597
},
{
"epoch": 0.6514161220043573,
"grad_norm": 779.6170654296875,
"learning_rate": 5.528047397522133e-05,
"loss": 13.4574,
"step": 598
},
{
"epoch": 0.6525054466230937,
"grad_norm": 974.6607055664062,
"learning_rate": 5.497127588380244e-05,
"loss": 13.5537,
"step": 599
},
{
"epoch": 0.6535947712418301,
"grad_norm": 957.2470092773438,
"learning_rate": 5.4662616827745185e-05,
"loss": 13.6918,
"step": 600
},
{
"epoch": 0.6546840958605664,
"grad_norm": 564.2131958007812,
"learning_rate": 5.4354500501983074e-05,
"loss": 14.5732,
"step": 601
},
{
"epoch": 0.6557734204793029,
"grad_norm": 970.4561157226562,
"learning_rate": 5.404693059495285e-05,
"loss": 14.259,
"step": 602
},
{
"epoch": 0.6568627450980392,
"grad_norm": 1475.91943359375,
"learning_rate": 5.373991078854992e-05,
"loss": 13.7511,
"step": 603
},
{
"epoch": 0.6579520697167756,
"grad_norm": 1206.491943359375,
"learning_rate": 5.3433444758084604e-05,
"loss": 13.9658,
"step": 604
},
{
"epoch": 0.659041394335512,
"grad_norm": 729.8560791015625,
"learning_rate": 5.312753617223794e-05,
"loss": 12.3194,
"step": 605
},
{
"epoch": 0.6601307189542484,
"grad_norm": 833.2518310546875,
"learning_rate": 5.282218869301788e-05,
"loss": 13.2262,
"step": 606
},
{
"epoch": 0.6612200435729847,
"grad_norm": 1121.98388671875,
"learning_rate": 5.251740597571542e-05,
"loss": 13.1887,
"step": 607
},
{
"epoch": 0.6623093681917211,
"grad_norm": 732.55810546875,
"learning_rate": 5.221319166886073e-05,
"loss": 11.9579,
"step": 608
},
{
"epoch": 0.6633986928104575,
"grad_norm": 795.4283447265625,
"learning_rate": 5.190954941417977e-05,
"loss": 13.6673,
"step": 609
},
{
"epoch": 0.664488017429194,
"grad_norm": 742.3599243164062,
"learning_rate": 5.160648284655032e-05,
"loss": 13.4396,
"step": 610
},
{
"epoch": 0.6655773420479303,
"grad_norm": 872.8551635742188,
"learning_rate": 5.1303995593958824e-05,
"loss": 14.0764,
"step": 611
},
{
"epoch": 0.6666666666666666,
"grad_norm": 865.8281860351562,
"learning_rate": 5.100209127745661e-05,
"loss": 13.2594,
"step": 612
},
{
"epoch": 0.6677559912854031,
"grad_norm": 553.1524658203125,
"learning_rate": 5.0700773511116906e-05,
"loss": 13.4783,
"step": 613
},
{
"epoch": 0.6688453159041394,
"grad_norm": 981.0257568359375,
"learning_rate": 5.040004590199128e-05,
"loss": 14.2846,
"step": 614
},
{
"epoch": 0.6699346405228758,
"grad_norm": 817.2470703125,
"learning_rate": 5.0099912050066556e-05,
"loss": 12.6324,
"step": 615
},
{
"epoch": 0.6710239651416122,
"grad_norm": 593.127197265625,
"learning_rate": 4.9800375548221845e-05,
"loss": 13.0678,
"step": 616
},
{
"epoch": 0.6721132897603486,
"grad_norm": 940.2001953125,
"learning_rate": 4.950143998218531e-05,
"loss": 13.5529,
"step": 617
},
{
"epoch": 0.673202614379085,
"grad_norm": 717.47021484375,
"learning_rate": 4.920310893049146e-05,
"loss": 12.7114,
"step": 618
},
{
"epoch": 0.6742919389978214,
"grad_norm": 874.2285766601562,
"learning_rate": 4.89053859644381e-05,
"loss": 14.4422,
"step": 619
},
{
"epoch": 0.6753812636165577,
"grad_norm": 847.6348876953125,
"learning_rate": 4.860827464804383e-05,
"loss": 12.8582,
"step": 620
},
{
"epoch": 0.6764705882352942,
"grad_norm": 552.3037719726562,
"learning_rate": 4.831177853800511e-05,
"loss": 13.5564,
"step": 621
},
{
"epoch": 0.6775599128540305,
"grad_norm": 704.1598510742188,
"learning_rate": 4.801590118365383e-05,
"loss": 14.2366,
"step": 622
},
{
"epoch": 0.6786492374727668,
"grad_norm": 535.1793823242188,
"learning_rate": 4.77206461269149e-05,
"loss": 13.5502,
"step": 623
},
{
"epoch": 0.6797385620915033,
"grad_norm": 1089.44775390625,
"learning_rate": 4.7426016902263636e-05,
"loss": 13.391,
"step": 624
},
{
"epoch": 0.6808278867102396,
"grad_norm": 793.9139404296875,
"learning_rate": 4.713201703668367e-05,
"loss": 13.9273,
"step": 625
},
{
"epoch": 0.681917211328976,
"grad_norm": 1263.728759765625,
"learning_rate": 4.683865004962452e-05,
"loss": 13.2224,
"step": 626
},
{
"epoch": 0.6830065359477124,
"grad_norm": 583.6502685546875,
"learning_rate": 4.654591945295969e-05,
"loss": 13.1198,
"step": 627
},
{
"epoch": 0.6840958605664488,
"grad_norm": 546.5349731445312,
"learning_rate": 4.6253828750944375e-05,
"loss": 14.2708,
"step": 628
},
{
"epoch": 0.6851851851851852,
"grad_norm": 573.7505493164062,
"learning_rate": 4.596238144017369e-05,
"loss": 14.4553,
"step": 629
},
{
"epoch": 0.6862745098039216,
"grad_norm": 485.56976318359375,
"learning_rate": 4.567158100954083e-05,
"loss": 13.9009,
"step": 630
},
{
"epoch": 0.6873638344226579,
"grad_norm": 859.7692260742188,
"learning_rate": 4.53814309401951e-05,
"loss": 13.4996,
"step": 631
},
{
"epoch": 0.6884531590413944,
"grad_norm": 806.7265014648438,
"learning_rate": 4.509193470550056e-05,
"loss": 13.8831,
"step": 632
},
{
"epoch": 0.6895424836601307,
"grad_norm": 678.3446044921875,
"learning_rate": 4.4803095770994106e-05,
"loss": 12.5697,
"step": 633
},
{
"epoch": 0.690631808278867,
"grad_norm": 760.7838745117188,
"learning_rate": 4.4514917594344184e-05,
"loss": 13.5064,
"step": 634
},
{
"epoch": 0.6917211328976035,
"grad_norm": 1103.8131103515625,
"learning_rate": 4.422740362530945e-05,
"loss": 14.8133,
"step": 635
},
{
"epoch": 0.6928104575163399,
"grad_norm": 737.2034912109375,
"learning_rate": 4.3940557305697226e-05,
"loss": 14.7038,
"step": 636
},
{
"epoch": 0.6938997821350763,
"grad_norm": 579.3258056640625,
"learning_rate": 4.3654382069322644e-05,
"loss": 14.0494,
"step": 637
},
{
"epoch": 0.6949891067538126,
"grad_norm": 680.4292602539062,
"learning_rate": 4.3368881341967135e-05,
"loss": 14.3115,
"step": 638
},
{
"epoch": 0.696078431372549,
"grad_norm": 617.1260986328125,
"learning_rate": 4.308405854133786e-05,
"loss": 13.788,
"step": 639
},
{
"epoch": 0.6971677559912854,
"grad_norm": 951.0760498046875,
"learning_rate": 4.2799917077026394e-05,
"loss": 13.1248,
"step": 640
},
{
"epoch": 0.6982570806100218,
"grad_norm": 606.4050903320312,
"learning_rate": 4.251646035046814e-05,
"loss": 14.0699,
"step": 641
},
{
"epoch": 0.6993464052287581,
"grad_norm": 596.975830078125,
"learning_rate": 4.223369175490162e-05,
"loss": 11.8161,
"step": 642
},
{
"epoch": 0.7004357298474946,
"grad_norm": 867.7359619140625,
"learning_rate": 4.195161467532769e-05,
"loss": 13.4987,
"step": 643
},
{
"epoch": 0.7015250544662309,
"grad_norm": 802.6876831054688,
"learning_rate": 4.167023248846925e-05,
"loss": 12.8087,
"step": 644
},
{
"epoch": 0.7026143790849673,
"grad_norm": 687.2286987304688,
"learning_rate": 4.138954856273054e-05,
"loss": 12.1043,
"step": 645
},
{
"epoch": 0.7037037037037037,
"grad_norm": 669.9190673828125,
"learning_rate": 4.110956625815713e-05,
"loss": 12.1478,
"step": 646
},
{
"epoch": 0.7047930283224401,
"grad_norm": 841.2921752929688,
"learning_rate": 4.083028892639541e-05,
"loss": 12.806,
"step": 647
},
{
"epoch": 0.7058823529411765,
"grad_norm": 808.080078125,
"learning_rate": 4.055171991065262e-05,
"loss": 13.3545,
"step": 648
},
{
"epoch": 0.7069716775599129,
"grad_norm": 919.4234619140625,
"learning_rate": 4.027386254565688e-05,
"loss": 13.9735,
"step": 649
},
{
"epoch": 0.7080610021786492,
"grad_norm": 769.6393432617188,
"learning_rate": 3.9996720157617094e-05,
"loss": 14.6133,
"step": 650
},
{
"epoch": 0.7091503267973857,
"grad_norm": 880.525146484375,
"learning_rate": 3.972029606418335e-05,
"loss": 13.0994,
"step": 651
},
{
"epoch": 0.710239651416122,
"grad_norm": 945.1436767578125,
"learning_rate": 3.9444593574406915e-05,
"loss": 14.2055,
"step": 652
},
{
"epoch": 0.7113289760348583,
"grad_norm": 690.2129516601562,
"learning_rate": 3.9169615988701e-05,
"loss": 13.2737,
"step": 653
},
{
"epoch": 0.7124183006535948,
"grad_norm": 563.0156860351562,
"learning_rate": 3.8895366598800896e-05,
"loss": 12.8401,
"step": 654
},
{
"epoch": 0.7135076252723311,
"grad_norm": 814.678466796875,
"learning_rate": 3.862184868772473e-05,
"loss": 12.2818,
"step": 655
},
{
"epoch": 0.7145969498910676,
"grad_norm": 712.4736938476562,
"learning_rate": 3.834906552973424e-05,
"loss": 12.5831,
"step": 656
},
{
"epoch": 0.7156862745098039,
"grad_norm": 1027.3265380859375,
"learning_rate": 3.807702039029539e-05,
"loss": 13.827,
"step": 657
},
{
"epoch": 0.7167755991285403,
"grad_norm": 1087.4530029296875,
"learning_rate": 3.780571652603949e-05,
"loss": 13.3193,
"step": 658
},
{
"epoch": 0.7178649237472767,
"grad_norm": 926.59326171875,
"learning_rate": 3.753515718472402e-05,
"loss": 13.3761,
"step": 659
},
{
"epoch": 0.7189542483660131,
"grad_norm": 758.7875366210938,
"learning_rate": 3.726534560519381e-05,
"loss": 12.2786,
"step": 660
},
{
"epoch": 0.7200435729847494,
"grad_norm": 693.2098999023438,
"learning_rate": 3.6996285017342406e-05,
"loss": 12.9392,
"step": 661
},
{
"epoch": 0.7211328976034859,
"grad_norm": 1132.9970703125,
"learning_rate": 3.672797864207316e-05,
"loss": 12.6866,
"step": 662
},
{
"epoch": 0.7222222222222222,
"grad_norm": 868.7568969726562,
"learning_rate": 3.646042969126093e-05,
"loss": 12.7426,
"step": 663
},
{
"epoch": 0.7233115468409586,
"grad_norm": 800.123291015625,
"learning_rate": 3.619364136771337e-05,
"loss": 12.4544,
"step": 664
},
{
"epoch": 0.724400871459695,
"grad_norm": 509.1244201660156,
"learning_rate": 3.5927616865132884e-05,
"loss": 13.2459,
"step": 665
},
{
"epoch": 0.7254901960784313,
"grad_norm": 568.7617797851562,
"learning_rate": 3.566235936807808e-05,
"loss": 13.3732,
"step": 666
},
{
"epoch": 0.7265795206971678,
"grad_norm": 839.1197509765625,
"learning_rate": 3.539787205192586e-05,
"loss": 12.6018,
"step": 667
},
{
"epoch": 0.7276688453159041,
"grad_norm": 750.1957397460938,
"learning_rate": 3.513415808283341e-05,
"loss": 13.1899,
"step": 668
},
{
"epoch": 0.7287581699346405,
"grad_norm": 604.1763305664062,
"learning_rate": 3.4871220617700126e-05,
"loss": 13.0681,
"step": 669
},
{
"epoch": 0.7298474945533769,
"grad_norm": 637.3112182617188,
"learning_rate": 3.460906280413007e-05,
"loss": 13.0931,
"step": 670
},
{
"epoch": 0.7309368191721133,
"grad_norm": 778.7605590820312,
"learning_rate": 3.4347687780394e-05,
"loss": 13.7031,
"step": 671
},
{
"epoch": 0.7320261437908496,
"grad_norm": 1044.1873779296875,
"learning_rate": 3.4087098675392104e-05,
"loss": 12.2163,
"step": 672
},
{
"epoch": 0.7331154684095861,
"grad_norm": 833.3889770507812,
"learning_rate": 3.382729860861632e-05,
"loss": 13.2927,
"step": 673
},
{
"epoch": 0.7342047930283224,
"grad_norm": 655.1200561523438,
"learning_rate": 3.3568290690113034e-05,
"loss": 12.0668,
"step": 674
},
{
"epoch": 0.7352941176470589,
"grad_norm": 702.258056640625,
"learning_rate": 3.331007802044601e-05,
"loss": 11.1181,
"step": 675
},
{
"epoch": 0.7363834422657952,
"grad_norm": 791.837890625,
"learning_rate": 3.305266369065901e-05,
"loss": 12.7031,
"step": 676
},
{
"epoch": 0.7374727668845316,
"grad_norm": 636.5592041015625,
"learning_rate": 3.279605078223906e-05,
"loss": 14.1468,
"step": 677
},
{
"epoch": 0.738562091503268,
"grad_norm": 1960.34228515625,
"learning_rate": 3.25402423670793e-05,
"loss": 12.7793,
"step": 678
},
{
"epoch": 0.7396514161220044,
"grad_norm": 987.132080078125,
"learning_rate": 3.228524150744249e-05,
"loss": 13.8105,
"step": 679
},
{
"epoch": 0.7407407407407407,
"grad_norm": 536.3349609375,
"learning_rate": 3.2031051255924085e-05,
"loss": 13.0451,
"step": 680
},
{
"epoch": 0.7418300653594772,
"grad_norm": 829.6603393554688,
"learning_rate": 3.1777674655415834e-05,
"loss": 13.352,
"step": 681
},
{
"epoch": 0.7429193899782135,
"grad_norm": 825.07275390625,
"learning_rate": 3.1525114739069415e-05,
"loss": 13.4622,
"step": 682
},
{
"epoch": 0.7440087145969498,
"grad_norm": 1563.3502197265625,
"learning_rate": 3.127337453025994e-05,
"loss": 13.9679,
"step": 683
},
{
"epoch": 0.7450980392156863,
"grad_norm": 955.0587158203125,
"learning_rate": 3.102245704254995e-05,
"loss": 12.3828,
"step": 684
},
{
"epoch": 0.7461873638344226,
"grad_norm": 784.834716796875,
"learning_rate": 3.077236527965318e-05,
"loss": 13.1804,
"step": 685
},
{
"epoch": 0.7472766884531591,
"grad_norm": 735.6173706054688,
"learning_rate": 3.0523102235398714e-05,
"loss": 13.0926,
"step": 686
},
{
"epoch": 0.7483660130718954,
"grad_norm": 1419.035888671875,
"learning_rate": 3.0274670893695147e-05,
"loss": 13.365,
"step": 687
},
{
"epoch": 0.7494553376906318,
"grad_norm": 1074.7969970703125,
"learning_rate": 3.002707422849472e-05,
"loss": 13.357,
"step": 688
},
{
"epoch": 0.7505446623093682,
"grad_norm": 1391.6951904296875,
"learning_rate": 2.978031520375798e-05,
"loss": 13.65,
"step": 689
},
{
"epoch": 0.7516339869281046,
"grad_norm": 789.4784545898438,
"learning_rate": 2.9534396773417994e-05,
"loss": 12.0752,
"step": 690
},
{
"epoch": 0.7516339869281046,
"eval_loss": 3.2088863849639893,
"eval_runtime": 2.6648,
"eval_samples_per_second": 145.229,
"eval_steps_per_second": 72.802,
"step": 690
},
{
"epoch": 0.7527233115468409,
"grad_norm": 880.0890502929688,
"learning_rate": 2.9289321881345254e-05,
"loss": 12.8765,
"step": 691
},
{
"epoch": 0.7538126361655774,
"grad_norm": 1300.8441162109375,
"learning_rate": 2.9045093461312258e-05,
"loss": 12.9364,
"step": 692
},
{
"epoch": 0.7549019607843137,
"grad_norm": 837.1845092773438,
"learning_rate": 2.8801714436958416e-05,
"loss": 11.9844,
"step": 693
},
{
"epoch": 0.7559912854030502,
"grad_norm": 837.4990844726562,
"learning_rate": 2.855918772175522e-05,
"loss": 12.274,
"step": 694
},
{
"epoch": 0.7570806100217865,
"grad_norm": 1010.6368408203125,
"learning_rate": 2.8317516218971073e-05,
"loss": 14.0263,
"step": 695
},
{
"epoch": 0.7581699346405228,
"grad_norm": 691.3543701171875,
"learning_rate": 2.8076702821636867e-05,
"loss": 12.8702,
"step": 696
},
{
"epoch": 0.7592592592592593,
"grad_norm": 884.868896484375,
"learning_rate": 2.7836750412511016e-05,
"loss": 13.9273,
"step": 697
},
{
"epoch": 0.7603485838779956,
"grad_norm": 1038.74462890625,
"learning_rate": 2.7597661864045233e-05,
"loss": 13.7905,
"step": 698
},
{
"epoch": 0.761437908496732,
"grad_norm": 861.4619140625,
"learning_rate": 2.735944003834997e-05,
"loss": 13.3323,
"step": 699
},
{
"epoch": 0.7625272331154684,
"grad_norm": 781.8521118164062,
"learning_rate": 2.7122087787160166e-05,
"loss": 13.0336,
"step": 700
},
{
"epoch": 0.7636165577342048,
"grad_norm": 1335.78076171875,
"learning_rate": 2.688560795180126e-05,
"loss": 12.7611,
"step": 701
},
{
"epoch": 0.7647058823529411,
"grad_norm": 704.1229248046875,
"learning_rate": 2.6650003363154963e-05,
"loss": 13.3398,
"step": 702
},
{
"epoch": 0.7657952069716776,
"grad_norm": 814.8448486328125,
"learning_rate": 2.641527684162556e-05,
"loss": 12.0726,
"step": 703
},
{
"epoch": 0.7668845315904139,
"grad_norm": 866.6876220703125,
"learning_rate": 2.6181431197105998e-05,
"loss": 12.4249,
"step": 704
},
{
"epoch": 0.7679738562091504,
"grad_norm": 820.3846435546875,
"learning_rate": 2.5948469228944318e-05,
"loss": 12.3404,
"step": 705
},
{
"epoch": 0.7690631808278867,
"grad_norm": 1044.1195068359375,
"learning_rate": 2.5716393725910215e-05,
"loss": 12.7415,
"step": 706
},
{
"epoch": 0.7701525054466231,
"grad_norm": 731.3047485351562,
"learning_rate": 2.5485207466161466e-05,
"loss": 12.301,
"step": 707
},
{
"epoch": 0.7712418300653595,
"grad_norm": 996.6677856445312,
"learning_rate": 2.5254913217210886e-05,
"loss": 12.2375,
"step": 708
},
{
"epoch": 0.7723311546840959,
"grad_norm": 1071.778076171875,
"learning_rate": 2.5025513735893014e-05,
"loss": 12.9786,
"step": 709
},
{
"epoch": 0.7734204793028322,
"grad_norm": 694.3305053710938,
"learning_rate": 2.47970117683313e-05,
"loss": 12.5304,
"step": 710
},
{
"epoch": 0.7745098039215687,
"grad_norm": 568.67626953125,
"learning_rate": 2.4569410049905016e-05,
"loss": 13.4328,
"step": 711
},
{
"epoch": 0.775599128540305,
"grad_norm": 781.4323120117188,
"learning_rate": 2.434271130521666e-05,
"loss": 13.7223,
"step": 712
},
{
"epoch": 0.7766884531590414,
"grad_norm": 707.454345703125,
"learning_rate": 2.411691824805934e-05,
"loss": 13.2585,
"step": 713
},
{
"epoch": 0.7777777777777778,
"grad_norm": 662.977783203125,
"learning_rate": 2.389203358138419e-05,
"loss": 13.1885,
"step": 714
},
{
"epoch": 0.7788671023965141,
"grad_norm": 1106.654296875,
"learning_rate": 2.3668059997268144e-05,
"loss": 12.8549,
"step": 715
},
{
"epoch": 0.7799564270152506,
"grad_norm": 809.3428955078125,
"learning_rate": 2.3445000176881537e-05,
"loss": 13.6922,
"step": 716
},
{
"epoch": 0.7810457516339869,
"grad_norm": 1407.9627685546875,
"learning_rate": 2.3222856790456226e-05,
"loss": 12.3901,
"step": 717
},
{
"epoch": 0.7821350762527233,
"grad_norm": 1036.4595947265625,
"learning_rate": 2.3001632497253424e-05,
"loss": 14.0636,
"step": 718
},
{
"epoch": 0.7832244008714597,
"grad_norm": 713.0249633789062,
"learning_rate": 2.2781329945531936e-05,
"loss": 13.3694,
"step": 719
},
{
"epoch": 0.7843137254901961,
"grad_norm": 946.2935180664062,
"learning_rate": 2.2561951772516587e-05,
"loss": 12.7461,
"step": 720
},
{
"epoch": 0.7854030501089324,
"grad_norm": 1000.2520751953125,
"learning_rate": 2.2343500604366374e-05,
"loss": 14.3916,
"step": 721
},
{
"epoch": 0.7864923747276689,
"grad_norm": 1008.6942749023438,
"learning_rate": 2.2125979056143364e-05,
"loss": 13.3091,
"step": 722
},
{
"epoch": 0.7875816993464052,
"grad_norm": 720.97314453125,
"learning_rate": 2.190938973178105e-05,
"loss": 12.963,
"step": 723
},
{
"epoch": 0.7886710239651417,
"grad_norm": 704.6624145507812,
"learning_rate": 2.169373522405349e-05,
"loss": 13.3057,
"step": 724
},
{
"epoch": 0.789760348583878,
"grad_norm": 944.1021728515625,
"learning_rate": 2.1479018114544026e-05,
"loss": 12.621,
"step": 725
},
{
"epoch": 0.7908496732026143,
"grad_norm": 552.0089111328125,
"learning_rate": 2.1265240973614486e-05,
"loss": 12.7888,
"step": 726
},
{
"epoch": 0.7919389978213508,
"grad_norm": 685.5350341796875,
"learning_rate": 2.105240636037449e-05,
"loss": 13.6543,
"step": 727
},
{
"epoch": 0.7930283224400871,
"grad_norm": 627.4608764648438,
"learning_rate": 2.0840516822650614e-05,
"loss": 12.6445,
"step": 728
},
{
"epoch": 0.7941176470588235,
"grad_norm": 645.2318725585938,
"learning_rate": 2.0629574896956126e-05,
"loss": 12.6309,
"step": 729
},
{
"epoch": 0.7952069716775599,
"grad_norm": 888.5296630859375,
"learning_rate": 2.0419583108460418e-05,
"loss": 11.886,
"step": 730
},
{
"epoch": 0.7962962962962963,
"grad_norm": 1192.0579833984375,
"learning_rate": 2.0210543970958872e-05,
"loss": 12.3713,
"step": 731
},
{
"epoch": 0.7973856209150327,
"grad_norm": 635.3908081054688,
"learning_rate": 2.0002459986842825e-05,
"loss": 13.2216,
"step": 732
},
{
"epoch": 0.7984749455337691,
"grad_norm": 657.1572265625,
"learning_rate": 1.9795333647069448e-05,
"loss": 12.6068,
"step": 733
},
{
"epoch": 0.7995642701525054,
"grad_norm": 1367.3870849609375,
"learning_rate": 1.958916743113214e-05,
"loss": 12.8324,
"step": 734
},
{
"epoch": 0.8006535947712419,
"grad_norm": 1032.5328369140625,
"learning_rate": 1.93839638070306e-05,
"loss": 12.6028,
"step": 735
},
{
"epoch": 0.8017429193899782,
"grad_norm": 762.1034545898438,
"learning_rate": 1.9179725231241564e-05,
"loss": 12.5233,
"step": 736
},
{
"epoch": 0.8028322440087146,
"grad_norm": 1431.1048583984375,
"learning_rate": 1.8976454148689127e-05,
"loss": 12.9953,
"step": 737
},
{
"epoch": 0.803921568627451,
"grad_norm": 1034.8011474609375,
"learning_rate": 1.877415299271561e-05,
"loss": 13.2373,
"step": 738
},
{
"epoch": 0.8050108932461874,
"grad_norm": 752.5025024414062,
"learning_rate": 1.857282418505253e-05,
"loss": 13.1138,
"step": 739
},
{
"epoch": 0.8061002178649237,
"grad_norm": 911.1949462890625,
"learning_rate": 1.8372470135791344e-05,
"loss": 12.7149,
"step": 740
},
{
"epoch": 0.8071895424836601,
"grad_norm": 1050.7984619140625,
"learning_rate": 1.8173093243354878e-05,
"loss": 12.862,
"step": 741
},
{
"epoch": 0.8082788671023965,
"grad_norm": 943.8112182617188,
"learning_rate": 1.7974695894468384e-05,
"loss": 12.6691,
"step": 742
},
{
"epoch": 0.809368191721133,
"grad_norm": 860.751708984375,
"learning_rate": 1.7777280464131197e-05,
"loss": 12.9838,
"step": 743
},
{
"epoch": 0.8104575163398693,
"grad_norm": 812.5382690429688,
"learning_rate": 1.7580849315588068e-05,
"loss": 12.5751,
"step": 744
},
{
"epoch": 0.8115468409586056,
"grad_norm": 684.0803833007812,
"learning_rate": 1.7385404800301007e-05,
"loss": 12.6821,
"step": 745
},
{
"epoch": 0.8126361655773421,
"grad_norm": 637.1691284179688,
"learning_rate": 1.7190949257921196e-05,
"loss": 12.704,
"step": 746
},
{
"epoch": 0.8137254901960784,
"grad_norm": 1279.00146484375,
"learning_rate": 1.6997485016260793e-05,
"loss": 13.6321,
"step": 747
},
{
"epoch": 0.8148148148148148,
"grad_norm": 1138.228515625,
"learning_rate": 1.680501439126525e-05,
"loss": 12.3677,
"step": 748
},
{
"epoch": 0.8159041394335512,
"grad_norm": 718.772705078125,
"learning_rate": 1.6613539686985458e-05,
"loss": 13.3372,
"step": 749
},
{
"epoch": 0.8169934640522876,
"grad_norm": 960.2557373046875,
"learning_rate": 1.642306319555027e-05,
"loss": 11.9433,
"step": 750
},
{
"epoch": 0.818082788671024,
"grad_norm": 745.1720581054688,
"learning_rate": 1.6233587197138968e-05,
"loss": 11.8539,
"step": 751
},
{
"epoch": 0.8191721132897604,
"grad_norm": 837.4060668945312,
"learning_rate": 1.6045113959953985e-05,
"loss": 12.3409,
"step": 752
},
{
"epoch": 0.8202614379084967,
"grad_norm": 854.7937622070312,
"learning_rate": 1.585764574019388e-05,
"loss": 13.3537,
"step": 753
},
{
"epoch": 0.8213507625272332,
"grad_norm": 879.7656860351562,
"learning_rate": 1.5671184782026106e-05,
"loss": 12.7348,
"step": 754
},
{
"epoch": 0.8224400871459695,
"grad_norm": 941.7662353515625,
"learning_rate": 1.548573331756038e-05,
"loss": 12.8492,
"step": 755
},
{
"epoch": 0.8235294117647058,
"grad_norm": 926.8936767578125,
"learning_rate": 1.530129356682175e-05,
"loss": 12.8661,
"step": 756
},
{
"epoch": 0.8246187363834423,
"grad_norm": 1937.478271484375,
"learning_rate": 1.5117867737724134e-05,
"loss": 12.9826,
"step": 757
},
{
"epoch": 0.8257080610021786,
"grad_norm": 909.1730346679688,
"learning_rate": 1.4935458026043959e-05,
"loss": 11.884,
"step": 758
},
{
"epoch": 0.826797385620915,
"grad_norm": 833.2159423828125,
"learning_rate": 1.4754066615393668e-05,
"loss": 13.1729,
"step": 759
},
{
"epoch": 0.8278867102396514,
"grad_norm": 1762.9833984375,
"learning_rate": 1.457369567719581e-05,
"loss": 12.5088,
"step": 760
},
{
"epoch": 0.8289760348583878,
"grad_norm": 774.0321044921875,
"learning_rate": 1.4394347370656836e-05,
"loss": 12.6099,
"step": 761
},
{
"epoch": 0.8300653594771242,
"grad_norm": 833.3447875976562,
"learning_rate": 1.4216023842741455e-05,
"loss": 11.8071,
"step": 762
},
{
"epoch": 0.8311546840958606,
"grad_norm": 659.9131469726562,
"learning_rate": 1.4038727228146753e-05,
"loss": 12.0735,
"step": 763
},
{
"epoch": 0.8322440087145969,
"grad_norm": 888.9996948242188,
"learning_rate": 1.3862459649276715e-05,
"loss": 12.47,
"step": 764
},
{
"epoch": 0.8333333333333334,
"grad_norm": 759.5191040039062,
"learning_rate": 1.3687223216216904e-05,
"loss": 12.5613,
"step": 765
},
{
"epoch": 0.8344226579520697,
"grad_norm": 805.9093627929688,
"learning_rate": 1.3513020026709023e-05,
"loss": 12.0561,
"step": 766
},
{
"epoch": 0.835511982570806,
"grad_norm": 763.7495727539062,
"learning_rate": 1.3339852166125954e-05,
"loss": 12.959,
"step": 767
},
{
"epoch": 0.8366013071895425,
"grad_norm": 610.1506958007812,
"learning_rate": 1.3167721707446678e-05,
"loss": 12.2513,
"step": 768
},
{
"epoch": 0.8376906318082789,
"grad_norm": 958.7525634765625,
"learning_rate": 1.2996630711231616e-05,
"loss": 14.1049,
"step": 769
},
{
"epoch": 0.8387799564270153,
"grad_norm": 1156.2576904296875,
"learning_rate": 1.2826581225597767e-05,
"loss": 12.8887,
"step": 770
},
{
"epoch": 0.8398692810457516,
"grad_norm": 1074.29345703125,
"learning_rate": 1.26575752861943e-05,
"loss": 13.7254,
"step": 771
},
{
"epoch": 0.840958605664488,
"grad_norm": 1287.4105224609375,
"learning_rate": 1.248961491617826e-05,
"loss": 13.033,
"step": 772
},
{
"epoch": 0.8420479302832244,
"grad_norm": 945.987548828125,
"learning_rate": 1.2322702126190156e-05,
"loss": 11.7015,
"step": 773
},
{
"epoch": 0.8431372549019608,
"grad_norm": 763.9169921875,
"learning_rate": 1.2156838914330072e-05,
"loss": 12.8775,
"step": 774
},
{
"epoch": 0.8442265795206971,
"grad_norm": 869.27880859375,
"learning_rate": 1.1992027266133598e-05,
"loss": 12.8051,
"step": 775
},
{
"epoch": 0.8453159041394336,
"grad_norm": 919.6107177734375,
"learning_rate": 1.1828269154548244e-05,
"loss": 12.7364,
"step": 776
},
{
"epoch": 0.8464052287581699,
"grad_norm": 840.4449462890625,
"learning_rate": 1.1665566539909623e-05,
"loss": 12.2462,
"step": 777
},
{
"epoch": 0.8474945533769063,
"grad_norm": 875.924072265625,
"learning_rate": 1.1503921369918091e-05,
"loss": 12.9855,
"step": 778
},
{
"epoch": 0.8485838779956427,
"grad_norm": 2267.00927734375,
"learning_rate": 1.1343335579615467e-05,
"loss": 12.9955,
"step": 779
},
{
"epoch": 0.8496732026143791,
"grad_norm": 934.5510864257812,
"learning_rate": 1.118381109136174e-05,
"loss": 12.6251,
"step": 780
},
{
"epoch": 0.8507625272331155,
"grad_norm": 599.7080078125,
"learning_rate": 1.1025349814812224e-05,
"loss": 12.7546,
"step": 781
},
{
"epoch": 0.8518518518518519,
"grad_norm": 1287.09912109375,
"learning_rate": 1.0867953646894525e-05,
"loss": 12.8024,
"step": 782
},
{
"epoch": 0.8529411764705882,
"grad_norm": 924.6251831054688,
"learning_rate": 1.0711624471785986e-05,
"loss": 12.9911,
"step": 783
},
{
"epoch": 0.8540305010893247,
"grad_norm": 904.363525390625,
"learning_rate": 1.055636416089102e-05,
"loss": 13.8365,
"step": 784
},
{
"epoch": 0.855119825708061,
"grad_norm": 998.8102416992188,
"learning_rate": 1.0402174572818723e-05,
"loss": 11.9683,
"step": 785
},
{
"epoch": 0.8562091503267973,
"grad_norm": 890.9308471679688,
"learning_rate": 1.0249057553360742e-05,
"loss": 12.8672,
"step": 786
},
{
"epoch": 0.8572984749455338,
"grad_norm": 944.108642578125,
"learning_rate": 1.0097014935468984e-05,
"loss": 13.2105,
"step": 787
},
{
"epoch": 0.8583877995642701,
"grad_norm": 956.6070556640625,
"learning_rate": 9.946048539233865e-06,
"loss": 12.8339,
"step": 788
},
{
"epoch": 0.8594771241830066,
"grad_norm": 783.9725341796875,
"learning_rate": 9.796160171862367e-06,
"loss": 12.0513,
"step": 789
},
{
"epoch": 0.8605664488017429,
"grad_norm": 678.079833984375,
"learning_rate": 9.647351627656543e-06,
"loss": 12.4566,
"step": 790
},
{
"epoch": 0.8616557734204793,
"grad_norm": 795.08154296875,
"learning_rate": 9.499624687991871e-06,
"loss": 13.0777,
"step": 791
},
{
"epoch": 0.8627450980392157,
"grad_norm": 1114.5704345703125,
"learning_rate": 9.352981121296134e-06,
"loss": 12.3807,
"step": 792
},
{
"epoch": 0.8638344226579521,
"grad_norm": 810.4409790039062,
"learning_rate": 9.207422683028066e-06,
"loss": 12.0099,
"step": 793
},
{
"epoch": 0.8649237472766884,
"grad_norm": 659.8233642578125,
"learning_rate": 9.062951115656403e-06,
"loss": 12.5349,
"step": 794
},
{
"epoch": 0.8660130718954249,
"grad_norm": 858.1024780273438,
"learning_rate": 8.919568148639123e-06,
"loss": 12.1748,
"step": 795
},
{
"epoch": 0.8671023965141612,
"grad_norm": 888.814453125,
"learning_rate": 8.777275498402548e-06,
"loss": 12.9867,
"step": 796
},
{
"epoch": 0.8681917211328976,
"grad_norm": 845.94140625,
"learning_rate": 8.636074868320987e-06,
"loss": 13.1836,
"step": 797
},
{
"epoch": 0.869281045751634,
"grad_norm": 970.3507080078125,
"learning_rate": 8.495967948696192e-06,
"loss": 11.6568,
"step": 798
},
{
"epoch": 0.8703703703703703,
"grad_norm": 865.5740966796875,
"learning_rate": 8.35695641673725e-06,
"loss": 12.0325,
"step": 799
},
{
"epoch": 0.8714596949891068,
"grad_norm": 916.9160766601562,
"learning_rate": 8.219041936540395e-06,
"loss": 13.141,
"step": 800
},
{
"epoch": 0.8725490196078431,
"grad_norm": 746.8594970703125,
"learning_rate": 8.082226159069196e-06,
"loss": 13.3162,
"step": 801
},
{
"epoch": 0.8736383442265795,
"grad_norm": 847.8311157226562,
"learning_rate": 7.946510722134692e-06,
"loss": 12.9385,
"step": 802
},
{
"epoch": 0.8747276688453159,
"grad_norm": 1479.700439453125,
"learning_rate": 7.811897250375833e-06,
"loss": 13.0996,
"step": 803
},
{
"epoch": 0.8758169934640523,
"grad_norm": 983.4318237304688,
"learning_rate": 7.678387355240057e-06,
"loss": 13.556,
"step": 804
},
{
"epoch": 0.8769063180827886,
"grad_norm": 1585.536376953125,
"learning_rate": 7.5459826349639436e-06,
"loss": 13.0711,
"step": 805
},
{
"epoch": 0.8779956427015251,
"grad_norm": 818.268310546875,
"learning_rate": 7.4146846745541506e-06,
"loss": 12.364,
"step": 806
},
{
"epoch": 0.8790849673202614,
"grad_norm": 732.67626953125,
"learning_rate": 7.284495045768325e-06,
"loss": 11.9299,
"step": 807
},
{
"epoch": 0.8801742919389978,
"grad_norm": 785.660888671875,
"learning_rate": 7.155415307096458e-06,
"loss": 13.0072,
"step": 808
},
{
"epoch": 0.8812636165577342,
"grad_norm": 1017.672607421875,
"learning_rate": 7.027447003742071e-06,
"loss": 11.7492,
"step": 809
},
{
"epoch": 0.8823529411764706,
"grad_norm": 961.2965087890625,
"learning_rate": 6.900591667603751e-06,
"loss": 12.7767,
"step": 810
},
{
"epoch": 0.883442265795207,
"grad_norm": 742.8497314453125,
"learning_rate": 6.774850817256939e-06,
"loss": 11.4043,
"step": 811
},
{
"epoch": 0.8845315904139434,
"grad_norm": 624.9981079101562,
"learning_rate": 6.650225957935552e-06,
"loss": 12.2672,
"step": 812
},
{
"epoch": 0.8856209150326797,
"grad_norm": 779.7870483398438,
"learning_rate": 6.5267185815141355e-06,
"loss": 11.9513,
"step": 813
},
{
"epoch": 0.8867102396514162,
"grad_norm": 1622.0540771484375,
"learning_rate": 6.40433016648988e-06,
"loss": 13.6973,
"step": 814
},
{
"epoch": 0.8877995642701525,
"grad_norm": 1286.1015625,
"learning_rate": 6.283062177965038e-06,
"loss": 11.6033,
"step": 815
},
{
"epoch": 0.8888888888888888,
"grad_norm": 1373.9736328125,
"learning_rate": 6.162916067629254e-06,
"loss": 13.2463,
"step": 816
},
{
"epoch": 0.8899782135076253,
"grad_norm": 1360.0540771484375,
"learning_rate": 6.043893273742329e-06,
"loss": 11.335,
"step": 817
},
{
"epoch": 0.8910675381263616,
"grad_norm": 1296.9820556640625,
"learning_rate": 5.925995221116853e-06,
"loss": 12.4607,
"step": 818
},
{
"epoch": 0.8921568627450981,
"grad_norm": 1495.9398193359375,
"learning_rate": 5.809223321101276e-06,
"loss": 11.7732,
"step": 819
},
{
"epoch": 0.8932461873638344,
"grad_norm": 880.9378662109375,
"learning_rate": 5.693578971562963e-06,
"loss": 12.8642,
"step": 820
},
{
"epoch": 0.8943355119825708,
"grad_norm": 1063.22216796875,
"learning_rate": 5.5790635568714224e-06,
"loss": 12.4356,
"step": 821
},
{
"epoch": 0.8954248366013072,
"grad_norm": 657.4075927734375,
"learning_rate": 5.465678447881828e-06,
"loss": 12.4044,
"step": 822
},
{
"epoch": 0.8965141612200436,
"grad_norm": 1261.831787109375,
"learning_rate": 5.3534250019184774e-06,
"loss": 13.5235,
"step": 823
},
{
"epoch": 0.8976034858387799,
"grad_norm": 601.0888061523438,
"learning_rate": 5.242304562758704e-06,
"loss": 11.9985,
"step": 824
},
{
"epoch": 0.8986928104575164,
"grad_norm": 781.6649780273438,
"learning_rate": 5.132318460616625e-06,
"loss": 11.9329,
"step": 825
},
{
"epoch": 0.8997821350762527,
"grad_norm": 1356.27294921875,
"learning_rate": 5.023468012127364e-06,
"loss": 12.6829,
"step": 826
},
{
"epoch": 0.900871459694989,
"grad_norm": 929.7411499023438,
"learning_rate": 4.915754520331173e-06,
"loss": 11.9567,
"step": 827
},
{
"epoch": 0.9019607843137255,
"grad_norm": 1020.9713745117188,
"learning_rate": 4.8091792746578935e-06,
"loss": 11.6725,
"step": 828
},
{
"epoch": 0.9030501089324618,
"grad_norm": 741.8558349609375,
"learning_rate": 4.703743550911543e-06,
"loss": 12.448,
"step": 829
},
{
"epoch": 0.9041394335511983,
"grad_norm": 637.9666137695312,
"learning_rate": 4.599448611254964e-06,
"loss": 12.7454,
"step": 830
},
{
"epoch": 0.9052287581699346,
"grad_norm": 1003.8346557617188,
"learning_rate": 4.496295704194819e-06,
"loss": 13.0511,
"step": 831
},
{
"epoch": 0.906318082788671,
"grad_norm": 819.62451171875,
"learning_rate": 4.394286064566511e-06,
"loss": 13.2029,
"step": 832
},
{
"epoch": 0.9074074074074074,
"grad_norm": 1023.1488647460938,
"learning_rate": 4.293420913519541e-06,
"loss": 12.9151,
"step": 833
},
{
"epoch": 0.9084967320261438,
"grad_norm": 788.2391967773438,
"learning_rate": 4.193701458502807e-06,
"loss": 12.7021,
"step": 834
},
{
"epoch": 0.9095860566448801,
"grad_norm": 837.283447265625,
"learning_rate": 4.095128893250156e-06,
"loss": 13.0428,
"step": 835
},
{
"epoch": 0.9106753812636166,
"grad_norm": 1206.6253662109375,
"learning_rate": 3.997704397766122e-06,
"loss": 12.3555,
"step": 836
},
{
"epoch": 0.9117647058823529,
"grad_norm": 856.0335083007812,
"learning_rate": 3.901429138311763e-06,
"loss": 12.2161,
"step": 837
},
{
"epoch": 0.9128540305010894,
"grad_norm": 840.4730834960938,
"learning_rate": 3.80630426739077e-06,
"loss": 12.3642,
"step": 838
},
{
"epoch": 0.9139433551198257,
"grad_norm": 767.2365112304688,
"learning_rate": 3.712330923735563e-06,
"loss": 12.338,
"step": 839
},
{
"epoch": 0.9150326797385621,
"grad_norm": 844.8609619140625,
"learning_rate": 3.6195102322937545e-06,
"loss": 12.4701,
"step": 840
},
{
"epoch": 0.9161220043572985,
"grad_norm": 808.1879272460938,
"learning_rate": 3.5278433042146397e-06,
"loss": 12.4012,
"step": 841
},
{
"epoch": 0.9172113289760349,
"grad_norm": 787.5524291992188,
"learning_rate": 3.4373312368358944e-06,
"loss": 12.1528,
"step": 842
},
{
"epoch": 0.9183006535947712,
"grad_norm": 861.0357666015625,
"learning_rate": 3.347975113670454e-06,
"loss": 13.0016,
"step": 843
},
{
"epoch": 0.9193899782135077,
"grad_norm": 1098.5736083984375,
"learning_rate": 3.259776004393533e-06,
"loss": 12.3925,
"step": 844
},
{
"epoch": 0.920479302832244,
"grad_norm": 1017.1472778320312,
"learning_rate": 3.1727349648298267e-06,
"loss": 13.2196,
"step": 845
},
{
"epoch": 0.9215686274509803,
"grad_norm": 729.2058715820312,
"learning_rate": 3.086853036940862e-06,
"loss": 11.8512,
"step": 846
},
{
"epoch": 0.9226579520697168,
"grad_norm": 809.0467529296875,
"learning_rate": 3.0021312488125454e-06,
"loss": 11.8526,
"step": 847
},
{
"epoch": 0.9237472766884531,
"grad_norm": 948.0686645507812,
"learning_rate": 2.9185706146428017e-06,
"loss": 12.4416,
"step": 848
},
{
"epoch": 0.9248366013071896,
"grad_norm": 631.7981567382812,
"learning_rate": 2.836172134729509e-06,
"loss": 12.836,
"step": 849
},
{
"epoch": 0.9259259259259259,
"grad_norm": 641.8875732421875,
"learning_rate": 2.754936795458485e-06,
"loss": 12.6921,
"step": 850
},
{
"epoch": 0.9270152505446623,
"grad_norm": 860.4398803710938,
"learning_rate": 2.674865569291651e-06,
"loss": 13.6532,
"step": 851
},
{
"epoch": 0.9281045751633987,
"grad_norm": 760.2114868164062,
"learning_rate": 2.5959594147554667e-06,
"loss": 13.178,
"step": 852
},
{
"epoch": 0.9291938997821351,
"grad_norm": 1154.3988037109375,
"learning_rate": 2.5182192764293567e-06,
"loss": 12.716,
"step": 853
},
{
"epoch": 0.9302832244008714,
"grad_norm": 762.8448486328125,
"learning_rate": 2.4416460849345123e-06,
"loss": 12.5605,
"step": 854
},
{
"epoch": 0.9313725490196079,
"grad_norm": 1097.0977783203125,
"learning_rate": 2.366240756922644e-06,
"loss": 12.0626,
"step": 855
},
{
"epoch": 0.9324618736383442,
"grad_norm": 989.987060546875,
"learning_rate": 2.2920041950650783e-06,
"loss": 12.1056,
"step": 856
},
{
"epoch": 0.9335511982570807,
"grad_norm": 761.228271484375,
"learning_rate": 2.218937288041956e-06,
"loss": 12.0653,
"step": 857
},
{
"epoch": 0.934640522875817,
"grad_norm": 698.48486328125,
"learning_rate": 2.1470409105315283e-06,
"loss": 12.689,
"step": 858
},
{
"epoch": 0.9357298474945533,
"grad_norm": 1059.8203125,
"learning_rate": 2.0763159231997674e-06,
"loss": 12.268,
"step": 859
},
{
"epoch": 0.9368191721132898,
"grad_norm": 1506.7076416015625,
"learning_rate": 2.0067631726899962e-06,
"loss": 13.0918,
"step": 860
},
{
"epoch": 0.9379084967320261,
"grad_norm": 686.2244262695312,
"learning_rate": 1.938383491612794e-06,
"loss": 13.2276,
"step": 861
},
{
"epoch": 0.9389978213507625,
"grad_norm": 1398.2288818359375,
"learning_rate": 1.8711776985360308e-06,
"loss": 12.4386,
"step": 862
},
{
"epoch": 0.9400871459694989,
"grad_norm": 787.7633666992188,
"learning_rate": 1.805146597975016e-06,
"loss": 12.0259,
"step": 863
},
{
"epoch": 0.9411764705882353,
"grad_norm": 1021.8810424804688,
"learning_rate": 1.7402909803829525e-06,
"loss": 13.2275,
"step": 864
},
{
"epoch": 0.9422657952069716,
"grad_norm": 724.1331176757812,
"learning_rate": 1.6766116221413774e-06,
"loss": 12.9674,
"step": 865
},
{
"epoch": 0.9433551198257081,
"grad_norm": 889.06201171875,
"learning_rate": 1.61410928555098e-06,
"loss": 12.9815,
"step": 866
},
{
"epoch": 0.9444444444444444,
"grad_norm": 694.7861938476562,
"learning_rate": 1.5527847188223644e-06,
"loss": 11.78,
"step": 867
},
{
"epoch": 0.9455337690631809,
"grad_norm": 1126.9815673828125,
"learning_rate": 1.4926386560671358e-06,
"loss": 13.2658,
"step": 868
},
{
"epoch": 0.9466230936819172,
"grad_norm": 1251.4708251953125,
"learning_rate": 1.433671817289184e-06,
"loss": 13.5727,
"step": 869
},
{
"epoch": 0.9477124183006536,
"grad_norm": 928.8302001953125,
"learning_rate": 1.3758849083759352e-06,
"loss": 12.5813,
"step": 870
},
{
"epoch": 0.94880174291939,
"grad_norm": 906.8268432617188,
"learning_rate": 1.3192786210900033e-06,
"loss": 12.3939,
"step": 871
},
{
"epoch": 0.9498910675381264,
"grad_norm": 1406.4046630859375,
"learning_rate": 1.2638536330608408e-06,
"loss": 12.5356,
"step": 872
},
{
"epoch": 0.9509803921568627,
"grad_norm": 734.1050415039062,
"learning_rate": 1.2096106077767011e-06,
"loss": 12.0204,
"step": 873
},
{
"epoch": 0.9520697167755992,
"grad_norm": 1201.427734375,
"learning_rate": 1.1565501945766222e-06,
"loss": 11.8001,
"step": 874
},
{
"epoch": 0.9531590413943355,
"grad_norm": 964.8887329101562,
"learning_rate": 1.1046730286426775e-06,
"loss": 13.3203,
"step": 875
},
{
"epoch": 0.954248366013072,
"grad_norm": 1066.5933837890625,
"learning_rate": 1.053979730992416e-06,
"loss": 12.9455,
"step": 876
},
{
"epoch": 0.9553376906318083,
"grad_norm": 867.9620361328125,
"learning_rate": 1.0044709084713554e-06,
"loss": 12.5005,
"step": 877
},
{
"epoch": 0.9564270152505446,
"grad_norm": 1209.2740478515625,
"learning_rate": 9.56147153745779e-07,
"loss": 12.6775,
"step": 878
},
{
"epoch": 0.9575163398692811,
"grad_norm": 920.9111938476562,
"learning_rate": 9.090090452955835e-07,
"loss": 12.656,
"step": 879
},
{
"epoch": 0.9586056644880174,
"grad_norm": 1178.7100830078125,
"learning_rate": 8.630571474074311e-07,
"loss": 11.8858,
"step": 880
},
{
"epoch": 0.9596949891067538,
"grad_norm": 827.2933349609375,
"learning_rate": 8.182920101679092e-07,
"loss": 12.7875,
"step": 881
},
{
"epoch": 0.9607843137254902,
"grad_norm": 970.2561645507812,
"learning_rate": 7.747141694570026e-07,
"loss": 12.1683,
"step": 882
},
{
"epoch": 0.9618736383442266,
"grad_norm": 961.2227783203125,
"learning_rate": 7.323241469416764e-07,
"loss": 13.4533,
"step": 883
},
{
"epoch": 0.9629629629629629,
"grad_norm": 1200.774658203125,
"learning_rate": 6.911224500695702e-07,
"loss": 13.4708,
"step": 884
},
{
"epoch": 0.9640522875816994,
"grad_norm": 961.2836303710938,
"learning_rate": 6.511095720630244e-07,
"loss": 12.1307,
"step": 885
},
{
"epoch": 0.9651416122004357,
"grad_norm": 949.4239501953125,
"learning_rate": 6.122859919130974e-07,
"loss": 11.6507,
"step": 886
},
{
"epoch": 0.9662309368191722,
"grad_norm": 1107.519287109375,
"learning_rate": 5.746521743738354e-07,
"loss": 13.5597,
"step": 887
},
{
"epoch": 0.9673202614379085,
"grad_norm": 713.34619140625,
"learning_rate": 5.382085699567552e-07,
"loss": 13.0859,
"step": 888
},
{
"epoch": 0.9684095860566448,
"grad_norm": 763.1091918945312,
"learning_rate": 5.029556149254266e-07,
"loss": 12.9483,
"step": 889
},
{
"epoch": 0.9694989106753813,
"grad_norm": 913.829345703125,
"learning_rate": 4.6889373129022085e-07,
"loss": 11.2009,
"step": 890
},
{
"epoch": 0.9705882352941176,
"grad_norm": 1170.449462890625,
"learning_rate": 4.3602332680331425e-07,
"loss": 12.748,
"step": 891
},
{
"epoch": 0.971677559912854,
"grad_norm": 877.0611572265625,
"learning_rate": 4.0434479495378155e-07,
"loss": 12.1814,
"step": 892
},
{
"epoch": 0.9727668845315904,
"grad_norm": 1181.8524169921875,
"learning_rate": 3.7385851496284374e-07,
"loss": 12.2686,
"step": 893
},
{
"epoch": 0.9738562091503268,
"grad_norm": 919.8130493164062,
"learning_rate": 3.445648517793942e-07,
"loss": 12.535,
"step": 894
},
{
"epoch": 0.9749455337690632,
"grad_norm": 736.0946044921875,
"learning_rate": 3.164641560756132e-07,
"loss": 13.5968,
"step": 895
},
{
"epoch": 0.9760348583877996,
"grad_norm": 1050.005126953125,
"learning_rate": 2.895567642427488e-07,
"loss": 13.5456,
"step": 896
},
{
"epoch": 0.9771241830065359,
"grad_norm": 1240.2838134765625,
"learning_rate": 2.638429983870983e-07,
"loss": 12.4089,
"step": 897
},
{
"epoch": 0.9782135076252724,
"grad_norm": 1174.1912841796875,
"learning_rate": 2.3932316632614416e-07,
"loss": 12.5838,
"step": 898
},
{
"epoch": 0.9793028322440087,
"grad_norm": 900.91845703125,
"learning_rate": 2.15997561584913e-07,
"loss": 12.2009,
"step": 899
},
{
"epoch": 0.9803921568627451,
"grad_norm": 1263.928466796875,
"learning_rate": 1.9386646339238924e-07,
"loss": 13.1211,
"step": 900
},
{
"epoch": 0.9814814814814815,
"grad_norm": 1037.0902099609375,
"learning_rate": 1.7293013667825098e-07,
"loss": 12.676,
"step": 901
},
{
"epoch": 0.9825708061002179,
"grad_norm": 1237.9251708984375,
"learning_rate": 1.5318883206962842e-07,
"loss": 12.2276,
"step": 902
},
{
"epoch": 0.9836601307189542,
"grad_norm": 630.1217041015625,
"learning_rate": 1.3464278588815048e-07,
"loss": 12.1347,
"step": 903
},
{
"epoch": 0.9847494553376906,
"grad_norm": 859.3702392578125,
"learning_rate": 1.1729222014709162e-07,
"loss": 12.4704,
"step": 904
},
{
"epoch": 0.985838779956427,
"grad_norm": 1073.7818603515625,
"learning_rate": 1.0113734254872942e-07,
"loss": 12.5898,
"step": 905
},
{
"epoch": 0.9869281045751634,
"grad_norm": 1251.8004150390625,
"learning_rate": 8.617834648185774e-08,
"loss": 13.251,
"step": 906
},
{
"epoch": 0.9880174291938998,
"grad_norm": 1012.1276245117188,
"learning_rate": 7.241541101945526e-08,
"loss": 11.1789,
"step": 907
},
{
"epoch": 0.9891067538126361,
"grad_norm": 662.820556640625,
"learning_rate": 5.984870091654271e-08,
"loss": 13.627,
"step": 908
},
{
"epoch": 0.9901960784313726,
"grad_norm": 771.7218627929688,
"learning_rate": 4.847836660824001e-08,
"loss": 12.1929,
"step": 909
},
{
"epoch": 0.9912854030501089,
"grad_norm": 654.5511474609375,
"learning_rate": 3.8304544207945495e-08,
"loss": 11.9869,
"step": 910
},
{
"epoch": 0.9923747276688453,
"grad_norm": 1049.4482421875,
"learning_rate": 2.9327355505681663e-08,
"loss": 12.3512,
"step": 911
},
{
"epoch": 0.9934640522875817,
"grad_norm": 861.9254150390625,
"learning_rate": 2.1546907966685236e-08,
"loss": 12.2443,
"step": 912
},
{
"epoch": 0.9945533769063181,
"grad_norm": 1027.2105712890625,
"learning_rate": 1.496329473008595e-08,
"loss": 12.2233,
"step": 913
},
{
"epoch": 0.9956427015250545,
"grad_norm": 783.8712158203125,
"learning_rate": 9.576594607807465e-09,
"loss": 13.0014,
"step": 914
},
{
"epoch": 0.9967320261437909,
"grad_norm": 1203.1295166015625,
"learning_rate": 5.3868720836236506e-09,
"loss": 13.0101,
"step": 915
},
{
"epoch": 0.9978213507625272,
"grad_norm": 702.3920288085938,
"learning_rate": 2.3941773123814516e-09,
"loss": 13.0705,
"step": 916
},
{
"epoch": 0.9989106753812637,
"grad_norm": 858.6944580078125,
"learning_rate": 5.985461193791509e-10,
"loss": 12.3027,
"step": 917
},
{
"epoch": 1.0,
"grad_norm": 1008.3307495117188,
"learning_rate": 0.0,
"loss": 12.5451,
"step": 918
}
],
"logging_steps": 1,
"max_steps": 918,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 230,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3120952812503040.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}