bloom-7b1-random-sw / trainer_state.json
atsuki-yamaguchi's picture
Upload folder using huggingface_hub
b7c8713 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8313847752663029,
"eval_steps": 500,
"global_step": 4000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 45.15835189819336,
"learning_rate": 5e-06,
"loss": 12.2785,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 44.153160095214844,
"learning_rate": 1e-05,
"loss": 11.9319,
"step": 10
},
{
"epoch": 0.0,
"grad_norm": 44.100494384765625,
"learning_rate": 1.5e-05,
"loss": 11.2211,
"step": 15
},
{
"epoch": 0.0,
"grad_norm": 33.43196105957031,
"learning_rate": 2e-05,
"loss": 10.14,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 37.416526794433594,
"learning_rate": 2.5e-05,
"loss": 9.0966,
"step": 25
},
{
"epoch": 0.01,
"grad_norm": 26.642860412597656,
"learning_rate": 3e-05,
"loss": 8.3694,
"step": 30
},
{
"epoch": 0.01,
"grad_norm": 27.94455337524414,
"learning_rate": 3.5e-05,
"loss": 7.8602,
"step": 35
},
{
"epoch": 0.01,
"grad_norm": 15.280680656433105,
"learning_rate": 4e-05,
"loss": 7.5713,
"step": 40
},
{
"epoch": 0.01,
"grad_norm": 14.044206619262695,
"learning_rate": 4.5e-05,
"loss": 7.388,
"step": 45
},
{
"epoch": 0.01,
"grad_norm": 8.312722206115723,
"learning_rate": 5e-05,
"loss": 7.2791,
"step": 50
},
{
"epoch": 0.01,
"grad_norm": 9.701332092285156,
"learning_rate": 5.500000000000001e-05,
"loss": 7.2155,
"step": 55
},
{
"epoch": 0.01,
"grad_norm": 5.983935356140137,
"learning_rate": 6e-05,
"loss": 7.0335,
"step": 60
},
{
"epoch": 0.01,
"grad_norm": 5.551008224487305,
"learning_rate": 6.500000000000001e-05,
"loss": 6.9453,
"step": 65
},
{
"epoch": 0.01,
"grad_norm": 6.666751384735107,
"learning_rate": 7e-05,
"loss": 6.7626,
"step": 70
},
{
"epoch": 0.02,
"grad_norm": 6.8435444831848145,
"learning_rate": 7.500000000000001e-05,
"loss": 6.6872,
"step": 75
},
{
"epoch": 0.02,
"grad_norm": 11.13158893585205,
"learning_rate": 8e-05,
"loss": 6.4701,
"step": 80
},
{
"epoch": 0.02,
"grad_norm": 23.48008155822754,
"learning_rate": 8.5e-05,
"loss": 6.397,
"step": 85
},
{
"epoch": 0.02,
"grad_norm": 26.83634376525879,
"learning_rate": 9e-05,
"loss": 6.2756,
"step": 90
},
{
"epoch": 0.02,
"grad_norm": 13.045366287231445,
"learning_rate": 9.5e-05,
"loss": 6.2447,
"step": 95
},
{
"epoch": 0.02,
"grad_norm": 16.109786987304688,
"learning_rate": 0.0001,
"loss": 6.1367,
"step": 100
},
{
"epoch": 0.02,
"grad_norm": 12.689685821533203,
"learning_rate": 9.999972205865686e-05,
"loss": 6.0392,
"step": 105
},
{
"epoch": 0.02,
"grad_norm": 13.829069137573242,
"learning_rate": 9.999888823771751e-05,
"loss": 5.9718,
"step": 110
},
{
"epoch": 0.02,
"grad_norm": 10.059715270996094,
"learning_rate": 9.999749854645204e-05,
"loss": 5.8929,
"step": 115
},
{
"epoch": 0.02,
"grad_norm": 11.05320930480957,
"learning_rate": 9.99955530003106e-05,
"loss": 5.6988,
"step": 120
},
{
"epoch": 0.03,
"grad_norm": 7.849647045135498,
"learning_rate": 9.99930516209231e-05,
"loss": 5.7041,
"step": 125
},
{
"epoch": 0.03,
"grad_norm": 14.663564682006836,
"learning_rate": 9.998999443609897e-05,
"loss": 5.5709,
"step": 130
},
{
"epoch": 0.03,
"grad_norm": 20.06233787536621,
"learning_rate": 9.998638147982696e-05,
"loss": 5.5674,
"step": 135
},
{
"epoch": 0.03,
"grad_norm": 13.702874183654785,
"learning_rate": 9.998221279227467e-05,
"loss": 5.4292,
"step": 140
},
{
"epoch": 0.03,
"grad_norm": 9.77934455871582,
"learning_rate": 9.997748841978812e-05,
"loss": 5.3657,
"step": 145
},
{
"epoch": 0.03,
"grad_norm": 11.119073867797852,
"learning_rate": 9.997220841489122e-05,
"loss": 5.393,
"step": 150
},
{
"epoch": 0.03,
"grad_norm": 11.486383438110352,
"learning_rate": 9.996637283628528e-05,
"loss": 5.2044,
"step": 155
},
{
"epoch": 0.03,
"grad_norm": 11.729193687438965,
"learning_rate": 9.995998174884821e-05,
"loss": 5.2556,
"step": 160
},
{
"epoch": 0.03,
"grad_norm": 10.201554298400879,
"learning_rate": 9.995303522363394e-05,
"loss": 5.2252,
"step": 165
},
{
"epoch": 0.04,
"grad_norm": 17.915985107421875,
"learning_rate": 9.99455333378715e-05,
"loss": 5.2607,
"step": 170
},
{
"epoch": 0.04,
"grad_norm": 10.455181121826172,
"learning_rate": 9.993747617496428e-05,
"loss": 5.1905,
"step": 175
},
{
"epoch": 0.04,
"grad_norm": 8.33234691619873,
"learning_rate": 9.9928863824489e-05,
"loss": 5.0516,
"step": 180
},
{
"epoch": 0.04,
"grad_norm": 10.21626091003418,
"learning_rate": 9.99196963821948e-05,
"loss": 5.0574,
"step": 185
},
{
"epoch": 0.04,
"grad_norm": 12.287332534790039,
"learning_rate": 9.990997395000217e-05,
"loss": 4.9587,
"step": 190
},
{
"epoch": 0.04,
"grad_norm": 12.515643119812012,
"learning_rate": 9.989969663600169e-05,
"loss": 4.9398,
"step": 195
},
{
"epoch": 0.04,
"grad_norm": 11.348413467407227,
"learning_rate": 9.9888864554453e-05,
"loss": 4.9797,
"step": 200
},
{
"epoch": 0.04,
"grad_norm": 13.060918807983398,
"learning_rate": 9.987747782578342e-05,
"loss": 4.8117,
"step": 205
},
{
"epoch": 0.04,
"grad_norm": 6.7433295249938965,
"learning_rate": 9.986553657658668e-05,
"loss": 4.884,
"step": 210
},
{
"epoch": 0.04,
"grad_norm": 15.980123519897461,
"learning_rate": 9.985304093962145e-05,
"loss": 4.8085,
"step": 215
},
{
"epoch": 0.05,
"grad_norm": 13.768548965454102,
"learning_rate": 9.983999105380988e-05,
"loss": 4.789,
"step": 220
},
{
"epoch": 0.05,
"grad_norm": 8.351860046386719,
"learning_rate": 9.982638706423608e-05,
"loss": 4.6209,
"step": 225
},
{
"epoch": 0.05,
"grad_norm": 11.581040382385254,
"learning_rate": 9.98122291221445e-05,
"loss": 4.6771,
"step": 230
},
{
"epoch": 0.05,
"grad_norm": 6.505850791931152,
"learning_rate": 9.979751738493826e-05,
"loss": 4.7031,
"step": 235
},
{
"epoch": 0.05,
"grad_norm": 11.426262855529785,
"learning_rate": 9.978225201617732e-05,
"loss": 4.741,
"step": 240
},
{
"epoch": 0.05,
"grad_norm": 9.727092742919922,
"learning_rate": 9.976643318557678e-05,
"loss": 4.6727,
"step": 245
},
{
"epoch": 0.05,
"grad_norm": 11.064685821533203,
"learning_rate": 9.975006106900495e-05,
"loss": 4.689,
"step": 250
},
{
"epoch": 0.05,
"grad_norm": 8.95312213897705,
"learning_rate": 9.973313584848132e-05,
"loss": 4.6522,
"step": 255
},
{
"epoch": 0.05,
"grad_norm": 7.535710334777832,
"learning_rate": 9.971565771217464e-05,
"loss": 4.5432,
"step": 260
},
{
"epoch": 0.06,
"grad_norm": 15.665597915649414,
"learning_rate": 9.969762685440076e-05,
"loss": 4.5918,
"step": 265
},
{
"epoch": 0.06,
"grad_norm": 6.6664228439331055,
"learning_rate": 9.967904347562054e-05,
"loss": 4.5906,
"step": 270
},
{
"epoch": 0.06,
"grad_norm": 11.473183631896973,
"learning_rate": 9.965990778243755e-05,
"loss": 4.5051,
"step": 275
},
{
"epoch": 0.06,
"grad_norm": 10.400932312011719,
"learning_rate": 9.964021998759577e-05,
"loss": 4.48,
"step": 280
},
{
"epoch": 0.06,
"grad_norm": 15.379740715026855,
"learning_rate": 9.961998030997733e-05,
"loss": 4.4611,
"step": 285
},
{
"epoch": 0.06,
"grad_norm": 11.486093521118164,
"learning_rate": 9.95991889745999e-05,
"loss": 4.4911,
"step": 290
},
{
"epoch": 0.06,
"grad_norm": 9.706180572509766,
"learning_rate": 9.957784621261441e-05,
"loss": 4.4621,
"step": 295
},
{
"epoch": 0.06,
"grad_norm": 9.328032493591309,
"learning_rate": 9.955595226130226e-05,
"loss": 4.5372,
"step": 300
},
{
"epoch": 0.06,
"grad_norm": 9.358675956726074,
"learning_rate": 9.953350736407282e-05,
"loss": 4.4059,
"step": 305
},
{
"epoch": 0.06,
"grad_norm": 6.445903301239014,
"learning_rate": 9.951051177046069e-05,
"loss": 4.469,
"step": 310
},
{
"epoch": 0.07,
"grad_norm": 7.891039848327637,
"learning_rate": 9.948696573612292e-05,
"loss": 4.4455,
"step": 315
},
{
"epoch": 0.07,
"grad_norm": 11.032697677612305,
"learning_rate": 9.946286952283618e-05,
"loss": 4.465,
"step": 320
},
{
"epoch": 0.07,
"grad_norm": 9.988504409790039,
"learning_rate": 9.943822339849381e-05,
"loss": 4.4225,
"step": 325
},
{
"epoch": 0.07,
"grad_norm": 9.232673645019531,
"learning_rate": 9.941302763710288e-05,
"loss": 4.333,
"step": 330
},
{
"epoch": 0.07,
"grad_norm": 12.38031005859375,
"learning_rate": 9.938728251878116e-05,
"loss": 4.4608,
"step": 335
},
{
"epoch": 0.07,
"grad_norm": 11.748014450073242,
"learning_rate": 9.936098832975393e-05,
"loss": 4.3169,
"step": 340
},
{
"epoch": 0.07,
"grad_norm": 3.538158416748047,
"learning_rate": 9.933414536235091e-05,
"loss": 4.2799,
"step": 345
},
{
"epoch": 0.07,
"grad_norm": 4.4387125968933105,
"learning_rate": 9.93067539150029e-05,
"loss": 4.2298,
"step": 350
},
{
"epoch": 0.07,
"grad_norm": 5.749505043029785,
"learning_rate": 9.927881429223853e-05,
"loss": 4.2515,
"step": 355
},
{
"epoch": 0.07,
"grad_norm": 10.459473609924316,
"learning_rate": 9.925032680468085e-05,
"loss": 4.3408,
"step": 360
},
{
"epoch": 0.08,
"grad_norm": 6.9239912033081055,
"learning_rate": 9.922129176904388e-05,
"loss": 4.263,
"step": 365
},
{
"epoch": 0.08,
"grad_norm": 14.129467964172363,
"learning_rate": 9.919170950812911e-05,
"loss": 4.3073,
"step": 370
},
{
"epoch": 0.08,
"grad_norm": 8.374786376953125,
"learning_rate": 9.916158035082184e-05,
"loss": 4.2453,
"step": 375
},
{
"epoch": 0.08,
"grad_norm": 12.091270446777344,
"learning_rate": 9.913090463208763e-05,
"loss": 4.2371,
"step": 380
},
{
"epoch": 0.08,
"grad_norm": 6.911503791809082,
"learning_rate": 9.90996826929685e-05,
"loss": 4.2742,
"step": 385
},
{
"epoch": 0.08,
"grad_norm": 5.651716709136963,
"learning_rate": 9.906791488057916e-05,
"loss": 4.2749,
"step": 390
},
{
"epoch": 0.08,
"grad_norm": 5.230156898498535,
"learning_rate": 9.903560154810313e-05,
"loss": 4.2694,
"step": 395
},
{
"epoch": 0.08,
"grad_norm": 8.92658519744873,
"learning_rate": 9.900274305478887e-05,
"loss": 4.1819,
"step": 400
},
{
"epoch": 0.08,
"grad_norm": 8.834922790527344,
"learning_rate": 9.896933976594572e-05,
"loss": 4.1367,
"step": 405
},
{
"epoch": 0.09,
"grad_norm": 9.098694801330566,
"learning_rate": 9.893539205293989e-05,
"loss": 4.206,
"step": 410
},
{
"epoch": 0.09,
"grad_norm": 12.133086204528809,
"learning_rate": 9.890090029319028e-05,
"loss": 4.1405,
"step": 415
},
{
"epoch": 0.09,
"grad_norm": 10.43943977355957,
"learning_rate": 9.886586487016433e-05,
"loss": 4.0637,
"step": 420
},
{
"epoch": 0.09,
"grad_norm": 6.3572096824646,
"learning_rate": 9.883028617337378e-05,
"loss": 4.1157,
"step": 425
},
{
"epoch": 0.09,
"grad_norm": 6.29457426071167,
"learning_rate": 9.879416459837022e-05,
"loss": 4.0842,
"step": 430
},
{
"epoch": 0.09,
"grad_norm": 3.725757598876953,
"learning_rate": 9.875750054674082e-05,
"loss": 4.0978,
"step": 435
},
{
"epoch": 0.09,
"grad_norm": 8.156859397888184,
"learning_rate": 9.872029442610382e-05,
"loss": 4.1218,
"step": 440
},
{
"epoch": 0.09,
"grad_norm": 9.0325288772583,
"learning_rate": 9.8682546650104e-05,
"loss": 4.1634,
"step": 445
},
{
"epoch": 0.09,
"grad_norm": 11.150066375732422,
"learning_rate": 9.864425763840802e-05,
"loss": 4.1573,
"step": 450
},
{
"epoch": 0.09,
"grad_norm": 6.580535411834717,
"learning_rate": 9.860542781669988e-05,
"loss": 4.0366,
"step": 455
},
{
"epoch": 0.1,
"grad_norm": 8.785696983337402,
"learning_rate": 9.85660576166761e-05,
"loss": 4.1392,
"step": 460
},
{
"epoch": 0.1,
"grad_norm": 5.839633464813232,
"learning_rate": 9.852614747604093e-05,
"loss": 3.9329,
"step": 465
},
{
"epoch": 0.1,
"grad_norm": 12.390026092529297,
"learning_rate": 9.848569783850145e-05,
"loss": 4.0082,
"step": 470
},
{
"epoch": 0.1,
"grad_norm": 7.401693344116211,
"learning_rate": 9.844470915376278e-05,
"loss": 4.0658,
"step": 475
},
{
"epoch": 0.1,
"grad_norm": 8.135747909545898,
"learning_rate": 9.840318187752292e-05,
"loss": 4.0223,
"step": 480
},
{
"epoch": 0.1,
"grad_norm": 10.107956886291504,
"learning_rate": 9.836111647146771e-05,
"loss": 4.0712,
"step": 485
},
{
"epoch": 0.1,
"grad_norm": 6.994045257568359,
"learning_rate": 9.831851340326577e-05,
"loss": 3.9983,
"step": 490
},
{
"epoch": 0.1,
"grad_norm": 4.492868900299072,
"learning_rate": 9.82753731465633e-05,
"loss": 4.0257,
"step": 495
},
{
"epoch": 0.1,
"grad_norm": 3.540200710296631,
"learning_rate": 9.823169618097871e-05,
"loss": 4.0559,
"step": 500
},
{
"epoch": 0.1,
"grad_norm": 7.264139652252197,
"learning_rate": 9.81874829920974e-05,
"loss": 4.0612,
"step": 505
},
{
"epoch": 0.11,
"grad_norm": 4.541455268859863,
"learning_rate": 9.814273407146623e-05,
"loss": 3.9361,
"step": 510
},
{
"epoch": 0.11,
"grad_norm": 7.387721538543701,
"learning_rate": 9.809744991658829e-05,
"loss": 3.9606,
"step": 515
},
{
"epoch": 0.11,
"grad_norm": 5.94922399520874,
"learning_rate": 9.805163103091708e-05,
"loss": 4.0654,
"step": 520
},
{
"epoch": 0.11,
"grad_norm": 8.019112586975098,
"learning_rate": 9.800527792385112e-05,
"loss": 4.0555,
"step": 525
},
{
"epoch": 0.11,
"grad_norm": 6.78863000869751,
"learning_rate": 9.79583911107282e-05,
"loss": 3.8795,
"step": 530
},
{
"epoch": 0.11,
"grad_norm": 6.733406066894531,
"learning_rate": 9.791097111281968e-05,
"loss": 3.9381,
"step": 535
},
{
"epoch": 0.11,
"grad_norm": 5.3538289070129395,
"learning_rate": 9.786301845732467e-05,
"loss": 3.9979,
"step": 540
},
{
"epoch": 0.11,
"grad_norm": 7.134853363037109,
"learning_rate": 9.781453367736418e-05,
"loss": 3.9878,
"step": 545
},
{
"epoch": 0.11,
"grad_norm": 7.910807132720947,
"learning_rate": 9.776551731197524e-05,
"loss": 3.8739,
"step": 550
},
{
"epoch": 0.12,
"grad_norm": 4.527065277099609,
"learning_rate": 9.771596990610478e-05,
"loss": 3.948,
"step": 555
},
{
"epoch": 0.12,
"grad_norm": 7.935023784637451,
"learning_rate": 9.766589201060372e-05,
"loss": 3.8215,
"step": 560
},
{
"epoch": 0.12,
"grad_norm": 5.733000755310059,
"learning_rate": 9.761528418222077e-05,
"loss": 3.8948,
"step": 565
},
{
"epoch": 0.12,
"grad_norm": 5.0525102615356445,
"learning_rate": 9.756414698359624e-05,
"loss": 3.8821,
"step": 570
},
{
"epoch": 0.12,
"grad_norm": 6.695211887359619,
"learning_rate": 9.75124809832558e-05,
"loss": 3.92,
"step": 575
},
{
"epoch": 0.12,
"grad_norm": 7.148219108581543,
"learning_rate": 9.746028675560413e-05,
"loss": 3.9897,
"step": 580
},
{
"epoch": 0.12,
"grad_norm": 5.714667320251465,
"learning_rate": 9.740756488091861e-05,
"loss": 3.871,
"step": 585
},
{
"epoch": 0.12,
"grad_norm": 5.185215473175049,
"learning_rate": 9.735431594534277e-05,
"loss": 3.808,
"step": 590
},
{
"epoch": 0.12,
"grad_norm": 4.752777099609375,
"learning_rate": 9.730054054087983e-05,
"loss": 3.8886,
"step": 595
},
{
"epoch": 0.12,
"grad_norm": 7.76281213760376,
"learning_rate": 9.724623926538612e-05,
"loss": 3.8922,
"step": 600
},
{
"epoch": 0.13,
"grad_norm": 5.730591773986816,
"learning_rate": 9.719141272256443e-05,
"loss": 3.7937,
"step": 605
},
{
"epoch": 0.13,
"grad_norm": 6.3743062019348145,
"learning_rate": 9.713606152195726e-05,
"loss": 3.7718,
"step": 610
},
{
"epoch": 0.13,
"grad_norm": 6.650794506072998,
"learning_rate": 9.708018627894011e-05,
"loss": 3.8133,
"step": 615
},
{
"epoch": 0.13,
"grad_norm": 6.830786228179932,
"learning_rate": 9.702378761471456e-05,
"loss": 3.8316,
"step": 620
},
{
"epoch": 0.13,
"grad_norm": 5.104033470153809,
"learning_rate": 9.696686615630146e-05,
"loss": 3.8143,
"step": 625
},
{
"epoch": 0.13,
"grad_norm": 3.255120038986206,
"learning_rate": 9.690942253653385e-05,
"loss": 3.8662,
"step": 630
},
{
"epoch": 0.13,
"grad_norm": 7.252981662750244,
"learning_rate": 9.685145739405002e-05,
"loss": 3.7507,
"step": 635
},
{
"epoch": 0.13,
"grad_norm": 6.434908866882324,
"learning_rate": 9.679297137328634e-05,
"loss": 3.847,
"step": 640
},
{
"epoch": 0.13,
"grad_norm": 5.378491401672363,
"learning_rate": 9.673396512447013e-05,
"loss": 3.7711,
"step": 645
},
{
"epoch": 0.14,
"grad_norm": 6.469566345214844,
"learning_rate": 9.667443930361247e-05,
"loss": 3.8045,
"step": 650
},
{
"epoch": 0.14,
"grad_norm": 4.375204086303711,
"learning_rate": 9.661439457250076e-05,
"loss": 3.8626,
"step": 655
},
{
"epoch": 0.14,
"grad_norm": 5.769386291503906,
"learning_rate": 9.655383159869158e-05,
"loss": 3.7856,
"step": 660
},
{
"epoch": 0.14,
"grad_norm": 5.417258262634277,
"learning_rate": 9.649275105550309e-05,
"loss": 3.7855,
"step": 665
},
{
"epoch": 0.14,
"grad_norm": 6.028948783874512,
"learning_rate": 9.643115362200762e-05,
"loss": 3.782,
"step": 670
},
{
"epoch": 0.14,
"grad_norm": 6.621837615966797,
"learning_rate": 9.636903998302409e-05,
"loss": 3.8037,
"step": 675
},
{
"epoch": 0.14,
"grad_norm": 5.0748982429504395,
"learning_rate": 9.630641082911045e-05,
"loss": 3.8076,
"step": 680
},
{
"epoch": 0.14,
"grad_norm": 4.059738636016846,
"learning_rate": 9.624326685655593e-05,
"loss": 3.8499,
"step": 685
},
{
"epoch": 0.14,
"grad_norm": 2.88065767288208,
"learning_rate": 9.617960876737337e-05,
"loss": 3.7676,
"step": 690
},
{
"epoch": 0.14,
"grad_norm": 5.104539394378662,
"learning_rate": 9.611543726929134e-05,
"loss": 3.897,
"step": 695
},
{
"epoch": 0.15,
"grad_norm": 4.849435806274414,
"learning_rate": 9.605075307574635e-05,
"loss": 3.7714,
"step": 700
},
{
"epoch": 0.15,
"grad_norm": 4.45396614074707,
"learning_rate": 9.598555690587487e-05,
"loss": 3.8802,
"step": 705
},
{
"epoch": 0.15,
"grad_norm": 4.763771057128906,
"learning_rate": 9.591984948450532e-05,
"loss": 3.7847,
"step": 710
},
{
"epoch": 0.15,
"grad_norm": 5.5793657302856445,
"learning_rate": 9.585363154215008e-05,
"loss": 3.7768,
"step": 715
},
{
"epoch": 0.15,
"grad_norm": 7.476240634918213,
"learning_rate": 9.578690381499728e-05,
"loss": 3.7294,
"step": 720
},
{
"epoch": 0.15,
"grad_norm": 5.490994930267334,
"learning_rate": 9.571966704490271e-05,
"loss": 3.6943,
"step": 725
},
{
"epoch": 0.15,
"grad_norm": 5.781400680541992,
"learning_rate": 9.565192197938148e-05,
"loss": 3.7737,
"step": 730
},
{
"epoch": 0.15,
"grad_norm": 4.797996997833252,
"learning_rate": 9.558366937159977e-05,
"loss": 3.7199,
"step": 735
},
{
"epoch": 0.15,
"grad_norm": 2.7132320404052734,
"learning_rate": 9.551490998036646e-05,
"loss": 3.7265,
"step": 740
},
{
"epoch": 0.15,
"grad_norm": 3.6285693645477295,
"learning_rate": 9.544564457012463e-05,
"loss": 3.8012,
"step": 745
},
{
"epoch": 0.16,
"grad_norm": 4.180876731872559,
"learning_rate": 9.537587391094314e-05,
"loss": 3.7283,
"step": 750
},
{
"epoch": 0.16,
"grad_norm": 3.4972259998321533,
"learning_rate": 9.5305598778508e-05,
"loss": 3.7145,
"step": 755
},
{
"epoch": 0.16,
"grad_norm": 2.9972047805786133,
"learning_rate": 9.52348199541138e-05,
"loss": 3.8331,
"step": 760
},
{
"epoch": 0.16,
"grad_norm": 3.239570379257202,
"learning_rate": 9.516353822465504e-05,
"loss": 3.7066,
"step": 765
},
{
"epoch": 0.16,
"grad_norm": 2.9122016429901123,
"learning_rate": 9.509175438261726e-05,
"loss": 3.7587,
"step": 770
},
{
"epoch": 0.16,
"grad_norm": 4.641622066497803,
"learning_rate": 9.501946922606838e-05,
"loss": 3.6635,
"step": 775
},
{
"epoch": 0.16,
"grad_norm": 3.8465282917022705,
"learning_rate": 9.494668355864973e-05,
"loss": 3.6535,
"step": 780
},
{
"epoch": 0.16,
"grad_norm": 4.916113376617432,
"learning_rate": 9.487339818956716e-05,
"loss": 3.6955,
"step": 785
},
{
"epoch": 0.16,
"grad_norm": 3.606034517288208,
"learning_rate": 9.479961393358203e-05,
"loss": 3.6696,
"step": 790
},
{
"epoch": 0.17,
"grad_norm": 2.406766891479492,
"learning_rate": 9.472533161100215e-05,
"loss": 3.7049,
"step": 795
},
{
"epoch": 0.17,
"grad_norm": 5.433980464935303,
"learning_rate": 9.465055204767265e-05,
"loss": 3.6382,
"step": 800
},
{
"epoch": 0.17,
"grad_norm": 4.613250732421875,
"learning_rate": 9.457527607496685e-05,
"loss": 3.6635,
"step": 805
},
{
"epoch": 0.17,
"grad_norm": 4.022452354431152,
"learning_rate": 9.44995045297769e-05,
"loss": 3.6506,
"step": 810
},
{
"epoch": 0.17,
"grad_norm": 5.174694538116455,
"learning_rate": 9.442323825450464e-05,
"loss": 3.6564,
"step": 815
},
{
"epoch": 0.17,
"grad_norm": 4.0063934326171875,
"learning_rate": 9.43464780970521e-05,
"loss": 3.6727,
"step": 820
},
{
"epoch": 0.17,
"grad_norm": 3.9014084339141846,
"learning_rate": 9.426922491081212e-05,
"loss": 3.7083,
"step": 825
},
{
"epoch": 0.17,
"grad_norm": 3.8488593101501465,
"learning_rate": 9.419147955465888e-05,
"loss": 3.6637,
"step": 830
},
{
"epoch": 0.17,
"grad_norm": 4.321128845214844,
"learning_rate": 9.411324289293832e-05,
"loss": 3.6481,
"step": 835
},
{
"epoch": 0.17,
"grad_norm": 3.240539789199829,
"learning_rate": 9.403451579545859e-05,
"loss": 3.6122,
"step": 840
},
{
"epoch": 0.18,
"grad_norm": 2.557753086090088,
"learning_rate": 9.395529913748025e-05,
"loss": 3.726,
"step": 845
},
{
"epoch": 0.18,
"grad_norm": 4.094338893890381,
"learning_rate": 9.387559379970672e-05,
"loss": 3.6109,
"step": 850
},
{
"epoch": 0.18,
"grad_norm": 3.2747020721435547,
"learning_rate": 9.379540066827431e-05,
"loss": 3.6669,
"step": 855
},
{
"epoch": 0.18,
"grad_norm": 3.9448740482330322,
"learning_rate": 9.371472063474248e-05,
"loss": 3.6874,
"step": 860
},
{
"epoch": 0.18,
"grad_norm": 3.8462798595428467,
"learning_rate": 9.363355459608394e-05,
"loss": 3.6582,
"step": 865
},
{
"epoch": 0.18,
"grad_norm": 3.1983115673065186,
"learning_rate": 9.355190345467457e-05,
"loss": 3.6203,
"step": 870
},
{
"epoch": 0.18,
"grad_norm": 3.503183603286743,
"learning_rate": 9.346976811828352e-05,
"loss": 3.6604,
"step": 875
},
{
"epoch": 0.18,
"grad_norm": 2.7498409748077393,
"learning_rate": 9.338714950006297e-05,
"loss": 3.6151,
"step": 880
},
{
"epoch": 0.18,
"grad_norm": 2.8823697566986084,
"learning_rate": 9.330404851853817e-05,
"loss": 3.6467,
"step": 885
},
{
"epoch": 0.18,
"grad_norm": 3.0663647651672363,
"learning_rate": 9.3220466097597e-05,
"loss": 3.5761,
"step": 890
},
{
"epoch": 0.19,
"grad_norm": 3.702836751937866,
"learning_rate": 9.313640316647991e-05,
"loss": 3.5919,
"step": 895
},
{
"epoch": 0.19,
"grad_norm": 3.544537305831909,
"learning_rate": 9.305186065976945e-05,
"loss": 3.5981,
"step": 900
},
{
"epoch": 0.19,
"grad_norm": 3.6887552738189697,
"learning_rate": 9.296683951737993e-05,
"loss": 3.6059,
"step": 905
},
{
"epoch": 0.19,
"grad_norm": 2.9412875175476074,
"learning_rate": 9.288134068454697e-05,
"loss": 3.5848,
"step": 910
},
{
"epoch": 0.19,
"grad_norm": 3.413482427597046,
"learning_rate": 9.2795365111817e-05,
"loss": 3.6291,
"step": 915
},
{
"epoch": 0.19,
"grad_norm": 2.6420347690582275,
"learning_rate": 9.270891375503665e-05,
"loss": 3.5178,
"step": 920
},
{
"epoch": 0.19,
"grad_norm": 3.8070802688598633,
"learning_rate": 9.262198757534218e-05,
"loss": 3.637,
"step": 925
},
{
"epoch": 0.19,
"grad_norm": 3.3861169815063477,
"learning_rate": 9.253458753914874e-05,
"loss": 3.5248,
"step": 930
},
{
"epoch": 0.19,
"grad_norm": 2.7866058349609375,
"learning_rate": 9.244671461813969e-05,
"loss": 3.6434,
"step": 935
},
{
"epoch": 0.2,
"grad_norm": 10.094674110412598,
"learning_rate": 9.235836978925572e-05,
"loss": 3.6617,
"step": 940
},
{
"epoch": 0.2,
"grad_norm": 4.435075283050537,
"learning_rate": 9.226955403468406e-05,
"loss": 3.5412,
"step": 945
},
{
"epoch": 0.2,
"grad_norm": 3.978870153427124,
"learning_rate": 9.21802683418475e-05,
"loss": 3.5819,
"step": 950
},
{
"epoch": 0.2,
"grad_norm": 4.426916122436523,
"learning_rate": 9.209051370339347e-05,
"loss": 3.6204,
"step": 955
},
{
"epoch": 0.2,
"grad_norm": 3.019714117050171,
"learning_rate": 9.200029111718295e-05,
"loss": 3.5788,
"step": 960
},
{
"epoch": 0.2,
"grad_norm": 3.524825096130371,
"learning_rate": 9.190960158627941e-05,
"loss": 3.4758,
"step": 965
},
{
"epoch": 0.2,
"grad_norm": 2.2843191623687744,
"learning_rate": 9.181844611893766e-05,
"loss": 3.4947,
"step": 970
},
{
"epoch": 0.2,
"grad_norm": 4.7173967361450195,
"learning_rate": 9.172682572859261e-05,
"loss": 3.5474,
"step": 975
},
{
"epoch": 0.2,
"grad_norm": 3.3184432983398438,
"learning_rate": 9.163474143384806e-05,
"loss": 3.6003,
"step": 980
},
{
"epoch": 0.2,
"grad_norm": 3.0249927043914795,
"learning_rate": 9.154219425846528e-05,
"loss": 3.6028,
"step": 985
},
{
"epoch": 0.21,
"grad_norm": 2.8559343814849854,
"learning_rate": 9.144918523135175e-05,
"loss": 3.5176,
"step": 990
},
{
"epoch": 0.21,
"grad_norm": 2.727180242538452,
"learning_rate": 9.13557153865496e-05,
"loss": 3.6066,
"step": 995
},
{
"epoch": 0.21,
"grad_norm": 2.655278444290161,
"learning_rate": 9.12617857632242e-05,
"loss": 3.4884,
"step": 1000
},
{
"epoch": 0.21,
"grad_norm": 3.3753433227539062,
"learning_rate": 9.116739740565259e-05,
"loss": 3.5703,
"step": 1005
},
{
"epoch": 0.21,
"grad_norm": 3.563138008117676,
"learning_rate": 9.107255136321184e-05,
"loss": 3.581,
"step": 1010
},
{
"epoch": 0.21,
"grad_norm": 3.620206832885742,
"learning_rate": 9.09772486903674e-05,
"loss": 3.5337,
"step": 1015
},
{
"epoch": 0.21,
"grad_norm": 2.891718864440918,
"learning_rate": 9.08814904466614e-05,
"loss": 3.5792,
"step": 1020
},
{
"epoch": 0.21,
"grad_norm": 2.634927272796631,
"learning_rate": 9.078527769670085e-05,
"loss": 3.5217,
"step": 1025
},
{
"epoch": 0.21,
"grad_norm": 2.790894031524658,
"learning_rate": 9.068861151014575e-05,
"loss": 3.4304,
"step": 1030
},
{
"epoch": 0.22,
"grad_norm": 2.6864757537841797,
"learning_rate": 9.05914929616973e-05,
"loss": 3.5357,
"step": 1035
},
{
"epoch": 0.22,
"grad_norm": 3.1982781887054443,
"learning_rate": 9.04939231310859e-05,
"loss": 3.5325,
"step": 1040
},
{
"epoch": 0.22,
"grad_norm": 2.6033880710601807,
"learning_rate": 9.039590310305914e-05,
"loss": 3.5909,
"step": 1045
},
{
"epoch": 0.22,
"grad_norm": 3.043409824371338,
"learning_rate": 9.029743396736974e-05,
"loss": 3.5386,
"step": 1050
},
{
"epoch": 0.22,
"grad_norm": 2.8006138801574707,
"learning_rate": 9.019851681876348e-05,
"loss": 3.5106,
"step": 1055
},
{
"epoch": 0.22,
"grad_norm": 3.0996017456054688,
"learning_rate": 9.009915275696693e-05,
"loss": 3.5617,
"step": 1060
},
{
"epoch": 0.22,
"grad_norm": 3.0199506282806396,
"learning_rate": 8.999934288667534e-05,
"loss": 3.4709,
"step": 1065
},
{
"epoch": 0.22,
"grad_norm": 3.6407251358032227,
"learning_rate": 8.989908831754028e-05,
"loss": 3.3929,
"step": 1070
},
{
"epoch": 0.22,
"grad_norm": 2.5755767822265625,
"learning_rate": 8.979839016415735e-05,
"loss": 3.4449,
"step": 1075
},
{
"epoch": 0.22,
"grad_norm": 2.531965732574463,
"learning_rate": 8.969724954605373e-05,
"loss": 3.4978,
"step": 1080
},
{
"epoch": 0.23,
"grad_norm": 2.9987034797668457,
"learning_rate": 8.959566758767581e-05,
"loss": 3.4814,
"step": 1085
},
{
"epoch": 0.23,
"grad_norm": 2.9496028423309326,
"learning_rate": 8.949364541837661e-05,
"loss": 3.4422,
"step": 1090
},
{
"epoch": 0.23,
"grad_norm": 2.7287490367889404,
"learning_rate": 8.939118417240329e-05,
"loss": 3.4412,
"step": 1095
},
{
"epoch": 0.23,
"grad_norm": 2.131716251373291,
"learning_rate": 8.92882849888845e-05,
"loss": 3.4986,
"step": 1100
},
{
"epoch": 0.23,
"grad_norm": 3.9027628898620605,
"learning_rate": 8.918494901181773e-05,
"loss": 3.3911,
"step": 1105
},
{
"epoch": 0.23,
"grad_norm": 3.365780830383301,
"learning_rate": 8.908117739005659e-05,
"loss": 3.4237,
"step": 1110
},
{
"epoch": 0.23,
"grad_norm": 2.260707139968872,
"learning_rate": 8.897697127729805e-05,
"loss": 3.478,
"step": 1115
},
{
"epoch": 0.23,
"grad_norm": 2.2363173961639404,
"learning_rate": 8.887233183206957e-05,
"loss": 3.467,
"step": 1120
},
{
"epoch": 0.23,
"grad_norm": 2.151614189147949,
"learning_rate": 8.876726021771627e-05,
"loss": 3.3874,
"step": 1125
},
{
"epoch": 0.23,
"grad_norm": 2.1778979301452637,
"learning_rate": 8.866175760238798e-05,
"loss": 3.5221,
"step": 1130
},
{
"epoch": 0.24,
"grad_norm": 2.537234306335449,
"learning_rate": 8.855582515902625e-05,
"loss": 3.3864,
"step": 1135
},
{
"epoch": 0.24,
"grad_norm": 2.1032111644744873,
"learning_rate": 8.844946406535131e-05,
"loss": 3.366,
"step": 1140
},
{
"epoch": 0.24,
"grad_norm": 2.383251428604126,
"learning_rate": 8.834267550384893e-05,
"loss": 3.4624,
"step": 1145
},
{
"epoch": 0.24,
"grad_norm": 1.833835482597351,
"learning_rate": 8.823546066175741e-05,
"loss": 3.3866,
"step": 1150
},
{
"epoch": 0.24,
"grad_norm": 3.101527452468872,
"learning_rate": 8.81278207310542e-05,
"loss": 3.4582,
"step": 1155
},
{
"epoch": 0.24,
"grad_norm": 3.125770092010498,
"learning_rate": 8.801975690844278e-05,
"loss": 3.4038,
"step": 1160
},
{
"epoch": 0.24,
"grad_norm": 2.553493022918701,
"learning_rate": 8.791127039533934e-05,
"loss": 3.3217,
"step": 1165
},
{
"epoch": 0.24,
"grad_norm": 1.8054865598678589,
"learning_rate": 8.780236239785935e-05,
"loss": 3.4609,
"step": 1170
},
{
"epoch": 0.24,
"grad_norm": 2.3070950508117676,
"learning_rate": 8.76930341268042e-05,
"loss": 3.4181,
"step": 1175
},
{
"epoch": 0.25,
"grad_norm": 2.2507147789001465,
"learning_rate": 8.758328679764776e-05,
"loss": 3.3341,
"step": 1180
},
{
"epoch": 0.25,
"grad_norm": 2.1136460304260254,
"learning_rate": 8.747312163052284e-05,
"loss": 3.3292,
"step": 1185
},
{
"epoch": 0.25,
"grad_norm": 2.121624231338501,
"learning_rate": 8.736253985020761e-05,
"loss": 3.389,
"step": 1190
},
{
"epoch": 0.25,
"grad_norm": 2.446190357208252,
"learning_rate": 8.725154268611203e-05,
"loss": 3.4031,
"step": 1195
},
{
"epoch": 0.25,
"grad_norm": 2.6911745071411133,
"learning_rate": 8.714013137226411e-05,
"loss": 3.3836,
"step": 1200
},
{
"epoch": 0.25,
"grad_norm": 2.276662826538086,
"learning_rate": 8.702830714729628e-05,
"loss": 3.4623,
"step": 1205
},
{
"epoch": 0.25,
"grad_norm": 2.2812204360961914,
"learning_rate": 8.691607125443153e-05,
"loss": 3.3339,
"step": 1210
},
{
"epoch": 0.25,
"grad_norm": 2.8849267959594727,
"learning_rate": 8.680342494146967e-05,
"loss": 3.4548,
"step": 1215
},
{
"epoch": 0.25,
"grad_norm": 2.444772958755493,
"learning_rate": 8.66903694607734e-05,
"loss": 3.3958,
"step": 1220
},
{
"epoch": 0.25,
"grad_norm": 2.928335189819336,
"learning_rate": 8.65769060692544e-05,
"loss": 3.4274,
"step": 1225
},
{
"epoch": 0.26,
"grad_norm": 2.9063730239868164,
"learning_rate": 8.646303602835936e-05,
"loss": 3.4068,
"step": 1230
},
{
"epoch": 0.26,
"grad_norm": 1.9862269163131714,
"learning_rate": 8.634876060405597e-05,
"loss": 3.4543,
"step": 1235
},
{
"epoch": 0.26,
"grad_norm": 1.9190977811813354,
"learning_rate": 8.623408106681884e-05,
"loss": 3.413,
"step": 1240
},
{
"epoch": 0.26,
"grad_norm": 3.001549005508423,
"learning_rate": 8.611899869161535e-05,
"loss": 3.3807,
"step": 1245
},
{
"epoch": 0.26,
"grad_norm": 2.739975690841675,
"learning_rate": 8.600351475789147e-05,
"loss": 3.3515,
"step": 1250
},
{
"epoch": 0.26,
"grad_norm": 3.1891651153564453,
"learning_rate": 8.588763054955764e-05,
"loss": 3.3546,
"step": 1255
},
{
"epoch": 0.26,
"grad_norm": 2.7442617416381836,
"learning_rate": 8.57713473549743e-05,
"loss": 3.3774,
"step": 1260
},
{
"epoch": 0.26,
"grad_norm": 2.693535327911377,
"learning_rate": 8.565466646693778e-05,
"loss": 3.3192,
"step": 1265
},
{
"epoch": 0.26,
"grad_norm": 2.1455957889556885,
"learning_rate": 8.553758918266578e-05,
"loss": 3.3477,
"step": 1270
},
{
"epoch": 0.27,
"grad_norm": 1.6235907077789307,
"learning_rate": 8.5420116803783e-05,
"loss": 3.2856,
"step": 1275
},
{
"epoch": 0.27,
"grad_norm": 2.1475882530212402,
"learning_rate": 8.530225063630668e-05,
"loss": 3.4148,
"step": 1280
},
{
"epoch": 0.27,
"grad_norm": 2.143613815307617,
"learning_rate": 8.518399199063205e-05,
"loss": 3.298,
"step": 1285
},
{
"epoch": 0.27,
"grad_norm": 2.105386734008789,
"learning_rate": 8.50653421815178e-05,
"loss": 3.3529,
"step": 1290
},
{
"epoch": 0.27,
"grad_norm": 2.3892133235931396,
"learning_rate": 8.494630252807138e-05,
"loss": 3.3511,
"step": 1295
},
{
"epoch": 0.27,
"grad_norm": 1.9199190139770508,
"learning_rate": 8.482687435373449e-05,
"loss": 3.3989,
"step": 1300
},
{
"epoch": 0.27,
"grad_norm": 1.6064597368240356,
"learning_rate": 8.470705898626817e-05,
"loss": 3.4317,
"step": 1305
},
{
"epoch": 0.27,
"grad_norm": 2.0932230949401855,
"learning_rate": 8.458685775773822e-05,
"loss": 3.3051,
"step": 1310
},
{
"epoch": 0.27,
"grad_norm": 2.630427598953247,
"learning_rate": 8.446627200450025e-05,
"loss": 3.3613,
"step": 1315
},
{
"epoch": 0.27,
"grad_norm": 1.485223650932312,
"learning_rate": 8.434530306718493e-05,
"loss": 3.3795,
"step": 1320
},
{
"epoch": 0.28,
"grad_norm": 1.779046654701233,
"learning_rate": 8.4223952290683e-05,
"loss": 3.3346,
"step": 1325
},
{
"epoch": 0.28,
"grad_norm": 2.5689127445220947,
"learning_rate": 8.41022210241304e-05,
"loss": 3.3057,
"step": 1330
},
{
"epoch": 0.28,
"grad_norm": 2.011467695236206,
"learning_rate": 8.398011062089316e-05,
"loss": 3.2323,
"step": 1335
},
{
"epoch": 0.28,
"grad_norm": 2.030775547027588,
"learning_rate": 8.385762243855249e-05,
"loss": 3.2741,
"step": 1340
},
{
"epoch": 0.28,
"grad_norm": 2.6618270874023438,
"learning_rate": 8.373475783888958e-05,
"loss": 3.3317,
"step": 1345
},
{
"epoch": 0.28,
"grad_norm": 2.1619679927825928,
"learning_rate": 8.36115181878705e-05,
"loss": 3.3239,
"step": 1350
},
{
"epoch": 0.28,
"grad_norm": 2.1114470958709717,
"learning_rate": 8.348790485563101e-05,
"loss": 3.2401,
"step": 1355
},
{
"epoch": 0.28,
"grad_norm": 2.219550609588623,
"learning_rate": 8.336391921646134e-05,
"loss": 3.328,
"step": 1360
},
{
"epoch": 0.28,
"grad_norm": 1.565345287322998,
"learning_rate": 8.323956264879089e-05,
"loss": 3.3306,
"step": 1365
},
{
"epoch": 0.28,
"grad_norm": 2.1180624961853027,
"learning_rate": 8.311483653517294e-05,
"loss": 3.295,
"step": 1370
},
{
"epoch": 0.29,
"grad_norm": 2.0208921432495117,
"learning_rate": 8.298974226226919e-05,
"loss": 3.2154,
"step": 1375
},
{
"epoch": 0.29,
"grad_norm": 2.022071599960327,
"learning_rate": 8.28642812208345e-05,
"loss": 3.3424,
"step": 1380
},
{
"epoch": 0.29,
"grad_norm": 1.7539397478103638,
"learning_rate": 8.273845480570123e-05,
"loss": 3.3704,
"step": 1385
},
{
"epoch": 0.29,
"grad_norm": 2.1509974002838135,
"learning_rate": 8.26122644157639e-05,
"loss": 3.339,
"step": 1390
},
{
"epoch": 0.29,
"grad_norm": 1.7231310606002808,
"learning_rate": 8.248571145396362e-05,
"loss": 3.2693,
"step": 1395
},
{
"epoch": 0.29,
"grad_norm": 1.7255983352661133,
"learning_rate": 8.235879732727236e-05,
"loss": 3.2817,
"step": 1400
},
{
"epoch": 0.29,
"grad_norm": 2.0972177982330322,
"learning_rate": 8.223152344667745e-05,
"loss": 3.2651,
"step": 1405
},
{
"epoch": 0.29,
"grad_norm": 1.8408994674682617,
"learning_rate": 8.21038912271658e-05,
"loss": 3.2287,
"step": 1410
},
{
"epoch": 0.29,
"grad_norm": 2.0348258018493652,
"learning_rate": 8.197590208770824e-05,
"loss": 3.2466,
"step": 1415
},
{
"epoch": 0.3,
"grad_norm": 1.989776849746704,
"learning_rate": 8.184755745124371e-05,
"loss": 3.187,
"step": 1420
},
{
"epoch": 0.3,
"grad_norm": 1.5390502214431763,
"learning_rate": 8.171885874466342e-05,
"loss": 3.3073,
"step": 1425
},
{
"epoch": 0.3,
"grad_norm": 1.9833441972732544,
"learning_rate": 8.158980739879507e-05,
"loss": 3.2203,
"step": 1430
},
{
"epoch": 0.3,
"grad_norm": 1.7567650079727173,
"learning_rate": 8.146040484838677e-05,
"loss": 3.213,
"step": 1435
},
{
"epoch": 0.3,
"grad_norm": 1.4360686540603638,
"learning_rate": 8.133065253209132e-05,
"loss": 3.1741,
"step": 1440
},
{
"epoch": 0.3,
"grad_norm": 1.384132981300354,
"learning_rate": 8.120055189245e-05,
"loss": 3.278,
"step": 1445
},
{
"epoch": 0.3,
"grad_norm": 2.004535436630249,
"learning_rate": 8.10701043758767e-05,
"loss": 3.2525,
"step": 1450
},
{
"epoch": 0.3,
"grad_norm": 1.5177205801010132,
"learning_rate": 8.093931143264174e-05,
"loss": 3.3334,
"step": 1455
},
{
"epoch": 0.3,
"grad_norm": 1.905267596244812,
"learning_rate": 8.080817451685576e-05,
"loss": 3.3221,
"step": 1460
},
{
"epoch": 0.3,
"grad_norm": 1.639228343963623,
"learning_rate": 8.067669508645356e-05,
"loss": 3.1484,
"step": 1465
},
{
"epoch": 0.31,
"grad_norm": 2.198517084121704,
"learning_rate": 8.054487460317797e-05,
"loss": 3.2279,
"step": 1470
},
{
"epoch": 0.31,
"grad_norm": 2.1966092586517334,
"learning_rate": 8.041271453256345e-05,
"loss": 3.3066,
"step": 1475
},
{
"epoch": 0.31,
"grad_norm": 2.3027963638305664,
"learning_rate": 8.02802163439199e-05,
"loss": 3.274,
"step": 1480
},
{
"epoch": 0.31,
"grad_norm": 1.8363163471221924,
"learning_rate": 8.01473815103163e-05,
"loss": 3.1949,
"step": 1485
},
{
"epoch": 0.31,
"grad_norm": 1.8598748445510864,
"learning_rate": 8.001421150856434e-05,
"loss": 3.1604,
"step": 1490
},
{
"epoch": 0.31,
"grad_norm": 1.483190655708313,
"learning_rate": 7.988070781920197e-05,
"loss": 3.1812,
"step": 1495
},
{
"epoch": 0.31,
"grad_norm": 2.142030954360962,
"learning_rate": 7.9746871926477e-05,
"loss": 3.1776,
"step": 1500
},
{
"epoch": 0.31,
"grad_norm": 2.048579216003418,
"learning_rate": 7.961270531833052e-05,
"loss": 3.2503,
"step": 1505
},
{
"epoch": 0.31,
"grad_norm": 1.7624013423919678,
"learning_rate": 7.947820948638045e-05,
"loss": 3.2101,
"step": 1510
},
{
"epoch": 0.31,
"grad_norm": 1.821346640586853,
"learning_rate": 7.934338592590486e-05,
"loss": 3.2156,
"step": 1515
},
{
"epoch": 0.32,
"grad_norm": 1.756419062614441,
"learning_rate": 7.92082361358254e-05,
"loss": 3.1873,
"step": 1520
},
{
"epoch": 0.32,
"grad_norm": 1.715536117553711,
"learning_rate": 7.907276161869065e-05,
"loss": 3.1996,
"step": 1525
},
{
"epoch": 0.32,
"grad_norm": 1.5711299180984497,
"learning_rate": 7.893696388065936e-05,
"loss": 3.2353,
"step": 1530
},
{
"epoch": 0.32,
"grad_norm": 1.925856113433838,
"learning_rate": 7.88008444314838e-05,
"loss": 3.1132,
"step": 1535
},
{
"epoch": 0.32,
"grad_norm": 1.7971457242965698,
"learning_rate": 7.866440478449283e-05,
"loss": 3.1944,
"step": 1540
},
{
"epoch": 0.32,
"grad_norm": 1.6164358854293823,
"learning_rate": 7.852764645657522e-05,
"loss": 3.1835,
"step": 1545
},
{
"epoch": 0.32,
"grad_norm": 1.7398253679275513,
"learning_rate": 7.839057096816271e-05,
"loss": 3.1242,
"step": 1550
},
{
"epoch": 0.32,
"grad_norm": 1.995582103729248,
"learning_rate": 7.82531798432131e-05,
"loss": 3.2223,
"step": 1555
},
{
"epoch": 0.32,
"grad_norm": 1.7915050983428955,
"learning_rate": 7.811547460919333e-05,
"loss": 3.1267,
"step": 1560
},
{
"epoch": 0.33,
"grad_norm": 5.1382155418396,
"learning_rate": 7.797745679706254e-05,
"loss": 3.1373,
"step": 1565
},
{
"epoch": 0.33,
"grad_norm": 1.6057478189468384,
"learning_rate": 7.783912794125496e-05,
"loss": 3.2183,
"step": 1570
},
{
"epoch": 0.33,
"grad_norm": 1.506475806236267,
"learning_rate": 7.770048957966291e-05,
"loss": 3.1581,
"step": 1575
},
{
"epoch": 0.33,
"grad_norm": 1.6283711194992065,
"learning_rate": 7.756154325361967e-05,
"loss": 3.1624,
"step": 1580
},
{
"epoch": 0.33,
"grad_norm": 1.6346111297607422,
"learning_rate": 7.74222905078824e-05,
"loss": 3.1722,
"step": 1585
},
{
"epoch": 0.33,
"grad_norm": 1.559010624885559,
"learning_rate": 7.728273289061489e-05,
"loss": 3.2168,
"step": 1590
},
{
"epoch": 0.33,
"grad_norm": 1.784799575805664,
"learning_rate": 7.714287195337044e-05,
"loss": 3.1516,
"step": 1595
},
{
"epoch": 0.33,
"grad_norm": 1.5602563619613647,
"learning_rate": 7.700270925107448e-05,
"loss": 3.1419,
"step": 1600
},
{
"epoch": 0.33,
"grad_norm": 1.801659107208252,
"learning_rate": 7.686224634200742e-05,
"loss": 3.1521,
"step": 1605
},
{
"epoch": 0.33,
"grad_norm": 1.999028205871582,
"learning_rate": 7.672148478778722e-05,
"loss": 3.1908,
"step": 1610
},
{
"epoch": 0.34,
"grad_norm": 2.2125654220581055,
"learning_rate": 7.658042615335212e-05,
"loss": 3.0955,
"step": 1615
},
{
"epoch": 0.34,
"grad_norm": 1.6908187866210938,
"learning_rate": 7.643907200694318e-05,
"loss": 3.148,
"step": 1620
},
{
"epoch": 0.34,
"grad_norm": 1.4700616598129272,
"learning_rate": 7.629742392008684e-05,
"loss": 3.1571,
"step": 1625
},
{
"epoch": 0.34,
"grad_norm": 1.218325138092041,
"learning_rate": 7.615548346757749e-05,
"loss": 3.1362,
"step": 1630
},
{
"epoch": 0.34,
"grad_norm": 1.709141492843628,
"learning_rate": 7.60132522274599e-05,
"loss": 3.2075,
"step": 1635
},
{
"epoch": 0.34,
"grad_norm": 1.396227478981018,
"learning_rate": 7.587073178101178e-05,
"loss": 3.1015,
"step": 1640
},
{
"epoch": 0.34,
"grad_norm": 1.469170331954956,
"learning_rate": 7.572792371272609e-05,
"loss": 3.1109,
"step": 1645
},
{
"epoch": 0.34,
"grad_norm": 1.9720960855484009,
"learning_rate": 7.55848296102935e-05,
"loss": 3.1616,
"step": 1650
},
{
"epoch": 0.34,
"grad_norm": 1.8008573055267334,
"learning_rate": 7.544145106458465e-05,
"loss": 3.1182,
"step": 1655
},
{
"epoch": 0.35,
"grad_norm": 1.9511905908584595,
"learning_rate": 7.529778966963259e-05,
"loss": 3.1679,
"step": 1660
},
{
"epoch": 0.35,
"grad_norm": 1.545742154121399,
"learning_rate": 7.515384702261496e-05,
"loss": 3.1304,
"step": 1665
},
{
"epoch": 0.35,
"grad_norm": 1.5802106857299805,
"learning_rate": 7.500962472383627e-05,
"loss": 3.1801,
"step": 1670
},
{
"epoch": 0.35,
"grad_norm": 1.6114333868026733,
"learning_rate": 7.486512437671011e-05,
"loss": 3.1302,
"step": 1675
},
{
"epoch": 0.35,
"grad_norm": 2.028850793838501,
"learning_rate": 7.472034758774128e-05,
"loss": 3.0923,
"step": 1680
},
{
"epoch": 0.35,
"grad_norm": 2.0958070755004883,
"learning_rate": 7.457529596650797e-05,
"loss": 3.1854,
"step": 1685
},
{
"epoch": 0.35,
"grad_norm": 1.353832483291626,
"learning_rate": 7.442997112564392e-05,
"loss": 3.0975,
"step": 1690
},
{
"epoch": 0.35,
"grad_norm": 1.8185460567474365,
"learning_rate": 7.428437468082037e-05,
"loss": 3.1207,
"step": 1695
},
{
"epoch": 0.35,
"grad_norm": 1.7965680360794067,
"learning_rate": 7.413850825072817e-05,
"loss": 3.0909,
"step": 1700
},
{
"epoch": 0.35,
"grad_norm": 1.2237658500671387,
"learning_rate": 7.39923734570598e-05,
"loss": 3.1064,
"step": 1705
},
{
"epoch": 0.36,
"grad_norm": 6.977996826171875,
"learning_rate": 7.384597192449126e-05,
"loss": 3.0698,
"step": 1710
},
{
"epoch": 0.36,
"grad_norm": 1.5268551111221313,
"learning_rate": 7.369930528066412e-05,
"loss": 3.0779,
"step": 1715
},
{
"epoch": 0.36,
"grad_norm": 1.4090296030044556,
"learning_rate": 7.355237515616732e-05,
"loss": 3.1395,
"step": 1720
},
{
"epoch": 0.36,
"grad_norm": 1.3859626054763794,
"learning_rate": 7.340518318451914e-05,
"loss": 3.2119,
"step": 1725
},
{
"epoch": 0.36,
"grad_norm": 1.6663029193878174,
"learning_rate": 7.325773100214893e-05,
"loss": 3.1041,
"step": 1730
},
{
"epoch": 0.36,
"grad_norm": 1.9529266357421875,
"learning_rate": 7.311002024837899e-05,
"loss": 3.1092,
"step": 1735
},
{
"epoch": 0.36,
"grad_norm": 1.6223665475845337,
"learning_rate": 7.296205256540633e-05,
"loss": 3.087,
"step": 1740
},
{
"epoch": 0.36,
"grad_norm": 1.23577082157135,
"learning_rate": 7.281382959828443e-05,
"loss": 3.0383,
"step": 1745
},
{
"epoch": 0.36,
"grad_norm": 1.4756420850753784,
"learning_rate": 7.26653529949049e-05,
"loss": 3.1373,
"step": 1750
},
{
"epoch": 0.36,
"grad_norm": 1.4079620838165283,
"learning_rate": 7.25166244059792e-05,
"loss": 3.0391,
"step": 1755
},
{
"epoch": 0.37,
"grad_norm": 1.2635889053344727,
"learning_rate": 7.236764548502029e-05,
"loss": 3.11,
"step": 1760
},
{
"epoch": 0.37,
"grad_norm": 1.4718565940856934,
"learning_rate": 7.221841788832421e-05,
"loss": 3.0451,
"step": 1765
},
{
"epoch": 0.37,
"grad_norm": 1.4071534872055054,
"learning_rate": 7.206894327495173e-05,
"loss": 3.0645,
"step": 1770
},
{
"epoch": 0.37,
"grad_norm": 1.4154398441314697,
"learning_rate": 7.191922330670982e-05,
"loss": 3.0744,
"step": 1775
},
{
"epoch": 0.37,
"grad_norm": 1.7999234199523926,
"learning_rate": 7.176925964813326e-05,
"loss": 3.0662,
"step": 1780
},
{
"epoch": 0.37,
"grad_norm": 1.6632797718048096,
"learning_rate": 7.161905396646607e-05,
"loss": 2.9983,
"step": 1785
},
{
"epoch": 0.37,
"grad_norm": 1.9087226390838623,
"learning_rate": 7.146860793164299e-05,
"loss": 3.1008,
"step": 1790
},
{
"epoch": 0.37,
"grad_norm": 1.2791259288787842,
"learning_rate": 7.131792321627098e-05,
"loss": 3.0951,
"step": 1795
},
{
"epoch": 0.37,
"grad_norm": 1.3382861614227295,
"learning_rate": 7.116700149561048e-05,
"loss": 3.1035,
"step": 1800
},
{
"epoch": 0.38,
"grad_norm": 1.7184237241744995,
"learning_rate": 7.101584444755696e-05,
"loss": 3.0621,
"step": 1805
},
{
"epoch": 0.38,
"grad_norm": 1.2195987701416016,
"learning_rate": 7.086445375262212e-05,
"loss": 3.1115,
"step": 1810
},
{
"epoch": 0.38,
"grad_norm": 1.5488882064819336,
"learning_rate": 7.071283109391528e-05,
"loss": 3.0731,
"step": 1815
},
{
"epoch": 0.38,
"grad_norm": 1.2592626810073853,
"learning_rate": 7.056097815712466e-05,
"loss": 3.0906,
"step": 1820
},
{
"epoch": 0.38,
"grad_norm": 1.2330182790756226,
"learning_rate": 7.040889663049862e-05,
"loss": 3.0828,
"step": 1825
},
{
"epoch": 0.38,
"grad_norm": 1.5080633163452148,
"learning_rate": 7.025658820482693e-05,
"loss": 3.0266,
"step": 1830
},
{
"epoch": 0.38,
"grad_norm": 1.3223737478256226,
"learning_rate": 7.010405457342192e-05,
"loss": 3.0154,
"step": 1835
},
{
"epoch": 0.38,
"grad_norm": 1.2206939458847046,
"learning_rate": 6.995129743209967e-05,
"loss": 2.9597,
"step": 1840
},
{
"epoch": 0.38,
"grad_norm": 1.3476722240447998,
"learning_rate": 6.97983184791612e-05,
"loss": 2.9717,
"step": 1845
},
{
"epoch": 0.38,
"grad_norm": 1.6109601259231567,
"learning_rate": 6.964511941537355e-05,
"loss": 3.0545,
"step": 1850
},
{
"epoch": 0.39,
"grad_norm": 1.4546648263931274,
"learning_rate": 6.949170194395083e-05,
"loss": 3.0534,
"step": 1855
},
{
"epoch": 0.39,
"grad_norm": 1.3800135850906372,
"learning_rate": 6.933806777053536e-05,
"loss": 3.0916,
"step": 1860
},
{
"epoch": 0.39,
"grad_norm": 1.378899335861206,
"learning_rate": 6.918421860317872e-05,
"loss": 3.0112,
"step": 1865
},
{
"epoch": 0.39,
"grad_norm": 1.182407259941101,
"learning_rate": 6.903015615232263e-05,
"loss": 3.0438,
"step": 1870
},
{
"epoch": 0.39,
"grad_norm": 1.2282214164733887,
"learning_rate": 6.887588213078012e-05,
"loss": 3.0656,
"step": 1875
},
{
"epoch": 0.39,
"grad_norm": 1.3863184452056885,
"learning_rate": 6.87213982537163e-05,
"loss": 3.0419,
"step": 1880
},
{
"epoch": 0.39,
"grad_norm": 1.415648341178894,
"learning_rate": 6.856670623862943e-05,
"loss": 3.0497,
"step": 1885
},
{
"epoch": 0.39,
"grad_norm": 1.4791895151138306,
"learning_rate": 6.841180780533179e-05,
"loss": 2.9501,
"step": 1890
},
{
"epoch": 0.39,
"grad_norm": 1.6115890741348267,
"learning_rate": 6.82567046759305e-05,
"loss": 3.0009,
"step": 1895
},
{
"epoch": 0.39,
"grad_norm": 1.4150872230529785,
"learning_rate": 6.810139857480844e-05,
"loss": 3.0068,
"step": 1900
},
{
"epoch": 0.4,
"grad_norm": 1.2133641242980957,
"learning_rate": 6.794589122860509e-05,
"loss": 3.0179,
"step": 1905
},
{
"epoch": 0.4,
"grad_norm": 1.4702268838882446,
"learning_rate": 6.779018436619725e-05,
"loss": 3.0392,
"step": 1910
},
{
"epoch": 0.4,
"grad_norm": 1.7092185020446777,
"learning_rate": 6.763427971867992e-05,
"loss": 3.0275,
"step": 1915
},
{
"epoch": 0.4,
"grad_norm": 1.224449872970581,
"learning_rate": 6.747817901934699e-05,
"loss": 3.0802,
"step": 1920
},
{
"epoch": 0.4,
"grad_norm": 1.2760062217712402,
"learning_rate": 6.732188400367197e-05,
"loss": 3.051,
"step": 1925
},
{
"epoch": 0.4,
"grad_norm": 1.5386216640472412,
"learning_rate": 6.716539640928871e-05,
"loss": 3.1051,
"step": 1930
},
{
"epoch": 0.4,
"grad_norm": 1.8112972974777222,
"learning_rate": 6.70087179759721e-05,
"loss": 3.0359,
"step": 1935
},
{
"epoch": 0.4,
"grad_norm": 1.6193982362747192,
"learning_rate": 6.685185044561874e-05,
"loss": 3.0989,
"step": 1940
},
{
"epoch": 0.4,
"grad_norm": 1.8458354473114014,
"learning_rate": 6.669479556222747e-05,
"loss": 3.0253,
"step": 1945
},
{
"epoch": 0.41,
"grad_norm": 2.0341317653656006,
"learning_rate": 6.653755507188013e-05,
"loss": 2.9737,
"step": 1950
},
{
"epoch": 0.41,
"grad_norm": 2.0165843963623047,
"learning_rate": 6.638013072272205e-05,
"loss": 3.1014,
"step": 1955
},
{
"epoch": 0.41,
"grad_norm": 1.5567164421081543,
"learning_rate": 6.622252426494259e-05,
"loss": 3.1072,
"step": 1960
},
{
"epoch": 0.41,
"grad_norm": 1.5388187170028687,
"learning_rate": 6.606473745075581e-05,
"loss": 3.0384,
"step": 1965
},
{
"epoch": 0.41,
"grad_norm": 1.5866913795471191,
"learning_rate": 6.590677203438084e-05,
"loss": 3.0324,
"step": 1970
},
{
"epoch": 0.41,
"grad_norm": 1.5576002597808838,
"learning_rate": 6.574862977202252e-05,
"loss": 2.9736,
"step": 1975
},
{
"epoch": 0.41,
"grad_norm": 1.793018102645874,
"learning_rate": 6.559031242185174e-05,
"loss": 2.9072,
"step": 1980
},
{
"epoch": 0.41,
"grad_norm": 1.2499622106552124,
"learning_rate": 6.543182174398597e-05,
"loss": 2.9612,
"step": 1985
},
{
"epoch": 0.41,
"grad_norm": 1.5662245750427246,
"learning_rate": 6.52731595004697e-05,
"loss": 3.0329,
"step": 1990
},
{
"epoch": 0.41,
"grad_norm": 1.3436709642410278,
"learning_rate": 6.51143274552548e-05,
"loss": 3.029,
"step": 1995
},
{
"epoch": 0.42,
"grad_norm": 1.1587468385696411,
"learning_rate": 6.495532737418098e-05,
"loss": 2.9982,
"step": 2000
},
{
"epoch": 0.42,
"grad_norm": 1.47077476978302,
"learning_rate": 6.479616102495605e-05,
"loss": 2.9831,
"step": 2005
},
{
"epoch": 0.42,
"grad_norm": 1.634659767150879,
"learning_rate": 6.463683017713638e-05,
"loss": 2.9842,
"step": 2010
},
{
"epoch": 0.42,
"grad_norm": 1.625710368156433,
"learning_rate": 6.447733660210715e-05,
"loss": 2.96,
"step": 2015
},
{
"epoch": 0.42,
"grad_norm": 1.3179887533187866,
"learning_rate": 6.431768207306272e-05,
"loss": 3.0163,
"step": 2020
},
{
"epoch": 0.42,
"grad_norm": 1.2866541147232056,
"learning_rate": 6.415786836498684e-05,
"loss": 3.0861,
"step": 2025
},
{
"epoch": 0.42,
"grad_norm": 2.325894594192505,
"learning_rate": 6.399789725463298e-05,
"loss": 3.0184,
"step": 2030
},
{
"epoch": 0.42,
"grad_norm": 1.7194280624389648,
"learning_rate": 6.383777052050458e-05,
"loss": 3.0344,
"step": 2035
},
{
"epoch": 0.42,
"grad_norm": 1.282314658164978,
"learning_rate": 6.367748994283518e-05,
"loss": 2.9811,
"step": 2040
},
{
"epoch": 0.43,
"grad_norm": 1.6111512184143066,
"learning_rate": 6.351705730356877e-05,
"loss": 2.9877,
"step": 2045
},
{
"epoch": 0.43,
"grad_norm": 1.410537838935852,
"learning_rate": 6.335647438633987e-05,
"loss": 3.0168,
"step": 2050
},
{
"epoch": 0.43,
"grad_norm": 1.1654980182647705,
"learning_rate": 6.319574297645374e-05,
"loss": 2.9711,
"step": 2055
},
{
"epoch": 0.43,
"grad_norm": 1.1389983892440796,
"learning_rate": 6.303486486086654e-05,
"loss": 2.9657,
"step": 2060
},
{
"epoch": 0.43,
"grad_norm": 1.5499783754348755,
"learning_rate": 6.287384182816546e-05,
"loss": 2.9502,
"step": 2065
},
{
"epoch": 0.43,
"grad_norm": 1.4282326698303223,
"learning_rate": 6.271267566854883e-05,
"loss": 3.0063,
"step": 2070
},
{
"epoch": 0.43,
"grad_norm": 1.6107735633850098,
"learning_rate": 6.255136817380618e-05,
"loss": 3.0268,
"step": 2075
},
{
"epoch": 0.43,
"grad_norm": 1.4487731456756592,
"learning_rate": 6.23899211372984e-05,
"loss": 2.9921,
"step": 2080
},
{
"epoch": 0.43,
"grad_norm": 1.4361457824707031,
"learning_rate": 6.222833635393772e-05,
"loss": 2.9553,
"step": 2085
},
{
"epoch": 0.43,
"grad_norm": 1.3371776342391968,
"learning_rate": 6.206661562016782e-05,
"loss": 3.0008,
"step": 2090
},
{
"epoch": 0.44,
"grad_norm": 1.2041832208633423,
"learning_rate": 6.190476073394382e-05,
"loss": 2.9138,
"step": 2095
},
{
"epoch": 0.44,
"grad_norm": 1.4377648830413818,
"learning_rate": 6.17427734947123e-05,
"loss": 2.9988,
"step": 2100
},
{
"epoch": 0.44,
"grad_norm": 1.478954792022705,
"learning_rate": 6.158065570339127e-05,
"loss": 2.9776,
"step": 2105
},
{
"epoch": 0.44,
"grad_norm": 1.3606313467025757,
"learning_rate": 6.141840916235021e-05,
"loss": 2.9472,
"step": 2110
},
{
"epoch": 0.44,
"grad_norm": 1.109960913658142,
"learning_rate": 6.125603567539001e-05,
"loss": 2.9937,
"step": 2115
},
{
"epoch": 0.44,
"grad_norm": 1.068223237991333,
"learning_rate": 6.109353704772284e-05,
"loss": 3.0124,
"step": 2120
},
{
"epoch": 0.44,
"grad_norm": 1.039057970046997,
"learning_rate": 6.0930915085952164e-05,
"loss": 2.9654,
"step": 2125
},
{
"epoch": 0.44,
"grad_norm": 1.2973130941390991,
"learning_rate": 6.076817159805267e-05,
"loss": 2.9683,
"step": 2130
},
{
"epoch": 0.44,
"grad_norm": 1.2471184730529785,
"learning_rate": 6.06053083933501e-05,
"loss": 2.9678,
"step": 2135
},
{
"epoch": 0.44,
"grad_norm": 1.2223358154296875,
"learning_rate": 6.044232728250116e-05,
"loss": 2.9847,
"step": 2140
},
{
"epoch": 0.45,
"grad_norm": 1.2057719230651855,
"learning_rate": 6.027923007747339e-05,
"loss": 2.881,
"step": 2145
},
{
"epoch": 0.45,
"grad_norm": 1.2965127229690552,
"learning_rate": 6.011601859152506e-05,
"loss": 2.9938,
"step": 2150
},
{
"epoch": 0.45,
"grad_norm": 1.5717276334762573,
"learning_rate": 5.995269463918495e-05,
"loss": 2.9392,
"step": 2155
},
{
"epoch": 0.45,
"grad_norm": 1.22340989112854,
"learning_rate": 5.97892600362322e-05,
"loss": 2.9455,
"step": 2160
},
{
"epoch": 0.45,
"grad_norm": 1.1999691724777222,
"learning_rate": 5.962571659967614e-05,
"loss": 2.9027,
"step": 2165
},
{
"epoch": 0.45,
"grad_norm": 1.224232792854309,
"learning_rate": 5.946206614773606e-05,
"loss": 2.9907,
"step": 2170
},
{
"epoch": 0.45,
"grad_norm": 1.1148897409439087,
"learning_rate": 5.929831049982103e-05,
"loss": 2.8995,
"step": 2175
},
{
"epoch": 0.45,
"grad_norm": 1.052234411239624,
"learning_rate": 5.9134451476509633e-05,
"loss": 2.9015,
"step": 2180
},
{
"epoch": 0.45,
"grad_norm": 1.2264165878295898,
"learning_rate": 5.897049089952974e-05,
"loss": 2.9542,
"step": 2185
},
{
"epoch": 0.46,
"grad_norm": 1.274595022201538,
"learning_rate": 5.880643059173826e-05,
"loss": 2.9146,
"step": 2190
},
{
"epoch": 0.46,
"grad_norm": 1.2384543418884277,
"learning_rate": 5.864227237710093e-05,
"loss": 2.9492,
"step": 2195
},
{
"epoch": 0.46,
"grad_norm": 1.0442498922348022,
"learning_rate": 5.847801808067189e-05,
"loss": 2.9415,
"step": 2200
},
{
"epoch": 0.46,
"grad_norm": 1.0750223398208618,
"learning_rate": 5.831366952857357e-05,
"loss": 2.9172,
"step": 2205
},
{
"epoch": 0.46,
"grad_norm": 1.2851649522781372,
"learning_rate": 5.814922854797622e-05,
"loss": 2.993,
"step": 2210
},
{
"epoch": 0.46,
"grad_norm": 1.3160637617111206,
"learning_rate": 5.798469696707775e-05,
"loss": 2.918,
"step": 2215
},
{
"epoch": 0.46,
"grad_norm": 1.1648153066635132,
"learning_rate": 5.782007661508331e-05,
"loss": 2.8707,
"step": 2220
},
{
"epoch": 0.46,
"grad_norm": 1.0308823585510254,
"learning_rate": 5.765536932218495e-05,
"loss": 2.8793,
"step": 2225
},
{
"epoch": 0.46,
"grad_norm": 1.2259533405303955,
"learning_rate": 5.7490576919541315e-05,
"loss": 2.9111,
"step": 2230
},
{
"epoch": 0.46,
"grad_norm": 1.1080760955810547,
"learning_rate": 5.732570123925729e-05,
"loss": 2.9137,
"step": 2235
},
{
"epoch": 0.47,
"grad_norm": 1.0767158269882202,
"learning_rate": 5.7160744114363593e-05,
"loss": 2.8627,
"step": 2240
},
{
"epoch": 0.47,
"grad_norm": 1.3223313093185425,
"learning_rate": 5.699570737879641e-05,
"loss": 2.9822,
"step": 2245
},
{
"epoch": 0.47,
"grad_norm": 1.3568925857543945,
"learning_rate": 5.683059286737702e-05,
"loss": 2.8968,
"step": 2250
},
{
"epoch": 0.47,
"grad_norm": 1.4644973278045654,
"learning_rate": 5.666540241579139e-05,
"loss": 2.9537,
"step": 2255
},
{
"epoch": 0.47,
"grad_norm": 1.2684613466262817,
"learning_rate": 5.6500137860569766e-05,
"loss": 2.9711,
"step": 2260
},
{
"epoch": 0.47,
"grad_norm": 1.2298182249069214,
"learning_rate": 5.633480103906624e-05,
"loss": 2.8662,
"step": 2265
},
{
"epoch": 0.47,
"grad_norm": 0.9774234294891357,
"learning_rate": 5.616939378943834e-05,
"loss": 2.9355,
"step": 2270
},
{
"epoch": 0.47,
"grad_norm": 1.2512295246124268,
"learning_rate": 5.6003917950626595e-05,
"loss": 2.9363,
"step": 2275
},
{
"epoch": 0.47,
"grad_norm": 1.165020227432251,
"learning_rate": 5.583837536233407e-05,
"loss": 2.9422,
"step": 2280
},
{
"epoch": 0.47,
"grad_norm": 1.0103294849395752,
"learning_rate": 5.567276786500596e-05,
"loss": 2.9219,
"step": 2285
},
{
"epoch": 0.48,
"grad_norm": 1.2318215370178223,
"learning_rate": 5.5507097299809054e-05,
"loss": 2.8888,
"step": 2290
},
{
"epoch": 0.48,
"grad_norm": 1.1302927732467651,
"learning_rate": 5.534136550861133e-05,
"loss": 2.9357,
"step": 2295
},
{
"epoch": 0.48,
"grad_norm": 1.1041792631149292,
"learning_rate": 5.5175574333961465e-05,
"loss": 3.0032,
"step": 2300
},
{
"epoch": 0.48,
"grad_norm": 0.9667074084281921,
"learning_rate": 5.500972561906832e-05,
"loss": 2.8891,
"step": 2305
},
{
"epoch": 0.48,
"grad_norm": 1.0719056129455566,
"learning_rate": 5.484382120778048e-05,
"loss": 2.855,
"step": 2310
},
{
"epoch": 0.48,
"grad_norm": 1.2497295141220093,
"learning_rate": 5.467786294456575e-05,
"loss": 2.956,
"step": 2315
},
{
"epoch": 0.48,
"grad_norm": 1.1342341899871826,
"learning_rate": 5.451185267449061e-05,
"loss": 2.8867,
"step": 2320
},
{
"epoch": 0.48,
"grad_norm": 1.2442474365234375,
"learning_rate": 5.43457922431998e-05,
"loss": 2.8761,
"step": 2325
},
{
"epoch": 0.48,
"grad_norm": 1.3404911756515503,
"learning_rate": 5.417968349689566e-05,
"loss": 2.9858,
"step": 2330
},
{
"epoch": 0.49,
"grad_norm": 1.429955005645752,
"learning_rate": 5.401352828231772e-05,
"loss": 2.9297,
"step": 2335
},
{
"epoch": 0.49,
"grad_norm": 1.5020805597305298,
"learning_rate": 5.384732844672211e-05,
"loss": 2.8893,
"step": 2340
},
{
"epoch": 0.49,
"grad_norm": 1.2861146926879883,
"learning_rate": 5.368108583786107e-05,
"loss": 2.9291,
"step": 2345
},
{
"epoch": 0.49,
"grad_norm": 1.3175119161605835,
"learning_rate": 5.3514802303962344e-05,
"loss": 2.8986,
"step": 2350
},
{
"epoch": 0.49,
"grad_norm": 1.0609021186828613,
"learning_rate": 5.334847969370868e-05,
"loss": 2.92,
"step": 2355
},
{
"epoch": 0.49,
"grad_norm": 1.2425000667572021,
"learning_rate": 5.3182119856217284e-05,
"loss": 2.8235,
"step": 2360
},
{
"epoch": 0.49,
"grad_norm": 1.1978639364242554,
"learning_rate": 5.3015724641019214e-05,
"loss": 2.9003,
"step": 2365
},
{
"epoch": 0.49,
"grad_norm": 1.2987258434295654,
"learning_rate": 5.284929589803884e-05,
"loss": 3.0109,
"step": 2370
},
{
"epoch": 0.49,
"grad_norm": 1.2826788425445557,
"learning_rate": 5.2682835477573336e-05,
"loss": 2.8281,
"step": 2375
},
{
"epoch": 0.49,
"grad_norm": 1.383651614189148,
"learning_rate": 5.2516345230271965e-05,
"loss": 2.9016,
"step": 2380
},
{
"epoch": 0.5,
"grad_norm": 1.6830869913101196,
"learning_rate": 5.234982700711569e-05,
"loss": 2.8548,
"step": 2385
},
{
"epoch": 0.5,
"grad_norm": 1.0755319595336914,
"learning_rate": 5.218328265939643e-05,
"loss": 2.9005,
"step": 2390
},
{
"epoch": 0.5,
"grad_norm": 1.1948031187057495,
"learning_rate": 5.201671403869657e-05,
"loss": 2.9149,
"step": 2395
},
{
"epoch": 0.5,
"grad_norm": 1.0630375146865845,
"learning_rate": 5.1850122996868366e-05,
"loss": 2.9354,
"step": 2400
},
{
"epoch": 0.5,
"grad_norm": 1.0797032117843628,
"learning_rate": 5.168351138601334e-05,
"loss": 2.9349,
"step": 2405
},
{
"epoch": 0.5,
"grad_norm": 1.2862792015075684,
"learning_rate": 5.1516881058461675e-05,
"loss": 2.7419,
"step": 2410
},
{
"epoch": 0.5,
"grad_norm": 1.197309970855713,
"learning_rate": 5.135023386675166e-05,
"loss": 2.8489,
"step": 2415
},
{
"epoch": 0.5,
"grad_norm": 1.0417031049728394,
"learning_rate": 5.118357166360906e-05,
"loss": 2.9111,
"step": 2420
},
{
"epoch": 0.5,
"grad_norm": 0.9647775888442993,
"learning_rate": 5.101689630192655e-05,
"loss": 2.8957,
"step": 2425
},
{
"epoch": 0.51,
"grad_norm": 1.1916242837905884,
"learning_rate": 5.085020963474307e-05,
"loss": 2.8144,
"step": 2430
},
{
"epoch": 0.51,
"grad_norm": 1.1009804010391235,
"learning_rate": 5.068351351522329e-05,
"loss": 2.8566,
"step": 2435
},
{
"epoch": 0.51,
"grad_norm": 0.9932246804237366,
"learning_rate": 5.0516809796636935e-05,
"loss": 2.8918,
"step": 2440
},
{
"epoch": 0.51,
"grad_norm": 1.2352042198181152,
"learning_rate": 5.035010033233821e-05,
"loss": 2.8594,
"step": 2445
},
{
"epoch": 0.51,
"grad_norm": 1.2683615684509277,
"learning_rate": 5.018338697574523e-05,
"loss": 2.871,
"step": 2450
},
{
"epoch": 0.51,
"grad_norm": 1.3254119157791138,
"learning_rate": 5.0016671580319354e-05,
"loss": 2.8575,
"step": 2455
},
{
"epoch": 0.51,
"grad_norm": 1.0678043365478516,
"learning_rate": 4.984995599954461e-05,
"loss": 2.8365,
"step": 2460
},
{
"epoch": 0.51,
"grad_norm": 1.051201581954956,
"learning_rate": 4.968324208690712e-05,
"loss": 2.889,
"step": 2465
},
{
"epoch": 0.51,
"grad_norm": 1.1683032512664795,
"learning_rate": 4.951653169587441e-05,
"loss": 2.9055,
"step": 2470
},
{
"epoch": 0.51,
"grad_norm": 1.0889207124710083,
"learning_rate": 4.93498266798749e-05,
"loss": 2.8431,
"step": 2475
},
{
"epoch": 0.52,
"grad_norm": 1.4402130842208862,
"learning_rate": 4.918312889227722e-05,
"loss": 2.9308,
"step": 2480
},
{
"epoch": 0.52,
"grad_norm": 1.2184454202651978,
"learning_rate": 4.901644018636966e-05,
"loss": 2.8436,
"step": 2485
},
{
"epoch": 0.52,
"grad_norm": 1.219307780265808,
"learning_rate": 4.8849762415339526e-05,
"loss": 2.908,
"step": 2490
},
{
"epoch": 0.52,
"grad_norm": 1.3211146593093872,
"learning_rate": 4.868309743225256e-05,
"loss": 2.9059,
"step": 2495
},
{
"epoch": 0.52,
"grad_norm": 1.0055177211761475,
"learning_rate": 4.851644709003233e-05,
"loss": 2.8685,
"step": 2500
},
{
"epoch": 0.52,
"grad_norm": 1.1313613653182983,
"learning_rate": 4.834981324143964e-05,
"loss": 2.8603,
"step": 2505
},
{
"epoch": 0.52,
"grad_norm": 1.2178055047988892,
"learning_rate": 4.818319773905191e-05,
"loss": 2.9292,
"step": 2510
},
{
"epoch": 0.52,
"grad_norm": 1.0852621793746948,
"learning_rate": 4.801660243524261e-05,
"loss": 2.8944,
"step": 2515
},
{
"epoch": 0.52,
"grad_norm": 1.24472975730896,
"learning_rate": 4.7850029182160626e-05,
"loss": 2.8892,
"step": 2520
},
{
"epoch": 0.52,
"grad_norm": 1.4635546207427979,
"learning_rate": 4.768347983170973e-05,
"loss": 2.8763,
"step": 2525
},
{
"epoch": 0.53,
"grad_norm": 1.0947743654251099,
"learning_rate": 4.7516956235527884e-05,
"loss": 2.8366,
"step": 2530
},
{
"epoch": 0.53,
"grad_norm": 1.0728180408477783,
"learning_rate": 4.735046024496682e-05,
"loss": 2.8518,
"step": 2535
},
{
"epoch": 0.53,
"grad_norm": 1.076548457145691,
"learning_rate": 4.7183993711071286e-05,
"loss": 2.7974,
"step": 2540
},
{
"epoch": 0.53,
"grad_norm": 1.3811664581298828,
"learning_rate": 4.7017558484558554e-05,
"loss": 2.869,
"step": 2545
},
{
"epoch": 0.53,
"grad_norm": 1.032314419746399,
"learning_rate": 4.6851156415797844e-05,
"loss": 2.8689,
"step": 2550
},
{
"epoch": 0.53,
"grad_norm": 1.1875766515731812,
"learning_rate": 4.6684789354789746e-05,
"loss": 2.8882,
"step": 2555
},
{
"epoch": 0.53,
"grad_norm": 1.1097415685653687,
"learning_rate": 4.651845915114563e-05,
"loss": 2.8113,
"step": 2560
},
{
"epoch": 0.53,
"grad_norm": 0.9626017212867737,
"learning_rate": 4.6352167654067095e-05,
"loss": 2.899,
"step": 2565
},
{
"epoch": 0.53,
"grad_norm": 1.1476669311523438,
"learning_rate": 4.618591671232544e-05,
"loss": 2.7812,
"step": 2570
},
{
"epoch": 0.54,
"grad_norm": 1.076156497001648,
"learning_rate": 4.601970817424106e-05,
"loss": 2.8227,
"step": 2575
},
{
"epoch": 0.54,
"grad_norm": 1.175553798675537,
"learning_rate": 4.585354388766292e-05,
"loss": 2.8536,
"step": 2580
},
{
"epoch": 0.54,
"grad_norm": 1.021243929862976,
"learning_rate": 4.568742569994802e-05,
"loss": 2.8325,
"step": 2585
},
{
"epoch": 0.54,
"grad_norm": 1.0142565965652466,
"learning_rate": 4.552135545794086e-05,
"loss": 2.8271,
"step": 2590
},
{
"epoch": 0.54,
"grad_norm": 0.9687764048576355,
"learning_rate": 4.535533500795288e-05,
"loss": 2.8684,
"step": 2595
},
{
"epoch": 0.54,
"grad_norm": 1.0715291500091553,
"learning_rate": 4.5189366195741953e-05,
"loss": 2.8642,
"step": 2600
},
{
"epoch": 0.54,
"grad_norm": 1.011589765548706,
"learning_rate": 4.502345086649186e-05,
"loss": 2.8648,
"step": 2605
},
{
"epoch": 0.54,
"grad_norm": 1.1587084531784058,
"learning_rate": 4.485759086479179e-05,
"loss": 2.8911,
"step": 2610
},
{
"epoch": 0.54,
"grad_norm": 1.2862259149551392,
"learning_rate": 4.469178803461579e-05,
"loss": 2.7923,
"step": 2615
},
{
"epoch": 0.54,
"grad_norm": 1.1106417179107666,
"learning_rate": 4.4526044219302326e-05,
"loss": 2.9325,
"step": 2620
},
{
"epoch": 0.55,
"grad_norm": 1.2699544429779053,
"learning_rate": 4.4360361261533745e-05,
"loss": 2.8068,
"step": 2625
},
{
"epoch": 0.55,
"grad_norm": 0.9380654692649841,
"learning_rate": 4.419474100331579e-05,
"loss": 2.8514,
"step": 2630
},
{
"epoch": 0.55,
"grad_norm": 1.0732371807098389,
"learning_rate": 4.402918528595715e-05,
"loss": 2.935,
"step": 2635
},
{
"epoch": 0.55,
"grad_norm": 1.129291296005249,
"learning_rate": 4.386369595004896e-05,
"loss": 2.8586,
"step": 2640
},
{
"epoch": 0.55,
"grad_norm": 0.9377846121788025,
"learning_rate": 4.3698274835444354e-05,
"loss": 2.7929,
"step": 2645
},
{
"epoch": 0.55,
"grad_norm": 1.2489937543869019,
"learning_rate": 4.3532923781238e-05,
"loss": 2.856,
"step": 2650
},
{
"epoch": 0.55,
"grad_norm": 0.982698380947113,
"learning_rate": 4.336764462574566e-05,
"loss": 2.8094,
"step": 2655
},
{
"epoch": 0.55,
"grad_norm": 0.9571766257286072,
"learning_rate": 4.320243920648376e-05,
"loss": 2.7977,
"step": 2660
},
{
"epoch": 0.55,
"grad_norm": 1.0526105165481567,
"learning_rate": 4.303730936014894e-05,
"loss": 2.8465,
"step": 2665
},
{
"epoch": 0.55,
"grad_norm": 1.2619898319244385,
"learning_rate": 4.287225692259765e-05,
"loss": 2.8379,
"step": 2670
},
{
"epoch": 0.56,
"grad_norm": 1.2104145288467407,
"learning_rate": 4.270728372882575e-05,
"loss": 2.7873,
"step": 2675
},
{
"epoch": 0.56,
"grad_norm": 0.989600419998169,
"learning_rate": 4.254239161294804e-05,
"loss": 2.8688,
"step": 2680
},
{
"epoch": 0.56,
"grad_norm": 1.0730929374694824,
"learning_rate": 4.237758240817802e-05,
"loss": 2.7977,
"step": 2685
},
{
"epoch": 0.56,
"grad_norm": 1.262408971786499,
"learning_rate": 4.2212857946807336e-05,
"loss": 2.7764,
"step": 2690
},
{
"epoch": 0.56,
"grad_norm": 0.973852276802063,
"learning_rate": 4.2048220060185516e-05,
"loss": 2.7779,
"step": 2695
},
{
"epoch": 0.56,
"grad_norm": 1.1079379320144653,
"learning_rate": 4.188367057869957e-05,
"loss": 2.8528,
"step": 2700
},
{
"epoch": 0.56,
"grad_norm": 1.005570650100708,
"learning_rate": 4.171921133175365e-05,
"loss": 2.825,
"step": 2705
},
{
"epoch": 0.56,
"grad_norm": 1.1702814102172852,
"learning_rate": 4.155484414774872e-05,
"loss": 2.7989,
"step": 2710
},
{
"epoch": 0.56,
"grad_norm": 0.8843361139297485,
"learning_rate": 4.139057085406221e-05,
"loss": 2.8916,
"step": 2715
},
{
"epoch": 0.57,
"grad_norm": 1.055151343345642,
"learning_rate": 4.1226393277027726e-05,
"loss": 2.826,
"step": 2720
},
{
"epoch": 0.57,
"grad_norm": 1.195949673652649,
"learning_rate": 4.106231324191471e-05,
"loss": 2.8527,
"step": 2725
},
{
"epoch": 0.57,
"grad_norm": 1.177303671836853,
"learning_rate": 4.089833257290817e-05,
"loss": 2.824,
"step": 2730
},
{
"epoch": 0.57,
"grad_norm": 1.0224922895431519,
"learning_rate": 4.073445309308842e-05,
"loss": 2.8755,
"step": 2735
},
{
"epoch": 0.57,
"grad_norm": 1.0592306852340698,
"learning_rate": 4.0570676624410756e-05,
"loss": 2.8691,
"step": 2740
},
{
"epoch": 0.57,
"grad_norm": 1.0377992391586304,
"learning_rate": 4.040700498768525e-05,
"loss": 2.8475,
"step": 2745
},
{
"epoch": 0.57,
"grad_norm": 1.0048409700393677,
"learning_rate": 4.024344000255648e-05,
"loss": 2.8497,
"step": 2750
},
{
"epoch": 0.57,
"grad_norm": 1.2291332483291626,
"learning_rate": 4.0079983487483313e-05,
"loss": 2.8426,
"step": 2755
},
{
"epoch": 0.57,
"grad_norm": 1.0328052043914795,
"learning_rate": 3.9916637259718683e-05,
"loss": 2.7501,
"step": 2760
},
{
"epoch": 0.57,
"grad_norm": 1.0020463466644287,
"learning_rate": 3.9753403135289396e-05,
"loss": 2.773,
"step": 2765
},
{
"epoch": 0.58,
"grad_norm": 0.9875773787498474,
"learning_rate": 3.9590282928975914e-05,
"loss": 2.8225,
"step": 2770
},
{
"epoch": 0.58,
"grad_norm": 1.0233545303344727,
"learning_rate": 3.942727845429221e-05,
"loss": 2.8097,
"step": 2775
},
{
"epoch": 0.58,
"grad_norm": 1.029152512550354,
"learning_rate": 3.926439152346558e-05,
"loss": 2.8186,
"step": 2780
},
{
"epoch": 0.58,
"grad_norm": 0.9377911686897278,
"learning_rate": 3.910162394741653e-05,
"loss": 2.8391,
"step": 2785
},
{
"epoch": 0.58,
"grad_norm": 0.9996848106384277,
"learning_rate": 3.893897753573861e-05,
"loss": 2.799,
"step": 2790
},
{
"epoch": 0.58,
"grad_norm": 1.0269639492034912,
"learning_rate": 3.877645409667829e-05,
"loss": 2.8077,
"step": 2795
},
{
"epoch": 0.58,
"grad_norm": 0.9148347973823547,
"learning_rate": 3.861405543711491e-05,
"loss": 2.9267,
"step": 2800
},
{
"epoch": 0.58,
"grad_norm": 1.0124460458755493,
"learning_rate": 3.8451783362540507e-05,
"loss": 2.8121,
"step": 2805
},
{
"epoch": 0.58,
"grad_norm": 1.1326009035110474,
"learning_rate": 3.828963967703983e-05,
"loss": 2.8329,
"step": 2810
},
{
"epoch": 0.59,
"grad_norm": 0.9656223654747009,
"learning_rate": 3.8127626183270223e-05,
"loss": 2.8702,
"step": 2815
},
{
"epoch": 0.59,
"grad_norm": 1.0153850317001343,
"learning_rate": 3.796574468244161e-05,
"loss": 2.7927,
"step": 2820
},
{
"epoch": 0.59,
"grad_norm": 1.0111325979232788,
"learning_rate": 3.7803996974296444e-05,
"loss": 2.7946,
"step": 2825
},
{
"epoch": 0.59,
"grad_norm": 1.0902135372161865,
"learning_rate": 3.7642384857089776e-05,
"loss": 2.7915,
"step": 2830
},
{
"epoch": 0.59,
"grad_norm": 0.8529213666915894,
"learning_rate": 3.748091012756915e-05,
"loss": 2.8387,
"step": 2835
},
{
"epoch": 0.59,
"grad_norm": 1.1113386154174805,
"learning_rate": 3.731957458095467e-05,
"loss": 2.8166,
"step": 2840
},
{
"epoch": 0.59,
"grad_norm": 1.0866583585739136,
"learning_rate": 3.71583800109191e-05,
"loss": 2.766,
"step": 2845
},
{
"epoch": 0.59,
"grad_norm": 0.9482641816139221,
"learning_rate": 3.699732820956784e-05,
"loss": 2.76,
"step": 2850
},
{
"epoch": 0.59,
"grad_norm": 1.1516281366348267,
"learning_rate": 3.6836420967419057e-05,
"loss": 2.7488,
"step": 2855
},
{
"epoch": 0.59,
"grad_norm": 1.2093509435653687,
"learning_rate": 3.6675660073383745e-05,
"loss": 2.7954,
"step": 2860
},
{
"epoch": 0.6,
"grad_norm": 1.0246303081512451,
"learning_rate": 3.6515047314745856e-05,
"loss": 2.7416,
"step": 2865
},
{
"epoch": 0.6,
"grad_norm": 1.0894135236740112,
"learning_rate": 3.6354584477142437e-05,
"loss": 2.8021,
"step": 2870
},
{
"epoch": 0.6,
"grad_norm": 1.1291885375976562,
"learning_rate": 3.6194273344543736e-05,
"loss": 2.8135,
"step": 2875
},
{
"epoch": 0.6,
"grad_norm": 1.0989508628845215,
"learning_rate": 3.6034115699233425e-05,
"loss": 2.843,
"step": 2880
},
{
"epoch": 0.6,
"grad_norm": 1.0321098566055298,
"learning_rate": 3.5874113321788736e-05,
"loss": 2.8295,
"step": 2885
},
{
"epoch": 0.6,
"grad_norm": 1.10627019405365,
"learning_rate": 3.571426799106071e-05,
"loss": 2.7902,
"step": 2890
},
{
"epoch": 0.6,
"grad_norm": 1.0204766988754272,
"learning_rate": 3.555458148415437e-05,
"loss": 2.8336,
"step": 2895
},
{
"epoch": 0.6,
"grad_norm": 1.0012155771255493,
"learning_rate": 3.539505557640901e-05,
"loss": 2.8176,
"step": 2900
},
{
"epoch": 0.6,
"grad_norm": 1.0569708347320557,
"learning_rate": 3.523569204137843e-05,
"loss": 2.8896,
"step": 2905
},
{
"epoch": 0.6,
"grad_norm": 1.0835829973220825,
"learning_rate": 3.5076492650811246e-05,
"loss": 2.7657,
"step": 2910
},
{
"epoch": 0.61,
"grad_norm": 1.1443898677825928,
"learning_rate": 3.491745917463113e-05,
"loss": 2.8314,
"step": 2915
},
{
"epoch": 0.61,
"grad_norm": 1.5082751512527466,
"learning_rate": 3.475859338091721e-05,
"loss": 2.9325,
"step": 2920
},
{
"epoch": 0.61,
"grad_norm": 1.0768505334854126,
"learning_rate": 3.4599897035884374e-05,
"loss": 2.821,
"step": 2925
},
{
"epoch": 0.61,
"grad_norm": 1.054617166519165,
"learning_rate": 3.444137190386363e-05,
"loss": 2.8665,
"step": 2930
},
{
"epoch": 0.61,
"grad_norm": 0.9536219239234924,
"learning_rate": 3.4283019747282514e-05,
"loss": 2.7844,
"step": 2935
},
{
"epoch": 0.61,
"grad_norm": 1.4797545671463013,
"learning_rate": 3.412484232664545e-05,
"loss": 2.8089,
"step": 2940
},
{
"epoch": 0.61,
"grad_norm": 1.0040301084518433,
"learning_rate": 3.396684140051424e-05,
"loss": 2.8695,
"step": 2945
},
{
"epoch": 0.61,
"grad_norm": 0.9701451659202576,
"learning_rate": 3.3809018725488466e-05,
"loss": 2.8051,
"step": 2950
},
{
"epoch": 0.61,
"grad_norm": 0.9382186532020569,
"learning_rate": 3.365137605618598e-05,
"loss": 2.8002,
"step": 2955
},
{
"epoch": 0.62,
"grad_norm": 1.0254327058792114,
"learning_rate": 3.3493915145223395e-05,
"loss": 2.7694,
"step": 2960
},
{
"epoch": 0.62,
"grad_norm": 1.1165177822113037,
"learning_rate": 3.3336637743196584e-05,
"loss": 2.8476,
"step": 2965
},
{
"epoch": 0.62,
"grad_norm": 0.9309197068214417,
"learning_rate": 3.317954559866126e-05,
"loss": 2.7724,
"step": 2970
},
{
"epoch": 0.62,
"grad_norm": 1.0240439176559448,
"learning_rate": 3.302264045811344e-05,
"loss": 2.7741,
"step": 2975
},
{
"epoch": 0.62,
"grad_norm": 1.0252265930175781,
"learning_rate": 3.286592406597021e-05,
"loss": 2.7953,
"step": 2980
},
{
"epoch": 0.62,
"grad_norm": 0.9758617877960205,
"learning_rate": 3.270939816455012e-05,
"loss": 2.7917,
"step": 2985
},
{
"epoch": 0.62,
"grad_norm": 1.271776795387268,
"learning_rate": 3.255306449405395e-05,
"loss": 2.7827,
"step": 2990
},
{
"epoch": 0.62,
"grad_norm": 1.053177833557129,
"learning_rate": 3.2396924792545304e-05,
"loss": 2.7507,
"step": 2995
},
{
"epoch": 0.62,
"grad_norm": 0.8852807879447937,
"learning_rate": 3.224098079593132e-05,
"loss": 2.8368,
"step": 3000
},
{
"epoch": 0.62,
"grad_norm": 0.9832589626312256,
"learning_rate": 3.2085234237943354e-05,
"loss": 2.7474,
"step": 3005
},
{
"epoch": 0.63,
"grad_norm": 1.572548747062683,
"learning_rate": 3.19296868501177e-05,
"loss": 2.811,
"step": 3010
},
{
"epoch": 0.63,
"grad_norm": 0.9211801290512085,
"learning_rate": 3.177434036177636e-05,
"loss": 2.8129,
"step": 3015
},
{
"epoch": 0.63,
"grad_norm": 1.1787726879119873,
"learning_rate": 3.1619196500007804e-05,
"loss": 2.8369,
"step": 3020
},
{
"epoch": 0.63,
"grad_norm": 1.4393579959869385,
"learning_rate": 3.146425698964776e-05,
"loss": 2.7433,
"step": 3025
},
{
"epoch": 0.63,
"grad_norm": 1.048122525215149,
"learning_rate": 3.1309523553260046e-05,
"loss": 2.811,
"step": 3030
},
{
"epoch": 0.63,
"grad_norm": 1.1251912117004395,
"learning_rate": 3.115499791111743e-05,
"loss": 2.8316,
"step": 3035
},
{
"epoch": 0.63,
"grad_norm": 1.230319857597351,
"learning_rate": 3.10006817811825e-05,
"loss": 2.8845,
"step": 3040
},
{
"epoch": 0.63,
"grad_norm": 0.9341143369674683,
"learning_rate": 3.084657687908855e-05,
"loss": 2.7453,
"step": 3045
},
{
"epoch": 0.63,
"grad_norm": 1.014804482460022,
"learning_rate": 3.069268491812052e-05,
"loss": 2.8087,
"step": 3050
},
{
"epoch": 0.63,
"grad_norm": 1.120583176612854,
"learning_rate": 3.0539007609195934e-05,
"loss": 2.7081,
"step": 3055
},
{
"epoch": 0.64,
"grad_norm": 1.0007081031799316,
"learning_rate": 3.0385546660845908e-05,
"loss": 2.7554,
"step": 3060
},
{
"epoch": 0.64,
"grad_norm": 1.0974400043487549,
"learning_rate": 3.0232303779196132e-05,
"loss": 2.7851,
"step": 3065
},
{
"epoch": 0.64,
"grad_norm": 0.9404457211494446,
"learning_rate": 3.0079280667947885e-05,
"loss": 2.794,
"step": 3070
},
{
"epoch": 0.64,
"grad_norm": 1.1209532022476196,
"learning_rate": 2.9926479028359132e-05,
"loss": 2.8168,
"step": 3075
},
{
"epoch": 0.64,
"grad_norm": 0.9819439053535461,
"learning_rate": 2.97739005592256e-05,
"loss": 2.8053,
"step": 3080
},
{
"epoch": 0.64,
"grad_norm": 0.9886718392372131,
"learning_rate": 2.962154695686187e-05,
"loss": 2.6517,
"step": 3085
},
{
"epoch": 0.64,
"grad_norm": 1.1918424367904663,
"learning_rate": 2.9469419915082536e-05,
"loss": 2.7005,
"step": 3090
},
{
"epoch": 0.64,
"grad_norm": 1.053731918334961,
"learning_rate": 2.9317521125183368e-05,
"loss": 2.7077,
"step": 3095
},
{
"epoch": 0.64,
"grad_norm": 0.9552854299545288,
"learning_rate": 2.9165852275922524e-05,
"loss": 2.759,
"step": 3100
},
{
"epoch": 0.65,
"grad_norm": 0.9677866697311401,
"learning_rate": 2.901441505350174e-05,
"loss": 2.7825,
"step": 3105
},
{
"epoch": 0.65,
"grad_norm": 1.091630458831787,
"learning_rate": 2.886321114154762e-05,
"loss": 2.8271,
"step": 3110
},
{
"epoch": 0.65,
"grad_norm": 1.372812271118164,
"learning_rate": 2.87122422210929e-05,
"loss": 2.7847,
"step": 3115
},
{
"epoch": 0.65,
"grad_norm": 0.9907299876213074,
"learning_rate": 2.8561509970557736e-05,
"loss": 2.8052,
"step": 3120
},
{
"epoch": 0.65,
"grad_norm": 1.0163778066635132,
"learning_rate": 2.8411016065731146e-05,
"loss": 2.817,
"step": 3125
},
{
"epoch": 0.65,
"grad_norm": 1.6438254117965698,
"learning_rate": 2.826076217975222e-05,
"loss": 2.738,
"step": 3130
},
{
"epoch": 0.65,
"grad_norm": 1.0988104343414307,
"learning_rate": 2.8110749983091632e-05,
"loss": 2.7959,
"step": 3135
},
{
"epoch": 0.65,
"grad_norm": 0.9554868936538696,
"learning_rate": 2.7960981143533053e-05,
"loss": 2.7101,
"step": 3140
},
{
"epoch": 0.65,
"grad_norm": 1.1096340417861938,
"learning_rate": 2.781145732615457e-05,
"loss": 2.7961,
"step": 3145
},
{
"epoch": 0.65,
"grad_norm": 0.933692991733551,
"learning_rate": 2.7662180193310218e-05,
"loss": 2.8189,
"step": 3150
},
{
"epoch": 0.66,
"grad_norm": 0.9508568644523621,
"learning_rate": 2.751315140461145e-05,
"loss": 2.7655,
"step": 3155
},
{
"epoch": 0.66,
"grad_norm": 0.9287638068199158,
"learning_rate": 2.7364372616908744e-05,
"loss": 2.8199,
"step": 3160
},
{
"epoch": 0.66,
"grad_norm": 0.9375027418136597,
"learning_rate": 2.7215845484273152e-05,
"loss": 2.7537,
"step": 3165
},
{
"epoch": 0.66,
"grad_norm": 0.977455198764801,
"learning_rate": 2.7067571657977893e-05,
"loss": 2.7686,
"step": 3170
},
{
"epoch": 0.66,
"grad_norm": 0.968980610370636,
"learning_rate": 2.691955278648003e-05,
"loss": 2.7598,
"step": 3175
},
{
"epoch": 0.66,
"grad_norm": 0.9863821268081665,
"learning_rate": 2.6771790515402112e-05,
"loss": 2.7697,
"step": 3180
},
{
"epoch": 0.66,
"grad_norm": 0.9942926168441772,
"learning_rate": 2.6624286487513916e-05,
"loss": 2.7545,
"step": 3185
},
{
"epoch": 0.66,
"grad_norm": 0.9893491864204407,
"learning_rate": 2.6477042342714137e-05,
"loss": 2.7436,
"step": 3190
},
{
"epoch": 0.66,
"grad_norm": 1.0433694124221802,
"learning_rate": 2.633005971801219e-05,
"loss": 2.7581,
"step": 3195
},
{
"epoch": 0.67,
"grad_norm": 0.9553687572479248,
"learning_rate": 2.6183340247510013e-05,
"loss": 2.7687,
"step": 3200
},
{
"epoch": 0.67,
"grad_norm": 0.9639105796813965,
"learning_rate": 2.6036885562383856e-05,
"loss": 2.7352,
"step": 3205
},
{
"epoch": 0.67,
"grad_norm": 0.9263445138931274,
"learning_rate": 2.5890697290866206e-05,
"loss": 2.7418,
"step": 3210
},
{
"epoch": 0.67,
"grad_norm": 0.9819431900978088,
"learning_rate": 2.5744777058227642e-05,
"loss": 2.7402,
"step": 3215
},
{
"epoch": 0.67,
"grad_norm": 0.9983292818069458,
"learning_rate": 2.5599126486758777e-05,
"loss": 2.7718,
"step": 3220
},
{
"epoch": 0.67,
"grad_norm": 0.8828099370002747,
"learning_rate": 2.5453747195752243e-05,
"loss": 2.6951,
"step": 3225
},
{
"epoch": 0.67,
"grad_norm": 0.9636290073394775,
"learning_rate": 2.530864080148464e-05,
"loss": 2.8062,
"step": 3230
},
{
"epoch": 0.67,
"grad_norm": 0.8762787580490112,
"learning_rate": 2.5163808917198615e-05,
"loss": 2.7136,
"step": 3235
},
{
"epoch": 0.67,
"grad_norm": 0.9096724390983582,
"learning_rate": 2.501925315308492e-05,
"loss": 2.7559,
"step": 3240
},
{
"epoch": 0.67,
"grad_norm": 1.0705084800720215,
"learning_rate": 2.4874975116264477e-05,
"loss": 2.7401,
"step": 3245
},
{
"epoch": 0.68,
"grad_norm": 0.9851915240287781,
"learning_rate": 2.4730976410770534e-05,
"loss": 2.7734,
"step": 3250
},
{
"epoch": 0.68,
"grad_norm": 1.0259265899658203,
"learning_rate": 2.458725863753084e-05,
"loss": 2.801,
"step": 3255
},
{
"epoch": 0.68,
"grad_norm": 0.8898816108703613,
"learning_rate": 2.4443823394349834e-05,
"loss": 2.7415,
"step": 3260
},
{
"epoch": 0.68,
"grad_norm": 1.0195220708847046,
"learning_rate": 2.430067227589088e-05,
"loss": 2.7858,
"step": 3265
},
{
"epoch": 0.68,
"grad_norm": 0.942822277545929,
"learning_rate": 2.4157806873658517e-05,
"loss": 2.7681,
"step": 3270
},
{
"epoch": 0.68,
"grad_norm": 0.9511753916740417,
"learning_rate": 2.401522877598087e-05,
"loss": 2.8133,
"step": 3275
},
{
"epoch": 0.68,
"grad_norm": 1.0596057176589966,
"learning_rate": 2.3872939567991827e-05,
"loss": 2.7838,
"step": 3280
},
{
"epoch": 0.68,
"grad_norm": 0.9920269846916199,
"learning_rate": 2.373094083161353e-05,
"loss": 2.802,
"step": 3285
},
{
"epoch": 0.68,
"grad_norm": 0.9092423915863037,
"learning_rate": 2.358923414553877e-05,
"loss": 2.7503,
"step": 3290
},
{
"epoch": 0.68,
"grad_norm": 0.9407171607017517,
"learning_rate": 2.3447821085213405e-05,
"loss": 2.6917,
"step": 3295
},
{
"epoch": 0.69,
"grad_norm": 0.9970971941947937,
"learning_rate": 2.3306703222818878e-05,
"loss": 2.8288,
"step": 3300
},
{
"epoch": 0.69,
"grad_norm": 0.985424816608429,
"learning_rate": 2.3165882127254705e-05,
"loss": 2.7971,
"step": 3305
},
{
"epoch": 0.69,
"grad_norm": 1.1382272243499756,
"learning_rate": 2.302535936412108e-05,
"loss": 2.7703,
"step": 3310
},
{
"epoch": 0.69,
"grad_norm": 1.1554280519485474,
"learning_rate": 2.2885136495701415e-05,
"loss": 2.7654,
"step": 3315
},
{
"epoch": 0.69,
"grad_norm": 0.9137274026870728,
"learning_rate": 2.274521508094501e-05,
"loss": 2.7842,
"step": 3320
},
{
"epoch": 0.69,
"grad_norm": 0.9484663605690002,
"learning_rate": 2.2605596675449698e-05,
"loss": 2.6593,
"step": 3325
},
{
"epoch": 0.69,
"grad_norm": 0.993303120136261,
"learning_rate": 2.246628283144457e-05,
"loss": 2.7118,
"step": 3330
},
{
"epoch": 0.69,
"grad_norm": 1.0425090789794922,
"learning_rate": 2.232727509777269e-05,
"loss": 2.7676,
"step": 3335
},
{
"epoch": 0.69,
"grad_norm": 0.9166971445083618,
"learning_rate": 2.2188575019873932e-05,
"loss": 2.7957,
"step": 3340
},
{
"epoch": 0.7,
"grad_norm": 1.023172378540039,
"learning_rate": 2.2050184139767704e-05,
"loss": 2.7887,
"step": 3345
},
{
"epoch": 0.7,
"grad_norm": 1.1221779584884644,
"learning_rate": 2.191210399603591e-05,
"loss": 2.7889,
"step": 3350
},
{
"epoch": 0.7,
"grad_norm": 0.9279941320419312,
"learning_rate": 2.1774336123805772e-05,
"loss": 2.7972,
"step": 3355
},
{
"epoch": 0.7,
"grad_norm": 0.9173780679702759,
"learning_rate": 2.1636882054732776e-05,
"loss": 2.7187,
"step": 3360
},
{
"epoch": 0.7,
"grad_norm": 1.0177903175354004,
"learning_rate": 2.1499743316983684e-05,
"loss": 2.7618,
"step": 3365
},
{
"epoch": 0.7,
"grad_norm": 2.8043572902679443,
"learning_rate": 2.1362921435219473e-05,
"loss": 2.7489,
"step": 3370
},
{
"epoch": 0.7,
"grad_norm": 0.9986701607704163,
"learning_rate": 2.1226417930578464e-05,
"loss": 2.7217,
"step": 3375
},
{
"epoch": 0.7,
"grad_norm": 1.0279260873794556,
"learning_rate": 2.109023432065935e-05,
"loss": 2.7698,
"step": 3380
},
{
"epoch": 0.7,
"grad_norm": 0.9698307514190674,
"learning_rate": 2.095437211950434e-05,
"loss": 2.8272,
"step": 3385
},
{
"epoch": 0.7,
"grad_norm": 0.9361196756362915,
"learning_rate": 2.0818832837582352e-05,
"loss": 2.7065,
"step": 3390
},
{
"epoch": 0.71,
"grad_norm": 0.9639699459075928,
"learning_rate": 2.068361798177218e-05,
"loss": 2.6764,
"step": 3395
},
{
"epoch": 0.71,
"grad_norm": 0.9683962464332581,
"learning_rate": 2.0548729055345778e-05,
"loss": 2.7494,
"step": 3400
},
{
"epoch": 0.71,
"grad_norm": 1.086588740348816,
"learning_rate": 2.0414167557951514e-05,
"loss": 2.6964,
"step": 3405
},
{
"epoch": 0.71,
"grad_norm": 1.027772068977356,
"learning_rate": 2.0279934985597527e-05,
"loss": 2.6967,
"step": 3410
},
{
"epoch": 0.71,
"grad_norm": 0.9064968824386597,
"learning_rate": 2.0146032830635054e-05,
"loss": 2.7529,
"step": 3415
},
{
"epoch": 0.71,
"grad_norm": 0.970786988735199,
"learning_rate": 2.001246258174192e-05,
"loss": 2.7721,
"step": 3420
},
{
"epoch": 0.71,
"grad_norm": 0.918952226638794,
"learning_rate": 1.9879225723905886e-05,
"loss": 2.75,
"step": 3425
},
{
"epoch": 0.71,
"grad_norm": 1.07177734375,
"learning_rate": 1.9746323738408203e-05,
"loss": 2.6795,
"step": 3430
},
{
"epoch": 0.71,
"grad_norm": 0.918933093547821,
"learning_rate": 1.9613758102807117e-05,
"loss": 2.7175,
"step": 3435
},
{
"epoch": 0.71,
"grad_norm": 0.8986356854438782,
"learning_rate": 1.9481530290921474e-05,
"loss": 2.7303,
"step": 3440
},
{
"epoch": 0.72,
"grad_norm": 0.9149788618087769,
"learning_rate": 1.934964177281428e-05,
"loss": 2.7732,
"step": 3445
},
{
"epoch": 0.72,
"grad_norm": 0.8481215238571167,
"learning_rate": 1.9218094014776434e-05,
"loss": 2.7881,
"step": 3450
},
{
"epoch": 0.72,
"grad_norm": 0.9517055749893188,
"learning_rate": 1.9086888479310333e-05,
"loss": 2.7432,
"step": 3455
},
{
"epoch": 0.72,
"grad_norm": 0.953904926776886,
"learning_rate": 1.895602662511371e-05,
"loss": 2.7674,
"step": 3460
},
{
"epoch": 0.72,
"grad_norm": 0.8726545572280884,
"learning_rate": 1.8825509907063327e-05,
"loss": 2.8083,
"step": 3465
},
{
"epoch": 0.72,
"grad_norm": 1.0478657484054565,
"learning_rate": 1.8695339776198872e-05,
"loss": 2.7283,
"step": 3470
},
{
"epoch": 0.72,
"grad_norm": 0.8761024475097656,
"learning_rate": 1.8565517679706783e-05,
"loss": 2.7714,
"step": 3475
},
{
"epoch": 0.72,
"grad_norm": 0.8935072422027588,
"learning_rate": 1.8436045060904174e-05,
"loss": 2.73,
"step": 3480
},
{
"epoch": 0.72,
"grad_norm": 0.869350254535675,
"learning_rate": 1.830692335922279e-05,
"loss": 2.7326,
"step": 3485
},
{
"epoch": 0.73,
"grad_norm": 1.0825552940368652,
"learning_rate": 1.8178154010192994e-05,
"loss": 2.7706,
"step": 3490
},
{
"epoch": 0.73,
"grad_norm": 0.9553731679916382,
"learning_rate": 1.8049738445427822e-05,
"loss": 2.7842,
"step": 3495
},
{
"epoch": 0.73,
"grad_norm": 0.9072303175926208,
"learning_rate": 1.7921678092607052e-05,
"loss": 2.7633,
"step": 3500
},
{
"epoch": 0.73,
"grad_norm": 0.8938944339752197,
"learning_rate": 1.7793974375461352e-05,
"loss": 2.7009,
"step": 3505
},
{
"epoch": 0.73,
"grad_norm": 1.0158241987228394,
"learning_rate": 1.7666628713756417e-05,
"loss": 2.7546,
"step": 3510
},
{
"epoch": 0.73,
"grad_norm": 0.9167622327804565,
"learning_rate": 1.7539642523277228e-05,
"loss": 2.7117,
"step": 3515
},
{
"epoch": 0.73,
"grad_norm": 0.8952285647392273,
"learning_rate": 1.7413017215812273e-05,
"loss": 2.7248,
"step": 3520
},
{
"epoch": 0.73,
"grad_norm": 1.1755582094192505,
"learning_rate": 1.728675419913788e-05,
"loss": 2.7318,
"step": 3525
},
{
"epoch": 0.73,
"grad_norm": 0.9154558777809143,
"learning_rate": 1.716085487700253e-05,
"loss": 2.7286,
"step": 3530
},
{
"epoch": 0.73,
"grad_norm": 0.9891672134399414,
"learning_rate": 1.703532064911131e-05,
"loss": 2.8045,
"step": 3535
},
{
"epoch": 0.74,
"grad_norm": 1.0414669513702393,
"learning_rate": 1.6910152911110283e-05,
"loss": 2.757,
"step": 3540
},
{
"epoch": 0.74,
"grad_norm": 1.0534803867340088,
"learning_rate": 1.6785353054571024e-05,
"loss": 2.6921,
"step": 3545
},
{
"epoch": 0.74,
"grad_norm": 1.0417871475219727,
"learning_rate": 1.666092246697512e-05,
"loss": 2.732,
"step": 3550
},
{
"epoch": 0.74,
"grad_norm": 1.0258711576461792,
"learning_rate": 1.6536862531698766e-05,
"loss": 2.7282,
"step": 3555
},
{
"epoch": 0.74,
"grad_norm": 1.0320672988891602,
"learning_rate": 1.6413174627997328e-05,
"loss": 2.7214,
"step": 3560
},
{
"epoch": 0.74,
"grad_norm": 1.0548003911972046,
"learning_rate": 1.6289860130990147e-05,
"loss": 2.6546,
"step": 3565
},
{
"epoch": 0.74,
"grad_norm": 0.9389927387237549,
"learning_rate": 1.6166920411645064e-05,
"loss": 2.7618,
"step": 3570
},
{
"epoch": 0.74,
"grad_norm": 1.114715814590454,
"learning_rate": 1.6044356836763315e-05,
"loss": 2.7682,
"step": 3575
},
{
"epoch": 0.74,
"grad_norm": 0.8785065412521362,
"learning_rate": 1.5922170768964285e-05,
"loss": 2.7817,
"step": 3580
},
{
"epoch": 0.75,
"grad_norm": 0.9799104928970337,
"learning_rate": 1.5800363566670362e-05,
"loss": 2.7193,
"step": 3585
},
{
"epoch": 0.75,
"grad_norm": 0.9984278082847595,
"learning_rate": 1.5678936584091852e-05,
"loss": 2.7363,
"step": 3590
},
{
"epoch": 0.75,
"grad_norm": 1.0623939037322998,
"learning_rate": 1.5557891171211892e-05,
"loss": 2.7631,
"step": 3595
},
{
"epoch": 0.75,
"grad_norm": 0.9091553688049316,
"learning_rate": 1.5437228673771465e-05,
"loss": 2.7623,
"step": 3600
},
{
"epoch": 0.75,
"grad_norm": 0.9698584675788879,
"learning_rate": 1.5316950433254445e-05,
"loss": 2.7451,
"step": 3605
},
{
"epoch": 0.75,
"grad_norm": 1.0113762617111206,
"learning_rate": 1.5197057786872649e-05,
"loss": 2.793,
"step": 3610
},
{
"epoch": 0.75,
"grad_norm": 1.0598238706588745,
"learning_rate": 1.5077552067551015e-05,
"loss": 2.8041,
"step": 3615
},
{
"epoch": 0.75,
"grad_norm": 0.9812130928039551,
"learning_rate": 1.4958434603912747e-05,
"loss": 2.7428,
"step": 3620
},
{
"epoch": 0.75,
"grad_norm": 0.8799479007720947,
"learning_rate": 1.4839706720264546e-05,
"loss": 2.7042,
"step": 3625
},
{
"epoch": 0.75,
"grad_norm": 0.9822835326194763,
"learning_rate": 1.4721369736581924e-05,
"loss": 2.8608,
"step": 3630
},
{
"epoch": 0.76,
"grad_norm": 0.9571648836135864,
"learning_rate": 1.4603424968494484e-05,
"loss": 2.8043,
"step": 3635
},
{
"epoch": 0.76,
"grad_norm": 0.9217616319656372,
"learning_rate": 1.448587372727132e-05,
"loss": 2.702,
"step": 3640
},
{
"epoch": 0.76,
"grad_norm": 0.8726725578308105,
"learning_rate": 1.4368717319806419e-05,
"loss": 2.7558,
"step": 3645
},
{
"epoch": 0.76,
"grad_norm": 0.9233871102333069,
"learning_rate": 1.4251957048604152e-05,
"loss": 2.7073,
"step": 3650
},
{
"epoch": 0.76,
"grad_norm": 0.8997598886489868,
"learning_rate": 1.413559421176479e-05,
"loss": 2.7383,
"step": 3655
},
{
"epoch": 0.76,
"grad_norm": 0.9536339640617371,
"learning_rate": 1.4019630102970056e-05,
"loss": 2.7303,
"step": 3660
},
{
"epoch": 0.76,
"grad_norm": 0.9330580830574036,
"learning_rate": 1.3904066011468753e-05,
"loss": 2.7246,
"step": 3665
},
{
"epoch": 0.76,
"grad_norm": 0.973706841468811,
"learning_rate": 1.3788903222062433e-05,
"loss": 2.6325,
"step": 3670
},
{
"epoch": 0.76,
"grad_norm": 0.8440922498703003,
"learning_rate": 1.3674143015091118e-05,
"loss": 2.7135,
"step": 3675
},
{
"epoch": 0.76,
"grad_norm": 0.9048754572868347,
"learning_rate": 1.355978666641905e-05,
"loss": 2.7216,
"step": 3680
},
{
"epoch": 0.77,
"grad_norm": 0.9642623066902161,
"learning_rate": 1.3445835447420507e-05,
"loss": 2.6994,
"step": 3685
},
{
"epoch": 0.77,
"grad_norm": 0.8947427868843079,
"learning_rate": 1.3332290624965688e-05,
"loss": 2.7183,
"step": 3690
},
{
"epoch": 0.77,
"grad_norm": 0.9548408389091492,
"learning_rate": 1.3219153461406609e-05,
"loss": 2.7828,
"step": 3695
},
{
"epoch": 0.77,
"grad_norm": 1.012382984161377,
"learning_rate": 1.3106425214563078e-05,
"loss": 2.6968,
"step": 3700
},
{
"epoch": 0.77,
"grad_norm": 0.8872066140174866,
"learning_rate": 1.2994107137708716e-05,
"loss": 2.6785,
"step": 3705
},
{
"epoch": 0.77,
"grad_norm": 1.0340611934661865,
"learning_rate": 1.2882200479556988e-05,
"loss": 2.7056,
"step": 3710
},
{
"epoch": 0.77,
"grad_norm": 0.9304479956626892,
"learning_rate": 1.2770706484247397e-05,
"loss": 2.715,
"step": 3715
},
{
"epoch": 0.77,
"grad_norm": 0.8831409215927124,
"learning_rate": 1.2659626391331564e-05,
"loss": 2.654,
"step": 3720
},
{
"epoch": 0.77,
"grad_norm": 0.912469744682312,
"learning_rate": 1.2548961435759493e-05,
"loss": 2.6885,
"step": 3725
},
{
"epoch": 0.78,
"grad_norm": 0.9259523153305054,
"learning_rate": 1.2438712847865846e-05,
"loss": 2.7894,
"step": 3730
},
{
"epoch": 0.78,
"grad_norm": 0.9369043111801147,
"learning_rate": 1.2328881853356244e-05,
"loss": 2.6822,
"step": 3735
},
{
"epoch": 0.78,
"grad_norm": 0.8766859173774719,
"learning_rate": 1.221946967329365e-05,
"loss": 2.7094,
"step": 3740
},
{
"epoch": 0.78,
"grad_norm": 0.9146238565444946,
"learning_rate": 1.2110477524084796e-05,
"loss": 2.6764,
"step": 3745
},
{
"epoch": 0.78,
"grad_norm": 0.8871788382530212,
"learning_rate": 1.2001906617466657e-05,
"loss": 2.6778,
"step": 3750
},
{
"epoch": 0.78,
"grad_norm": 0.9674075841903687,
"learning_rate": 1.1893758160492978e-05,
"loss": 2.7803,
"step": 3755
},
{
"epoch": 0.78,
"grad_norm": 1.0255939960479736,
"learning_rate": 1.1786033355520859e-05,
"loss": 2.741,
"step": 3760
},
{
"epoch": 0.78,
"grad_norm": 1.0135067701339722,
"learning_rate": 1.1678733400197373e-05,
"loss": 2.6789,
"step": 3765
},
{
"epoch": 0.78,
"grad_norm": 0.9260480999946594,
"learning_rate": 1.1571859487446263e-05,
"loss": 2.6915,
"step": 3770
},
{
"epoch": 0.78,
"grad_norm": 0.9849831461906433,
"learning_rate": 1.1465412805454695e-05,
"loss": 2.7304,
"step": 3775
},
{
"epoch": 0.79,
"grad_norm": 0.9340236186981201,
"learning_rate": 1.1359394537660011e-05,
"loss": 2.7698,
"step": 3780
},
{
"epoch": 0.79,
"grad_norm": 1.023711919784546,
"learning_rate": 1.125380586273661e-05,
"loss": 2.7383,
"step": 3785
},
{
"epoch": 0.79,
"grad_norm": 0.8625822067260742,
"learning_rate": 1.1148647954582808e-05,
"loss": 2.673,
"step": 3790
},
{
"epoch": 0.79,
"grad_norm": 0.8937363624572754,
"learning_rate": 1.1043921982307819e-05,
"loss": 2.7359,
"step": 3795
},
{
"epoch": 0.79,
"grad_norm": 0.8716992139816284,
"learning_rate": 1.0939629110218735e-05,
"loss": 2.698,
"step": 3800
},
{
"epoch": 0.79,
"grad_norm": 1.0017049312591553,
"learning_rate": 1.0835770497807596e-05,
"loss": 2.7153,
"step": 3805
},
{
"epoch": 0.79,
"grad_norm": 0.9061327576637268,
"learning_rate": 1.0732347299738493e-05,
"loss": 2.7116,
"step": 3810
},
{
"epoch": 0.79,
"grad_norm": 0.8214864134788513,
"learning_rate": 1.0629360665834732e-05,
"loss": 2.7203,
"step": 3815
},
{
"epoch": 0.79,
"grad_norm": 2.124131917953491,
"learning_rate": 1.052681174106604e-05,
"loss": 2.6305,
"step": 3820
},
{
"epoch": 0.8,
"grad_norm": 0.9771587252616882,
"learning_rate": 1.0424701665535852e-05,
"loss": 2.6817,
"step": 3825
},
{
"epoch": 0.8,
"grad_norm": 0.8942197561264038,
"learning_rate": 1.0323031574468638e-05,
"loss": 2.6845,
"step": 3830
},
{
"epoch": 0.8,
"grad_norm": 0.9050390124320984,
"learning_rate": 1.0221802598197261e-05,
"loss": 2.7471,
"step": 3835
},
{
"epoch": 0.8,
"grad_norm": 0.8933614492416382,
"learning_rate": 1.0121015862150423e-05,
"loss": 2.7714,
"step": 3840
},
{
"epoch": 0.8,
"grad_norm": 0.9024760127067566,
"learning_rate": 1.0020672486840154e-05,
"loss": 2.7094,
"step": 3845
},
{
"epoch": 0.8,
"grad_norm": 0.9106290936470032,
"learning_rate": 9.920773587849364e-06,
"loss": 2.7646,
"step": 3850
},
{
"epoch": 0.8,
"grad_norm": 0.8813436031341553,
"learning_rate": 9.821320275819401e-06,
"loss": 2.7159,
"step": 3855
},
{
"epoch": 0.8,
"grad_norm": 0.96685391664505,
"learning_rate": 9.72231365643777e-06,
"loss": 2.7413,
"step": 3860
},
{
"epoch": 0.8,
"grad_norm": 0.9142242670059204,
"learning_rate": 9.623754830425779e-06,
"loss": 2.6952,
"step": 3865
},
{
"epoch": 0.8,
"grad_norm": 0.9112724661827087,
"learning_rate": 9.52564489352632e-06,
"loss": 2.7158,
"step": 3870
},
{
"epoch": 0.81,
"grad_norm": 0.9589788913726807,
"learning_rate": 9.427984936491702e-06,
"loss": 2.6881,
"step": 3875
},
{
"epoch": 0.81,
"grad_norm": 1.027599573135376,
"learning_rate": 9.330776045071509e-06,
"loss": 2.7046,
"step": 3880
},
{
"epoch": 0.81,
"grad_norm": 2.11747670173645,
"learning_rate": 9.23401930000054e-06,
"loss": 2.7807,
"step": 3885
},
{
"epoch": 0.81,
"grad_norm": 0.9211429953575134,
"learning_rate": 9.137715776986772e-06,
"loss": 2.8296,
"step": 3890
},
{
"epoch": 0.81,
"grad_norm": 1.0487579107284546,
"learning_rate": 9.041866546699434e-06,
"loss": 2.749,
"step": 3895
},
{
"epoch": 0.81,
"grad_norm": 0.8894890546798706,
"learning_rate": 8.946472674757078e-06,
"loss": 2.6835,
"step": 3900
},
{
"epoch": 0.81,
"grad_norm": 0.9346022605895996,
"learning_rate": 8.851535221715735e-06,
"loss": 2.6934,
"step": 3905
},
{
"epoch": 0.81,
"grad_norm": 0.932664155960083,
"learning_rate": 8.757055243057132e-06,
"loss": 2.6916,
"step": 3910
},
{
"epoch": 0.81,
"grad_norm": 0.9112188220024109,
"learning_rate": 8.663033789176967e-06,
"loss": 2.7246,
"step": 3915
},
{
"epoch": 0.81,
"grad_norm": 0.9287726283073425,
"learning_rate": 8.5694719053732e-06,
"loss": 2.7032,
"step": 3920
},
{
"epoch": 0.82,
"grad_norm": 0.8490238785743713,
"learning_rate": 8.476370631834458e-06,
"loss": 2.7178,
"step": 3925
},
{
"epoch": 0.82,
"grad_norm": 0.8654835224151611,
"learning_rate": 8.383731003628452e-06,
"loss": 2.7395,
"step": 3930
},
{
"epoch": 0.82,
"grad_norm": 0.914492666721344,
"learning_rate": 8.291554050690508e-06,
"loss": 2.665,
"step": 3935
},
{
"epoch": 0.82,
"grad_norm": 0.9744204878807068,
"learning_rate": 8.199840797812058e-06,
"loss": 2.692,
"step": 3940
},
{
"epoch": 0.82,
"grad_norm": 0.8835412859916687,
"learning_rate": 8.108592264629295e-06,
"loss": 2.7157,
"step": 3945
},
{
"epoch": 0.82,
"grad_norm": 1.0571808815002441,
"learning_rate": 8.017809465611803e-06,
"loss": 2.7412,
"step": 3950
},
{
"epoch": 0.82,
"grad_norm": 0.8671538829803467,
"learning_rate": 7.927493410051324e-06,
"loss": 2.7418,
"step": 3955
},
{
"epoch": 0.82,
"grad_norm": 0.8907613754272461,
"learning_rate": 7.837645102050473e-06,
"loss": 2.6535,
"step": 3960
},
{
"epoch": 0.82,
"grad_norm": 0.9619112014770508,
"learning_rate": 7.748265540511635e-06,
"loss": 2.7056,
"step": 3965
},
{
"epoch": 0.83,
"grad_norm": 0.9052380323410034,
"learning_rate": 7.65935571912582e-06,
"loss": 2.7376,
"step": 3970
},
{
"epoch": 0.83,
"grad_norm": 0.8572185635566711,
"learning_rate": 7.5709166263616405e-06,
"loss": 2.685,
"step": 3975
},
{
"epoch": 0.83,
"grad_norm": 0.8871633410453796,
"learning_rate": 7.482949245454302e-06,
"loss": 2.7844,
"step": 3980
},
{
"epoch": 0.83,
"grad_norm": 0.9142546653747559,
"learning_rate": 7.3954545543946876e-06,
"loss": 2.7152,
"step": 3985
},
{
"epoch": 0.83,
"grad_norm": 0.9594203233718872,
"learning_rate": 7.308433525918468e-06,
"loss": 2.6962,
"step": 3990
},
{
"epoch": 0.83,
"grad_norm": 0.8935016393661499,
"learning_rate": 7.221887127495313e-06,
"loss": 2.7212,
"step": 3995
},
{
"epoch": 0.83,
"grad_norm": 0.9266638159751892,
"learning_rate": 7.1358163213181114e-06,
"loss": 2.7401,
"step": 4000
}
],
"logging_steps": 5,
"max_steps": 4811,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"total_flos": 9.850881155530752e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}