diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,41837 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 4.0, + "eval_steps": 500, + "global_step": 5972, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0006697923643670462, + "grad_norm": 69.30303128653149, + "learning_rate": 8.361204013377928e-09, + "loss": 3.6371, + "step": 1 + }, + { + "epoch": 0.0013395847287340924, + "grad_norm": 63.39863825719709, + "learning_rate": 1.6722408026755855e-08, + "loss": 3.3042, + "step": 2 + }, + { + "epoch": 0.0020093770931011385, + "grad_norm": 58.888811983797105, + "learning_rate": 2.508361204013378e-08, + "loss": 3.2394, + "step": 3 + }, + { + "epoch": 0.0026791694574681848, + "grad_norm": 67.62224942340907, + "learning_rate": 3.344481605351171e-08, + "loss": 3.4757, + "step": 4 + }, + { + "epoch": 0.003348961821835231, + "grad_norm": 72.70647320187784, + "learning_rate": 4.180602006688963e-08, + "loss": 3.6744, + "step": 5 + }, + { + "epoch": 0.004018754186202277, + "grad_norm": 57.70937755694992, + "learning_rate": 5.016722408026756e-08, + "loss": 3.0436, + "step": 6 + }, + { + "epoch": 0.004688546550569324, + "grad_norm": 66.5560313914662, + "learning_rate": 5.8528428093645485e-08, + "loss": 3.7171, + "step": 7 + }, + { + "epoch": 0.0053583389149363695, + "grad_norm": 61.05755516377674, + "learning_rate": 6.688963210702342e-08, + "loss": 3.5177, + "step": 8 + }, + { + "epoch": 0.006028131279303416, + "grad_norm": 66.33627882957263, + "learning_rate": 7.525083612040134e-08, + "loss": 3.3774, + "step": 9 + }, + { + "epoch": 0.006697923643670462, + "grad_norm": 63.74285312179843, + "learning_rate": 8.361204013377927e-08, + "loss": 3.4429, + "step": 10 + }, + { + "epoch": 0.007367716008037508, + "grad_norm": 59.137198833224794, + "learning_rate": 9.19732441471572e-08, + "loss": 3.2817, + "step": 11 + }, + { + "epoch": 0.008037508372404554, + "grad_norm": 66.88324223009751, + "learning_rate": 1.0033444816053512e-07, + "loss": 3.4574, + "step": 12 + }, + { + "epoch": 0.008707300736771601, + "grad_norm": 72.67353715205135, + "learning_rate": 1.0869565217391305e-07, + "loss": 3.7756, + "step": 13 + }, + { + "epoch": 0.009377093101138647, + "grad_norm": 61.064835590430754, + "learning_rate": 1.1705685618729097e-07, + "loss": 3.4527, + "step": 14 + }, + { + "epoch": 0.010046885465505693, + "grad_norm": 66.75985781039162, + "learning_rate": 1.254180602006689e-07, + "loss": 3.4581, + "step": 15 + }, + { + "epoch": 0.010716677829872739, + "grad_norm": 58.78268823573887, + "learning_rate": 1.3377926421404684e-07, + "loss": 3.279, + "step": 16 + }, + { + "epoch": 0.011386470194239785, + "grad_norm": 57.22804658169749, + "learning_rate": 1.4214046822742475e-07, + "loss": 3.214, + "step": 17 + }, + { + "epoch": 0.012056262558606833, + "grad_norm": 59.7761497320737, + "learning_rate": 1.505016722408027e-07, + "loss": 3.4552, + "step": 18 + }, + { + "epoch": 0.012726054922973878, + "grad_norm": 63.24321160529608, + "learning_rate": 1.5886287625418062e-07, + "loss": 3.571, + "step": 19 + }, + { + "epoch": 0.013395847287340924, + "grad_norm": 48.27002829904877, + "learning_rate": 1.6722408026755853e-07, + "loss": 3.1955, + "step": 20 + }, + { + "epoch": 0.01406563965170797, + "grad_norm": 72.82899064512456, + "learning_rate": 1.7558528428093647e-07, + "loss": 3.5783, + "step": 21 + }, + { + "epoch": 0.014735432016075016, + "grad_norm": 58.134278367258034, + "learning_rate": 1.839464882943144e-07, + "loss": 3.3614, + "step": 22 + }, + { + "epoch": 0.015405224380442064, + "grad_norm": 54.89598424751139, + "learning_rate": 1.9230769230769234e-07, + "loss": 3.1378, + "step": 23 + }, + { + "epoch": 0.016075016744809108, + "grad_norm": 49.25166219042795, + "learning_rate": 2.0066889632107025e-07, + "loss": 3.3452, + "step": 24 + }, + { + "epoch": 0.016744809109176157, + "grad_norm": 52.51568144746892, + "learning_rate": 2.0903010033444818e-07, + "loss": 3.1901, + "step": 25 + }, + { + "epoch": 0.017414601473543203, + "grad_norm": 47.97657554473152, + "learning_rate": 2.173913043478261e-07, + "loss": 3.3074, + "step": 26 + }, + { + "epoch": 0.01808439383791025, + "grad_norm": 45.718497621515624, + "learning_rate": 2.2575250836120403e-07, + "loss": 3.3471, + "step": 27 + }, + { + "epoch": 0.018754186202277295, + "grad_norm": 56.55403088977122, + "learning_rate": 2.3411371237458194e-07, + "loss": 3.6429, + "step": 28 + }, + { + "epoch": 0.01942397856664434, + "grad_norm": 44.49589424653418, + "learning_rate": 2.424749163879599e-07, + "loss": 3.4836, + "step": 29 + }, + { + "epoch": 0.020093770931011386, + "grad_norm": 38.02375248655162, + "learning_rate": 2.508361204013378e-07, + "loss": 3.0077, + "step": 30 + }, + { + "epoch": 0.020763563295378432, + "grad_norm": 40.96671685568642, + "learning_rate": 2.591973244147157e-07, + "loss": 3.0784, + "step": 31 + }, + { + "epoch": 0.021433355659745478, + "grad_norm": 26.007039807372717, + "learning_rate": 2.675585284280937e-07, + "loss": 2.7787, + "step": 32 + }, + { + "epoch": 0.022103148024112524, + "grad_norm": 28.146196766076315, + "learning_rate": 2.759197324414716e-07, + "loss": 2.9716, + "step": 33 + }, + { + "epoch": 0.02277294038847957, + "grad_norm": 22.57265607684001, + "learning_rate": 2.842809364548495e-07, + "loss": 3.1303, + "step": 34 + }, + { + "epoch": 0.02344273275284662, + "grad_norm": 24.632459904400648, + "learning_rate": 2.9264214046822746e-07, + "loss": 3.0677, + "step": 35 + }, + { + "epoch": 0.024112525117213665, + "grad_norm": 20.988141131233203, + "learning_rate": 3.010033444816054e-07, + "loss": 2.7798, + "step": 36 + }, + { + "epoch": 0.02478231748158071, + "grad_norm": 30.05195973004854, + "learning_rate": 3.093645484949833e-07, + "loss": 2.9268, + "step": 37 + }, + { + "epoch": 0.025452109845947757, + "grad_norm": 18.616099828083474, + "learning_rate": 3.1772575250836125e-07, + "loss": 2.7558, + "step": 38 + }, + { + "epoch": 0.026121902210314803, + "grad_norm": 21.201027428688857, + "learning_rate": 3.2608695652173915e-07, + "loss": 3.1688, + "step": 39 + }, + { + "epoch": 0.02679169457468185, + "grad_norm": 18.824859169980016, + "learning_rate": 3.3444816053511706e-07, + "loss": 2.8944, + "step": 40 + }, + { + "epoch": 0.027461486939048894, + "grad_norm": 20.98581731338723, + "learning_rate": 3.42809364548495e-07, + "loss": 2.8649, + "step": 41 + }, + { + "epoch": 0.02813127930341594, + "grad_norm": 17.24724456661999, + "learning_rate": 3.5117056856187294e-07, + "loss": 2.8495, + "step": 42 + }, + { + "epoch": 0.028801071667782986, + "grad_norm": 27.732868222970886, + "learning_rate": 3.5953177257525085e-07, + "loss": 3.5849, + "step": 43 + }, + { + "epoch": 0.029470864032150032, + "grad_norm": 11.96171501892785, + "learning_rate": 3.678929765886288e-07, + "loss": 2.7068, + "step": 44 + }, + { + "epoch": 0.03014065639651708, + "grad_norm": 43.99613235694454, + "learning_rate": 3.7625418060200677e-07, + "loss": 2.8726, + "step": 45 + }, + { + "epoch": 0.030810448760884127, + "grad_norm": 32.6887697083864, + "learning_rate": 3.846153846153847e-07, + "loss": 3.1299, + "step": 46 + }, + { + "epoch": 0.03148024112525117, + "grad_norm": 12.504579716492144, + "learning_rate": 3.929765886287626e-07, + "loss": 2.9368, + "step": 47 + }, + { + "epoch": 0.032150033489618215, + "grad_norm": 10.473647544256366, + "learning_rate": 4.013377926421405e-07, + "loss": 2.727, + "step": 48 + }, + { + "epoch": 0.03281982585398526, + "grad_norm": 9.420283015267394, + "learning_rate": 4.096989966555184e-07, + "loss": 2.685, + "step": 49 + }, + { + "epoch": 0.033489618218352314, + "grad_norm": 10.092431047618733, + "learning_rate": 4.1806020066889637e-07, + "loss": 2.4654, + "step": 50 + }, + { + "epoch": 0.03415941058271936, + "grad_norm": 10.955157481579574, + "learning_rate": 4.264214046822743e-07, + "loss": 2.7869, + "step": 51 + }, + { + "epoch": 0.034829202947086406, + "grad_norm": 8.732526999245882, + "learning_rate": 4.347826086956522e-07, + "loss": 2.5413, + "step": 52 + }, + { + "epoch": 0.03549899531145345, + "grad_norm": 11.928435245688288, + "learning_rate": 4.431438127090301e-07, + "loss": 2.868, + "step": 53 + }, + { + "epoch": 0.0361687876758205, + "grad_norm": 8.655905124903459, + "learning_rate": 4.5150501672240806e-07, + "loss": 2.7676, + "step": 54 + }, + { + "epoch": 0.036838580040187544, + "grad_norm": 19.3954407135807, + "learning_rate": 4.5986622073578597e-07, + "loss": 2.8676, + "step": 55 + }, + { + "epoch": 0.03750837240455459, + "grad_norm": 10.628484338076767, + "learning_rate": 4.682274247491639e-07, + "loss": 2.3241, + "step": 56 + }, + { + "epoch": 0.038178164768921635, + "grad_norm": 7.327065253773348, + "learning_rate": 4.765886287625419e-07, + "loss": 2.3553, + "step": 57 + }, + { + "epoch": 0.03884795713328868, + "grad_norm": 13.870257684803839, + "learning_rate": 4.849498327759198e-07, + "loss": 2.9324, + "step": 58 + }, + { + "epoch": 0.03951774949765573, + "grad_norm": 9.723924181183948, + "learning_rate": 4.933110367892977e-07, + "loss": 2.6405, + "step": 59 + }, + { + "epoch": 0.04018754186202277, + "grad_norm": 10.609426007468016, + "learning_rate": 5.016722408026756e-07, + "loss": 2.691, + "step": 60 + }, + { + "epoch": 0.04085733422638982, + "grad_norm": 7.567443916769537, + "learning_rate": 5.100334448160535e-07, + "loss": 2.3113, + "step": 61 + }, + { + "epoch": 0.041527126590756865, + "grad_norm": 12.45841990524486, + "learning_rate": 5.183946488294314e-07, + "loss": 2.4698, + "step": 62 + }, + { + "epoch": 0.04219691895512391, + "grad_norm": 11.466171439921254, + "learning_rate": 5.267558528428094e-07, + "loss": 2.6358, + "step": 63 + }, + { + "epoch": 0.042866711319490956, + "grad_norm": 8.569633051690637, + "learning_rate": 5.351170568561874e-07, + "loss": 2.5333, + "step": 64 + }, + { + "epoch": 0.043536503683858, + "grad_norm": 7.553371133244344, + "learning_rate": 5.434782608695653e-07, + "loss": 2.3734, + "step": 65 + }, + { + "epoch": 0.04420629604822505, + "grad_norm": 5.47708702125804, + "learning_rate": 5.518394648829432e-07, + "loss": 2.3204, + "step": 66 + }, + { + "epoch": 0.044876088412592094, + "grad_norm": 5.759040866948895, + "learning_rate": 5.602006688963211e-07, + "loss": 2.6482, + "step": 67 + }, + { + "epoch": 0.04554588077695914, + "grad_norm": 8.433793229519353, + "learning_rate": 5.68561872909699e-07, + "loss": 2.1965, + "step": 68 + }, + { + "epoch": 0.046215673141326186, + "grad_norm": 5.553830053257806, + "learning_rate": 5.76923076923077e-07, + "loss": 2.6791, + "step": 69 + }, + { + "epoch": 0.04688546550569324, + "grad_norm": 6.106510946325645, + "learning_rate": 5.852842809364549e-07, + "loss": 2.3641, + "step": 70 + }, + { + "epoch": 0.047555257870060284, + "grad_norm": 5.2428294310525665, + "learning_rate": 5.936454849498328e-07, + "loss": 2.5735, + "step": 71 + }, + { + "epoch": 0.04822505023442733, + "grad_norm": 4.123523325248075, + "learning_rate": 6.020066889632107e-07, + "loss": 2.5473, + "step": 72 + }, + { + "epoch": 0.048894842598794376, + "grad_norm": 4.604061151178795, + "learning_rate": 6.103678929765887e-07, + "loss": 2.5999, + "step": 73 + }, + { + "epoch": 0.04956463496316142, + "grad_norm": 4.66275665966332, + "learning_rate": 6.187290969899666e-07, + "loss": 2.335, + "step": 74 + }, + { + "epoch": 0.05023442732752847, + "grad_norm": 6.6833439747250605, + "learning_rate": 6.270903010033446e-07, + "loss": 2.526, + "step": 75 + }, + { + "epoch": 0.050904219691895514, + "grad_norm": 4.726793101547953, + "learning_rate": 6.354515050167225e-07, + "loss": 2.445, + "step": 76 + }, + { + "epoch": 0.05157401205626256, + "grad_norm": 3.474443701982591, + "learning_rate": 6.438127090301004e-07, + "loss": 2.2251, + "step": 77 + }, + { + "epoch": 0.052243804420629605, + "grad_norm": 3.422419294115465, + "learning_rate": 6.521739130434783e-07, + "loss": 2.3318, + "step": 78 + }, + { + "epoch": 0.05291359678499665, + "grad_norm": 4.416814682137442, + "learning_rate": 6.605351170568562e-07, + "loss": 2.5774, + "step": 79 + }, + { + "epoch": 0.0535833891493637, + "grad_norm": 3.6444076968269496, + "learning_rate": 6.688963210702341e-07, + "loss": 2.4835, + "step": 80 + }, + { + "epoch": 0.05425318151373074, + "grad_norm": 4.977208190105457, + "learning_rate": 6.77257525083612e-07, + "loss": 2.4455, + "step": 81 + }, + { + "epoch": 0.05492297387809779, + "grad_norm": 3.3143215723429518, + "learning_rate": 6.8561872909699e-07, + "loss": 2.0597, + "step": 82 + }, + { + "epoch": 0.055592766242464835, + "grad_norm": 3.4903035307114343, + "learning_rate": 6.939799331103679e-07, + "loss": 2.4378, + "step": 83 + }, + { + "epoch": 0.05626255860683188, + "grad_norm": 3.4304194591248027, + "learning_rate": 7.023411371237459e-07, + "loss": 2.2895, + "step": 84 + }, + { + "epoch": 0.056932350971198926, + "grad_norm": 4.12249045089643, + "learning_rate": 7.107023411371238e-07, + "loss": 2.4969, + "step": 85 + }, + { + "epoch": 0.05760214333556597, + "grad_norm": 3.334930248863227, + "learning_rate": 7.190635451505017e-07, + "loss": 2.4193, + "step": 86 + }, + { + "epoch": 0.05827193569993302, + "grad_norm": 5.152152562809818, + "learning_rate": 7.274247491638796e-07, + "loss": 2.3668, + "step": 87 + }, + { + "epoch": 0.058941728064300064, + "grad_norm": 2.9867115187138995, + "learning_rate": 7.357859531772576e-07, + "loss": 1.9934, + "step": 88 + }, + { + "epoch": 0.05961152042866711, + "grad_norm": 3.267463285983656, + "learning_rate": 7.441471571906355e-07, + "loss": 2.5118, + "step": 89 + }, + { + "epoch": 0.06028131279303416, + "grad_norm": 3.0825317224319173, + "learning_rate": 7.525083612040135e-07, + "loss": 2.5295, + "step": 90 + }, + { + "epoch": 0.06095110515740121, + "grad_norm": 3.6208368339355084, + "learning_rate": 7.608695652173914e-07, + "loss": 2.2986, + "step": 91 + }, + { + "epoch": 0.061620897521768254, + "grad_norm": 3.205268879456299, + "learning_rate": 7.692307692307694e-07, + "loss": 2.6256, + "step": 92 + }, + { + "epoch": 0.0622906898861353, + "grad_norm": 3.060586574787455, + "learning_rate": 7.775919732441473e-07, + "loss": 2.2133, + "step": 93 + }, + { + "epoch": 0.06296048225050234, + "grad_norm": 2.8794206363523736, + "learning_rate": 7.859531772575252e-07, + "loss": 2.5352, + "step": 94 + }, + { + "epoch": 0.06363027461486939, + "grad_norm": 4.19200742517357, + "learning_rate": 7.943143812709031e-07, + "loss": 2.2648, + "step": 95 + }, + { + "epoch": 0.06430006697923643, + "grad_norm": 2.8368004151811745, + "learning_rate": 8.02675585284281e-07, + "loss": 2.4997, + "step": 96 + }, + { + "epoch": 0.06496985934360348, + "grad_norm": 2.647626542077999, + "learning_rate": 8.110367892976589e-07, + "loss": 2.2919, + "step": 97 + }, + { + "epoch": 0.06563965170797052, + "grad_norm": 2.793052101928018, + "learning_rate": 8.193979933110368e-07, + "loss": 2.5411, + "step": 98 + }, + { + "epoch": 0.06630944407233758, + "grad_norm": 2.580177031868139, + "learning_rate": 8.277591973244148e-07, + "loss": 2.2843, + "step": 99 + }, + { + "epoch": 0.06697923643670463, + "grad_norm": 4.476043724034834, + "learning_rate": 8.361204013377927e-07, + "loss": 2.1411, + "step": 100 + }, + { + "epoch": 0.06764902880107167, + "grad_norm": 5.076174111137021, + "learning_rate": 8.444816053511706e-07, + "loss": 2.2659, + "step": 101 + }, + { + "epoch": 0.06831882116543872, + "grad_norm": 3.978698635531036, + "learning_rate": 8.528428093645486e-07, + "loss": 2.1249, + "step": 102 + }, + { + "epoch": 0.06898861352980576, + "grad_norm": 3.2111797267790005, + "learning_rate": 8.612040133779265e-07, + "loss": 2.3341, + "step": 103 + }, + { + "epoch": 0.06965840589417281, + "grad_norm": 3.098937782130139, + "learning_rate": 8.695652173913044e-07, + "loss": 2.2682, + "step": 104 + }, + { + "epoch": 0.07032819825853985, + "grad_norm": 7.044120588564895, + "learning_rate": 8.779264214046823e-07, + "loss": 2.2125, + "step": 105 + }, + { + "epoch": 0.0709979906229069, + "grad_norm": 2.7272777471893317, + "learning_rate": 8.862876254180602e-07, + "loss": 2.1693, + "step": 106 + }, + { + "epoch": 0.07166778298727394, + "grad_norm": 2.754918257337873, + "learning_rate": 8.946488294314382e-07, + "loss": 2.3726, + "step": 107 + }, + { + "epoch": 0.072337575351641, + "grad_norm": 2.8361589674469694, + "learning_rate": 9.030100334448161e-07, + "loss": 2.2293, + "step": 108 + }, + { + "epoch": 0.07300736771600803, + "grad_norm": 3.4090314667410686, + "learning_rate": 9.11371237458194e-07, + "loss": 1.9068, + "step": 109 + }, + { + "epoch": 0.07367716008037509, + "grad_norm": 5.3331938881178695, + "learning_rate": 9.197324414715719e-07, + "loss": 2.2791, + "step": 110 + }, + { + "epoch": 0.07434695244474213, + "grad_norm": 8.161039927118571, + "learning_rate": 9.280936454849498e-07, + "loss": 2.1592, + "step": 111 + }, + { + "epoch": 0.07501674480910918, + "grad_norm": 4.555056991145756, + "learning_rate": 9.364548494983278e-07, + "loss": 2.2154, + "step": 112 + }, + { + "epoch": 0.07568653717347622, + "grad_norm": 7.307263439292584, + "learning_rate": 9.448160535117059e-07, + "loss": 2.191, + "step": 113 + }, + { + "epoch": 0.07635632953784327, + "grad_norm": 2.455880181140146, + "learning_rate": 9.531772575250838e-07, + "loss": 2.079, + "step": 114 + }, + { + "epoch": 0.07702612190221031, + "grad_norm": 4.313396033496189, + "learning_rate": 9.615384615384617e-07, + "loss": 2.3504, + "step": 115 + }, + { + "epoch": 0.07769591426657736, + "grad_norm": 3.3945048152638604, + "learning_rate": 9.698996655518396e-07, + "loss": 2.2421, + "step": 116 + }, + { + "epoch": 0.0783657066309444, + "grad_norm": 4.309574915796638, + "learning_rate": 9.782608695652175e-07, + "loss": 2.2163, + "step": 117 + }, + { + "epoch": 0.07903549899531145, + "grad_norm": 3.4123937015680657, + "learning_rate": 9.866220735785954e-07, + "loss": 2.1856, + "step": 118 + }, + { + "epoch": 0.07970529135967849, + "grad_norm": 11.794166779043291, + "learning_rate": 9.949832775919733e-07, + "loss": 2.3167, + "step": 119 + }, + { + "epoch": 0.08037508372404555, + "grad_norm": 3.698659141989465, + "learning_rate": 1.0033444816053512e-06, + "loss": 2.3164, + "step": 120 + }, + { + "epoch": 0.0810448760884126, + "grad_norm": 3.177793946021012, + "learning_rate": 1.0117056856187292e-06, + "loss": 2.2774, + "step": 121 + }, + { + "epoch": 0.08171466845277964, + "grad_norm": 3.1461731875826078, + "learning_rate": 1.020066889632107e-06, + "loss": 2.2714, + "step": 122 + }, + { + "epoch": 0.08238446081714669, + "grad_norm": 3.201946800152565, + "learning_rate": 1.028428093645485e-06, + "loss": 1.9828, + "step": 123 + }, + { + "epoch": 0.08305425318151373, + "grad_norm": 3.3693593612468855, + "learning_rate": 1.0367892976588629e-06, + "loss": 2.3195, + "step": 124 + }, + { + "epoch": 0.08372404554588078, + "grad_norm": 7.023525903351072, + "learning_rate": 1.0451505016722408e-06, + "loss": 2.5293, + "step": 125 + }, + { + "epoch": 0.08439383791024782, + "grad_norm": 2.5457963082695154, + "learning_rate": 1.0535117056856187e-06, + "loss": 2.0417, + "step": 126 + }, + { + "epoch": 0.08506363027461487, + "grad_norm": 2.935350705600814, + "learning_rate": 1.0618729096989968e-06, + "loss": 2.2513, + "step": 127 + }, + { + "epoch": 0.08573342263898191, + "grad_norm": 2.6143292074825992, + "learning_rate": 1.0702341137123747e-06, + "loss": 2.2259, + "step": 128 + }, + { + "epoch": 0.08640321500334897, + "grad_norm": 4.284481084698038, + "learning_rate": 1.0785953177257526e-06, + "loss": 2.127, + "step": 129 + }, + { + "epoch": 0.087073007367716, + "grad_norm": 3.8491915566237025, + "learning_rate": 1.0869565217391306e-06, + "loss": 2.1477, + "step": 130 + }, + { + "epoch": 0.08774279973208306, + "grad_norm": 7.164348805772128, + "learning_rate": 1.0953177257525085e-06, + "loss": 2.3903, + "step": 131 + }, + { + "epoch": 0.0884125920964501, + "grad_norm": 11.793038574235803, + "learning_rate": 1.1036789297658864e-06, + "loss": 2.2334, + "step": 132 + }, + { + "epoch": 0.08908238446081715, + "grad_norm": 5.476735327458481, + "learning_rate": 1.1120401337792643e-06, + "loss": 1.9697, + "step": 133 + }, + { + "epoch": 0.08975217682518419, + "grad_norm": 4.783635542014902, + "learning_rate": 1.1204013377926422e-06, + "loss": 2.4196, + "step": 134 + }, + { + "epoch": 0.09042196918955124, + "grad_norm": 6.499570892373274, + "learning_rate": 1.12876254180602e-06, + "loss": 2.185, + "step": 135 + }, + { + "epoch": 0.09109176155391828, + "grad_norm": 11.044717457267213, + "learning_rate": 1.137123745819398e-06, + "loss": 2.1634, + "step": 136 + }, + { + "epoch": 0.09176155391828533, + "grad_norm": 13.063713312328028, + "learning_rate": 1.145484949832776e-06, + "loss": 2.1894, + "step": 137 + }, + { + "epoch": 0.09243134628265237, + "grad_norm": 4.082016214902324, + "learning_rate": 1.153846153846154e-06, + "loss": 2.1897, + "step": 138 + }, + { + "epoch": 0.09310113864701942, + "grad_norm": 2.7147969081509307, + "learning_rate": 1.162207357859532e-06, + "loss": 2.0926, + "step": 139 + }, + { + "epoch": 0.09377093101138648, + "grad_norm": 2.963861909792279, + "learning_rate": 1.1705685618729099e-06, + "loss": 2.159, + "step": 140 + }, + { + "epoch": 0.09444072337575352, + "grad_norm": 4.304168433198634, + "learning_rate": 1.1789297658862878e-06, + "loss": 2.0876, + "step": 141 + }, + { + "epoch": 0.09511051574012057, + "grad_norm": 3.1086759434691085, + "learning_rate": 1.1872909698996657e-06, + "loss": 2.2891, + "step": 142 + }, + { + "epoch": 0.09578030810448761, + "grad_norm": 3.058286093849596, + "learning_rate": 1.1956521739130436e-06, + "loss": 2.3467, + "step": 143 + }, + { + "epoch": 0.09645010046885466, + "grad_norm": 5.974465663460153, + "learning_rate": 1.2040133779264215e-06, + "loss": 2.0553, + "step": 144 + }, + { + "epoch": 0.0971198928332217, + "grad_norm": 3.0345247703614677, + "learning_rate": 1.2123745819397994e-06, + "loss": 2.1872, + "step": 145 + }, + { + "epoch": 0.09778968519758875, + "grad_norm": 3.2928033798767866, + "learning_rate": 1.2207357859531773e-06, + "loss": 2.1292, + "step": 146 + }, + { + "epoch": 0.09845947756195579, + "grad_norm": 3.3596405668174865, + "learning_rate": 1.2290969899665552e-06, + "loss": 2.1302, + "step": 147 + }, + { + "epoch": 0.09912926992632284, + "grad_norm": 4.309417295630066, + "learning_rate": 1.2374581939799331e-06, + "loss": 2.1588, + "step": 148 + }, + { + "epoch": 0.09979906229068988, + "grad_norm": 5.012070494002269, + "learning_rate": 1.245819397993311e-06, + "loss": 2.2011, + "step": 149 + }, + { + "epoch": 0.10046885465505694, + "grad_norm": 3.19331276942955, + "learning_rate": 1.2541806020066892e-06, + "loss": 2.2609, + "step": 150 + }, + { + "epoch": 0.10113864701942397, + "grad_norm": 3.3480218167927536, + "learning_rate": 1.262541806020067e-06, + "loss": 2.2915, + "step": 151 + }, + { + "epoch": 0.10180843938379103, + "grad_norm": 4.4107305027578585, + "learning_rate": 1.270903010033445e-06, + "loss": 2.4247, + "step": 152 + }, + { + "epoch": 0.10247823174815807, + "grad_norm": 3.6965699225212116, + "learning_rate": 1.2792642140468229e-06, + "loss": 2.1016, + "step": 153 + }, + { + "epoch": 0.10314802411252512, + "grad_norm": 3.2048350872849007, + "learning_rate": 1.2876254180602008e-06, + "loss": 2.2344, + "step": 154 + }, + { + "epoch": 0.10381781647689216, + "grad_norm": 3.2310955443046985, + "learning_rate": 1.2959866220735787e-06, + "loss": 2.2081, + "step": 155 + }, + { + "epoch": 0.10448760884125921, + "grad_norm": 4.3367008130585, + "learning_rate": 1.3043478260869566e-06, + "loss": 2.1236, + "step": 156 + }, + { + "epoch": 0.10515740120562625, + "grad_norm": 3.3852428292881123, + "learning_rate": 1.3127090301003345e-06, + "loss": 2.2975, + "step": 157 + }, + { + "epoch": 0.1058271935699933, + "grad_norm": 4.732600455755357, + "learning_rate": 1.3210702341137124e-06, + "loss": 2.0121, + "step": 158 + }, + { + "epoch": 0.10649698593436034, + "grad_norm": 3.21918042700378, + "learning_rate": 1.3294314381270903e-06, + "loss": 2.0695, + "step": 159 + }, + { + "epoch": 0.1071667782987274, + "grad_norm": 4.451498752190507, + "learning_rate": 1.3377926421404683e-06, + "loss": 2.0987, + "step": 160 + }, + { + "epoch": 0.10783657066309445, + "grad_norm": 3.255555577032961, + "learning_rate": 1.3461538461538462e-06, + "loss": 2.0604, + "step": 161 + }, + { + "epoch": 0.10850636302746149, + "grad_norm": 3.1047626455586927, + "learning_rate": 1.354515050167224e-06, + "loss": 2.058, + "step": 162 + }, + { + "epoch": 0.10917615539182854, + "grad_norm": 4.148954516005423, + "learning_rate": 1.362876254180602e-06, + "loss": 2.3055, + "step": 163 + }, + { + "epoch": 0.10984594775619558, + "grad_norm": 3.3038854518367087, + "learning_rate": 1.37123745819398e-06, + "loss": 2.065, + "step": 164 + }, + { + "epoch": 0.11051574012056263, + "grad_norm": 3.3075769416224343, + "learning_rate": 1.3795986622073578e-06, + "loss": 2.1644, + "step": 165 + }, + { + "epoch": 0.11118553248492967, + "grad_norm": 4.65064266048092, + "learning_rate": 1.3879598662207357e-06, + "loss": 2.0432, + "step": 166 + }, + { + "epoch": 0.11185532484929672, + "grad_norm": 4.82722475969538, + "learning_rate": 1.3963210702341138e-06, + "loss": 1.9268, + "step": 167 + }, + { + "epoch": 0.11252511721366376, + "grad_norm": 4.073563868907187, + "learning_rate": 1.4046822742474917e-06, + "loss": 2.2505, + "step": 168 + }, + { + "epoch": 0.11319490957803081, + "grad_norm": 4.980276998439862, + "learning_rate": 1.4130434782608697e-06, + "loss": 2.167, + "step": 169 + }, + { + "epoch": 0.11386470194239785, + "grad_norm": 3.5020258324913747, + "learning_rate": 1.4214046822742476e-06, + "loss": 1.9906, + "step": 170 + }, + { + "epoch": 0.1145344943067649, + "grad_norm": 4.5251553470029515, + "learning_rate": 1.4297658862876255e-06, + "loss": 1.7986, + "step": 171 + }, + { + "epoch": 0.11520428667113194, + "grad_norm": 4.059564164089794, + "learning_rate": 1.4381270903010034e-06, + "loss": 2.0959, + "step": 172 + }, + { + "epoch": 0.115874079035499, + "grad_norm": 3.3761782192447227, + "learning_rate": 1.4464882943143813e-06, + "loss": 1.9052, + "step": 173 + }, + { + "epoch": 0.11654387139986604, + "grad_norm": 3.77581388490209, + "learning_rate": 1.4548494983277592e-06, + "loss": 1.7514, + "step": 174 + }, + { + "epoch": 0.11721366376423309, + "grad_norm": 4.2415200291037, + "learning_rate": 1.4632107023411373e-06, + "loss": 2.0212, + "step": 175 + }, + { + "epoch": 0.11788345612860013, + "grad_norm": 4.606257452752132, + "learning_rate": 1.4715719063545152e-06, + "loss": 2.0941, + "step": 176 + }, + { + "epoch": 0.11855324849296718, + "grad_norm": 3.8884955886124413, + "learning_rate": 1.4799331103678931e-06, + "loss": 2.0005, + "step": 177 + }, + { + "epoch": 0.11922304085733422, + "grad_norm": 7.676545314731305, + "learning_rate": 1.488294314381271e-06, + "loss": 2.038, + "step": 178 + }, + { + "epoch": 0.11989283322170127, + "grad_norm": 7.004151865748798, + "learning_rate": 1.4966555183946492e-06, + "loss": 2.3287, + "step": 179 + }, + { + "epoch": 0.12056262558606833, + "grad_norm": 3.436470980866342, + "learning_rate": 1.505016722408027e-06, + "loss": 2.149, + "step": 180 + }, + { + "epoch": 0.12123241795043536, + "grad_norm": 3.642461497267327, + "learning_rate": 1.513377926421405e-06, + "loss": 2.1821, + "step": 181 + }, + { + "epoch": 0.12190221031480242, + "grad_norm": 3.3554843772901974, + "learning_rate": 1.521739130434783e-06, + "loss": 1.9573, + "step": 182 + }, + { + "epoch": 0.12257200267916946, + "grad_norm": 4.061079844212496, + "learning_rate": 1.5301003344481608e-06, + "loss": 2.1069, + "step": 183 + }, + { + "epoch": 0.12324179504353651, + "grad_norm": 3.3420803815257187, + "learning_rate": 1.5384615384615387e-06, + "loss": 2.1264, + "step": 184 + }, + { + "epoch": 0.12391158740790355, + "grad_norm": 8.436285582271946, + "learning_rate": 1.5468227424749166e-06, + "loss": 2.0233, + "step": 185 + }, + { + "epoch": 0.1245813797722706, + "grad_norm": 3.0608059658782016, + "learning_rate": 1.5551839464882945e-06, + "loss": 1.8426, + "step": 186 + }, + { + "epoch": 0.12525117213663764, + "grad_norm": 3.4590385423945764, + "learning_rate": 1.5635451505016724e-06, + "loss": 1.9833, + "step": 187 + }, + { + "epoch": 0.12592096450100468, + "grad_norm": 4.707712577297673, + "learning_rate": 1.5719063545150504e-06, + "loss": 1.7976, + "step": 188 + }, + { + "epoch": 0.12659075686537175, + "grad_norm": 3.382263160191859, + "learning_rate": 1.5802675585284283e-06, + "loss": 1.8379, + "step": 189 + }, + { + "epoch": 0.12726054922973878, + "grad_norm": 3.9405507291603015, + "learning_rate": 1.5886287625418062e-06, + "loss": 2.2475, + "step": 190 + }, + { + "epoch": 0.12793034159410582, + "grad_norm": 3.6318436771970384, + "learning_rate": 1.596989966555184e-06, + "loss": 2.2273, + "step": 191 + }, + { + "epoch": 0.12860013395847286, + "grad_norm": 4.302085284660499, + "learning_rate": 1.605351170568562e-06, + "loss": 1.9925, + "step": 192 + }, + { + "epoch": 0.12926992632283993, + "grad_norm": 3.5373985454707753, + "learning_rate": 1.61371237458194e-06, + "loss": 2.0925, + "step": 193 + }, + { + "epoch": 0.12993971868720697, + "grad_norm": 3.488582150966482, + "learning_rate": 1.6220735785953178e-06, + "loss": 2.1153, + "step": 194 + }, + { + "epoch": 0.130609511051574, + "grad_norm": 3.5514181366670754, + "learning_rate": 1.6304347826086957e-06, + "loss": 2.0722, + "step": 195 + }, + { + "epoch": 0.13127930341594105, + "grad_norm": 3.1715945121988267, + "learning_rate": 1.6387959866220736e-06, + "loss": 1.9732, + "step": 196 + }, + { + "epoch": 0.1319490957803081, + "grad_norm": 3.518416969340009, + "learning_rate": 1.6471571906354518e-06, + "loss": 1.8059, + "step": 197 + }, + { + "epoch": 0.13261888814467515, + "grad_norm": 3.5092332154910526, + "learning_rate": 1.6555183946488297e-06, + "loss": 1.9881, + "step": 198 + }, + { + "epoch": 0.1332886805090422, + "grad_norm": 4.131464282030278, + "learning_rate": 1.6638795986622076e-06, + "loss": 1.9627, + "step": 199 + }, + { + "epoch": 0.13395847287340926, + "grad_norm": 3.3202850733198175, + "learning_rate": 1.6722408026755855e-06, + "loss": 1.9685, + "step": 200 + }, + { + "epoch": 0.1346282652377763, + "grad_norm": 3.4965339447068295, + "learning_rate": 1.6806020066889634e-06, + "loss": 2.1129, + "step": 201 + }, + { + "epoch": 0.13529805760214333, + "grad_norm": 3.1967536306193223, + "learning_rate": 1.6889632107023413e-06, + "loss": 1.8925, + "step": 202 + }, + { + "epoch": 0.13596784996651037, + "grad_norm": 4.069151724967296, + "learning_rate": 1.6973244147157192e-06, + "loss": 2.0566, + "step": 203 + }, + { + "epoch": 0.13663764233087744, + "grad_norm": 3.4263121787412114, + "learning_rate": 1.7056856187290971e-06, + "loss": 1.8686, + "step": 204 + }, + { + "epoch": 0.13730743469524448, + "grad_norm": 3.3362540662549063, + "learning_rate": 1.714046822742475e-06, + "loss": 2.1656, + "step": 205 + }, + { + "epoch": 0.13797722705961152, + "grad_norm": 3.1464633213434383, + "learning_rate": 1.722408026755853e-06, + "loss": 2.1096, + "step": 206 + }, + { + "epoch": 0.13864701942397856, + "grad_norm": 8.025475292501426, + "learning_rate": 1.7307692307692308e-06, + "loss": 1.8972, + "step": 207 + }, + { + "epoch": 0.13931681178834562, + "grad_norm": 3.1719221140424545, + "learning_rate": 1.7391304347826088e-06, + "loss": 2.1083, + "step": 208 + }, + { + "epoch": 0.13998660415271266, + "grad_norm": 3.0628471229796905, + "learning_rate": 1.7474916387959867e-06, + "loss": 1.7911, + "step": 209 + }, + { + "epoch": 0.1406563965170797, + "grad_norm": 3.203682339311749, + "learning_rate": 1.7558528428093646e-06, + "loss": 2.0126, + "step": 210 + }, + { + "epoch": 0.14132618888144674, + "grad_norm": 3.193843290699495, + "learning_rate": 1.7642140468227425e-06, + "loss": 2.0138, + "step": 211 + }, + { + "epoch": 0.1419959812458138, + "grad_norm": 4.206265212962417, + "learning_rate": 1.7725752508361204e-06, + "loss": 1.9765, + "step": 212 + }, + { + "epoch": 0.14266577361018085, + "grad_norm": 2.776884034315761, + "learning_rate": 1.7809364548494983e-06, + "loss": 1.9542, + "step": 213 + }, + { + "epoch": 0.14333556597454788, + "grad_norm": 3.2137185845080087, + "learning_rate": 1.7892976588628764e-06, + "loss": 2.0452, + "step": 214 + }, + { + "epoch": 0.14400535833891492, + "grad_norm": 4.586638562003805, + "learning_rate": 1.7976588628762543e-06, + "loss": 2.0934, + "step": 215 + }, + { + "epoch": 0.144675150703282, + "grad_norm": 2.878407315220466, + "learning_rate": 1.8060200668896322e-06, + "loss": 2.1379, + "step": 216 + }, + { + "epoch": 0.14534494306764903, + "grad_norm": 2.7835482834228014, + "learning_rate": 1.8143812709030102e-06, + "loss": 2.081, + "step": 217 + }, + { + "epoch": 0.14601473543201607, + "grad_norm": 3.137659760474874, + "learning_rate": 1.822742474916388e-06, + "loss": 2.1303, + "step": 218 + }, + { + "epoch": 0.14668452779638314, + "grad_norm": 3.1682939421073764, + "learning_rate": 1.831103678929766e-06, + "loss": 2.153, + "step": 219 + }, + { + "epoch": 0.14735432016075017, + "grad_norm": 3.009922519675137, + "learning_rate": 1.8394648829431439e-06, + "loss": 1.8708, + "step": 220 + }, + { + "epoch": 0.1480241125251172, + "grad_norm": 2.758349744706276, + "learning_rate": 1.8478260869565218e-06, + "loss": 2.0419, + "step": 221 + }, + { + "epoch": 0.14869390488948425, + "grad_norm": 2.5533153095492924, + "learning_rate": 1.8561872909698997e-06, + "loss": 1.9501, + "step": 222 + }, + { + "epoch": 0.14936369725385132, + "grad_norm": 8.868874596398358, + "learning_rate": 1.8645484949832776e-06, + "loss": 1.911, + "step": 223 + }, + { + "epoch": 0.15003348961821836, + "grad_norm": 2.8642600285536397, + "learning_rate": 1.8729096989966555e-06, + "loss": 2.0204, + "step": 224 + }, + { + "epoch": 0.1507032819825854, + "grad_norm": 2.729672988073045, + "learning_rate": 1.8812709030100336e-06, + "loss": 2.0782, + "step": 225 + }, + { + "epoch": 0.15137307434695244, + "grad_norm": 2.3759567382191373, + "learning_rate": 1.8896321070234118e-06, + "loss": 1.7147, + "step": 226 + }, + { + "epoch": 0.1520428667113195, + "grad_norm": 10.509959992876281, + "learning_rate": 1.8979933110367897e-06, + "loss": 1.7351, + "step": 227 + }, + { + "epoch": 0.15271265907568654, + "grad_norm": 2.8514018390794145, + "learning_rate": 1.9063545150501676e-06, + "loss": 1.9389, + "step": 228 + }, + { + "epoch": 0.15338245144005358, + "grad_norm": 4.4832042051059, + "learning_rate": 1.9147157190635453e-06, + "loss": 1.6621, + "step": 229 + }, + { + "epoch": 0.15405224380442062, + "grad_norm": 2.580799604371309, + "learning_rate": 1.9230769230769234e-06, + "loss": 1.9717, + "step": 230 + }, + { + "epoch": 0.15472203616878769, + "grad_norm": 2.444444116856193, + "learning_rate": 1.931438127090301e-06, + "loss": 1.7277, + "step": 231 + }, + { + "epoch": 0.15539182853315472, + "grad_norm": 2.405338803148918, + "learning_rate": 1.9397993311036792e-06, + "loss": 1.8347, + "step": 232 + }, + { + "epoch": 0.15606162089752176, + "grad_norm": 3.26938661563197, + "learning_rate": 1.948160535117057e-06, + "loss": 1.9485, + "step": 233 + }, + { + "epoch": 0.1567314132618888, + "grad_norm": 2.776644200405492, + "learning_rate": 1.956521739130435e-06, + "loss": 1.7192, + "step": 234 + }, + { + "epoch": 0.15740120562625587, + "grad_norm": 2.2673063831761175, + "learning_rate": 1.964882943143813e-06, + "loss": 1.8752, + "step": 235 + }, + { + "epoch": 0.1580709979906229, + "grad_norm": 3.0950657214935426, + "learning_rate": 1.973244147157191e-06, + "loss": 1.958, + "step": 236 + }, + { + "epoch": 0.15874079035498995, + "grad_norm": 5.811694884959375, + "learning_rate": 1.981605351170569e-06, + "loss": 1.903, + "step": 237 + }, + { + "epoch": 0.15941058271935699, + "grad_norm": 2.3558592985933586, + "learning_rate": 1.9899665551839467e-06, + "loss": 1.9661, + "step": 238 + }, + { + "epoch": 0.16008037508372405, + "grad_norm": 2.3104732431641333, + "learning_rate": 1.998327759197325e-06, + "loss": 1.8798, + "step": 239 + }, + { + "epoch": 0.1607501674480911, + "grad_norm": 3.7698886495940034, + "learning_rate": 2.0066889632107025e-06, + "loss": 1.8961, + "step": 240 + }, + { + "epoch": 0.16141995981245813, + "grad_norm": 6.563630452179998, + "learning_rate": 2.0150501672240806e-06, + "loss": 1.9682, + "step": 241 + }, + { + "epoch": 0.1620897521768252, + "grad_norm": 12.065084435056384, + "learning_rate": 2.0234113712374583e-06, + "loss": 1.894, + "step": 242 + }, + { + "epoch": 0.16275954454119224, + "grad_norm": 2.7450681563094586, + "learning_rate": 2.0317725752508364e-06, + "loss": 2.0255, + "step": 243 + }, + { + "epoch": 0.16342933690555927, + "grad_norm": 7.921035653346702, + "learning_rate": 2.040133779264214e-06, + "loss": 1.9235, + "step": 244 + }, + { + "epoch": 0.1640991292699263, + "grad_norm": 2.6200038678054036, + "learning_rate": 2.0484949832775922e-06, + "loss": 1.5655, + "step": 245 + }, + { + "epoch": 0.16476892163429338, + "grad_norm": 3.0955178755005552, + "learning_rate": 2.05685618729097e-06, + "loss": 1.767, + "step": 246 + }, + { + "epoch": 0.16543871399866042, + "grad_norm": 5.732282828412601, + "learning_rate": 2.065217391304348e-06, + "loss": 1.7083, + "step": 247 + }, + { + "epoch": 0.16610850636302746, + "grad_norm": 3.082359942753026, + "learning_rate": 2.0735785953177258e-06, + "loss": 1.9471, + "step": 248 + }, + { + "epoch": 0.1667782987273945, + "grad_norm": 2.288000073934078, + "learning_rate": 2.081939799331104e-06, + "loss": 2.0094, + "step": 249 + }, + { + "epoch": 0.16744809109176156, + "grad_norm": 2.3238372058322554, + "learning_rate": 2.0903010033444816e-06, + "loss": 1.8703, + "step": 250 + }, + { + "epoch": 0.1681178834561286, + "grad_norm": 3.6117048171858306, + "learning_rate": 2.0986622073578597e-06, + "loss": 1.9327, + "step": 251 + }, + { + "epoch": 0.16878767582049564, + "grad_norm": 2.1640325452027174, + "learning_rate": 2.1070234113712374e-06, + "loss": 1.8861, + "step": 252 + }, + { + "epoch": 0.16945746818486268, + "grad_norm": 6.290918829666269, + "learning_rate": 2.1153846153846155e-06, + "loss": 1.9709, + "step": 253 + }, + { + "epoch": 0.17012726054922975, + "grad_norm": 4.98312214748408, + "learning_rate": 2.1237458193979936e-06, + "loss": 1.7658, + "step": 254 + }, + { + "epoch": 0.1707970529135968, + "grad_norm": 1.9405345079078287, + "learning_rate": 2.1321070234113713e-06, + "loss": 1.7623, + "step": 255 + }, + { + "epoch": 0.17146684527796383, + "grad_norm": 2.0285193298735655, + "learning_rate": 2.1404682274247495e-06, + "loss": 1.9351, + "step": 256 + }, + { + "epoch": 0.17213663764233086, + "grad_norm": 2.6868484127112437, + "learning_rate": 2.148829431438127e-06, + "loss": 1.802, + "step": 257 + }, + { + "epoch": 0.17280643000669793, + "grad_norm": 2.6704082126149733, + "learning_rate": 2.1571906354515053e-06, + "loss": 2.0481, + "step": 258 + }, + { + "epoch": 0.17347622237106497, + "grad_norm": 2.1798339252440306, + "learning_rate": 2.165551839464883e-06, + "loss": 1.7975, + "step": 259 + }, + { + "epoch": 0.174146014735432, + "grad_norm": 3.811449962011576, + "learning_rate": 2.173913043478261e-06, + "loss": 1.7894, + "step": 260 + }, + { + "epoch": 0.17481580709979908, + "grad_norm": 4.238918587318307, + "learning_rate": 2.182274247491639e-06, + "loss": 1.7453, + "step": 261 + }, + { + "epoch": 0.17548559946416611, + "grad_norm": 3.584674349205926, + "learning_rate": 2.190635451505017e-06, + "loss": 2.0157, + "step": 262 + }, + { + "epoch": 0.17615539182853315, + "grad_norm": 3.414315781154583, + "learning_rate": 2.1989966555183946e-06, + "loss": 1.8505, + "step": 263 + }, + { + "epoch": 0.1768251841929002, + "grad_norm": 2.8921805409049597, + "learning_rate": 2.2073578595317727e-06, + "loss": 1.9347, + "step": 264 + }, + { + "epoch": 0.17749497655726726, + "grad_norm": 2.2322789943397874, + "learning_rate": 2.2157190635451504e-06, + "loss": 1.8619, + "step": 265 + }, + { + "epoch": 0.1781647689216343, + "grad_norm": 2.4006877067716825, + "learning_rate": 2.2240802675585286e-06, + "loss": 1.8664, + "step": 266 + }, + { + "epoch": 0.17883456128600134, + "grad_norm": 2.063315814526604, + "learning_rate": 2.2324414715719063e-06, + "loss": 2.0105, + "step": 267 + }, + { + "epoch": 0.17950435365036838, + "grad_norm": 2.2734100645992186, + "learning_rate": 2.2408026755852844e-06, + "loss": 1.6536, + "step": 268 + }, + { + "epoch": 0.18017414601473544, + "grad_norm": 3.177890942042914, + "learning_rate": 2.249163879598662e-06, + "loss": 1.8837, + "step": 269 + }, + { + "epoch": 0.18084393837910248, + "grad_norm": 2.004549936698657, + "learning_rate": 2.25752508361204e-06, + "loss": 2.0643, + "step": 270 + }, + { + "epoch": 0.18151373074346952, + "grad_norm": 4.6325676732573875, + "learning_rate": 2.2658862876254183e-06, + "loss": 1.8228, + "step": 271 + }, + { + "epoch": 0.18218352310783656, + "grad_norm": 1.9160883064325698, + "learning_rate": 2.274247491638796e-06, + "loss": 1.9863, + "step": 272 + }, + { + "epoch": 0.18285331547220363, + "grad_norm": 2.5211054000931683, + "learning_rate": 2.282608695652174e-06, + "loss": 1.6476, + "step": 273 + }, + { + "epoch": 0.18352310783657066, + "grad_norm": 1.9747490108473735, + "learning_rate": 2.290969899665552e-06, + "loss": 1.9271, + "step": 274 + }, + { + "epoch": 0.1841929002009377, + "grad_norm": 1.964894300646358, + "learning_rate": 2.29933110367893e-06, + "loss": 2.0611, + "step": 275 + }, + { + "epoch": 0.18486269256530474, + "grad_norm": 2.4138891249540855, + "learning_rate": 2.307692307692308e-06, + "loss": 1.8455, + "step": 276 + }, + { + "epoch": 0.1855324849296718, + "grad_norm": 3.264415478932968, + "learning_rate": 2.3160535117056858e-06, + "loss": 1.6501, + "step": 277 + }, + { + "epoch": 0.18620227729403885, + "grad_norm": 1.977164321512977, + "learning_rate": 2.324414715719064e-06, + "loss": 2.0806, + "step": 278 + }, + { + "epoch": 0.1868720696584059, + "grad_norm": 1.8429671324452241, + "learning_rate": 2.3327759197324416e-06, + "loss": 1.7916, + "step": 279 + }, + { + "epoch": 0.18754186202277295, + "grad_norm": 3.1027105387966842, + "learning_rate": 2.3411371237458197e-06, + "loss": 1.5992, + "step": 280 + }, + { + "epoch": 0.18821165438714, + "grad_norm": 4.169022604348427, + "learning_rate": 2.3494983277591974e-06, + "loss": 1.8428, + "step": 281 + }, + { + "epoch": 0.18888144675150703, + "grad_norm": 2.2146980708448125, + "learning_rate": 2.3578595317725755e-06, + "loss": 1.6593, + "step": 282 + }, + { + "epoch": 0.18955123911587407, + "grad_norm": 1.8127022388825764, + "learning_rate": 2.3662207357859537e-06, + "loss": 1.8122, + "step": 283 + }, + { + "epoch": 0.19022103148024114, + "grad_norm": 2.6320394440524364, + "learning_rate": 2.3745819397993314e-06, + "loss": 1.793, + "step": 284 + }, + { + "epoch": 0.19089082384460818, + "grad_norm": 2.151122671668974, + "learning_rate": 2.3829431438127095e-06, + "loss": 1.9432, + "step": 285 + }, + { + "epoch": 0.19156061620897522, + "grad_norm": 2.858262320000492, + "learning_rate": 2.391304347826087e-06, + "loss": 2.0409, + "step": 286 + }, + { + "epoch": 0.19223040857334225, + "grad_norm": 2.6799830044218607, + "learning_rate": 2.3996655518394653e-06, + "loss": 1.7319, + "step": 287 + }, + { + "epoch": 0.19290020093770932, + "grad_norm": 3.353653611670843, + "learning_rate": 2.408026755852843e-06, + "loss": 1.9225, + "step": 288 + }, + { + "epoch": 0.19356999330207636, + "grad_norm": 3.256991801847376, + "learning_rate": 2.416387959866221e-06, + "loss": 1.8909, + "step": 289 + }, + { + "epoch": 0.1942397856664434, + "grad_norm": 1.9261488761141956, + "learning_rate": 2.424749163879599e-06, + "loss": 1.7396, + "step": 290 + }, + { + "epoch": 0.19490957803081044, + "grad_norm": 3.353580499308848, + "learning_rate": 2.433110367892977e-06, + "loss": 1.9002, + "step": 291 + }, + { + "epoch": 0.1955793703951775, + "grad_norm": 1.9666492053556095, + "learning_rate": 2.4414715719063546e-06, + "loss": 1.9615, + "step": 292 + }, + { + "epoch": 0.19624916275954454, + "grad_norm": 3.000449125377943, + "learning_rate": 2.4498327759197327e-06, + "loss": 1.884, + "step": 293 + }, + { + "epoch": 0.19691895512391158, + "grad_norm": 3.6228192581125747, + "learning_rate": 2.4581939799331104e-06, + "loss": 1.9558, + "step": 294 + }, + { + "epoch": 0.19758874748827862, + "grad_norm": 4.987205927521508, + "learning_rate": 2.4665551839464886e-06, + "loss": 1.7899, + "step": 295 + }, + { + "epoch": 0.1982585398526457, + "grad_norm": 1.947787198428038, + "learning_rate": 2.4749163879598663e-06, + "loss": 1.7946, + "step": 296 + }, + { + "epoch": 0.19892833221701273, + "grad_norm": 2.7895426268191588, + "learning_rate": 2.4832775919732444e-06, + "loss": 1.8097, + "step": 297 + }, + { + "epoch": 0.19959812458137977, + "grad_norm": 2.7073235050101765, + "learning_rate": 2.491638795986622e-06, + "loss": 1.8227, + "step": 298 + }, + { + "epoch": 0.20026791694574683, + "grad_norm": 2.095718504018715, + "learning_rate": 2.5e-06, + "loss": 1.6255, + "step": 299 + }, + { + "epoch": 0.20093770931011387, + "grad_norm": 4.729433180190761, + "learning_rate": 2.5083612040133783e-06, + "loss": 1.8542, + "step": 300 + }, + { + "epoch": 0.2016075016744809, + "grad_norm": 2.5485804479437784, + "learning_rate": 2.516722408026756e-06, + "loss": 1.7442, + "step": 301 + }, + { + "epoch": 0.20227729403884795, + "grad_norm": 2.078181395452582, + "learning_rate": 2.525083612040134e-06, + "loss": 1.9933, + "step": 302 + }, + { + "epoch": 0.20294708640321502, + "grad_norm": 1.8539336615106785, + "learning_rate": 2.533444816053512e-06, + "loss": 2.114, + "step": 303 + }, + { + "epoch": 0.20361687876758205, + "grad_norm": 3.6208143717392622, + "learning_rate": 2.54180602006689e-06, + "loss": 1.6794, + "step": 304 + }, + { + "epoch": 0.2042866711319491, + "grad_norm": 2.0912371565758883, + "learning_rate": 2.5501672240802677e-06, + "loss": 1.8093, + "step": 305 + }, + { + "epoch": 0.20495646349631613, + "grad_norm": 2.1108928941253553, + "learning_rate": 2.5585284280936458e-06, + "loss": 1.7678, + "step": 306 + }, + { + "epoch": 0.2056262558606832, + "grad_norm": 1.7548881076167067, + "learning_rate": 2.5668896321070235e-06, + "loss": 1.9306, + "step": 307 + }, + { + "epoch": 0.20629604822505024, + "grad_norm": 2.975448859725833, + "learning_rate": 2.5752508361204016e-06, + "loss": 1.8555, + "step": 308 + }, + { + "epoch": 0.20696584058941728, + "grad_norm": 2.3196087067529296, + "learning_rate": 2.5836120401337793e-06, + "loss": 1.7486, + "step": 309 + }, + { + "epoch": 0.20763563295378432, + "grad_norm": 1.841975366419062, + "learning_rate": 2.5919732441471574e-06, + "loss": 1.9089, + "step": 310 + }, + { + "epoch": 0.20830542531815138, + "grad_norm": 1.9183041574571633, + "learning_rate": 2.600334448160535e-06, + "loss": 1.9754, + "step": 311 + }, + { + "epoch": 0.20897521768251842, + "grad_norm": 1.9156823225214141, + "learning_rate": 2.6086956521739132e-06, + "loss": 1.7109, + "step": 312 + }, + { + "epoch": 0.20964501004688546, + "grad_norm": 1.798921140629132, + "learning_rate": 2.617056856187291e-06, + "loss": 1.9663, + "step": 313 + }, + { + "epoch": 0.2103148024112525, + "grad_norm": 1.8521744363864094, + "learning_rate": 2.625418060200669e-06, + "loss": 1.8411, + "step": 314 + }, + { + "epoch": 0.21098459477561957, + "grad_norm": 1.8801354216019828, + "learning_rate": 2.6337792642140468e-06, + "loss": 1.9034, + "step": 315 + }, + { + "epoch": 0.2116543871399866, + "grad_norm": 1.6612492107845123, + "learning_rate": 2.642140468227425e-06, + "loss": 2.0102, + "step": 316 + }, + { + "epoch": 0.21232417950435364, + "grad_norm": 2.8600825431158547, + "learning_rate": 2.650501672240803e-06, + "loss": 1.6456, + "step": 317 + }, + { + "epoch": 0.21299397186872068, + "grad_norm": 2.0837032285390036, + "learning_rate": 2.6588628762541807e-06, + "loss": 1.9283, + "step": 318 + }, + { + "epoch": 0.21366376423308775, + "grad_norm": 2.215816641855323, + "learning_rate": 2.667224080267559e-06, + "loss": 1.8729, + "step": 319 + }, + { + "epoch": 0.2143335565974548, + "grad_norm": 1.8412323415309464, + "learning_rate": 2.6755852842809365e-06, + "loss": 1.9543, + "step": 320 + }, + { + "epoch": 0.21500334896182183, + "grad_norm": 1.862212732425416, + "learning_rate": 2.6839464882943146e-06, + "loss": 1.7631, + "step": 321 + }, + { + "epoch": 0.2156731413261889, + "grad_norm": 1.8677287385910695, + "learning_rate": 2.6923076923076923e-06, + "loss": 1.9194, + "step": 322 + }, + { + "epoch": 0.21634293369055593, + "grad_norm": 1.9216041705862603, + "learning_rate": 2.7006688963210705e-06, + "loss": 1.8508, + "step": 323 + }, + { + "epoch": 0.21701272605492297, + "grad_norm": 2.0995190731615243, + "learning_rate": 2.709030100334448e-06, + "loss": 1.803, + "step": 324 + }, + { + "epoch": 0.21768251841929, + "grad_norm": 1.7365949493943054, + "learning_rate": 2.7173913043478263e-06, + "loss": 1.9315, + "step": 325 + }, + { + "epoch": 0.21835231078365708, + "grad_norm": 4.761989456898099, + "learning_rate": 2.725752508361204e-06, + "loss": 1.9345, + "step": 326 + }, + { + "epoch": 0.21902210314802412, + "grad_norm": 2.911723476406704, + "learning_rate": 2.734113712374582e-06, + "loss": 1.7071, + "step": 327 + }, + { + "epoch": 0.21969189551239116, + "grad_norm": 2.746194950642097, + "learning_rate": 2.74247491638796e-06, + "loss": 1.9962, + "step": 328 + }, + { + "epoch": 0.2203616878767582, + "grad_norm": 1.836104801838498, + "learning_rate": 2.750836120401338e-06, + "loss": 1.8632, + "step": 329 + }, + { + "epoch": 0.22103148024112526, + "grad_norm": 2.0106207161881406, + "learning_rate": 2.7591973244147156e-06, + "loss": 2.0596, + "step": 330 + }, + { + "epoch": 0.2217012726054923, + "grad_norm": 1.6872820002296687, + "learning_rate": 2.7675585284280937e-06, + "loss": 2.0197, + "step": 331 + }, + { + "epoch": 0.22237106496985934, + "grad_norm": 3.334599091695983, + "learning_rate": 2.7759197324414714e-06, + "loss": 1.7177, + "step": 332 + }, + { + "epoch": 0.22304085733422638, + "grad_norm": 2.826992914542087, + "learning_rate": 2.7842809364548495e-06, + "loss": 1.9366, + "step": 333 + }, + { + "epoch": 0.22371064969859344, + "grad_norm": 2.7872343812785116, + "learning_rate": 2.7926421404682277e-06, + "loss": 1.797, + "step": 334 + }, + { + "epoch": 0.22438044206296048, + "grad_norm": 2.8031958073935086, + "learning_rate": 2.8010033444816054e-06, + "loss": 1.7572, + "step": 335 + }, + { + "epoch": 0.22505023442732752, + "grad_norm": 2.490174555531096, + "learning_rate": 2.8093645484949835e-06, + "loss": 1.8697, + "step": 336 + }, + { + "epoch": 0.22572002679169456, + "grad_norm": 3.1652941635530305, + "learning_rate": 2.817725752508361e-06, + "loss": 1.7522, + "step": 337 + }, + { + "epoch": 0.22638981915606163, + "grad_norm": 2.3866296581110866, + "learning_rate": 2.8260869565217393e-06, + "loss": 1.6968, + "step": 338 + }, + { + "epoch": 0.22705961152042867, + "grad_norm": 2.999860168137359, + "learning_rate": 2.834448160535117e-06, + "loss": 1.8118, + "step": 339 + }, + { + "epoch": 0.2277294038847957, + "grad_norm": 1.8687474362480911, + "learning_rate": 2.842809364548495e-06, + "loss": 1.7847, + "step": 340 + }, + { + "epoch": 0.22839919624916277, + "grad_norm": 2.391512707051508, + "learning_rate": 2.851170568561873e-06, + "loss": 1.8938, + "step": 341 + }, + { + "epoch": 0.2290689886135298, + "grad_norm": 1.8519721321353546, + "learning_rate": 2.859531772575251e-06, + "loss": 1.9002, + "step": 342 + }, + { + "epoch": 0.22973878097789685, + "grad_norm": 1.964629934829367, + "learning_rate": 2.8678929765886286e-06, + "loss": 2.056, + "step": 343 + }, + { + "epoch": 0.2304085733422639, + "grad_norm": 4.155715106556499, + "learning_rate": 2.8762541806020068e-06, + "loss": 1.6318, + "step": 344 + }, + { + "epoch": 0.23107836570663096, + "grad_norm": 1.8665581393527892, + "learning_rate": 2.8846153846153845e-06, + "loss": 2.0759, + "step": 345 + }, + { + "epoch": 0.231748158070998, + "grad_norm": 2.2198781819216844, + "learning_rate": 2.8929765886287626e-06, + "loss": 1.509, + "step": 346 + }, + { + "epoch": 0.23241795043536503, + "grad_norm": 1.768043879075564, + "learning_rate": 2.9013377926421403e-06, + "loss": 1.6995, + "step": 347 + }, + { + "epoch": 0.23308774279973207, + "grad_norm": 2.6113047685230444, + "learning_rate": 2.9096989966555184e-06, + "loss": 1.7911, + "step": 348 + }, + { + "epoch": 0.23375753516409914, + "grad_norm": 1.7080501989962582, + "learning_rate": 2.918060200668897e-06, + "loss": 1.8129, + "step": 349 + }, + { + "epoch": 0.23442732752846618, + "grad_norm": 2.1180651211284163, + "learning_rate": 2.9264214046822746e-06, + "loss": 1.9824, + "step": 350 + }, + { + "epoch": 0.23509711989283322, + "grad_norm": 2.9896863478204203, + "learning_rate": 2.9347826086956528e-06, + "loss": 1.8428, + "step": 351 + }, + { + "epoch": 0.23576691225720026, + "grad_norm": 1.8145543438937126, + "learning_rate": 2.9431438127090305e-06, + "loss": 1.8387, + "step": 352 + }, + { + "epoch": 0.23643670462156732, + "grad_norm": 1.7159115858270948, + "learning_rate": 2.9515050167224086e-06, + "loss": 1.7644, + "step": 353 + }, + { + "epoch": 0.23710649698593436, + "grad_norm": 1.6984159818510645, + "learning_rate": 2.9598662207357863e-06, + "loss": 1.8435, + "step": 354 + }, + { + "epoch": 0.2377762893503014, + "grad_norm": 2.054663667535852, + "learning_rate": 2.9682274247491644e-06, + "loss": 1.847, + "step": 355 + }, + { + "epoch": 0.23844608171466844, + "grad_norm": 2.667552514575222, + "learning_rate": 2.976588628762542e-06, + "loss": 1.8058, + "step": 356 + }, + { + "epoch": 0.2391158740790355, + "grad_norm": 3.1268034225036505, + "learning_rate": 2.9849498327759202e-06, + "loss": 1.8631, + "step": 357 + }, + { + "epoch": 0.23978566644340255, + "grad_norm": 3.570147229726924, + "learning_rate": 2.9933110367892983e-06, + "loss": 1.851, + "step": 358 + }, + { + "epoch": 0.24045545880776958, + "grad_norm": 3.7971118438958524, + "learning_rate": 3.001672240802676e-06, + "loss": 2.1826, + "step": 359 + }, + { + "epoch": 0.24112525117213665, + "grad_norm": 2.2220903035763317, + "learning_rate": 3.010033444816054e-06, + "loss": 1.7269, + "step": 360 + }, + { + "epoch": 0.2417950435365037, + "grad_norm": 2.025702081627396, + "learning_rate": 3.018394648829432e-06, + "loss": 1.6372, + "step": 361 + }, + { + "epoch": 0.24246483590087073, + "grad_norm": 4.390335693717886, + "learning_rate": 3.02675585284281e-06, + "loss": 1.6099, + "step": 362 + }, + { + "epoch": 0.24313462826523777, + "grad_norm": 2.7932753026908843, + "learning_rate": 3.0351170568561877e-06, + "loss": 1.9937, + "step": 363 + }, + { + "epoch": 0.24380442062960483, + "grad_norm": 5.124558072451642, + "learning_rate": 3.043478260869566e-06, + "loss": 1.7736, + "step": 364 + }, + { + "epoch": 0.24447421299397187, + "grad_norm": 3.30451163122862, + "learning_rate": 3.0518394648829435e-06, + "loss": 1.7612, + "step": 365 + }, + { + "epoch": 0.2451440053583389, + "grad_norm": 2.1013704757657106, + "learning_rate": 3.0602006688963216e-06, + "loss": 1.8228, + "step": 366 + }, + { + "epoch": 0.24581379772270595, + "grad_norm": 8.220155476216618, + "learning_rate": 3.0685618729096993e-06, + "loss": 1.6224, + "step": 367 + }, + { + "epoch": 0.24648359008707302, + "grad_norm": 2.5541987373732, + "learning_rate": 3.0769230769230774e-06, + "loss": 1.9031, + "step": 368 + }, + { + "epoch": 0.24715338245144006, + "grad_norm": 2.465441080159187, + "learning_rate": 3.085284280936455e-06, + "loss": 1.7098, + "step": 369 + }, + { + "epoch": 0.2478231748158071, + "grad_norm": 1.8083358177860613, + "learning_rate": 3.0936454849498333e-06, + "loss": 1.8874, + "step": 370 + }, + { + "epoch": 0.24849296718017413, + "grad_norm": 1.5973148230312335, + "learning_rate": 3.102006688963211e-06, + "loss": 1.9978, + "step": 371 + }, + { + "epoch": 0.2491627595445412, + "grad_norm": 3.4295752892666007, + "learning_rate": 3.110367892976589e-06, + "loss": 1.7308, + "step": 372 + }, + { + "epoch": 0.24983255190890824, + "grad_norm": 5.028572895073708, + "learning_rate": 3.1187290969899668e-06, + "loss": 1.7995, + "step": 373 + }, + { + "epoch": 0.2505023442732753, + "grad_norm": 3.4219973321889445, + "learning_rate": 3.127090301003345e-06, + "loss": 1.9022, + "step": 374 + }, + { + "epoch": 0.2511721366376423, + "grad_norm": 2.9673889353982714, + "learning_rate": 3.1354515050167226e-06, + "loss": 1.3981, + "step": 375 + }, + { + "epoch": 0.25184192900200936, + "grad_norm": 1.7764701249516304, + "learning_rate": 3.1438127090301007e-06, + "loss": 1.773, + "step": 376 + }, + { + "epoch": 0.2525117213663764, + "grad_norm": 1.8096400780811566, + "learning_rate": 3.152173913043479e-06, + "loss": 1.9883, + "step": 377 + }, + { + "epoch": 0.2531815137307435, + "grad_norm": 4.391994449089869, + "learning_rate": 3.1605351170568565e-06, + "loss": 1.6766, + "step": 378 + }, + { + "epoch": 0.25385130609511053, + "grad_norm": 5.583694601327485, + "learning_rate": 3.1688963210702347e-06, + "loss": 1.7861, + "step": 379 + }, + { + "epoch": 0.25452109845947757, + "grad_norm": 2.7943273973269998, + "learning_rate": 3.1772575250836123e-06, + "loss": 1.865, + "step": 380 + }, + { + "epoch": 0.2551908908238446, + "grad_norm": 2.7661226444441773, + "learning_rate": 3.1856187290969905e-06, + "loss": 1.8637, + "step": 381 + }, + { + "epoch": 0.25586068318821165, + "grad_norm": 2.048973792043512, + "learning_rate": 3.193979933110368e-06, + "loss": 1.6824, + "step": 382 + }, + { + "epoch": 0.2565304755525787, + "grad_norm": 1.7090609978298485, + "learning_rate": 3.2023411371237463e-06, + "loss": 1.8707, + "step": 383 + }, + { + "epoch": 0.2572002679169457, + "grad_norm": 1.714114293431212, + "learning_rate": 3.210702341137124e-06, + "loss": 1.7126, + "step": 384 + }, + { + "epoch": 0.2578700602813128, + "grad_norm": 1.6223101977107537, + "learning_rate": 3.219063545150502e-06, + "loss": 1.7293, + "step": 385 + }, + { + "epoch": 0.25853985264567986, + "grad_norm": 1.7297387882840767, + "learning_rate": 3.22742474916388e-06, + "loss": 1.7231, + "step": 386 + }, + { + "epoch": 0.2592096450100469, + "grad_norm": 1.8052363672027805, + "learning_rate": 3.235785953177258e-06, + "loss": 1.9772, + "step": 387 + }, + { + "epoch": 0.25987943737441394, + "grad_norm": 1.8200513381073047, + "learning_rate": 3.2441471571906356e-06, + "loss": 1.7789, + "step": 388 + }, + { + "epoch": 0.260549229738781, + "grad_norm": 2.431482778992783, + "learning_rate": 3.2525083612040137e-06, + "loss": 1.739, + "step": 389 + }, + { + "epoch": 0.261219022103148, + "grad_norm": 2.6045427704151396, + "learning_rate": 3.2608695652173914e-06, + "loss": 1.6385, + "step": 390 + }, + { + "epoch": 0.26188881446751505, + "grad_norm": 1.6484565776061637, + "learning_rate": 3.2692307692307696e-06, + "loss": 1.7669, + "step": 391 + }, + { + "epoch": 0.2625586068318821, + "grad_norm": 2.3649172529877407, + "learning_rate": 3.2775919732441473e-06, + "loss": 2.0444, + "step": 392 + }, + { + "epoch": 0.2632283991962492, + "grad_norm": 2.7515396408862074, + "learning_rate": 3.2859531772575254e-06, + "loss": 1.5652, + "step": 393 + }, + { + "epoch": 0.2638981915606162, + "grad_norm": 2.596258200552931, + "learning_rate": 3.2943143812709035e-06, + "loss": 1.8603, + "step": 394 + }, + { + "epoch": 0.26456798392498326, + "grad_norm": 2.835035471342379, + "learning_rate": 3.302675585284281e-06, + "loss": 1.6411, + "step": 395 + }, + { + "epoch": 0.2652377762893503, + "grad_norm": 2.217095876996061, + "learning_rate": 3.3110367892976593e-06, + "loss": 1.6673, + "step": 396 + }, + { + "epoch": 0.26590756865371734, + "grad_norm": 2.516733407396638, + "learning_rate": 3.319397993311037e-06, + "loss": 1.8779, + "step": 397 + }, + { + "epoch": 0.2665773610180844, + "grad_norm": 5.075486168885235, + "learning_rate": 3.327759197324415e-06, + "loss": 1.8173, + "step": 398 + }, + { + "epoch": 0.2672471533824514, + "grad_norm": 2.3652225796294304, + "learning_rate": 3.336120401337793e-06, + "loss": 1.8807, + "step": 399 + }, + { + "epoch": 0.2679169457468185, + "grad_norm": 1.7354348789721252, + "learning_rate": 3.344481605351171e-06, + "loss": 1.9431, + "step": 400 + }, + { + "epoch": 0.26858673811118555, + "grad_norm": 1.7053050534024181, + "learning_rate": 3.3528428093645487e-06, + "loss": 1.7898, + "step": 401 + }, + { + "epoch": 0.2692565304755526, + "grad_norm": 2.2712881418684563, + "learning_rate": 3.3612040133779268e-06, + "loss": 1.8618, + "step": 402 + }, + { + "epoch": 0.26992632283991963, + "grad_norm": 1.7435323656013615, + "learning_rate": 3.3695652173913045e-06, + "loss": 1.6362, + "step": 403 + }, + { + "epoch": 0.27059611520428667, + "grad_norm": 3.8256977738240323, + "learning_rate": 3.3779264214046826e-06, + "loss": 1.8707, + "step": 404 + }, + { + "epoch": 0.2712659075686537, + "grad_norm": 3.4266046725560657, + "learning_rate": 3.3862876254180603e-06, + "loss": 1.8192, + "step": 405 + }, + { + "epoch": 0.27193569993302075, + "grad_norm": 2.6440788080324853, + "learning_rate": 3.3946488294314384e-06, + "loss": 1.8251, + "step": 406 + }, + { + "epoch": 0.2726054922973878, + "grad_norm": 3.304668874591189, + "learning_rate": 3.403010033444816e-06, + "loss": 1.986, + "step": 407 + }, + { + "epoch": 0.2732752846617549, + "grad_norm": 1.8432292884446164, + "learning_rate": 3.4113712374581942e-06, + "loss": 2.004, + "step": 408 + }, + { + "epoch": 0.2739450770261219, + "grad_norm": 3.2353393170361753, + "learning_rate": 3.419732441471572e-06, + "loss": 1.7189, + "step": 409 + }, + { + "epoch": 0.27461486939048896, + "grad_norm": 2.0702215771006256, + "learning_rate": 3.42809364548495e-06, + "loss": 1.8867, + "step": 410 + }, + { + "epoch": 0.275284661754856, + "grad_norm": 1.8191702300841133, + "learning_rate": 3.436454849498328e-06, + "loss": 1.6781, + "step": 411 + }, + { + "epoch": 0.27595445411922304, + "grad_norm": 1.9025610711274095, + "learning_rate": 3.444816053511706e-06, + "loss": 1.7711, + "step": 412 + }, + { + "epoch": 0.2766242464835901, + "grad_norm": 3.036607853407895, + "learning_rate": 3.453177257525084e-06, + "loss": 1.8276, + "step": 413 + }, + { + "epoch": 0.2772940388479571, + "grad_norm": 2.487943553467819, + "learning_rate": 3.4615384615384617e-06, + "loss": 1.8379, + "step": 414 + }, + { + "epoch": 0.27796383121232415, + "grad_norm": 2.6577495709762022, + "learning_rate": 3.46989966555184e-06, + "loss": 1.7669, + "step": 415 + }, + { + "epoch": 0.27863362357669125, + "grad_norm": 1.8318652075142634, + "learning_rate": 3.4782608695652175e-06, + "loss": 1.9805, + "step": 416 + }, + { + "epoch": 0.2793034159410583, + "grad_norm": 1.7578523712731393, + "learning_rate": 3.4866220735785956e-06, + "loss": 1.8658, + "step": 417 + }, + { + "epoch": 0.2799732083054253, + "grad_norm": 3.3523668637959028, + "learning_rate": 3.4949832775919733e-06, + "loss": 1.8021, + "step": 418 + }, + { + "epoch": 0.28064300066979236, + "grad_norm": 2.379161425399218, + "learning_rate": 3.5033444816053515e-06, + "loss": 1.8433, + "step": 419 + }, + { + "epoch": 0.2813127930341594, + "grad_norm": 1.9064525962772625, + "learning_rate": 3.511705685618729e-06, + "loss": 1.7901, + "step": 420 + }, + { + "epoch": 0.28198258539852644, + "grad_norm": 2.4643457792730215, + "learning_rate": 3.5200668896321073e-06, + "loss": 1.6551, + "step": 421 + }, + { + "epoch": 0.2826523777628935, + "grad_norm": 4.383682325154937, + "learning_rate": 3.528428093645485e-06, + "loss": 1.6842, + "step": 422 + }, + { + "epoch": 0.2833221701272606, + "grad_norm": 1.8225168631779762, + "learning_rate": 3.536789297658863e-06, + "loss": 1.8387, + "step": 423 + }, + { + "epoch": 0.2839919624916276, + "grad_norm": 2.7990317981870403, + "learning_rate": 3.5451505016722408e-06, + "loss": 1.8929, + "step": 424 + }, + { + "epoch": 0.28466175485599465, + "grad_norm": 2.2568649390986284, + "learning_rate": 3.553511705685619e-06, + "loss": 1.7896, + "step": 425 + }, + { + "epoch": 0.2853315472203617, + "grad_norm": 2.4732331517940707, + "learning_rate": 3.5618729096989966e-06, + "loss": 1.8224, + "step": 426 + }, + { + "epoch": 0.28600133958472873, + "grad_norm": 1.8400744194909044, + "learning_rate": 3.5702341137123747e-06, + "loss": 1.725, + "step": 427 + }, + { + "epoch": 0.28667113194909577, + "grad_norm": 2.5718981437788844, + "learning_rate": 3.578595317725753e-06, + "loss": 1.7246, + "step": 428 + }, + { + "epoch": 0.2873409243134628, + "grad_norm": 1.7092316542940735, + "learning_rate": 3.5869565217391305e-06, + "loss": 1.9007, + "step": 429 + }, + { + "epoch": 0.28801071667782985, + "grad_norm": 4.532640714582798, + "learning_rate": 3.5953177257525087e-06, + "loss": 1.881, + "step": 430 + }, + { + "epoch": 0.28868050904219694, + "grad_norm": 2.8001383028201388, + "learning_rate": 3.6036789297658864e-06, + "loss": 1.9352, + "step": 431 + }, + { + "epoch": 0.289350301406564, + "grad_norm": 1.794008836601672, + "learning_rate": 3.6120401337792645e-06, + "loss": 1.9677, + "step": 432 + }, + { + "epoch": 0.290020093770931, + "grad_norm": 1.7183760359969582, + "learning_rate": 3.620401337792642e-06, + "loss": 1.7627, + "step": 433 + }, + { + "epoch": 0.29068988613529806, + "grad_norm": 1.833532897291909, + "learning_rate": 3.6287625418060203e-06, + "loss": 1.7883, + "step": 434 + }, + { + "epoch": 0.2913596784996651, + "grad_norm": 2.241865409660624, + "learning_rate": 3.637123745819398e-06, + "loss": 1.8787, + "step": 435 + }, + { + "epoch": 0.29202947086403214, + "grad_norm": 4.042545821428583, + "learning_rate": 3.645484949832776e-06, + "loss": 1.4776, + "step": 436 + }, + { + "epoch": 0.2926992632283992, + "grad_norm": 2.407340695157406, + "learning_rate": 3.653846153846154e-06, + "loss": 1.7905, + "step": 437 + }, + { + "epoch": 0.29336905559276627, + "grad_norm": 1.7479264576363596, + "learning_rate": 3.662207357859532e-06, + "loss": 1.9118, + "step": 438 + }, + { + "epoch": 0.2940388479571333, + "grad_norm": 2.8158253517232663, + "learning_rate": 3.6705685618729096e-06, + "loss": 1.6438, + "step": 439 + }, + { + "epoch": 0.29470864032150035, + "grad_norm": 1.668746957691386, + "learning_rate": 3.6789297658862878e-06, + "loss": 1.6515, + "step": 440 + }, + { + "epoch": 0.2953784326858674, + "grad_norm": 2.6184092324065356, + "learning_rate": 3.6872909698996655e-06, + "loss": 1.6502, + "step": 441 + }, + { + "epoch": 0.2960482250502344, + "grad_norm": 3.5187468467369207, + "learning_rate": 3.6956521739130436e-06, + "loss": 1.665, + "step": 442 + }, + { + "epoch": 0.29671801741460146, + "grad_norm": 2.768036909440129, + "learning_rate": 3.7040133779264213e-06, + "loss": 1.8257, + "step": 443 + }, + { + "epoch": 0.2973878097789685, + "grad_norm": 1.7359258338276617, + "learning_rate": 3.7123745819397994e-06, + "loss": 1.5708, + "step": 444 + }, + { + "epoch": 0.29805760214333554, + "grad_norm": 1.861116919088346, + "learning_rate": 3.720735785953177e-06, + "loss": 1.6562, + "step": 445 + }, + { + "epoch": 0.29872739450770264, + "grad_norm": 1.8255109541218428, + "learning_rate": 3.7290969899665552e-06, + "loss": 1.9129, + "step": 446 + }, + { + "epoch": 0.2993971868720697, + "grad_norm": 6.87743873262874, + "learning_rate": 3.7374581939799333e-06, + "loss": 1.7521, + "step": 447 + }, + { + "epoch": 0.3000669792364367, + "grad_norm": 4.092456373577083, + "learning_rate": 3.745819397993311e-06, + "loss": 1.8925, + "step": 448 + }, + { + "epoch": 0.30073677160080375, + "grad_norm": 3.662085972004648, + "learning_rate": 3.7541806020066896e-06, + "loss": 1.8761, + "step": 449 + }, + { + "epoch": 0.3014065639651708, + "grad_norm": 1.8829820475587393, + "learning_rate": 3.7625418060200673e-06, + "loss": 1.805, + "step": 450 + }, + { + "epoch": 0.30207635632953783, + "grad_norm": 1.7225124440869766, + "learning_rate": 3.7709030100334454e-06, + "loss": 1.6915, + "step": 451 + }, + { + "epoch": 0.30274614869390487, + "grad_norm": 1.893407520994519, + "learning_rate": 3.7792642140468235e-06, + "loss": 1.7013, + "step": 452 + }, + { + "epoch": 0.3034159410582719, + "grad_norm": 4.263403026683554, + "learning_rate": 3.7876254180602012e-06, + "loss": 1.8972, + "step": 453 + }, + { + "epoch": 0.304085733422639, + "grad_norm": 1.9636245069162745, + "learning_rate": 3.7959866220735793e-06, + "loss": 1.8243, + "step": 454 + }, + { + "epoch": 0.30475552578700604, + "grad_norm": 4.167716246136424, + "learning_rate": 3.804347826086957e-06, + "loss": 1.7032, + "step": 455 + }, + { + "epoch": 0.3054253181513731, + "grad_norm": 1.636785450848718, + "learning_rate": 3.812709030100335e-06, + "loss": 1.7466, + "step": 456 + }, + { + "epoch": 0.3060951105157401, + "grad_norm": 13.95276006309857, + "learning_rate": 3.821070234113713e-06, + "loss": 1.6478, + "step": 457 + }, + { + "epoch": 0.30676490288010716, + "grad_norm": 1.910734098112084, + "learning_rate": 3.8294314381270906e-06, + "loss": 1.7253, + "step": 458 + }, + { + "epoch": 0.3074346952444742, + "grad_norm": 1.6935829026437719, + "learning_rate": 3.837792642140469e-06, + "loss": 1.9572, + "step": 459 + }, + { + "epoch": 0.30810448760884124, + "grad_norm": 3.451475769133562, + "learning_rate": 3.846153846153847e-06, + "loss": 1.8615, + "step": 460 + }, + { + "epoch": 0.30877427997320833, + "grad_norm": 2.5521480811668753, + "learning_rate": 3.854515050167225e-06, + "loss": 1.7278, + "step": 461 + }, + { + "epoch": 0.30944407233757537, + "grad_norm": 3.52484277056804, + "learning_rate": 3.862876254180602e-06, + "loss": 1.7541, + "step": 462 + }, + { + "epoch": 0.3101138647019424, + "grad_norm": 1.7257527418781042, + "learning_rate": 3.87123745819398e-06, + "loss": 1.922, + "step": 463 + }, + { + "epoch": 0.31078365706630945, + "grad_norm": 2.260413455771488, + "learning_rate": 3.8795986622073584e-06, + "loss": 1.7582, + "step": 464 + }, + { + "epoch": 0.3114534494306765, + "grad_norm": 2.032015910138601, + "learning_rate": 3.8879598662207366e-06, + "loss": 1.8653, + "step": 465 + }, + { + "epoch": 0.3121232417950435, + "grad_norm": 1.6413311777138264, + "learning_rate": 3.896321070234114e-06, + "loss": 1.9201, + "step": 466 + }, + { + "epoch": 0.31279303415941057, + "grad_norm": 1.7274956929986782, + "learning_rate": 3.904682274247492e-06, + "loss": 1.6442, + "step": 467 + }, + { + "epoch": 0.3134628265237776, + "grad_norm": 2.0244939779991, + "learning_rate": 3.91304347826087e-06, + "loss": 1.8438, + "step": 468 + }, + { + "epoch": 0.3141326188881447, + "grad_norm": 1.684981835139464, + "learning_rate": 3.921404682274248e-06, + "loss": 1.793, + "step": 469 + }, + { + "epoch": 0.31480241125251174, + "grad_norm": 1.7959022304718073, + "learning_rate": 3.929765886287626e-06, + "loss": 1.5633, + "step": 470 + }, + { + "epoch": 0.3154722036168788, + "grad_norm": 2.930026562650521, + "learning_rate": 3.938127090301004e-06, + "loss": 1.7087, + "step": 471 + }, + { + "epoch": 0.3161419959812458, + "grad_norm": 1.973486155850852, + "learning_rate": 3.946488294314382e-06, + "loss": 1.7479, + "step": 472 + }, + { + "epoch": 0.31681178834561285, + "grad_norm": 1.6815723770538218, + "learning_rate": 3.95484949832776e-06, + "loss": 1.7912, + "step": 473 + }, + { + "epoch": 0.3174815807099799, + "grad_norm": 1.9282224388928138, + "learning_rate": 3.963210702341138e-06, + "loss": 1.8369, + "step": 474 + }, + { + "epoch": 0.31815137307434693, + "grad_norm": 2.01814726573713, + "learning_rate": 3.971571906354515e-06, + "loss": 1.7185, + "step": 475 + }, + { + "epoch": 0.31882116543871397, + "grad_norm": 2.7777660983353507, + "learning_rate": 3.979933110367893e-06, + "loss": 1.8334, + "step": 476 + }, + { + "epoch": 0.31949095780308107, + "grad_norm": 3.926583034119562, + "learning_rate": 3.9882943143812715e-06, + "loss": 1.6354, + "step": 477 + }, + { + "epoch": 0.3201607501674481, + "grad_norm": 2.4259578648755182, + "learning_rate": 3.99665551839465e-06, + "loss": 1.5596, + "step": 478 + }, + { + "epoch": 0.32083054253181514, + "grad_norm": 1.894661945511024, + "learning_rate": 4.005016722408027e-06, + "loss": 1.8944, + "step": 479 + }, + { + "epoch": 0.3215003348961822, + "grad_norm": 2.383106612230074, + "learning_rate": 4.013377926421405e-06, + "loss": 1.7678, + "step": 480 + }, + { + "epoch": 0.3221701272605492, + "grad_norm": 4.287349213395872, + "learning_rate": 4.021739130434783e-06, + "loss": 1.8941, + "step": 481 + }, + { + "epoch": 0.32283991962491626, + "grad_norm": 17.262739443384906, + "learning_rate": 4.030100334448161e-06, + "loss": 1.7974, + "step": 482 + }, + { + "epoch": 0.3235097119892833, + "grad_norm": 2.3263819849325587, + "learning_rate": 4.0384615384615385e-06, + "loss": 1.7708, + "step": 483 + }, + { + "epoch": 0.3241795043536504, + "grad_norm": 2.9198404055942464, + "learning_rate": 4.046822742474917e-06, + "loss": 1.925, + "step": 484 + }, + { + "epoch": 0.32484929671801743, + "grad_norm": 1.9231923397664437, + "learning_rate": 4.055183946488295e-06, + "loss": 1.8055, + "step": 485 + }, + { + "epoch": 0.32551908908238447, + "grad_norm": 2.0024257954121842, + "learning_rate": 4.063545150501673e-06, + "loss": 1.5806, + "step": 486 + }, + { + "epoch": 0.3261888814467515, + "grad_norm": 4.074096483422249, + "learning_rate": 4.071906354515051e-06, + "loss": 2.0105, + "step": 487 + }, + { + "epoch": 0.32685867381111855, + "grad_norm": 8.653632809914845, + "learning_rate": 4.080267558528428e-06, + "loss": 1.6827, + "step": 488 + }, + { + "epoch": 0.3275284661754856, + "grad_norm": 2.920798372001984, + "learning_rate": 4.088628762541806e-06, + "loss": 2.0726, + "step": 489 + }, + { + "epoch": 0.3281982585398526, + "grad_norm": 1.7836973873966844, + "learning_rate": 4.0969899665551845e-06, + "loss": 1.969, + "step": 490 + }, + { + "epoch": 0.32886805090421967, + "grad_norm": 2.1669460118128123, + "learning_rate": 4.105351170568563e-06, + "loss": 1.8843, + "step": 491 + }, + { + "epoch": 0.32953784326858676, + "grad_norm": 2.024411807417877, + "learning_rate": 4.11371237458194e-06, + "loss": 1.6812, + "step": 492 + }, + { + "epoch": 0.3302076356329538, + "grad_norm": 1.6744946809923227, + "learning_rate": 4.122073578595318e-06, + "loss": 1.7954, + "step": 493 + }, + { + "epoch": 0.33087742799732084, + "grad_norm": 2.0891961358667297, + "learning_rate": 4.130434782608696e-06, + "loss": 1.8219, + "step": 494 + }, + { + "epoch": 0.3315472203616879, + "grad_norm": 2.35559303603382, + "learning_rate": 4.138795986622074e-06, + "loss": 1.6313, + "step": 495 + }, + { + "epoch": 0.3322170127260549, + "grad_norm": 1.6586081100279004, + "learning_rate": 4.1471571906354515e-06, + "loss": 1.9108, + "step": 496 + }, + { + "epoch": 0.33288680509042196, + "grad_norm": 1.9044423492498974, + "learning_rate": 4.15551839464883e-06, + "loss": 1.9776, + "step": 497 + }, + { + "epoch": 0.333556597454789, + "grad_norm": 1.8873250884753219, + "learning_rate": 4.163879598662208e-06, + "loss": 1.7207, + "step": 498 + }, + { + "epoch": 0.3342263898191561, + "grad_norm": 5.157341800508888, + "learning_rate": 4.172240802675586e-06, + "loss": 1.8805, + "step": 499 + }, + { + "epoch": 0.33489618218352313, + "grad_norm": 1.7887571819610584, + "learning_rate": 4.180602006688963e-06, + "loss": 1.808, + "step": 500 + }, + { + "epoch": 0.33556597454789017, + "grad_norm": 3.4529962466529596, + "learning_rate": 4.188963210702341e-06, + "loss": 1.6756, + "step": 501 + }, + { + "epoch": 0.3362357669122572, + "grad_norm": 3.1287953962727864, + "learning_rate": 4.197324414715719e-06, + "loss": 2.0067, + "step": 502 + }, + { + "epoch": 0.33690555927662424, + "grad_norm": 1.7673731734017915, + "learning_rate": 4.2056856187290975e-06, + "loss": 1.9651, + "step": 503 + }, + { + "epoch": 0.3375753516409913, + "grad_norm": 1.5986697554411962, + "learning_rate": 4.214046822742475e-06, + "loss": 1.9179, + "step": 504 + }, + { + "epoch": 0.3382451440053583, + "grad_norm": 2.4218635548423912, + "learning_rate": 4.222408026755853e-06, + "loss": 1.8456, + "step": 505 + }, + { + "epoch": 0.33891493636972536, + "grad_norm": 2.5255273844161397, + "learning_rate": 4.230769230769231e-06, + "loss": 1.8482, + "step": 506 + }, + { + "epoch": 0.33958472873409246, + "grad_norm": 6.023653501042373, + "learning_rate": 4.239130434782609e-06, + "loss": 1.7233, + "step": 507 + }, + { + "epoch": 0.3402545210984595, + "grad_norm": 1.7893662460270319, + "learning_rate": 4.247491638795987e-06, + "loss": 1.8472, + "step": 508 + }, + { + "epoch": 0.34092431346282653, + "grad_norm": 1.5559752211889448, + "learning_rate": 4.2558528428093646e-06, + "loss": 1.8129, + "step": 509 + }, + { + "epoch": 0.3415941058271936, + "grad_norm": 1.623136167408694, + "learning_rate": 4.264214046822743e-06, + "loss": 1.6959, + "step": 510 + }, + { + "epoch": 0.3422638981915606, + "grad_norm": 1.8093195127946324, + "learning_rate": 4.272575250836121e-06, + "loss": 1.8652, + "step": 511 + }, + { + "epoch": 0.34293369055592765, + "grad_norm": 2.8631731380434062, + "learning_rate": 4.280936454849499e-06, + "loss": 1.7024, + "step": 512 + }, + { + "epoch": 0.3436034829202947, + "grad_norm": 1.8792325468496325, + "learning_rate": 4.289297658862876e-06, + "loss": 1.9857, + "step": 513 + }, + { + "epoch": 0.34427327528466173, + "grad_norm": 2.424617834949338, + "learning_rate": 4.297658862876254e-06, + "loss": 1.9404, + "step": 514 + }, + { + "epoch": 0.3449430676490288, + "grad_norm": 1.7343815949489643, + "learning_rate": 4.3060200668896324e-06, + "loss": 1.76, + "step": 515 + }, + { + "epoch": 0.34561286001339586, + "grad_norm": 1.9392776072412408, + "learning_rate": 4.3143812709030106e-06, + "loss": 1.74, + "step": 516 + }, + { + "epoch": 0.3462826523777629, + "grad_norm": 2.736009524865389, + "learning_rate": 4.322742474916388e-06, + "loss": 1.8155, + "step": 517 + }, + { + "epoch": 0.34695244474212994, + "grad_norm": 2.564593490322407, + "learning_rate": 4.331103678929766e-06, + "loss": 1.7382, + "step": 518 + }, + { + "epoch": 0.347622237106497, + "grad_norm": 1.6707343298220867, + "learning_rate": 4.339464882943144e-06, + "loss": 1.6926, + "step": 519 + }, + { + "epoch": 0.348292029470864, + "grad_norm": 1.9108501285366601, + "learning_rate": 4.347826086956522e-06, + "loss": 1.4583, + "step": 520 + }, + { + "epoch": 0.34896182183523106, + "grad_norm": 2.7234099789028905, + "learning_rate": 4.3561872909698995e-06, + "loss": 1.4684, + "step": 521 + }, + { + "epoch": 0.34963161419959815, + "grad_norm": 3.2495607453767676, + "learning_rate": 4.364548494983278e-06, + "loss": 1.8327, + "step": 522 + }, + { + "epoch": 0.3503014065639652, + "grad_norm": 1.6924757484658102, + "learning_rate": 4.372909698996656e-06, + "loss": 1.8901, + "step": 523 + }, + { + "epoch": 0.35097119892833223, + "grad_norm": 1.5764046525163686, + "learning_rate": 4.381270903010034e-06, + "loss": 1.7401, + "step": 524 + }, + { + "epoch": 0.35164099129269927, + "grad_norm": 9.21205666355632, + "learning_rate": 4.389632107023412e-06, + "loss": 1.5702, + "step": 525 + }, + { + "epoch": 0.3523107836570663, + "grad_norm": 2.6148122530230204, + "learning_rate": 4.397993311036789e-06, + "loss": 1.7683, + "step": 526 + }, + { + "epoch": 0.35298057602143335, + "grad_norm": 2.8246879430806895, + "learning_rate": 4.406354515050167e-06, + "loss": 1.7654, + "step": 527 + }, + { + "epoch": 0.3536503683858004, + "grad_norm": 2.790456497683111, + "learning_rate": 4.4147157190635455e-06, + "loss": 1.9526, + "step": 528 + }, + { + "epoch": 0.3543201607501674, + "grad_norm": 2.290518823991492, + "learning_rate": 4.423076923076924e-06, + "loss": 1.7873, + "step": 529 + }, + { + "epoch": 0.3549899531145345, + "grad_norm": 1.6333752164453508, + "learning_rate": 4.431438127090301e-06, + "loss": 1.8897, + "step": 530 + }, + { + "epoch": 0.35565974547890156, + "grad_norm": 1.6192495331675452, + "learning_rate": 4.439799331103679e-06, + "loss": 1.6359, + "step": 531 + }, + { + "epoch": 0.3563295378432686, + "grad_norm": 1.8172924200415748, + "learning_rate": 4.448160535117057e-06, + "loss": 1.7878, + "step": 532 + }, + { + "epoch": 0.35699933020763563, + "grad_norm": 1.7236536910834495, + "learning_rate": 4.456521739130435e-06, + "loss": 1.7951, + "step": 533 + }, + { + "epoch": 0.3576691225720027, + "grad_norm": 2.048242285370518, + "learning_rate": 4.4648829431438125e-06, + "loss": 1.7748, + "step": 534 + }, + { + "epoch": 0.3583389149363697, + "grad_norm": 2.1835192129974526, + "learning_rate": 4.473244147157191e-06, + "loss": 1.9695, + "step": 535 + }, + { + "epoch": 0.35900870730073675, + "grad_norm": 2.800703907494424, + "learning_rate": 4.481605351170569e-06, + "loss": 1.7675, + "step": 536 + }, + { + "epoch": 0.3596784996651038, + "grad_norm": 1.876627998032182, + "learning_rate": 4.489966555183947e-06, + "loss": 1.8646, + "step": 537 + }, + { + "epoch": 0.3603482920294709, + "grad_norm": 1.8418892826958253, + "learning_rate": 4.498327759197324e-06, + "loss": 1.8061, + "step": 538 + }, + { + "epoch": 0.3610180843938379, + "grad_norm": 4.151897776912136, + "learning_rate": 4.506688963210702e-06, + "loss": 1.6127, + "step": 539 + }, + { + "epoch": 0.36168787675820496, + "grad_norm": 2.513008859557772, + "learning_rate": 4.51505016722408e-06, + "loss": 1.653, + "step": 540 + }, + { + "epoch": 0.362357669122572, + "grad_norm": 3.6654961419338634, + "learning_rate": 4.5234113712374585e-06, + "loss": 1.8682, + "step": 541 + }, + { + "epoch": 0.36302746148693904, + "grad_norm": 1.6781072137869602, + "learning_rate": 4.531772575250837e-06, + "loss": 1.8939, + "step": 542 + }, + { + "epoch": 0.3636972538513061, + "grad_norm": 1.7572538990673425, + "learning_rate": 4.540133779264214e-06, + "loss": 1.9224, + "step": 543 + }, + { + "epoch": 0.3643670462156731, + "grad_norm": 3.6181530550865117, + "learning_rate": 4.548494983277592e-06, + "loss": 1.7177, + "step": 544 + }, + { + "epoch": 0.3650368385800402, + "grad_norm": 2.7980268395733185, + "learning_rate": 4.55685618729097e-06, + "loss": 1.7844, + "step": 545 + }, + { + "epoch": 0.36570663094440725, + "grad_norm": 3.1812675826708743, + "learning_rate": 4.565217391304348e-06, + "loss": 1.5221, + "step": 546 + }, + { + "epoch": 0.3663764233087743, + "grad_norm": 4.52669533045387, + "learning_rate": 4.5735785953177255e-06, + "loss": 1.7086, + "step": 547 + }, + { + "epoch": 0.36704621567314133, + "grad_norm": 1.6463121776871146, + "learning_rate": 4.581939799331104e-06, + "loss": 1.7353, + "step": 548 + }, + { + "epoch": 0.36771600803750837, + "grad_norm": 1.8073862365612663, + "learning_rate": 4.590301003344483e-06, + "loss": 1.9638, + "step": 549 + }, + { + "epoch": 0.3683858004018754, + "grad_norm": 2.0179101333472893, + "learning_rate": 4.59866220735786e-06, + "loss": 1.7945, + "step": 550 + }, + { + "epoch": 0.36905559276624245, + "grad_norm": 1.6564780055548576, + "learning_rate": 4.607023411371238e-06, + "loss": 1.8202, + "step": 551 + }, + { + "epoch": 0.3697253851306095, + "grad_norm": 2.0148030765402654, + "learning_rate": 4.615384615384616e-06, + "loss": 1.9069, + "step": 552 + }, + { + "epoch": 0.3703951774949766, + "grad_norm": 2.4161963452664654, + "learning_rate": 4.623745819397994e-06, + "loss": 1.9185, + "step": 553 + }, + { + "epoch": 0.3710649698593436, + "grad_norm": 3.05169596195641, + "learning_rate": 4.6321070234113715e-06, + "loss": 1.8842, + "step": 554 + }, + { + "epoch": 0.37173476222371066, + "grad_norm": 2.979148872321494, + "learning_rate": 4.64046822742475e-06, + "loss": 1.7889, + "step": 555 + }, + { + "epoch": 0.3724045545880777, + "grad_norm": 1.57317041901, + "learning_rate": 4.648829431438128e-06, + "loss": 1.7531, + "step": 556 + }, + { + "epoch": 0.37307434695244474, + "grad_norm": 2.1202995952268804, + "learning_rate": 4.657190635451506e-06, + "loss": 1.5471, + "step": 557 + }, + { + "epoch": 0.3737441393168118, + "grad_norm": 2.36573256855359, + "learning_rate": 4.665551839464883e-06, + "loss": 1.8788, + "step": 558 + }, + { + "epoch": 0.3744139316811788, + "grad_norm": 1.746347304150331, + "learning_rate": 4.673913043478261e-06, + "loss": 1.722, + "step": 559 + }, + { + "epoch": 0.3750837240455459, + "grad_norm": 1.812749180422219, + "learning_rate": 4.6822742474916394e-06, + "loss": 1.61, + "step": 560 + }, + { + "epoch": 0.37575351640991295, + "grad_norm": 2.6230948506378917, + "learning_rate": 4.6906354515050175e-06, + "loss": 1.7787, + "step": 561 + }, + { + "epoch": 0.37642330877428, + "grad_norm": 2.2101814696040267, + "learning_rate": 4.698996655518395e-06, + "loss": 1.7179, + "step": 562 + }, + { + "epoch": 0.377093101138647, + "grad_norm": 2.892566657890582, + "learning_rate": 4.707357859531773e-06, + "loss": 2.0691, + "step": 563 + }, + { + "epoch": 0.37776289350301406, + "grad_norm": 1.6258723747483725, + "learning_rate": 4.715719063545151e-06, + "loss": 1.7493, + "step": 564 + }, + { + "epoch": 0.3784326858673811, + "grad_norm": 1.7806291417803495, + "learning_rate": 4.724080267558529e-06, + "loss": 1.8362, + "step": 565 + }, + { + "epoch": 0.37910247823174814, + "grad_norm": 2.753364781733513, + "learning_rate": 4.732441471571907e-06, + "loss": 2.0198, + "step": 566 + }, + { + "epoch": 0.3797722705961152, + "grad_norm": 1.5829897804961608, + "learning_rate": 4.740802675585285e-06, + "loss": 1.8578, + "step": 567 + }, + { + "epoch": 0.3804420629604823, + "grad_norm": 1.6467675935197494, + "learning_rate": 4.749163879598663e-06, + "loss": 1.8946, + "step": 568 + }, + { + "epoch": 0.3811118553248493, + "grad_norm": 2.060400214278387, + "learning_rate": 4.757525083612041e-06, + "loss": 1.8322, + "step": 569 + }, + { + "epoch": 0.38178164768921635, + "grad_norm": 1.6477768581053345, + "learning_rate": 4.765886287625419e-06, + "loss": 1.5595, + "step": 570 + }, + { + "epoch": 0.3824514400535834, + "grad_norm": 3.532600452033511, + "learning_rate": 4.774247491638796e-06, + "loss": 1.6653, + "step": 571 + }, + { + "epoch": 0.38312123241795043, + "grad_norm": 1.648809601496548, + "learning_rate": 4.782608695652174e-06, + "loss": 1.9188, + "step": 572 + }, + { + "epoch": 0.38379102478231747, + "grad_norm": 2.55998175763192, + "learning_rate": 4.7909698996655525e-06, + "loss": 1.6643, + "step": 573 + }, + { + "epoch": 0.3844608171466845, + "grad_norm": 1.738695768332351, + "learning_rate": 4.799331103678931e-06, + "loss": 1.7798, + "step": 574 + }, + { + "epoch": 0.38513060951105155, + "grad_norm": 1.6808342141214436, + "learning_rate": 4.807692307692308e-06, + "loss": 1.8368, + "step": 575 + }, + { + "epoch": 0.38580040187541864, + "grad_norm": 2.3157726568277224, + "learning_rate": 4.816053511705686e-06, + "loss": 1.7718, + "step": 576 + }, + { + "epoch": 0.3864701942397857, + "grad_norm": 1.7126751987666322, + "learning_rate": 4.824414715719064e-06, + "loss": 1.6806, + "step": 577 + }, + { + "epoch": 0.3871399866041527, + "grad_norm": 2.2256035633126916, + "learning_rate": 4.832775919732442e-06, + "loss": 1.7348, + "step": 578 + }, + { + "epoch": 0.38780977896851976, + "grad_norm": 1.6603419994872395, + "learning_rate": 4.8411371237458195e-06, + "loss": 1.5764, + "step": 579 + }, + { + "epoch": 0.3884795713328868, + "grad_norm": 8.888767592260743, + "learning_rate": 4.849498327759198e-06, + "loss": 1.7403, + "step": 580 + }, + { + "epoch": 0.38914936369725384, + "grad_norm": 2.894179605021347, + "learning_rate": 4.857859531772576e-06, + "loss": 1.6321, + "step": 581 + }, + { + "epoch": 0.3898191560616209, + "grad_norm": 1.9576350655188808, + "learning_rate": 4.866220735785954e-06, + "loss": 1.6145, + "step": 582 + }, + { + "epoch": 0.39048894842598797, + "grad_norm": 1.6884451992417333, + "learning_rate": 4.874581939799332e-06, + "loss": 1.6868, + "step": 583 + }, + { + "epoch": 0.391158740790355, + "grad_norm": 1.6477071060395296, + "learning_rate": 4.882943143812709e-06, + "loss": 1.8702, + "step": 584 + }, + { + "epoch": 0.39182853315472205, + "grad_norm": 2.3181553620122015, + "learning_rate": 4.891304347826087e-06, + "loss": 1.8262, + "step": 585 + }, + { + "epoch": 0.3924983255190891, + "grad_norm": 1.5649739968692382, + "learning_rate": 4.8996655518394655e-06, + "loss": 1.8505, + "step": 586 + }, + { + "epoch": 0.3931681178834561, + "grad_norm": 1.7184654140906273, + "learning_rate": 4.908026755852844e-06, + "loss": 1.724, + "step": 587 + }, + { + "epoch": 0.39383791024782316, + "grad_norm": 1.6082306098105228, + "learning_rate": 4.916387959866221e-06, + "loss": 1.7596, + "step": 588 + }, + { + "epoch": 0.3945077026121902, + "grad_norm": 1.6738135205078282, + "learning_rate": 4.924749163879599e-06, + "loss": 1.9126, + "step": 589 + }, + { + "epoch": 0.39517749497655724, + "grad_norm": 1.739626447869813, + "learning_rate": 4.933110367892977e-06, + "loss": 1.8739, + "step": 590 + }, + { + "epoch": 0.39584728734092434, + "grad_norm": 1.9378437151933805, + "learning_rate": 4.941471571906355e-06, + "loss": 1.8099, + "step": 591 + }, + { + "epoch": 0.3965170797052914, + "grad_norm": 2.4028183854724317, + "learning_rate": 4.9498327759197325e-06, + "loss": 1.8086, + "step": 592 + }, + { + "epoch": 0.3971868720696584, + "grad_norm": 3.4326504777253213, + "learning_rate": 4.958193979933111e-06, + "loss": 1.6477, + "step": 593 + }, + { + "epoch": 0.39785666443402545, + "grad_norm": 1.7789444740239244, + "learning_rate": 4.966555183946489e-06, + "loss": 1.6677, + "step": 594 + }, + { + "epoch": 0.3985264567983925, + "grad_norm": 2.9129370101170515, + "learning_rate": 4.974916387959867e-06, + "loss": 1.7614, + "step": 595 + }, + { + "epoch": 0.39919624916275953, + "grad_norm": 1.7075840244833347, + "learning_rate": 4.983277591973244e-06, + "loss": 1.724, + "step": 596 + }, + { + "epoch": 0.39986604152712657, + "grad_norm": 1.5255271869522227, + "learning_rate": 4.991638795986622e-06, + "loss": 1.7466, + "step": 597 + }, + { + "epoch": 0.40053583389149366, + "grad_norm": 1.8024094406404065, + "learning_rate": 5e-06, + "loss": 1.9782, + "step": 598 + }, + { + "epoch": 0.4012056262558607, + "grad_norm": 1.637680200654763, + "learning_rate": 4.9999995728165475e-06, + "loss": 1.8057, + "step": 599 + }, + { + "epoch": 0.40187541862022774, + "grad_norm": 2.6935738993512195, + "learning_rate": 4.9999982912663345e-06, + "loss": 1.8068, + "step": 600 + }, + { + "epoch": 0.4025452109845948, + "grad_norm": 3.2255905058013363, + "learning_rate": 4.9999961553498e-06, + "loss": 1.7269, + "step": 601 + }, + { + "epoch": 0.4032150033489618, + "grad_norm": 2.1702317095201393, + "learning_rate": 4.999993165067674e-06, + "loss": 1.6779, + "step": 602 + }, + { + "epoch": 0.40388479571332886, + "grad_norm": 2.9415575380619723, + "learning_rate": 4.999989320420977e-06, + "loss": 1.713, + "step": 603 + }, + { + "epoch": 0.4045545880776959, + "grad_norm": 3.1360096363399275, + "learning_rate": 4.999984621411024e-06, + "loss": 1.8201, + "step": 604 + }, + { + "epoch": 0.40522438044206294, + "grad_norm": 2.997618620572075, + "learning_rate": 4.99997906803942e-06, + "loss": 1.8102, + "step": 605 + }, + { + "epoch": 0.40589417280643003, + "grad_norm": 2.5376145532329617, + "learning_rate": 4.999972660308064e-06, + "loss": 1.8047, + "step": 606 + }, + { + "epoch": 0.40656396517079707, + "grad_norm": 1.6939883338148145, + "learning_rate": 4.9999653982191465e-06, + "loss": 1.8485, + "step": 607 + }, + { + "epoch": 0.4072337575351641, + "grad_norm": 2.465981211280775, + "learning_rate": 4.999957281775148e-06, + "loss": 1.9827, + "step": 608 + }, + { + "epoch": 0.40790354989953115, + "grad_norm": 1.6853999597278386, + "learning_rate": 4.999948310978842e-06, + "loss": 1.6904, + "step": 609 + }, + { + "epoch": 0.4085733422638982, + "grad_norm": 2.7578818604958477, + "learning_rate": 4.999938485833293e-06, + "loss": 1.7586, + "step": 610 + }, + { + "epoch": 0.4092431346282652, + "grad_norm": 1.7161074729027967, + "learning_rate": 4.999927806341863e-06, + "loss": 1.7549, + "step": 611 + }, + { + "epoch": 0.40991292699263226, + "grad_norm": 1.5496028853540131, + "learning_rate": 4.999916272508198e-06, + "loss": 1.8013, + "step": 612 + }, + { + "epoch": 0.4105827193569993, + "grad_norm": 4.299669215376763, + "learning_rate": 4.99990388433624e-06, + "loss": 1.7674, + "step": 613 + }, + { + "epoch": 0.4112525117213664, + "grad_norm": 2.540204998369217, + "learning_rate": 4.999890641830225e-06, + "loss": 1.5692, + "step": 614 + }, + { + "epoch": 0.41192230408573344, + "grad_norm": 1.817577577517568, + "learning_rate": 4.999876544994676e-06, + "loss": 1.7025, + "step": 615 + }, + { + "epoch": 0.4125920964501005, + "grad_norm": 2.509243940609527, + "learning_rate": 4.9998615938344106e-06, + "loss": 1.6595, + "step": 616 + }, + { + "epoch": 0.4132618888144675, + "grad_norm": 1.8580354982512808, + "learning_rate": 4.9998457883545405e-06, + "loss": 1.5812, + "step": 617 + }, + { + "epoch": 0.41393168117883455, + "grad_norm": 1.6526972717831616, + "learning_rate": 4.999829128560465e-06, + "loss": 1.768, + "step": 618 + }, + { + "epoch": 0.4146014735432016, + "grad_norm": 3.3532010447818803, + "learning_rate": 4.999811614457879e-06, + "loss": 1.7025, + "step": 619 + }, + { + "epoch": 0.41527126590756863, + "grad_norm": 1.7378644419733065, + "learning_rate": 4.999793246052768e-06, + "loss": 1.8465, + "step": 620 + }, + { + "epoch": 0.4159410582719357, + "grad_norm": 1.6767536521599544, + "learning_rate": 4.999774023351407e-06, + "loss": 1.9093, + "step": 621 + }, + { + "epoch": 0.41661085063630277, + "grad_norm": 2.1973922281995657, + "learning_rate": 4.999753946360368e-06, + "loss": 1.7524, + "step": 622 + }, + { + "epoch": 0.4172806430006698, + "grad_norm": 1.6763458098313042, + "learning_rate": 4.999733015086511e-06, + "loss": 1.6753, + "step": 623 + }, + { + "epoch": 0.41795043536503684, + "grad_norm": 2.64313650191554, + "learning_rate": 4.999711229536989e-06, + "loss": 1.9269, + "step": 624 + }, + { + "epoch": 0.4186202277294039, + "grad_norm": 1.7605171014826582, + "learning_rate": 4.999688589719248e-06, + "loss": 1.6209, + "step": 625 + }, + { + "epoch": 0.4192900200937709, + "grad_norm": 1.5407778054393315, + "learning_rate": 4.999665095641024e-06, + "loss": 1.7029, + "step": 626 + }, + { + "epoch": 0.41995981245813796, + "grad_norm": 2.468437554871246, + "learning_rate": 4.999640747310347e-06, + "loss": 1.7308, + "step": 627 + }, + { + "epoch": 0.420629604822505, + "grad_norm": 1.6553742236041205, + "learning_rate": 4.999615544735537e-06, + "loss": 1.8789, + "step": 628 + }, + { + "epoch": 0.4212993971868721, + "grad_norm": 4.636657907249763, + "learning_rate": 4.999589487925208e-06, + "loss": 1.6891, + "step": 629 + }, + { + "epoch": 0.42196918955123913, + "grad_norm": 7.750856552695607, + "learning_rate": 4.999562576888264e-06, + "loss": 1.7537, + "step": 630 + }, + { + "epoch": 0.42263898191560617, + "grad_norm": 1.6455780301144587, + "learning_rate": 4.999534811633903e-06, + "loss": 1.8158, + "step": 631 + }, + { + "epoch": 0.4233087742799732, + "grad_norm": 1.6111775169339808, + "learning_rate": 4.999506192171611e-06, + "loss": 1.8478, + "step": 632 + }, + { + "epoch": 0.42397856664434025, + "grad_norm": 4.171393351509834, + "learning_rate": 4.999476718511172e-06, + "loss": 1.6646, + "step": 633 + }, + { + "epoch": 0.4246483590087073, + "grad_norm": 2.033293875881388, + "learning_rate": 4.999446390662657e-06, + "loss": 1.5765, + "step": 634 + }, + { + "epoch": 0.4253181513730743, + "grad_norm": 2.707724069700208, + "learning_rate": 4.999415208636429e-06, + "loss": 1.9341, + "step": 635 + }, + { + "epoch": 0.42598794373744137, + "grad_norm": 3.419337173240486, + "learning_rate": 4.999383172443146e-06, + "loss": 1.8075, + "step": 636 + }, + { + "epoch": 0.42665773610180846, + "grad_norm": 1.8138518832868824, + "learning_rate": 4.999350282093757e-06, + "loss": 1.8447, + "step": 637 + }, + { + "epoch": 0.4273275284661755, + "grad_norm": 1.6288210328048967, + "learning_rate": 4.9993165375995e-06, + "loss": 1.7029, + "step": 638 + }, + { + "epoch": 0.42799732083054254, + "grad_norm": 1.593037461961953, + "learning_rate": 4.9992819389719085e-06, + "loss": 1.7538, + "step": 639 + }, + { + "epoch": 0.4286671131949096, + "grad_norm": 1.7656901479859104, + "learning_rate": 4.999246486222806e-06, + "loss": 2.0202, + "step": 640 + }, + { + "epoch": 0.4293369055592766, + "grad_norm": 1.5251495333958622, + "learning_rate": 4.999210179364309e-06, + "loss": 1.8122, + "step": 641 + }, + { + "epoch": 0.43000669792364365, + "grad_norm": 3.5211670925353187, + "learning_rate": 4.999173018408824e-06, + "loss": 1.6459, + "step": 642 + }, + { + "epoch": 0.4306764902880107, + "grad_norm": 1.8438739793998244, + "learning_rate": 4.999135003369052e-06, + "loss": 1.6738, + "step": 643 + }, + { + "epoch": 0.4313462826523778, + "grad_norm": 3.074060964796996, + "learning_rate": 4.999096134257984e-06, + "loss": 1.7089, + "step": 644 + }, + { + "epoch": 0.4320160750167448, + "grad_norm": 3.254433685887632, + "learning_rate": 4.999056411088903e-06, + "loss": 1.875, + "step": 645 + }, + { + "epoch": 0.43268586738111187, + "grad_norm": 4.568637373511861, + "learning_rate": 4.999015833875385e-06, + "loss": 1.7922, + "step": 646 + }, + { + "epoch": 0.4333556597454789, + "grad_norm": 2.691286624981353, + "learning_rate": 4.998974402631296e-06, + "loss": 1.6918, + "step": 647 + }, + { + "epoch": 0.43402545210984594, + "grad_norm": 2.8953334384911558, + "learning_rate": 4.998932117370796e-06, + "loss": 1.9881, + "step": 648 + }, + { + "epoch": 0.434695244474213, + "grad_norm": 3.994880469745825, + "learning_rate": 4.998888978108336e-06, + "loss": 1.7304, + "step": 649 + }, + { + "epoch": 0.43536503683858, + "grad_norm": 2.756765348851459, + "learning_rate": 4.9988449848586585e-06, + "loss": 1.8374, + "step": 650 + }, + { + "epoch": 0.43603482920294706, + "grad_norm": 1.6057548304289995, + "learning_rate": 4.998800137636797e-06, + "loss": 1.7164, + "step": 651 + }, + { + "epoch": 0.43670462156731416, + "grad_norm": 2.1697054106546623, + "learning_rate": 4.9987544364580794e-06, + "loss": 1.6256, + "step": 652 + }, + { + "epoch": 0.4373744139316812, + "grad_norm": 1.780484223306451, + "learning_rate": 4.998707881338123e-06, + "loss": 1.7952, + "step": 653 + }, + { + "epoch": 0.43804420629604823, + "grad_norm": 3.018792173748664, + "learning_rate": 4.998660472292838e-06, + "loss": 1.6737, + "step": 654 + }, + { + "epoch": 0.43871399866041527, + "grad_norm": 1.8125731389149329, + "learning_rate": 4.998612209338426e-06, + "loss": 1.8052, + "step": 655 + }, + { + "epoch": 0.4393837910247823, + "grad_norm": 1.6991469117223725, + "learning_rate": 4.998563092491382e-06, + "loss": 2.0089, + "step": 656 + }, + { + "epoch": 0.44005358338914935, + "grad_norm": 1.4893069091645772, + "learning_rate": 4.998513121768489e-06, + "loss": 1.7303, + "step": 657 + }, + { + "epoch": 0.4407233757535164, + "grad_norm": 6.108135591841628, + "learning_rate": 4.998462297186828e-06, + "loss": 1.8064, + "step": 658 + }, + { + "epoch": 0.4413931681178835, + "grad_norm": 2.263279424015309, + "learning_rate": 4.998410618763765e-06, + "loss": 1.6255, + "step": 659 + }, + { + "epoch": 0.4420629604822505, + "grad_norm": 2.938188705787677, + "learning_rate": 4.998358086516962e-06, + "loss": 1.8447, + "step": 660 + }, + { + "epoch": 0.44273275284661756, + "grad_norm": 1.7027523312724893, + "learning_rate": 4.998304700464371e-06, + "loss": 1.7455, + "step": 661 + }, + { + "epoch": 0.4434025452109846, + "grad_norm": 2.4219857085305816, + "learning_rate": 4.998250460624239e-06, + "loss": 1.6717, + "step": 662 + }, + { + "epoch": 0.44407233757535164, + "grad_norm": 3.002808774547495, + "learning_rate": 4.9981953670151e-06, + "loss": 1.7746, + "step": 663 + }, + { + "epoch": 0.4447421299397187, + "grad_norm": 1.6219259510719335, + "learning_rate": 4.998139419655782e-06, + "loss": 1.5888, + "step": 664 + }, + { + "epoch": 0.4454119223040857, + "grad_norm": 1.6934678947265738, + "learning_rate": 4.998082618565405e-06, + "loss": 1.8558, + "step": 665 + }, + { + "epoch": 0.44608171466845276, + "grad_norm": 1.6721942613853913, + "learning_rate": 4.998024963763383e-06, + "loss": 1.7906, + "step": 666 + }, + { + "epoch": 0.44675150703281985, + "grad_norm": 2.084592732299423, + "learning_rate": 4.997966455269415e-06, + "loss": 1.8061, + "step": 667 + }, + { + "epoch": 0.4474212993971869, + "grad_norm": 1.7825512284914542, + "learning_rate": 4.9979070931035e-06, + "loss": 1.7002, + "step": 668 + }, + { + "epoch": 0.44809109176155393, + "grad_norm": 1.4929707964466492, + "learning_rate": 4.997846877285923e-06, + "loss": 1.7401, + "step": 669 + }, + { + "epoch": 0.44876088412592097, + "grad_norm": 3.438040386200981, + "learning_rate": 4.997785807837263e-06, + "loss": 1.6773, + "step": 670 + }, + { + "epoch": 0.449430676490288, + "grad_norm": 2.580045966960885, + "learning_rate": 4.99772388477839e-06, + "loss": 1.936, + "step": 671 + }, + { + "epoch": 0.45010046885465504, + "grad_norm": 1.437803156294496, + "learning_rate": 4.9976611081304655e-06, + "loss": 1.6901, + "step": 672 + }, + { + "epoch": 0.4507702612190221, + "grad_norm": 1.503925231144287, + "learning_rate": 4.997597477914944e-06, + "loss": 1.8481, + "step": 673 + }, + { + "epoch": 0.4514400535833891, + "grad_norm": 2.0250365759724303, + "learning_rate": 4.997532994153571e-06, + "loss": 1.7169, + "step": 674 + }, + { + "epoch": 0.4521098459477562, + "grad_norm": 1.5035987412385643, + "learning_rate": 4.997467656868384e-06, + "loss": 1.799, + "step": 675 + }, + { + "epoch": 0.45277963831212326, + "grad_norm": 1.9738274528388422, + "learning_rate": 4.9974014660817105e-06, + "loss": 1.7637, + "step": 676 + }, + { + "epoch": 0.4534494306764903, + "grad_norm": 2.2465834337763106, + "learning_rate": 4.997334421816172e-06, + "loss": 1.6189, + "step": 677 + }, + { + "epoch": 0.45411922304085733, + "grad_norm": 1.6670349333665777, + "learning_rate": 4.99726652409468e-06, + "loss": 1.585, + "step": 678 + }, + { + "epoch": 0.4547890154052244, + "grad_norm": 1.5672945422820195, + "learning_rate": 4.997197772940439e-06, + "loss": 1.6647, + "step": 679 + }, + { + "epoch": 0.4554588077695914, + "grad_norm": 1.70891099170233, + "learning_rate": 4.9971281683769446e-06, + "loss": 1.6608, + "step": 680 + }, + { + "epoch": 0.45612860013395845, + "grad_norm": 3.5174087454993144, + "learning_rate": 4.997057710427983e-06, + "loss": 1.9431, + "step": 681 + }, + { + "epoch": 0.45679839249832555, + "grad_norm": 1.4942574869394278, + "learning_rate": 4.996986399117633e-06, + "loss": 1.7152, + "step": 682 + }, + { + "epoch": 0.4574681848626926, + "grad_norm": 2.65194710816166, + "learning_rate": 4.996914234470266e-06, + "loss": 1.632, + "step": 683 + }, + { + "epoch": 0.4581379772270596, + "grad_norm": 1.5745984032230491, + "learning_rate": 4.996841216510543e-06, + "loss": 1.6749, + "step": 684 + }, + { + "epoch": 0.45880776959142666, + "grad_norm": 1.8007268438791417, + "learning_rate": 4.996767345263419e-06, + "loss": 1.8869, + "step": 685 + }, + { + "epoch": 0.4594775619557937, + "grad_norm": 3.3624498548717874, + "learning_rate": 4.996692620754138e-06, + "loss": 1.7886, + "step": 686 + }, + { + "epoch": 0.46014735432016074, + "grad_norm": 2.3238922768218067, + "learning_rate": 4.996617043008237e-06, + "loss": 1.9652, + "step": 687 + }, + { + "epoch": 0.4608171466845278, + "grad_norm": 2.183867345515146, + "learning_rate": 4.996540612051544e-06, + "loss": 1.8282, + "step": 688 + }, + { + "epoch": 0.4614869390488948, + "grad_norm": 2.4102980885917944, + "learning_rate": 4.996463327910182e-06, + "loss": 1.8408, + "step": 689 + }, + { + "epoch": 0.4621567314132619, + "grad_norm": 1.7024153615110251, + "learning_rate": 4.996385190610559e-06, + "loss": 1.7089, + "step": 690 + }, + { + "epoch": 0.46282652377762895, + "grad_norm": 1.5574585945343966, + "learning_rate": 4.996306200179379e-06, + "loss": 1.8266, + "step": 691 + }, + { + "epoch": 0.463496316141996, + "grad_norm": 1.6778201114858005, + "learning_rate": 4.996226356643638e-06, + "loss": 1.8571, + "step": 692 + }, + { + "epoch": 0.46416610850636303, + "grad_norm": 2.07701903952786, + "learning_rate": 4.9961456600306215e-06, + "loss": 1.5136, + "step": 693 + }, + { + "epoch": 0.46483590087073007, + "grad_norm": 1.782970493013676, + "learning_rate": 4.996064110367908e-06, + "loss": 1.7663, + "step": 694 + }, + { + "epoch": 0.4655056932350971, + "grad_norm": 3.119260616910519, + "learning_rate": 4.995981707683365e-06, + "loss": 1.7492, + "step": 695 + }, + { + "epoch": 0.46617548559946415, + "grad_norm": 6.24097623776597, + "learning_rate": 4.995898452005155e-06, + "loss": 1.7517, + "step": 696 + }, + { + "epoch": 0.46684527796383124, + "grad_norm": 1.5813929026070708, + "learning_rate": 4.99581434336173e-06, + "loss": 1.6905, + "step": 697 + }, + { + "epoch": 0.4675150703281983, + "grad_norm": 2.3395058426830997, + "learning_rate": 4.9957293817818335e-06, + "loss": 1.7943, + "step": 698 + }, + { + "epoch": 0.4681848626925653, + "grad_norm": 1.566390498177085, + "learning_rate": 4.995643567294502e-06, + "loss": 1.8643, + "step": 699 + }, + { + "epoch": 0.46885465505693236, + "grad_norm": 1.891913439752397, + "learning_rate": 4.995556899929059e-06, + "loss": 1.7982, + "step": 700 + }, + { + "epoch": 0.4695244474212994, + "grad_norm": 1.5179837376373224, + "learning_rate": 4.995469379715128e-06, + "loss": 1.9322, + "step": 701 + }, + { + "epoch": 0.47019423978566643, + "grad_norm": 1.601835503811913, + "learning_rate": 4.995381006682613e-06, + "loss": 1.7523, + "step": 702 + }, + { + "epoch": 0.4708640321500335, + "grad_norm": 1.8556616316808678, + "learning_rate": 4.995291780861721e-06, + "loss": 1.898, + "step": 703 + }, + { + "epoch": 0.4715338245144005, + "grad_norm": 1.60794522412261, + "learning_rate": 4.99520170228294e-06, + "loss": 1.7721, + "step": 704 + }, + { + "epoch": 0.4722036168787676, + "grad_norm": 1.48808764274547, + "learning_rate": 4.9951107709770565e-06, + "loss": 1.7259, + "step": 705 + }, + { + "epoch": 0.47287340924313465, + "grad_norm": 1.9658553725449575, + "learning_rate": 4.9950189869751455e-06, + "loss": 1.7998, + "step": 706 + }, + { + "epoch": 0.4735432016075017, + "grad_norm": 2.882382948814962, + "learning_rate": 4.9949263503085736e-06, + "loss": 1.7569, + "step": 707 + }, + { + "epoch": 0.4742129939718687, + "grad_norm": 1.5161473658557367, + "learning_rate": 4.994832861008999e-06, + "loss": 1.802, + "step": 708 + }, + { + "epoch": 0.47488278633623576, + "grad_norm": 3.774732466264579, + "learning_rate": 4.994738519108372e-06, + "loss": 1.7248, + "step": 709 + }, + { + "epoch": 0.4755525787006028, + "grad_norm": 2.594328634209691, + "learning_rate": 4.994643324638933e-06, + "loss": 1.9354, + "step": 710 + }, + { + "epoch": 0.47622237106496984, + "grad_norm": 3.410897417094479, + "learning_rate": 4.9945472776332135e-06, + "loss": 1.8595, + "step": 711 + }, + { + "epoch": 0.4768921634293369, + "grad_norm": 1.8503049408853502, + "learning_rate": 4.994450378124039e-06, + "loss": 1.7759, + "step": 712 + }, + { + "epoch": 0.477561955793704, + "grad_norm": 6.6737542480549115, + "learning_rate": 4.994352626144525e-06, + "loss": 1.5739, + "step": 713 + }, + { + "epoch": 0.478231748158071, + "grad_norm": 3.6156223169290147, + "learning_rate": 4.994254021728075e-06, + "loss": 1.7815, + "step": 714 + }, + { + "epoch": 0.47890154052243805, + "grad_norm": 1.4780185950410563, + "learning_rate": 4.99415456490839e-06, + "loss": 1.825, + "step": 715 + }, + { + "epoch": 0.4795713328868051, + "grad_norm": 4.0577287382831235, + "learning_rate": 4.994054255719458e-06, + "loss": 1.7095, + "step": 716 + }, + { + "epoch": 0.48024112525117213, + "grad_norm": 1.788707163269408, + "learning_rate": 4.993953094195558e-06, + "loss": 1.8985, + "step": 717 + }, + { + "epoch": 0.48091091761553917, + "grad_norm": 2.5753249582695417, + "learning_rate": 4.993851080371262e-06, + "loss": 1.7885, + "step": 718 + }, + { + "epoch": 0.4815807099799062, + "grad_norm": 1.682800657400592, + "learning_rate": 4.993748214281435e-06, + "loss": 1.7665, + "step": 719 + }, + { + "epoch": 0.4822505023442733, + "grad_norm": 1.442505223954903, + "learning_rate": 4.993644495961229e-06, + "loss": 1.8327, + "step": 720 + }, + { + "epoch": 0.48292029470864034, + "grad_norm": 7.435043969228974, + "learning_rate": 4.99353992544609e-06, + "loss": 1.6489, + "step": 721 + }, + { + "epoch": 0.4835900870730074, + "grad_norm": 1.8319309257901006, + "learning_rate": 4.993434502771755e-06, + "loss": 1.8234, + "step": 722 + }, + { + "epoch": 0.4842598794373744, + "grad_norm": 1.5190277345700354, + "learning_rate": 4.9933282279742515e-06, + "loss": 1.9055, + "step": 723 + }, + { + "epoch": 0.48492967180174146, + "grad_norm": 2.090110648256617, + "learning_rate": 4.993221101089898e-06, + "loss": 1.8464, + "step": 724 + }, + { + "epoch": 0.4855994641661085, + "grad_norm": 2.4384211897961094, + "learning_rate": 4.9931131221553065e-06, + "loss": 1.6178, + "step": 725 + }, + { + "epoch": 0.48626925653047554, + "grad_norm": 3.606716353986938, + "learning_rate": 4.993004291207376e-06, + "loss": 1.6459, + "step": 726 + }, + { + "epoch": 0.4869390488948426, + "grad_norm": 4.7524277775506105, + "learning_rate": 4.992894608283301e-06, + "loss": 1.8369, + "step": 727 + }, + { + "epoch": 0.48760884125920967, + "grad_norm": 2.972499449497673, + "learning_rate": 4.992784073420565e-06, + "loss": 1.7159, + "step": 728 + }, + { + "epoch": 0.4882786336235767, + "grad_norm": 1.6238031084987437, + "learning_rate": 4.992672686656943e-06, + "loss": 1.8755, + "step": 729 + }, + { + "epoch": 0.48894842598794375, + "grad_norm": 3.280142093069612, + "learning_rate": 4.9925604480305e-06, + "loss": 1.5975, + "step": 730 + }, + { + "epoch": 0.4896182183523108, + "grad_norm": 1.788260972024729, + "learning_rate": 4.992447357579594e-06, + "loss": 1.6802, + "step": 731 + }, + { + "epoch": 0.4902880107166778, + "grad_norm": 1.7571996087818518, + "learning_rate": 4.992333415342874e-06, + "loss": 1.6989, + "step": 732 + }, + { + "epoch": 0.49095780308104486, + "grad_norm": 2.5148968004548498, + "learning_rate": 4.992218621359278e-06, + "loss": 1.618, + "step": 733 + }, + { + "epoch": 0.4916275954454119, + "grad_norm": 1.8237462501266546, + "learning_rate": 4.9921029756680374e-06, + "loss": 1.6519, + "step": 734 + }, + { + "epoch": 0.49229738780977894, + "grad_norm": 1.8691305802333953, + "learning_rate": 4.9919864783086725e-06, + "loss": 1.7924, + "step": 735 + }, + { + "epoch": 0.49296718017414604, + "grad_norm": 1.5787227648558833, + "learning_rate": 4.991869129320997e-06, + "loss": 1.6936, + "step": 736 + }, + { + "epoch": 0.4936369725385131, + "grad_norm": 2.586434482807407, + "learning_rate": 4.991750928745116e-06, + "loss": 1.854, + "step": 737 + }, + { + "epoch": 0.4943067649028801, + "grad_norm": 1.6838933024016836, + "learning_rate": 4.991631876621421e-06, + "loss": 1.8276, + "step": 738 + }, + { + "epoch": 0.49497655726724715, + "grad_norm": 2.341344078436252, + "learning_rate": 4.9915119729906e-06, + "loss": 1.7424, + "step": 739 + }, + { + "epoch": 0.4956463496316142, + "grad_norm": 4.388796553787298, + "learning_rate": 4.991391217893628e-06, + "loss": 1.6936, + "step": 740 + }, + { + "epoch": 0.49631614199598123, + "grad_norm": 2.3860126185665553, + "learning_rate": 4.991269611371774e-06, + "loss": 1.8367, + "step": 741 + }, + { + "epoch": 0.49698593436034827, + "grad_norm": 2.2677527608715065, + "learning_rate": 4.991147153466596e-06, + "loss": 1.7976, + "step": 742 + }, + { + "epoch": 0.49765572672471536, + "grad_norm": 1.7095553831982901, + "learning_rate": 4.991023844219945e-06, + "loss": 1.8373, + "step": 743 + }, + { + "epoch": 0.4983255190890824, + "grad_norm": 1.6057829206294594, + "learning_rate": 4.9908996836739584e-06, + "loss": 1.7161, + "step": 744 + }, + { + "epoch": 0.49899531145344944, + "grad_norm": 2.233859365056613, + "learning_rate": 4.990774671871071e-06, + "loss": 1.8852, + "step": 745 + }, + { + "epoch": 0.4996651038178165, + "grad_norm": 1.5524357284948331, + "learning_rate": 4.9906488088540025e-06, + "loss": 1.6453, + "step": 746 + }, + { + "epoch": 0.5003348961821835, + "grad_norm": 1.62035268623331, + "learning_rate": 4.9905220946657685e-06, + "loss": 1.7661, + "step": 747 + }, + { + "epoch": 0.5010046885465506, + "grad_norm": 1.4764419440111767, + "learning_rate": 4.990394529349672e-06, + "loss": 1.6747, + "step": 748 + }, + { + "epoch": 0.5016744809109176, + "grad_norm": 1.692438986369742, + "learning_rate": 4.990266112949307e-06, + "loss": 1.943, + "step": 749 + }, + { + "epoch": 0.5023442732752846, + "grad_norm": 4.3298059275694785, + "learning_rate": 4.9901368455085616e-06, + "loss": 1.5511, + "step": 750 + }, + { + "epoch": 0.5030140656396517, + "grad_norm": 2.1095195823088964, + "learning_rate": 4.990006727071612e-06, + "loss": 1.7736, + "step": 751 + }, + { + "epoch": 0.5036838580040187, + "grad_norm": 3.205972185993652, + "learning_rate": 4.989875757682924e-06, + "loss": 1.9502, + "step": 752 + }, + { + "epoch": 0.5043536503683858, + "grad_norm": 1.7253011714812103, + "learning_rate": 4.989743937387257e-06, + "loss": 1.7056, + "step": 753 + }, + { + "epoch": 0.5050234427327528, + "grad_norm": 1.742066460368822, + "learning_rate": 4.989611266229662e-06, + "loss": 1.5018, + "step": 754 + }, + { + "epoch": 0.5056932350971199, + "grad_norm": 1.5854766984648385, + "learning_rate": 4.989477744255475e-06, + "loss": 1.8905, + "step": 755 + }, + { + "epoch": 0.506363027461487, + "grad_norm": 1.8224469369362293, + "learning_rate": 4.98934337151033e-06, + "loss": 2.0192, + "step": 756 + }, + { + "epoch": 0.507032819825854, + "grad_norm": 1.6325981640395286, + "learning_rate": 4.989208148040148e-06, + "loss": 1.8075, + "step": 757 + }, + { + "epoch": 0.5077026121902211, + "grad_norm": 1.7892475544243573, + "learning_rate": 4.9890720738911405e-06, + "loss": 1.6616, + "step": 758 + }, + { + "epoch": 0.5083724045545881, + "grad_norm": 2.1094284863294366, + "learning_rate": 4.9889351491098095e-06, + "loss": 1.75, + "step": 759 + }, + { + "epoch": 0.5090421969189551, + "grad_norm": 1.5690725952955162, + "learning_rate": 4.9887973737429505e-06, + "loss": 1.6547, + "step": 760 + }, + { + "epoch": 0.5097119892833222, + "grad_norm": 1.7148024078737498, + "learning_rate": 4.988658747837647e-06, + "loss": 1.7151, + "step": 761 + }, + { + "epoch": 0.5103817816476892, + "grad_norm": 1.7831388017210332, + "learning_rate": 4.988519271441272e-06, + "loss": 1.901, + "step": 762 + }, + { + "epoch": 0.5110515740120563, + "grad_norm": 2.3037767614672235, + "learning_rate": 4.988378944601495e-06, + "loss": 1.6972, + "step": 763 + }, + { + "epoch": 0.5117213663764233, + "grad_norm": 2.3473640853367748, + "learning_rate": 4.98823776736627e-06, + "loss": 1.7187, + "step": 764 + }, + { + "epoch": 0.5123911587407903, + "grad_norm": 1.6951070250068938, + "learning_rate": 4.988095739783843e-06, + "loss": 1.734, + "step": 765 + }, + { + "epoch": 0.5130609511051574, + "grad_norm": 1.5405660779534895, + "learning_rate": 4.987952861902753e-06, + "loss": 1.9281, + "step": 766 + }, + { + "epoch": 0.5137307434695244, + "grad_norm": 2.6476368856263055, + "learning_rate": 4.987809133771828e-06, + "loss": 1.7846, + "step": 767 + }, + { + "epoch": 0.5144005358338914, + "grad_norm": 4.2279167599395615, + "learning_rate": 4.987664555440186e-06, + "loss": 1.7015, + "step": 768 + }, + { + "epoch": 0.5150703281982585, + "grad_norm": 2.9837211961812966, + "learning_rate": 4.9875191269572356e-06, + "loss": 1.7715, + "step": 769 + }, + { + "epoch": 0.5157401205626256, + "grad_norm": 3.4907304297088992, + "learning_rate": 4.987372848372678e-06, + "loss": 1.7226, + "step": 770 + }, + { + "epoch": 0.5164099129269927, + "grad_norm": 1.5572739207701136, + "learning_rate": 4.987225719736503e-06, + "loss": 1.8203, + "step": 771 + }, + { + "epoch": 0.5170797052913597, + "grad_norm": 1.8592125272375086, + "learning_rate": 4.987077741098991e-06, + "loss": 1.7703, + "step": 772 + }, + { + "epoch": 0.5177494976557268, + "grad_norm": 5.4344634768955675, + "learning_rate": 4.986928912510712e-06, + "loss": 1.7849, + "step": 773 + }, + { + "epoch": 0.5184192900200938, + "grad_norm": 1.681276941546417, + "learning_rate": 4.98677923402253e-06, + "loss": 1.7677, + "step": 774 + }, + { + "epoch": 0.5190890823844608, + "grad_norm": 1.5478691010793302, + "learning_rate": 4.986628705685597e-06, + "loss": 1.7595, + "step": 775 + }, + { + "epoch": 0.5197588747488279, + "grad_norm": 1.540136117929779, + "learning_rate": 4.986477327551353e-06, + "loss": 1.6986, + "step": 776 + }, + { + "epoch": 0.5204286671131949, + "grad_norm": 1.670030886420145, + "learning_rate": 4.986325099671534e-06, + "loss": 1.692, + "step": 777 + }, + { + "epoch": 0.521098459477562, + "grad_norm": 1.78663328612816, + "learning_rate": 4.986172022098161e-06, + "loss": 1.8683, + "step": 778 + }, + { + "epoch": 0.521768251841929, + "grad_norm": 1.619043861205127, + "learning_rate": 4.986018094883549e-06, + "loss": 1.7875, + "step": 779 + }, + { + "epoch": 0.522438044206296, + "grad_norm": 2.2378125678493386, + "learning_rate": 4.985863318080302e-06, + "loss": 1.6445, + "step": 780 + }, + { + "epoch": 0.5231078365706631, + "grad_norm": 1.7412804799514618, + "learning_rate": 4.985707691741315e-06, + "loss": 1.6881, + "step": 781 + }, + { + "epoch": 0.5237776289350301, + "grad_norm": 2.3669486834700253, + "learning_rate": 4.985551215919772e-06, + "loss": 1.7114, + "step": 782 + }, + { + "epoch": 0.5244474212993971, + "grad_norm": 2.02162835473235, + "learning_rate": 4.985393890669148e-06, + "loss": 1.6902, + "step": 783 + }, + { + "epoch": 0.5251172136637642, + "grad_norm": 1.512901234838957, + "learning_rate": 4.9852357160432085e-06, + "loss": 1.758, + "step": 784 + }, + { + "epoch": 0.5257870060281313, + "grad_norm": 2.0092872186752784, + "learning_rate": 4.98507669209601e-06, + "loss": 1.5724, + "step": 785 + }, + { + "epoch": 0.5264567983924984, + "grad_norm": 3.059012194840155, + "learning_rate": 4.984916818881898e-06, + "loss": 1.7698, + "step": 786 + }, + { + "epoch": 0.5271265907568654, + "grad_norm": 1.836686749731867, + "learning_rate": 4.984756096455507e-06, + "loss": 1.6273, + "step": 787 + }, + { + "epoch": 0.5277963831212324, + "grad_norm": 2.8247958625487333, + "learning_rate": 4.984594524871765e-06, + "loss": 1.6998, + "step": 788 + }, + { + "epoch": 0.5284661754855995, + "grad_norm": 2.736372403650082, + "learning_rate": 4.98443210418589e-06, + "loss": 1.7412, + "step": 789 + }, + { + "epoch": 0.5291359678499665, + "grad_norm": 1.7473434756731738, + "learning_rate": 4.984268834453386e-06, + "loss": 1.7774, + "step": 790 + }, + { + "epoch": 0.5298057602143336, + "grad_norm": 1.5370220062263604, + "learning_rate": 4.9841047157300515e-06, + "loss": 1.787, + "step": 791 + }, + { + "epoch": 0.5304755525787006, + "grad_norm": 4.822916121929465, + "learning_rate": 4.983939748071972e-06, + "loss": 1.7053, + "step": 792 + }, + { + "epoch": 0.5311453449430676, + "grad_norm": 2.7338846095918172, + "learning_rate": 4.983773931535527e-06, + "loss": 1.8909, + "step": 793 + }, + { + "epoch": 0.5318151373074347, + "grad_norm": 1.7384119478946285, + "learning_rate": 4.983607266177381e-06, + "loss": 1.9146, + "step": 794 + }, + { + "epoch": 0.5324849296718017, + "grad_norm": 1.479064982452944, + "learning_rate": 4.9834397520544945e-06, + "loss": 1.8473, + "step": 795 + }, + { + "epoch": 0.5331547220361688, + "grad_norm": 1.9750815523701566, + "learning_rate": 4.983271389224113e-06, + "loss": 1.7307, + "step": 796 + }, + { + "epoch": 0.5338245144005358, + "grad_norm": 1.826723996935029, + "learning_rate": 4.983102177743774e-06, + "loss": 1.4684, + "step": 797 + }, + { + "epoch": 0.5344943067649028, + "grad_norm": 1.5657455508659233, + "learning_rate": 4.982932117671304e-06, + "loss": 1.4998, + "step": 798 + }, + { + "epoch": 0.5351640991292699, + "grad_norm": 1.5697922894778398, + "learning_rate": 4.982761209064823e-06, + "loss": 1.7719, + "step": 799 + }, + { + "epoch": 0.535833891493637, + "grad_norm": 2.0362235650630365, + "learning_rate": 4.9825894519827364e-06, + "loss": 1.8234, + "step": 800 + }, + { + "epoch": 0.5365036838580041, + "grad_norm": 1.6145900930378088, + "learning_rate": 4.982416846483743e-06, + "loss": 1.756, + "step": 801 + }, + { + "epoch": 0.5371734762223711, + "grad_norm": 2.2886001059809225, + "learning_rate": 4.982243392626829e-06, + "loss": 1.7453, + "step": 802 + }, + { + "epoch": 0.5378432685867381, + "grad_norm": 3.3457180965316997, + "learning_rate": 4.982069090471273e-06, + "loss": 1.7941, + "step": 803 + }, + { + "epoch": 0.5385130609511052, + "grad_norm": 1.6987589327097543, + "learning_rate": 4.9818939400766405e-06, + "loss": 1.6094, + "step": 804 + }, + { + "epoch": 0.5391828533154722, + "grad_norm": 1.6744274586379513, + "learning_rate": 4.981717941502789e-06, + "loss": 1.8013, + "step": 805 + }, + { + "epoch": 0.5398526456798393, + "grad_norm": 1.9905025231434494, + "learning_rate": 4.9815410948098675e-06, + "loss": 1.8216, + "step": 806 + }, + { + "epoch": 0.5405224380442063, + "grad_norm": 1.6688705807045319, + "learning_rate": 4.98136340005831e-06, + "loss": 1.5495, + "step": 807 + }, + { + "epoch": 0.5411922304085733, + "grad_norm": 1.8488907323825683, + "learning_rate": 4.9811848573088454e-06, + "loss": 2.0645, + "step": 808 + }, + { + "epoch": 0.5418620227729404, + "grad_norm": 1.8167418865456177, + "learning_rate": 4.981005466622488e-06, + "loss": 1.7042, + "step": 809 + }, + { + "epoch": 0.5425318151373074, + "grad_norm": 2.0723916092809347, + "learning_rate": 4.980825228060545e-06, + "loss": 1.7192, + "step": 810 + }, + { + "epoch": 0.5432016075016745, + "grad_norm": 2.0604290372136322, + "learning_rate": 4.980644141684613e-06, + "loss": 1.7692, + "step": 811 + }, + { + "epoch": 0.5438713998660415, + "grad_norm": 1.5435429467341564, + "learning_rate": 4.9804622075565775e-06, + "loss": 1.8163, + "step": 812 + }, + { + "epoch": 0.5445411922304085, + "grad_norm": 1.5356477929844097, + "learning_rate": 4.9802794257386125e-06, + "loss": 1.747, + "step": 813 + }, + { + "epoch": 0.5452109845947756, + "grad_norm": 1.4962974025873743, + "learning_rate": 4.980095796293184e-06, + "loss": 1.9696, + "step": 814 + }, + { + "epoch": 0.5458807769591426, + "grad_norm": 1.4665498050668826, + "learning_rate": 4.9799113192830475e-06, + "loss": 1.5641, + "step": 815 + }, + { + "epoch": 0.5465505693235098, + "grad_norm": 2.7544617674406644, + "learning_rate": 4.979725994771246e-06, + "loss": 1.7587, + "step": 816 + }, + { + "epoch": 0.5472203616878768, + "grad_norm": 1.4112553183704466, + "learning_rate": 4.979539822821115e-06, + "loss": 1.7352, + "step": 817 + }, + { + "epoch": 0.5478901540522438, + "grad_norm": 3.4458737820114247, + "learning_rate": 4.979352803496277e-06, + "loss": 1.467, + "step": 818 + }, + { + "epoch": 0.5485599464166109, + "grad_norm": 1.8539157845546546, + "learning_rate": 4.979164936860645e-06, + "loss": 1.746, + "step": 819 + }, + { + "epoch": 0.5492297387809779, + "grad_norm": 2.5109808201302073, + "learning_rate": 4.9789762229784235e-06, + "loss": 1.6603, + "step": 820 + }, + { + "epoch": 0.549899531145345, + "grad_norm": 2.7868791736874963, + "learning_rate": 4.978786661914103e-06, + "loss": 1.7359, + "step": 821 + }, + { + "epoch": 0.550569323509712, + "grad_norm": 1.854021509012841, + "learning_rate": 4.9785962537324674e-06, + "loss": 1.6183, + "step": 822 + }, + { + "epoch": 0.551239115874079, + "grad_norm": 1.6232441119900523, + "learning_rate": 4.978404998498586e-06, + "loss": 1.8811, + "step": 823 + }, + { + "epoch": 0.5519089082384461, + "grad_norm": 3.7242668430356916, + "learning_rate": 4.978212896277821e-06, + "loss": 1.589, + "step": 824 + }, + { + "epoch": 0.5525787006028131, + "grad_norm": 1.5557729224653203, + "learning_rate": 4.978019947135823e-06, + "loss": 1.7206, + "step": 825 + }, + { + "epoch": 0.5532484929671801, + "grad_norm": 1.6017156091714777, + "learning_rate": 4.977826151138529e-06, + "loss": 1.746, + "step": 826 + }, + { + "epoch": 0.5539182853315472, + "grad_norm": 1.663097646583482, + "learning_rate": 4.977631508352172e-06, + "loss": 1.7449, + "step": 827 + }, + { + "epoch": 0.5545880776959142, + "grad_norm": 1.6327325790568499, + "learning_rate": 4.977436018843268e-06, + "loss": 1.7726, + "step": 828 + }, + { + "epoch": 0.5552578700602813, + "grad_norm": 1.538875284267791, + "learning_rate": 4.977239682678626e-06, + "loss": 1.7636, + "step": 829 + }, + { + "epoch": 0.5559276624246483, + "grad_norm": 1.5179653092718632, + "learning_rate": 4.977042499925343e-06, + "loss": 1.7287, + "step": 830 + }, + { + "epoch": 0.5565974547890155, + "grad_norm": 1.661024281738233, + "learning_rate": 4.976844470650804e-06, + "loss": 1.7549, + "step": 831 + }, + { + "epoch": 0.5572672471533825, + "grad_norm": 1.6478500100818505, + "learning_rate": 4.976645594922688e-06, + "loss": 1.8198, + "step": 832 + }, + { + "epoch": 0.5579370395177495, + "grad_norm": 1.5601702971045928, + "learning_rate": 4.976445872808958e-06, + "loss": 1.7706, + "step": 833 + }, + { + "epoch": 0.5586068318821166, + "grad_norm": 2.172645969516445, + "learning_rate": 4.976245304377869e-06, + "loss": 1.6348, + "step": 834 + }, + { + "epoch": 0.5592766242464836, + "grad_norm": 3.121009305898603, + "learning_rate": 4.976043889697964e-06, + "loss": 1.7909, + "step": 835 + }, + { + "epoch": 0.5599464166108507, + "grad_norm": 10.193498434563017, + "learning_rate": 4.975841628838076e-06, + "loss": 1.8837, + "step": 836 + }, + { + "epoch": 0.5606162089752177, + "grad_norm": 2.3487473342048606, + "learning_rate": 4.975638521867328e-06, + "loss": 1.7862, + "step": 837 + }, + { + "epoch": 0.5612860013395847, + "grad_norm": 1.7475462452981168, + "learning_rate": 4.97543456885513e-06, + "loss": 1.8766, + "step": 838 + }, + { + "epoch": 0.5619557937039518, + "grad_norm": 1.545543190989106, + "learning_rate": 4.975229769871183e-06, + "loss": 1.7949, + "step": 839 + }, + { + "epoch": 0.5626255860683188, + "grad_norm": 1.541011845205872, + "learning_rate": 4.975024124985476e-06, + "loss": 1.7496, + "step": 840 + }, + { + "epoch": 0.5632953784326858, + "grad_norm": 1.7289431253595926, + "learning_rate": 4.974817634268287e-06, + "loss": 1.8229, + "step": 841 + }, + { + "epoch": 0.5639651707970529, + "grad_norm": 1.654540221003204, + "learning_rate": 4.9746102977901845e-06, + "loss": 1.642, + "step": 842 + }, + { + "epoch": 0.5646349631614199, + "grad_norm": 1.608561473540289, + "learning_rate": 4.974402115622025e-06, + "loss": 1.8174, + "step": 843 + }, + { + "epoch": 0.565304755525787, + "grad_norm": 1.4559370741254065, + "learning_rate": 4.974193087834953e-06, + "loss": 1.6273, + "step": 844 + }, + { + "epoch": 0.565974547890154, + "grad_norm": 1.6937739132641314, + "learning_rate": 4.973983214500404e-06, + "loss": 1.5849, + "step": 845 + }, + { + "epoch": 0.5666443402545212, + "grad_norm": 2.5905063559389245, + "learning_rate": 4.973772495690103e-06, + "loss": 1.7293, + "step": 846 + }, + { + "epoch": 0.5673141326188882, + "grad_norm": 2.6301397241377638, + "learning_rate": 4.973560931476058e-06, + "loss": 1.7552, + "step": 847 + }, + { + "epoch": 0.5679839249832552, + "grad_norm": 1.91987483540413, + "learning_rate": 4.9733485219305746e-06, + "loss": 1.2358, + "step": 848 + }, + { + "epoch": 0.5686537173476223, + "grad_norm": 2.3754376183936223, + "learning_rate": 4.973135267126243e-06, + "loss": 1.644, + "step": 849 + }, + { + "epoch": 0.5693235097119893, + "grad_norm": 2.602874780357738, + "learning_rate": 4.972921167135939e-06, + "loss": 1.649, + "step": 850 + }, + { + "epoch": 0.5699933020763563, + "grad_norm": 3.89199374366504, + "learning_rate": 4.9727062220328324e-06, + "loss": 1.6751, + "step": 851 + }, + { + "epoch": 0.5706630944407234, + "grad_norm": 1.882175363439424, + "learning_rate": 4.972490431890381e-06, + "loss": 1.8581, + "step": 852 + }, + { + "epoch": 0.5713328868050904, + "grad_norm": 6.049975131093648, + "learning_rate": 4.97227379678233e-06, + "loss": 1.6719, + "step": 853 + }, + { + "epoch": 0.5720026791694575, + "grad_norm": 2.4219036188783543, + "learning_rate": 4.972056316782713e-06, + "loss": 1.7461, + "step": 854 + }, + { + "epoch": 0.5726724715338245, + "grad_norm": 1.643405310383024, + "learning_rate": 4.971837991965853e-06, + "loss": 1.6188, + "step": 855 + }, + { + "epoch": 0.5733422638981915, + "grad_norm": 4.362588693732724, + "learning_rate": 4.971618822406363e-06, + "loss": 1.903, + "step": 856 + }, + { + "epoch": 0.5740120562625586, + "grad_norm": 1.4782678583778999, + "learning_rate": 4.971398808179142e-06, + "loss": 1.7109, + "step": 857 + }, + { + "epoch": 0.5746818486269256, + "grad_norm": 1.6617895178391566, + "learning_rate": 4.9711779493593795e-06, + "loss": 1.7776, + "step": 858 + }, + { + "epoch": 0.5753516409912927, + "grad_norm": 2.6106541061126607, + "learning_rate": 4.970956246022555e-06, + "loss": 1.9226, + "step": 859 + }, + { + "epoch": 0.5760214333556597, + "grad_norm": 2.4440967372752715, + "learning_rate": 4.970733698244432e-06, + "loss": 1.72, + "step": 860 + }, + { + "epoch": 0.5766912257200268, + "grad_norm": 1.578698097134013, + "learning_rate": 4.970510306101067e-06, + "loss": 1.815, + "step": 861 + }, + { + "epoch": 0.5773610180843939, + "grad_norm": 2.171381933462649, + "learning_rate": 4.970286069668805e-06, + "loss": 1.7808, + "step": 862 + }, + { + "epoch": 0.5780308104487609, + "grad_norm": 1.563488000415158, + "learning_rate": 4.970060989024276e-06, + "loss": 1.8988, + "step": 863 + }, + { + "epoch": 0.578700602813128, + "grad_norm": 2.5700403922553123, + "learning_rate": 4.969835064244401e-06, + "loss": 1.7653, + "step": 864 + }, + { + "epoch": 0.579370395177495, + "grad_norm": 1.5246916199627156, + "learning_rate": 4.969608295406389e-06, + "loss": 1.7772, + "step": 865 + }, + { + "epoch": 0.580040187541862, + "grad_norm": 2.645595293549406, + "learning_rate": 4.969380682587738e-06, + "loss": 1.5953, + "step": 866 + }, + { + "epoch": 0.5807099799062291, + "grad_norm": 1.463606606050033, + "learning_rate": 4.969152225866234e-06, + "loss": 1.7247, + "step": 867 + }, + { + "epoch": 0.5813797722705961, + "grad_norm": 1.7804234120681675, + "learning_rate": 4.968922925319951e-06, + "loss": 1.6217, + "step": 868 + }, + { + "epoch": 0.5820495646349632, + "grad_norm": 1.52193788488823, + "learning_rate": 4.968692781027251e-06, + "loss": 1.803, + "step": 869 + }, + { + "epoch": 0.5827193569993302, + "grad_norm": 3.381383454210634, + "learning_rate": 4.968461793066787e-06, + "loss": 1.6141, + "step": 870 + }, + { + "epoch": 0.5833891493636972, + "grad_norm": 1.5568937619284606, + "learning_rate": 4.968229961517496e-06, + "loss": 1.5935, + "step": 871 + }, + { + "epoch": 0.5840589417280643, + "grad_norm": 1.9811177363049859, + "learning_rate": 4.967997286458608e-06, + "loss": 1.812, + "step": 872 + }, + { + "epoch": 0.5847287340924313, + "grad_norm": 2.737001935296344, + "learning_rate": 4.967763767969638e-06, + "loss": 1.7478, + "step": 873 + }, + { + "epoch": 0.5853985264567984, + "grad_norm": 1.5066425476946719, + "learning_rate": 4.967529406130389e-06, + "loss": 1.632, + "step": 874 + }, + { + "epoch": 0.5860683188211654, + "grad_norm": 2.7016698887217445, + "learning_rate": 4.967294201020954e-06, + "loss": 1.8217, + "step": 875 + }, + { + "epoch": 0.5867381111855325, + "grad_norm": 1.4799621951127266, + "learning_rate": 4.967058152721716e-06, + "loss": 1.647, + "step": 876 + }, + { + "epoch": 0.5874079035498996, + "grad_norm": 6.935123245622872, + "learning_rate": 4.966821261313341e-06, + "loss": 1.6817, + "step": 877 + }, + { + "epoch": 0.5880776959142666, + "grad_norm": 1.5230232174867322, + "learning_rate": 4.966583526876786e-06, + "loss": 1.6027, + "step": 878 + }, + { + "epoch": 0.5887474882786337, + "grad_norm": 3.532991435272279, + "learning_rate": 4.966344949493297e-06, + "loss": 1.6953, + "step": 879 + }, + { + "epoch": 0.5894172806430007, + "grad_norm": 2.1875855354426244, + "learning_rate": 4.966105529244407e-06, + "loss": 1.8187, + "step": 880 + }, + { + "epoch": 0.5900870730073677, + "grad_norm": 1.6910882143706614, + "learning_rate": 4.965865266211936e-06, + "loss": 1.4962, + "step": 881 + }, + { + "epoch": 0.5907568653717348, + "grad_norm": 1.8703921824892735, + "learning_rate": 4.965624160477995e-06, + "loss": 1.6336, + "step": 882 + }, + { + "epoch": 0.5914266577361018, + "grad_norm": 1.6567322227468302, + "learning_rate": 4.96538221212498e-06, + "loss": 1.6447, + "step": 883 + }, + { + "epoch": 0.5920964501004689, + "grad_norm": 1.6229378179371619, + "learning_rate": 4.965139421235575e-06, + "loss": 1.7328, + "step": 884 + }, + { + "epoch": 0.5927662424648359, + "grad_norm": 1.5248078317191611, + "learning_rate": 4.964895787892755e-06, + "loss": 1.6772, + "step": 885 + }, + { + "epoch": 0.5934360348292029, + "grad_norm": 2.164463591286396, + "learning_rate": 4.964651312179779e-06, + "loss": 1.8797, + "step": 886 + }, + { + "epoch": 0.59410582719357, + "grad_norm": 2.161024746592223, + "learning_rate": 4.964405994180197e-06, + "loss": 1.5791, + "step": 887 + }, + { + "epoch": 0.594775619557937, + "grad_norm": 2.038914330189904, + "learning_rate": 4.964159833977846e-06, + "loss": 1.6194, + "step": 888 + }, + { + "epoch": 0.595445411922304, + "grad_norm": 1.9819125444620285, + "learning_rate": 4.96391283165685e-06, + "loss": 1.8005, + "step": 889 + }, + { + "epoch": 0.5961152042866711, + "grad_norm": 1.6610469090879068, + "learning_rate": 4.96366498730162e-06, + "loss": 1.6094, + "step": 890 + }, + { + "epoch": 0.5967849966510381, + "grad_norm": 1.5699076937859378, + "learning_rate": 4.963416300996857e-06, + "loss": 1.7378, + "step": 891 + }, + { + "epoch": 0.5974547890154053, + "grad_norm": 3.275210291054124, + "learning_rate": 4.96316677282755e-06, + "loss": 1.8633, + "step": 892 + }, + { + "epoch": 0.5981245813797723, + "grad_norm": 1.4916140944020861, + "learning_rate": 4.962916402878972e-06, + "loss": 1.7514, + "step": 893 + }, + { + "epoch": 0.5987943737441394, + "grad_norm": 1.8030754769373485, + "learning_rate": 4.9626651912366885e-06, + "loss": 1.7396, + "step": 894 + }, + { + "epoch": 0.5994641661085064, + "grad_norm": 8.028785025690565, + "learning_rate": 4.962413137986549e-06, + "loss": 1.5479, + "step": 895 + }, + { + "epoch": 0.6001339584728734, + "grad_norm": 1.6252705099026852, + "learning_rate": 4.962160243214692e-06, + "loss": 1.709, + "step": 896 + }, + { + "epoch": 0.6008037508372405, + "grad_norm": 2.5694049962190615, + "learning_rate": 4.9619065070075435e-06, + "loss": 1.6972, + "step": 897 + }, + { + "epoch": 0.6014735432016075, + "grad_norm": 2.057116914151255, + "learning_rate": 4.9616519294518166e-06, + "loss": 1.754, + "step": 898 + }, + { + "epoch": 0.6021433355659745, + "grad_norm": 2.4478373639181097, + "learning_rate": 4.961396510634513e-06, + "loss": 1.7308, + "step": 899 + }, + { + "epoch": 0.6028131279303416, + "grad_norm": 1.7239214431440983, + "learning_rate": 4.961140250642921e-06, + "loss": 1.7046, + "step": 900 + }, + { + "epoch": 0.6034829202947086, + "grad_norm": 2.03814101004789, + "learning_rate": 4.960883149564616e-06, + "loss": 1.6238, + "step": 901 + }, + { + "epoch": 0.6041527126590757, + "grad_norm": 2.5696837525438774, + "learning_rate": 4.9606252074874624e-06, + "loss": 1.5441, + "step": 902 + }, + { + "epoch": 0.6048225050234427, + "grad_norm": 1.5125309040282673, + "learning_rate": 4.9603664244996115e-06, + "loss": 1.805, + "step": 903 + }, + { + "epoch": 0.6054922973878097, + "grad_norm": 1.8187583395873934, + "learning_rate": 4.960106800689501e-06, + "loss": 1.6557, + "step": 904 + }, + { + "epoch": 0.6061620897521768, + "grad_norm": 1.592304197030378, + "learning_rate": 4.9598463361458556e-06, + "loss": 1.7147, + "step": 905 + }, + { + "epoch": 0.6068318821165438, + "grad_norm": 2.6955354249434444, + "learning_rate": 4.959585030957689e-06, + "loss": 1.7176, + "step": 906 + }, + { + "epoch": 0.607501674480911, + "grad_norm": 1.719331466388272, + "learning_rate": 4.959322885214302e-06, + "loss": 1.6144, + "step": 907 + }, + { + "epoch": 0.608171466845278, + "grad_norm": 2.110023084476623, + "learning_rate": 4.95905989900528e-06, + "loss": 1.7535, + "step": 908 + }, + { + "epoch": 0.608841259209645, + "grad_norm": 2.1536975372695073, + "learning_rate": 4.958796072420501e-06, + "loss": 1.4822, + "step": 909 + }, + { + "epoch": 0.6095110515740121, + "grad_norm": 1.5658018652041186, + "learning_rate": 4.958531405550124e-06, + "loss": 1.5868, + "step": 910 + }, + { + "epoch": 0.6101808439383791, + "grad_norm": 1.4803257757246429, + "learning_rate": 4.958265898484599e-06, + "loss": 1.536, + "step": 911 + }, + { + "epoch": 0.6108506363027462, + "grad_norm": 2.0357588765412418, + "learning_rate": 4.957999551314662e-06, + "loss": 1.5428, + "step": 912 + }, + { + "epoch": 0.6115204286671132, + "grad_norm": 1.4524803347097142, + "learning_rate": 4.957732364131337e-06, + "loss": 1.599, + "step": 913 + }, + { + "epoch": 0.6121902210314802, + "grad_norm": 1.3938621438001122, + "learning_rate": 4.957464337025934e-06, + "loss": 1.566, + "step": 914 + }, + { + "epoch": 0.6128600133958473, + "grad_norm": 3.5284134386134527, + "learning_rate": 4.957195470090049e-06, + "loss": 1.8166, + "step": 915 + }, + { + "epoch": 0.6135298057602143, + "grad_norm": 1.491169539260467, + "learning_rate": 4.956925763415569e-06, + "loss": 1.7096, + "step": 916 + }, + { + "epoch": 0.6141995981245814, + "grad_norm": 1.922966841795839, + "learning_rate": 4.956655217094663e-06, + "loss": 1.7448, + "step": 917 + }, + { + "epoch": 0.6148693904889484, + "grad_norm": 3.9506030970667836, + "learning_rate": 4.956383831219791e-06, + "loss": 1.5952, + "step": 918 + }, + { + "epoch": 0.6155391828533154, + "grad_norm": 3.7361589260398764, + "learning_rate": 4.956111605883697e-06, + "loss": 1.8224, + "step": 919 + }, + { + "epoch": 0.6162089752176825, + "grad_norm": 3.1058508433973153, + "learning_rate": 4.955838541179414e-06, + "loss": 1.7994, + "step": 920 + }, + { + "epoch": 0.6168787675820495, + "grad_norm": 2.52708153171011, + "learning_rate": 4.955564637200261e-06, + "loss": 1.6092, + "step": 921 + }, + { + "epoch": 0.6175485599464167, + "grad_norm": 1.7275886450195495, + "learning_rate": 4.955289894039843e-06, + "loss": 1.6748, + "step": 922 + }, + { + "epoch": 0.6182183523107837, + "grad_norm": 4.197191686900288, + "learning_rate": 4.955014311792054e-06, + "loss": 1.7168, + "step": 923 + }, + { + "epoch": 0.6188881446751507, + "grad_norm": 2.391972007478211, + "learning_rate": 4.954737890551072e-06, + "loss": 1.6814, + "step": 924 + }, + { + "epoch": 0.6195579370395178, + "grad_norm": 9.34111420362443, + "learning_rate": 4.954460630411363e-06, + "loss": 1.6488, + "step": 925 + }, + { + "epoch": 0.6202277294038848, + "grad_norm": 2.308972322549389, + "learning_rate": 4.954182531467681e-06, + "loss": 1.601, + "step": 926 + }, + { + "epoch": 0.6208975217682519, + "grad_norm": 2.0578512721370954, + "learning_rate": 4.953903593815063e-06, + "loss": 1.7623, + "step": 927 + }, + { + "epoch": 0.6215673141326189, + "grad_norm": 1.7919005453757475, + "learning_rate": 4.953623817548839e-06, + "loss": 1.8412, + "step": 928 + }, + { + "epoch": 0.6222371064969859, + "grad_norm": 1.5005534391820288, + "learning_rate": 4.953343202764618e-06, + "loss": 1.8631, + "step": 929 + }, + { + "epoch": 0.622906898861353, + "grad_norm": 1.8835939228898508, + "learning_rate": 4.953061749558301e-06, + "loss": 1.5625, + "step": 930 + }, + { + "epoch": 0.62357669122572, + "grad_norm": 1.4776387287561028, + "learning_rate": 4.952779458026073e-06, + "loss": 1.8223, + "step": 931 + }, + { + "epoch": 0.624246483590087, + "grad_norm": 1.6817757185419988, + "learning_rate": 4.952496328264407e-06, + "loss": 1.7767, + "step": 932 + }, + { + "epoch": 0.6249162759544541, + "grad_norm": 1.7447148121519491, + "learning_rate": 4.95221236037006e-06, + "loss": 1.6854, + "step": 933 + }, + { + "epoch": 0.6255860683188211, + "grad_norm": 1.5991092825494793, + "learning_rate": 4.951927554440079e-06, + "loss": 1.5999, + "step": 934 + }, + { + "epoch": 0.6262558606831882, + "grad_norm": 2.072304197175563, + "learning_rate": 4.951641910571795e-06, + "loss": 1.6847, + "step": 935 + }, + { + "epoch": 0.6269256530475552, + "grad_norm": 2.1588769586151817, + "learning_rate": 4.951355428862825e-06, + "loss": 1.7074, + "step": 936 + }, + { + "epoch": 0.6275954454119224, + "grad_norm": 2.800838168959561, + "learning_rate": 4.9510681094110746e-06, + "loss": 1.5846, + "step": 937 + }, + { + "epoch": 0.6282652377762894, + "grad_norm": 2.5160788960848337, + "learning_rate": 4.950779952314732e-06, + "loss": 1.7381, + "step": 938 + }, + { + "epoch": 0.6289350301406564, + "grad_norm": 1.6166301441977464, + "learning_rate": 4.950490957672277e-06, + "loss": 1.7768, + "step": 939 + }, + { + "epoch": 0.6296048225050235, + "grad_norm": 2.1639013567629855, + "learning_rate": 4.950201125582471e-06, + "loss": 1.8105, + "step": 940 + }, + { + "epoch": 0.6302746148693905, + "grad_norm": 3.540202294925917, + "learning_rate": 4.949910456144362e-06, + "loss": 1.8665, + "step": 941 + }, + { + "epoch": 0.6309444072337576, + "grad_norm": 2.081359472053604, + "learning_rate": 4.949618949457288e-06, + "loss": 1.592, + "step": 942 + }, + { + "epoch": 0.6316141995981246, + "grad_norm": 1.7226584218408652, + "learning_rate": 4.949326605620868e-06, + "loss": 1.4397, + "step": 943 + }, + { + "epoch": 0.6322839919624916, + "grad_norm": 3.625204725618973, + "learning_rate": 4.949033424735012e-06, + "loss": 1.5901, + "step": 944 + }, + { + "epoch": 0.6329537843268587, + "grad_norm": 1.7681868814362316, + "learning_rate": 4.94873940689991e-06, + "loss": 1.5041, + "step": 945 + }, + { + "epoch": 0.6336235766912257, + "grad_norm": 7.542351433889735, + "learning_rate": 4.948444552216045e-06, + "loss": 1.639, + "step": 946 + }, + { + "epoch": 0.6342933690555927, + "grad_norm": 1.626825625953941, + "learning_rate": 4.948148860784182e-06, + "loss": 1.5788, + "step": 947 + }, + { + "epoch": 0.6349631614199598, + "grad_norm": 1.5448967181723714, + "learning_rate": 4.947852332705372e-06, + "loss": 1.5026, + "step": 948 + }, + { + "epoch": 0.6356329537843268, + "grad_norm": 1.5039461722643535, + "learning_rate": 4.947554968080952e-06, + "loss": 1.8162, + "step": 949 + }, + { + "epoch": 0.6363027461486939, + "grad_norm": 2.67141675599732, + "learning_rate": 4.947256767012546e-06, + "loss": 1.6957, + "step": 950 + }, + { + "epoch": 0.6369725385130609, + "grad_norm": 1.9940069238698552, + "learning_rate": 4.946957729602063e-06, + "loss": 1.6772, + "step": 951 + }, + { + "epoch": 0.6376423308774279, + "grad_norm": 6.22431912857819, + "learning_rate": 4.946657855951699e-06, + "loss": 1.8117, + "step": 952 + }, + { + "epoch": 0.6383121232417951, + "grad_norm": 2.2875060878697706, + "learning_rate": 4.946357146163934e-06, + "loss": 1.5808, + "step": 953 + }, + { + "epoch": 0.6389819156061621, + "grad_norm": 1.82137856177247, + "learning_rate": 4.946055600341535e-06, + "loss": 1.713, + "step": 954 + }, + { + "epoch": 0.6396517079705292, + "grad_norm": 1.8753338006260964, + "learning_rate": 4.945753218587553e-06, + "loss": 1.7986, + "step": 955 + }, + { + "epoch": 0.6403215003348962, + "grad_norm": 1.5457376444546806, + "learning_rate": 4.945450001005328e-06, + "loss": 1.7816, + "step": 956 + }, + { + "epoch": 0.6409912926992632, + "grad_norm": 2.205167135891539, + "learning_rate": 4.945145947698483e-06, + "loss": 1.7321, + "step": 957 + }, + { + "epoch": 0.6416610850636303, + "grad_norm": 1.7621329849510645, + "learning_rate": 4.944841058770926e-06, + "loss": 1.7347, + "step": 958 + }, + { + "epoch": 0.6423308774279973, + "grad_norm": 2.5783309618368824, + "learning_rate": 4.944535334326853e-06, + "loss": 1.8135, + "step": 959 + }, + { + "epoch": 0.6430006697923644, + "grad_norm": 1.462630270808878, + "learning_rate": 4.944228774470744e-06, + "loss": 1.6821, + "step": 960 + }, + { + "epoch": 0.6436704621567314, + "grad_norm": 1.4417415175946697, + "learning_rate": 4.943921379307365e-06, + "loss": 1.6535, + "step": 961 + }, + { + "epoch": 0.6443402545210984, + "grad_norm": 1.5081197799917094, + "learning_rate": 4.943613148941768e-06, + "loss": 1.7153, + "step": 962 + }, + { + "epoch": 0.6450100468854655, + "grad_norm": 2.113674015847343, + "learning_rate": 4.943304083479288e-06, + "loss": 1.6979, + "step": 963 + }, + { + "epoch": 0.6456798392498325, + "grad_norm": 3.2842210084101446, + "learning_rate": 4.942994183025548e-06, + "loss": 1.7788, + "step": 964 + }, + { + "epoch": 0.6463496316141996, + "grad_norm": 1.6334452455131891, + "learning_rate": 4.942683447686456e-06, + "loss": 1.7839, + "step": 965 + }, + { + "epoch": 0.6470194239785666, + "grad_norm": 1.835221359299977, + "learning_rate": 4.942371877568203e-06, + "loss": 1.8546, + "step": 966 + }, + { + "epoch": 0.6476892163429336, + "grad_norm": 2.412533590864445, + "learning_rate": 4.94205947277727e-06, + "loss": 1.607, + "step": 967 + }, + { + "epoch": 0.6483590087073008, + "grad_norm": 5.100983148006955, + "learning_rate": 4.941746233420418e-06, + "loss": 1.7152, + "step": 968 + }, + { + "epoch": 0.6490288010716678, + "grad_norm": 3.0844535986372765, + "learning_rate": 4.941432159604697e-06, + "loss": 1.5679, + "step": 969 + }, + { + "epoch": 0.6496985934360349, + "grad_norm": 1.7244121200585139, + "learning_rate": 4.941117251437438e-06, + "loss": 1.6886, + "step": 970 + }, + { + "epoch": 0.6503683858004019, + "grad_norm": 1.4344717270639946, + "learning_rate": 4.940801509026264e-06, + "loss": 1.6936, + "step": 971 + }, + { + "epoch": 0.6510381781647689, + "grad_norm": 2.097657069181618, + "learning_rate": 4.940484932479075e-06, + "loss": 1.7113, + "step": 972 + }, + { + "epoch": 0.651707970529136, + "grad_norm": 1.528032291603824, + "learning_rate": 4.940167521904064e-06, + "loss": 1.4766, + "step": 973 + }, + { + "epoch": 0.652377762893503, + "grad_norm": 2.4630222115193936, + "learning_rate": 4.939849277409701e-06, + "loss": 1.6794, + "step": 974 + }, + { + "epoch": 0.6530475552578701, + "grad_norm": 4.032357418296534, + "learning_rate": 4.9395301991047465e-06, + "loss": 1.5344, + "step": 975 + }, + { + "epoch": 0.6537173476222371, + "grad_norm": 1.7668598673074896, + "learning_rate": 4.939210287098246e-06, + "loss": 1.9876, + "step": 976 + }, + { + "epoch": 0.6543871399866041, + "grad_norm": 1.772909097051449, + "learning_rate": 4.938889541499526e-06, + "loss": 1.8169, + "step": 977 + }, + { + "epoch": 0.6550569323509712, + "grad_norm": 1.5974453450521113, + "learning_rate": 4.938567962418202e-06, + "loss": 1.8407, + "step": 978 + }, + { + "epoch": 0.6557267247153382, + "grad_norm": 1.5151695833849317, + "learning_rate": 4.938245549964174e-06, + "loss": 1.7248, + "step": 979 + }, + { + "epoch": 0.6563965170797053, + "grad_norm": 1.4988839301580668, + "learning_rate": 4.937922304247621e-06, + "loss": 1.695, + "step": 980 + }, + { + "epoch": 0.6570663094440723, + "grad_norm": 2.0718992796106566, + "learning_rate": 4.937598225379015e-06, + "loss": 1.7351, + "step": 981 + }, + { + "epoch": 0.6577361018084393, + "grad_norm": 2.0922121490819805, + "learning_rate": 4.9372733134691065e-06, + "loss": 1.6551, + "step": 982 + }, + { + "epoch": 0.6584058941728065, + "grad_norm": 4.780142134542246, + "learning_rate": 4.9369475686289345e-06, + "loss": 1.6633, + "step": 983 + }, + { + "epoch": 0.6590756865371735, + "grad_norm": 1.8857739861668033, + "learning_rate": 4.936620990969822e-06, + "loss": 1.5912, + "step": 984 + }, + { + "epoch": 0.6597454789015406, + "grad_norm": 1.8756580921024046, + "learning_rate": 4.9362935806033726e-06, + "loss": 1.7678, + "step": 985 + }, + { + "epoch": 0.6604152712659076, + "grad_norm": 1.5936291691433322, + "learning_rate": 4.9359653376414815e-06, + "loss": 1.7131, + "step": 986 + }, + { + "epoch": 0.6610850636302746, + "grad_norm": 1.599184122478421, + "learning_rate": 4.935636262196323e-06, + "loss": 1.5323, + "step": 987 + }, + { + "epoch": 0.6617548559946417, + "grad_norm": 2.5857670601614697, + "learning_rate": 4.935306354380357e-06, + "loss": 1.4067, + "step": 988 + }, + { + "epoch": 0.6624246483590087, + "grad_norm": 1.990577094539267, + "learning_rate": 4.9349756143063295e-06, + "loss": 1.7908, + "step": 989 + }, + { + "epoch": 0.6630944407233758, + "grad_norm": 1.8184184689078602, + "learning_rate": 4.93464404208727e-06, + "loss": 1.6009, + "step": 990 + }, + { + "epoch": 0.6637642330877428, + "grad_norm": 1.8920182650737996, + "learning_rate": 4.934311637836491e-06, + "loss": 1.6976, + "step": 991 + }, + { + "epoch": 0.6644340254521098, + "grad_norm": 2.2785166452192844, + "learning_rate": 4.93397840166759e-06, + "loss": 1.6941, + "step": 992 + }, + { + "epoch": 0.6651038178164769, + "grad_norm": 1.55725815878263, + "learning_rate": 4.933644333694453e-06, + "loss": 1.7963, + "step": 993 + }, + { + "epoch": 0.6657736101808439, + "grad_norm": 1.5954220692315655, + "learning_rate": 4.933309434031243e-06, + "loss": 1.7903, + "step": 994 + }, + { + "epoch": 0.666443402545211, + "grad_norm": 2.176831591679202, + "learning_rate": 4.932973702792412e-06, + "loss": 1.6026, + "step": 995 + }, + { + "epoch": 0.667113194909578, + "grad_norm": 3.6582979658267805, + "learning_rate": 4.9326371400926955e-06, + "loss": 1.6793, + "step": 996 + }, + { + "epoch": 0.667782987273945, + "grad_norm": 2.335538637841441, + "learning_rate": 4.932299746047112e-06, + "loss": 1.6896, + "step": 997 + }, + { + "epoch": 0.6684527796383122, + "grad_norm": 1.5442718817204628, + "learning_rate": 4.931961520770966e-06, + "loss": 1.6934, + "step": 998 + }, + { + "epoch": 0.6691225720026792, + "grad_norm": 1.6391514748951286, + "learning_rate": 4.931622464379843e-06, + "loss": 1.5606, + "step": 999 + }, + { + "epoch": 0.6697923643670463, + "grad_norm": 2.543171958347597, + "learning_rate": 4.9312825769896166e-06, + "loss": 1.4591, + "step": 1000 + }, + { + "epoch": 0.6704621567314133, + "grad_norm": 1.6366545819536171, + "learning_rate": 4.9309418587164405e-06, + "loss": 1.8457, + "step": 1001 + }, + { + "epoch": 0.6711319490957803, + "grad_norm": 2.5817380937485743, + "learning_rate": 4.930600309676755e-06, + "loss": 1.7099, + "step": 1002 + }, + { + "epoch": 0.6718017414601474, + "grad_norm": 3.194068732340534, + "learning_rate": 4.930257929987283e-06, + "loss": 1.7366, + "step": 1003 + }, + { + "epoch": 0.6724715338245144, + "grad_norm": 1.6616536968164615, + "learning_rate": 4.929914719765032e-06, + "loss": 1.7705, + "step": 1004 + }, + { + "epoch": 0.6731413261888815, + "grad_norm": 5.775090608261412, + "learning_rate": 4.929570679127292e-06, + "loss": 1.4862, + "step": 1005 + }, + { + "epoch": 0.6738111185532485, + "grad_norm": 2.3271064947931706, + "learning_rate": 4.92922580819164e-06, + "loss": 1.5499, + "step": 1006 + }, + { + "epoch": 0.6744809109176155, + "grad_norm": 2.48194358565641, + "learning_rate": 4.928880107075932e-06, + "loss": 1.5563, + "step": 1007 + }, + { + "epoch": 0.6751507032819826, + "grad_norm": 2.794440062393993, + "learning_rate": 4.928533575898311e-06, + "loss": 1.5577, + "step": 1008 + }, + { + "epoch": 0.6758204956463496, + "grad_norm": 4.547425148342909, + "learning_rate": 4.928186214777204e-06, + "loss": 1.7267, + "step": 1009 + }, + { + "epoch": 0.6764902880107166, + "grad_norm": 2.4158204910893932, + "learning_rate": 4.92783802383132e-06, + "loss": 1.6919, + "step": 1010 + }, + { + "epoch": 0.6771600803750837, + "grad_norm": 1.630341003886277, + "learning_rate": 4.9274890031796505e-06, + "loss": 1.8014, + "step": 1011 + }, + { + "epoch": 0.6778298727394507, + "grad_norm": 1.5949128866535052, + "learning_rate": 4.927139152941474e-06, + "loss": 1.8183, + "step": 1012 + }, + { + "epoch": 0.6784996651038178, + "grad_norm": 1.5672348124196442, + "learning_rate": 4.92678847323635e-06, + "loss": 1.7464, + "step": 1013 + }, + { + "epoch": 0.6791694574681849, + "grad_norm": 1.7503818928268515, + "learning_rate": 4.926436964184124e-06, + "loss": 1.5256, + "step": 1014 + }, + { + "epoch": 0.679839249832552, + "grad_norm": 1.7507698338999411, + "learning_rate": 4.92608462590492e-06, + "loss": 1.7464, + "step": 1015 + }, + { + "epoch": 0.680509042196919, + "grad_norm": 2.6975314444260214, + "learning_rate": 4.9257314585191496e-06, + "loss": 1.7217, + "step": 1016 + }, + { + "epoch": 0.681178834561286, + "grad_norm": 7.090068374118402, + "learning_rate": 4.9253774621475075e-06, + "loss": 1.583, + "step": 1017 + }, + { + "epoch": 0.6818486269256531, + "grad_norm": 3.3563214346028887, + "learning_rate": 4.92502263691097e-06, + "loss": 1.5203, + "step": 1018 + }, + { + "epoch": 0.6825184192900201, + "grad_norm": 1.545687989844509, + "learning_rate": 4.924666982930798e-06, + "loss": 1.8523, + "step": 1019 + }, + { + "epoch": 0.6831882116543871, + "grad_norm": 2.350157908159858, + "learning_rate": 4.924310500328535e-06, + "loss": 1.7878, + "step": 1020 + }, + { + "epoch": 0.6838580040187542, + "grad_norm": 2.081648907208455, + "learning_rate": 4.923953189226007e-06, + "loss": 1.6868, + "step": 1021 + }, + { + "epoch": 0.6845277963831212, + "grad_norm": 2.8255540430721977, + "learning_rate": 4.923595049745325e-06, + "loss": 1.8167, + "step": 1022 + }, + { + "epoch": 0.6851975887474883, + "grad_norm": 2.6654697475105023, + "learning_rate": 4.923236082008881e-06, + "loss": 1.7407, + "step": 1023 + }, + { + "epoch": 0.6858673811118553, + "grad_norm": 3.8238907553985846, + "learning_rate": 4.922876286139353e-06, + "loss": 1.5609, + "step": 1024 + }, + { + "epoch": 0.6865371734762223, + "grad_norm": 1.5278240874489508, + "learning_rate": 4.922515662259697e-06, + "loss": 1.5816, + "step": 1025 + }, + { + "epoch": 0.6872069658405894, + "grad_norm": 2.536434232478737, + "learning_rate": 4.922154210493158e-06, + "loss": 1.7182, + "step": 1026 + }, + { + "epoch": 0.6878767582049564, + "grad_norm": 3.9382037302928, + "learning_rate": 4.921791930963258e-06, + "loss": 1.6907, + "step": 1027 + }, + { + "epoch": 0.6885465505693235, + "grad_norm": 2.517335649822696, + "learning_rate": 4.921428823793807e-06, + "loss": 1.6977, + "step": 1028 + }, + { + "epoch": 0.6892163429336906, + "grad_norm": 2.443939683155804, + "learning_rate": 4.9210648891088965e-06, + "loss": 1.4476, + "step": 1029 + }, + { + "epoch": 0.6898861352980576, + "grad_norm": 3.397095492317555, + "learning_rate": 4.920700127032897e-06, + "loss": 1.7344, + "step": 1030 + }, + { + "epoch": 0.6905559276624247, + "grad_norm": 2.6599324773356305, + "learning_rate": 4.920334537690468e-06, + "loss": 1.5768, + "step": 1031 + }, + { + "epoch": 0.6912257200267917, + "grad_norm": 4.851848281120374, + "learning_rate": 4.919968121206546e-06, + "loss": 1.7226, + "step": 1032 + }, + { + "epoch": 0.6918955123911588, + "grad_norm": 1.6631104376604915, + "learning_rate": 4.9196008777063535e-06, + "loss": 1.3449, + "step": 1033 + }, + { + "epoch": 0.6925653047555258, + "grad_norm": 1.8277456953758842, + "learning_rate": 4.919232807315396e-06, + "loss": 1.8113, + "step": 1034 + }, + { + "epoch": 0.6932350971198928, + "grad_norm": 1.601111230121366, + "learning_rate": 4.918863910159458e-06, + "loss": 1.6743, + "step": 1035 + }, + { + "epoch": 0.6939048894842599, + "grad_norm": 2.4258333650071564, + "learning_rate": 4.918494186364611e-06, + "loss": 1.7977, + "step": 1036 + }, + { + "epoch": 0.6945746818486269, + "grad_norm": 1.5992577107716923, + "learning_rate": 4.9181236360572045e-06, + "loss": 1.7005, + "step": 1037 + }, + { + "epoch": 0.695244474212994, + "grad_norm": 1.9544632116946976, + "learning_rate": 4.917752259363876e-06, + "loss": 1.6768, + "step": 1038 + }, + { + "epoch": 0.695914266577361, + "grad_norm": 2.0948159991891804, + "learning_rate": 4.9173800564115405e-06, + "loss": 1.7116, + "step": 1039 + }, + { + "epoch": 0.696584058941728, + "grad_norm": 1.5726238013895826, + "learning_rate": 4.917007027327398e-06, + "loss": 1.8259, + "step": 1040 + }, + { + "epoch": 0.6972538513060951, + "grad_norm": 3.646037271463209, + "learning_rate": 4.916633172238927e-06, + "loss": 1.6803, + "step": 1041 + }, + { + "epoch": 0.6979236436704621, + "grad_norm": 4.82786002984369, + "learning_rate": 4.916258491273895e-06, + "loss": 1.5637, + "step": 1042 + }, + { + "epoch": 0.6985934360348292, + "grad_norm": 2.1507490314786355, + "learning_rate": 4.915882984560347e-06, + "loss": 1.5681, + "step": 1043 + }, + { + "epoch": 0.6992632283991963, + "grad_norm": 1.8054004506361925, + "learning_rate": 4.915506652226611e-06, + "loss": 1.5718, + "step": 1044 + }, + { + "epoch": 0.6999330207635633, + "grad_norm": 1.9769606678030651, + "learning_rate": 4.9151294944012965e-06, + "loss": 1.642, + "step": 1045 + }, + { + "epoch": 0.7006028131279304, + "grad_norm": 1.7122523039452182, + "learning_rate": 4.914751511213296e-06, + "loss": 1.6396, + "step": 1046 + }, + { + "epoch": 0.7012726054922974, + "grad_norm": 2.375029508083482, + "learning_rate": 4.914372702791786e-06, + "loss": 1.8114, + "step": 1047 + }, + { + "epoch": 0.7019423978566645, + "grad_norm": 2.4387703471821145, + "learning_rate": 4.913993069266221e-06, + "loss": 1.6397, + "step": 1048 + }, + { + "epoch": 0.7026121902210315, + "grad_norm": 1.6881192953733755, + "learning_rate": 4.913612610766341e-06, + "loss": 1.7852, + "step": 1049 + }, + { + "epoch": 0.7032819825853985, + "grad_norm": 3.129404161473196, + "learning_rate": 4.913231327422164e-06, + "loss": 1.6472, + "step": 1050 + }, + { + "epoch": 0.7039517749497656, + "grad_norm": 2.8932758216863963, + "learning_rate": 4.912849219363996e-06, + "loss": 1.6615, + "step": 1051 + }, + { + "epoch": 0.7046215673141326, + "grad_norm": 1.5754054132529927, + "learning_rate": 4.912466286722418e-06, + "loss": 1.8393, + "step": 1052 + }, + { + "epoch": 0.7052913596784997, + "grad_norm": 1.8189039064938088, + "learning_rate": 4.912082529628298e-06, + "loss": 1.6943, + "step": 1053 + }, + { + "epoch": 0.7059611520428667, + "grad_norm": 1.5062059421560563, + "learning_rate": 4.911697948212783e-06, + "loss": 1.6255, + "step": 1054 + }, + { + "epoch": 0.7066309444072337, + "grad_norm": 2.3595540635581735, + "learning_rate": 4.911312542607301e-06, + "loss": 1.5383, + "step": 1055 + }, + { + "epoch": 0.7073007367716008, + "grad_norm": 3.4431763352077396, + "learning_rate": 4.910926312943566e-06, + "loss": 1.6359, + "step": 1056 + }, + { + "epoch": 0.7079705291359678, + "grad_norm": 1.6537572396256301, + "learning_rate": 4.910539259353569e-06, + "loss": 1.6072, + "step": 1057 + }, + { + "epoch": 0.7086403215003348, + "grad_norm": 3.372908401971047, + "learning_rate": 4.910151381969585e-06, + "loss": 1.5869, + "step": 1058 + }, + { + "epoch": 0.709310113864702, + "grad_norm": 1.5470152495338345, + "learning_rate": 4.909762680924168e-06, + "loss": 1.6675, + "step": 1059 + }, + { + "epoch": 0.709979906229069, + "grad_norm": 1.582359581223445, + "learning_rate": 4.909373156350159e-06, + "loss": 1.6356, + "step": 1060 + }, + { + "epoch": 0.7106496985934361, + "grad_norm": 1.5470216650164907, + "learning_rate": 4.908982808380672e-06, + "loss": 1.6536, + "step": 1061 + }, + { + "epoch": 0.7113194909578031, + "grad_norm": 2.9537435267037453, + "learning_rate": 4.908591637149112e-06, + "loss": 1.789, + "step": 1062 + }, + { + "epoch": 0.7119892833221702, + "grad_norm": 7.558918236923413, + "learning_rate": 4.908199642789156e-06, + "loss": 1.4554, + "step": 1063 + }, + { + "epoch": 0.7126590756865372, + "grad_norm": 1.6384632188817794, + "learning_rate": 4.907806825434771e-06, + "loss": 1.7894, + "step": 1064 + }, + { + "epoch": 0.7133288680509042, + "grad_norm": 1.8065610021010852, + "learning_rate": 4.907413185220198e-06, + "loss": 1.6299, + "step": 1065 + }, + { + "epoch": 0.7139986604152713, + "grad_norm": 1.5662395080692966, + "learning_rate": 4.907018722279964e-06, + "loss": 1.6955, + "step": 1066 + }, + { + "epoch": 0.7146684527796383, + "grad_norm": 1.5993011389024911, + "learning_rate": 4.906623436748874e-06, + "loss": 1.693, + "step": 1067 + }, + { + "epoch": 0.7153382451440053, + "grad_norm": 3.1128835509698773, + "learning_rate": 4.906227328762017e-06, + "loss": 1.6422, + "step": 1068 + }, + { + "epoch": 0.7160080375083724, + "grad_norm": 1.7993269639045648, + "learning_rate": 4.90583039845476e-06, + "loss": 1.7086, + "step": 1069 + }, + { + "epoch": 0.7166778298727394, + "grad_norm": 1.9753655025887518, + "learning_rate": 4.905432645962754e-06, + "loss": 1.6025, + "step": 1070 + }, + { + "epoch": 0.7173476222371065, + "grad_norm": 2.3644468079767686, + "learning_rate": 4.905034071421929e-06, + "loss": 1.509, + "step": 1071 + }, + { + "epoch": 0.7180174146014735, + "grad_norm": 2.7793164847809595, + "learning_rate": 4.904634674968497e-06, + "loss": 1.7639, + "step": 1072 + }, + { + "epoch": 0.7186872069658405, + "grad_norm": 1.9646286414773058, + "learning_rate": 4.904234456738951e-06, + "loss": 1.5365, + "step": 1073 + }, + { + "epoch": 0.7193569993302076, + "grad_norm": 1.6839119005559027, + "learning_rate": 4.903833416870062e-06, + "loss": 1.5967, + "step": 1074 + }, + { + "epoch": 0.7200267916945747, + "grad_norm": 1.5891929156000952, + "learning_rate": 4.903431555498887e-06, + "loss": 1.6397, + "step": 1075 + }, + { + "epoch": 0.7206965840589418, + "grad_norm": 2.824713764782243, + "learning_rate": 4.903028872762758e-06, + "loss": 1.936, + "step": 1076 + }, + { + "epoch": 0.7213663764233088, + "grad_norm": 1.5704393898225653, + "learning_rate": 4.9026253687992925e-06, + "loss": 1.664, + "step": 1077 + }, + { + "epoch": 0.7220361687876758, + "grad_norm": 3.9988230884633267, + "learning_rate": 4.9022210437463856e-06, + "loss": 1.8865, + "step": 1078 + }, + { + "epoch": 0.7227059611520429, + "grad_norm": 1.6509367030142084, + "learning_rate": 4.901815897742216e-06, + "loss": 1.5844, + "step": 1079 + }, + { + "epoch": 0.7233757535164099, + "grad_norm": 2.180711740682503, + "learning_rate": 4.901409930925238e-06, + "loss": 1.7205, + "step": 1080 + }, + { + "epoch": 0.724045545880777, + "grad_norm": 1.7974297643964927, + "learning_rate": 4.901003143434192e-06, + "loss": 1.7493, + "step": 1081 + }, + { + "epoch": 0.724715338245144, + "grad_norm": 1.7994020757120992, + "learning_rate": 4.900595535408095e-06, + "loss": 1.7387, + "step": 1082 + }, + { + "epoch": 0.725385130609511, + "grad_norm": 2.2673454329365175, + "learning_rate": 4.900187106986246e-06, + "loss": 1.5875, + "step": 1083 + }, + { + "epoch": 0.7260549229738781, + "grad_norm": 1.8187602814523618, + "learning_rate": 4.899777858308224e-06, + "loss": 1.8545, + "step": 1084 + }, + { + "epoch": 0.7267247153382451, + "grad_norm": 1.9719909537917186, + "learning_rate": 4.899367789513889e-06, + "loss": 1.5066, + "step": 1085 + }, + { + "epoch": 0.7273945077026122, + "grad_norm": 2.847933474977862, + "learning_rate": 4.89895690074338e-06, + "loss": 1.6213, + "step": 1086 + }, + { + "epoch": 0.7280643000669792, + "grad_norm": 2.150959542343543, + "learning_rate": 4.898545192137117e-06, + "loss": 1.656, + "step": 1087 + }, + { + "epoch": 0.7287340924313462, + "grad_norm": 2.595494398809492, + "learning_rate": 4.898132663835801e-06, + "loss": 1.6395, + "step": 1088 + }, + { + "epoch": 0.7294038847957133, + "grad_norm": 1.7824268301913389, + "learning_rate": 4.8977193159804106e-06, + "loss": 1.824, + "step": 1089 + }, + { + "epoch": 0.7300736771600804, + "grad_norm": 1.5923656030796176, + "learning_rate": 4.897305148712207e-06, + "loss": 1.7206, + "step": 1090 + }, + { + "epoch": 0.7307434695244475, + "grad_norm": 1.5249189009305708, + "learning_rate": 4.896890162172731e-06, + "loss": 1.6023, + "step": 1091 + }, + { + "epoch": 0.7314132618888145, + "grad_norm": 1.7414321874495167, + "learning_rate": 4.896474356503802e-06, + "loss": 1.733, + "step": 1092 + }, + { + "epoch": 0.7320830542531815, + "grad_norm": 1.71499499791636, + "learning_rate": 4.896057731847521e-06, + "loss": 1.7152, + "step": 1093 + }, + { + "epoch": 0.7327528466175486, + "grad_norm": 1.689493350442763, + "learning_rate": 4.895640288346267e-06, + "loss": 1.8105, + "step": 1094 + }, + { + "epoch": 0.7334226389819156, + "grad_norm": 1.8718264383256817, + "learning_rate": 4.8952220261427016e-06, + "loss": 1.6164, + "step": 1095 + }, + { + "epoch": 0.7340924313462827, + "grad_norm": 3.587558949005464, + "learning_rate": 4.894802945379763e-06, + "loss": 1.7036, + "step": 1096 + }, + { + "epoch": 0.7347622237106497, + "grad_norm": 1.9659531951787526, + "learning_rate": 4.894383046200671e-06, + "loss": 1.794, + "step": 1097 + }, + { + "epoch": 0.7354320160750167, + "grad_norm": 2.1824679094481905, + "learning_rate": 4.893962328748927e-06, + "loss": 1.4813, + "step": 1098 + }, + { + "epoch": 0.7361018084393838, + "grad_norm": 2.4658929311286526, + "learning_rate": 4.893540793168306e-06, + "loss": 1.4608, + "step": 1099 + }, + { + "epoch": 0.7367716008037508, + "grad_norm": 2.0410116454115728, + "learning_rate": 4.893118439602869e-06, + "loss": 1.7734, + "step": 1100 + }, + { + "epoch": 0.7374413931681179, + "grad_norm": 1.722839049101332, + "learning_rate": 4.892695268196954e-06, + "loss": 1.5498, + "step": 1101 + }, + { + "epoch": 0.7381111855324849, + "grad_norm": 2.6715554691090673, + "learning_rate": 4.892271279095176e-06, + "loss": 1.626, + "step": 1102 + }, + { + "epoch": 0.7387809778968519, + "grad_norm": 2.26133991150912, + "learning_rate": 4.891846472442436e-06, + "loss": 1.6718, + "step": 1103 + }, + { + "epoch": 0.739450770261219, + "grad_norm": 2.167365781400644, + "learning_rate": 4.891420848383906e-06, + "loss": 1.7034, + "step": 1104 + }, + { + "epoch": 0.7401205626255861, + "grad_norm": 1.7195610299986006, + "learning_rate": 4.890994407065045e-06, + "loss": 1.8271, + "step": 1105 + }, + { + "epoch": 0.7407903549899532, + "grad_norm": 1.7650599343348226, + "learning_rate": 4.890567148631587e-06, + "loss": 1.51, + "step": 1106 + }, + { + "epoch": 0.7414601473543202, + "grad_norm": 6.5727185412062825, + "learning_rate": 4.890139073229545e-06, + "loss": 1.7638, + "step": 1107 + }, + { + "epoch": 0.7421299397186872, + "grad_norm": 2.158645291951229, + "learning_rate": 4.889710181005213e-06, + "loss": 1.6277, + "step": 1108 + }, + { + "epoch": 0.7427997320830543, + "grad_norm": 2.546052747728407, + "learning_rate": 4.889280472105164e-06, + "loss": 1.762, + "step": 1109 + }, + { + "epoch": 0.7434695244474213, + "grad_norm": 1.8174505463132797, + "learning_rate": 4.8888499466762485e-06, + "loss": 1.5821, + "step": 1110 + }, + { + "epoch": 0.7441393168117884, + "grad_norm": 1.7636816632262071, + "learning_rate": 4.888418604865599e-06, + "loss": 1.4306, + "step": 1111 + }, + { + "epoch": 0.7448091091761554, + "grad_norm": 1.4908096655914682, + "learning_rate": 4.887986446820624e-06, + "loss": 1.6034, + "step": 1112 + }, + { + "epoch": 0.7454789015405224, + "grad_norm": 2.048558922665236, + "learning_rate": 4.8875534726890115e-06, + "loss": 1.6305, + "step": 1113 + }, + { + "epoch": 0.7461486939048895, + "grad_norm": 1.6937874700557447, + "learning_rate": 4.887119682618731e-06, + "loss": 1.5805, + "step": 1114 + }, + { + "epoch": 0.7468184862692565, + "grad_norm": 2.4519727562845923, + "learning_rate": 4.8866850767580265e-06, + "loss": 1.5582, + "step": 1115 + }, + { + "epoch": 0.7474882786336235, + "grad_norm": 1.566467520851259, + "learning_rate": 4.8862496552554255e-06, + "loss": 1.7275, + "step": 1116 + }, + { + "epoch": 0.7481580709979906, + "grad_norm": 1.495320509582792, + "learning_rate": 4.885813418259729e-06, + "loss": 1.8049, + "step": 1117 + }, + { + "epoch": 0.7488278633623576, + "grad_norm": 1.9546754880553152, + "learning_rate": 4.885376365920023e-06, + "loss": 1.6028, + "step": 1118 + }, + { + "epoch": 0.7494976557267247, + "grad_norm": 1.5649712290909048, + "learning_rate": 4.884938498385665e-06, + "loss": 1.8341, + "step": 1119 + }, + { + "epoch": 0.7501674480910918, + "grad_norm": 2.347424565859115, + "learning_rate": 4.8844998158062986e-06, + "loss": 1.7143, + "step": 1120 + }, + { + "epoch": 0.7508372404554589, + "grad_norm": 1.571529836386078, + "learning_rate": 4.884060318331839e-06, + "loss": 1.5407, + "step": 1121 + }, + { + "epoch": 0.7515070328198259, + "grad_norm": 2.98502274251386, + "learning_rate": 4.883620006112485e-06, + "loss": 1.646, + "step": 1122 + }, + { + "epoch": 0.7521768251841929, + "grad_norm": 2.0930590498952064, + "learning_rate": 4.8831788792987115e-06, + "loss": 1.5964, + "step": 1123 + }, + { + "epoch": 0.75284661754856, + "grad_norm": 3.786605566456868, + "learning_rate": 4.8827369380412715e-06, + "loss": 1.4837, + "step": 1124 + }, + { + "epoch": 0.753516409912927, + "grad_norm": 2.33328247759955, + "learning_rate": 4.882294182491198e-06, + "loss": 1.7146, + "step": 1125 + }, + { + "epoch": 0.754186202277294, + "grad_norm": 1.6545251963681706, + "learning_rate": 4.8818506127998e-06, + "loss": 1.7133, + "step": 1126 + }, + { + "epoch": 0.7548559946416611, + "grad_norm": 1.5031961329747614, + "learning_rate": 4.881406229118667e-06, + "loss": 1.7336, + "step": 1127 + }, + { + "epoch": 0.7555257870060281, + "grad_norm": 1.9036312790803243, + "learning_rate": 4.880961031599665e-06, + "loss": 1.5877, + "step": 1128 + }, + { + "epoch": 0.7561955793703952, + "grad_norm": 2.6789014127636124, + "learning_rate": 4.880515020394939e-06, + "loss": 1.4848, + "step": 1129 + }, + { + "epoch": 0.7568653717347622, + "grad_norm": 2.442893585224981, + "learning_rate": 4.880068195656913e-06, + "loss": 1.8165, + "step": 1130 + }, + { + "epoch": 0.7575351640991292, + "grad_norm": 2.119128475899313, + "learning_rate": 4.879620557538286e-06, + "loss": 1.7666, + "step": 1131 + }, + { + "epoch": 0.7582049564634963, + "grad_norm": 1.6705246487987617, + "learning_rate": 4.8791721061920385e-06, + "loss": 1.5406, + "step": 1132 + }, + { + "epoch": 0.7588747488278633, + "grad_norm": 2.128773132702044, + "learning_rate": 4.8787228417714265e-06, + "loss": 1.5618, + "step": 1133 + }, + { + "epoch": 0.7595445411922304, + "grad_norm": 2.6378806126915317, + "learning_rate": 4.878272764429985e-06, + "loss": 1.5401, + "step": 1134 + }, + { + "epoch": 0.7602143335565975, + "grad_norm": 1.8716356899745965, + "learning_rate": 4.877821874321526e-06, + "loss": 1.8266, + "step": 1135 + }, + { + "epoch": 0.7608841259209645, + "grad_norm": 2.4620897232846897, + "learning_rate": 4.87737017160014e-06, + "loss": 1.7151, + "step": 1136 + }, + { + "epoch": 0.7615539182853316, + "grad_norm": 2.6135902491658483, + "learning_rate": 4.876917656420195e-06, + "loss": 1.6728, + "step": 1137 + }, + { + "epoch": 0.7622237106496986, + "grad_norm": 1.8124754578367148, + "learning_rate": 4.8764643289363365e-06, + "loss": 1.5033, + "step": 1138 + }, + { + "epoch": 0.7628935030140657, + "grad_norm": 1.7109096952878087, + "learning_rate": 4.876010189303487e-06, + "loss": 1.5056, + "step": 1139 + }, + { + "epoch": 0.7635632953784327, + "grad_norm": 1.8905646547723332, + "learning_rate": 4.875555237676849e-06, + "loss": 1.5189, + "step": 1140 + }, + { + "epoch": 0.7642330877427997, + "grad_norm": 3.3916279035982635, + "learning_rate": 4.875099474211899e-06, + "loss": 1.7128, + "step": 1141 + }, + { + "epoch": 0.7649028801071668, + "grad_norm": 2.060983734659227, + "learning_rate": 4.874642899064395e-06, + "loss": 1.6331, + "step": 1142 + }, + { + "epoch": 0.7655726724715338, + "grad_norm": 1.871800040368539, + "learning_rate": 4.874185512390367e-06, + "loss": 1.7967, + "step": 1143 + }, + { + "epoch": 0.7662424648359009, + "grad_norm": 2.6436966847714984, + "learning_rate": 4.8737273143461275e-06, + "loss": 1.5843, + "step": 1144 + }, + { + "epoch": 0.7669122572002679, + "grad_norm": 2.4336707763839738, + "learning_rate": 4.8732683050882635e-06, + "loss": 1.8022, + "step": 1145 + }, + { + "epoch": 0.7675820495646349, + "grad_norm": 3.322768662386125, + "learning_rate": 4.87280848477364e-06, + "loss": 1.7812, + "step": 1146 + }, + { + "epoch": 0.768251841929002, + "grad_norm": 1.7350888104259974, + "learning_rate": 4.872347853559399e-06, + "loss": 1.7043, + "step": 1147 + }, + { + "epoch": 0.768921634293369, + "grad_norm": 1.7918930349805349, + "learning_rate": 4.871886411602961e-06, + "loss": 1.8941, + "step": 1148 + }, + { + "epoch": 0.769591426657736, + "grad_norm": 1.526751983501587, + "learning_rate": 4.8714241590620195e-06, + "loss": 1.7585, + "step": 1149 + }, + { + "epoch": 0.7702612190221031, + "grad_norm": 1.5737057654058895, + "learning_rate": 4.870961096094551e-06, + "loss": 1.5603, + "step": 1150 + }, + { + "epoch": 0.7709310113864702, + "grad_norm": 5.6834892956552405, + "learning_rate": 4.870497222858803e-06, + "loss": 1.7137, + "step": 1151 + }, + { + "epoch": 0.7716008037508373, + "grad_norm": 5.308157143035082, + "learning_rate": 4.870032539513305e-06, + "loss": 1.7254, + "step": 1152 + }, + { + "epoch": 0.7722705961152043, + "grad_norm": 4.21813170585988, + "learning_rate": 4.869567046216859e-06, + "loss": 1.2512, + "step": 1153 + }, + { + "epoch": 0.7729403884795714, + "grad_norm": 2.554373403345984, + "learning_rate": 4.869100743128548e-06, + "loss": 1.7353, + "step": 1154 + }, + { + "epoch": 0.7736101808439384, + "grad_norm": 1.9584603497057096, + "learning_rate": 4.868633630407727e-06, + "loss": 1.7978, + "step": 1155 + }, + { + "epoch": 0.7742799732083054, + "grad_norm": 1.9214255668849656, + "learning_rate": 4.8681657082140325e-06, + "loss": 1.8084, + "step": 1156 + }, + { + "epoch": 0.7749497655726725, + "grad_norm": 3.0603788241822203, + "learning_rate": 4.867696976707373e-06, + "loss": 1.5603, + "step": 1157 + }, + { + "epoch": 0.7756195579370395, + "grad_norm": 1.6668815868135867, + "learning_rate": 4.867227436047939e-06, + "loss": 1.7193, + "step": 1158 + }, + { + "epoch": 0.7762893503014066, + "grad_norm": 1.5631600496032765, + "learning_rate": 4.866757086396193e-06, + "loss": 1.8323, + "step": 1159 + }, + { + "epoch": 0.7769591426657736, + "grad_norm": 1.5698815388628031, + "learning_rate": 4.866285927912875e-06, + "loss": 1.7643, + "step": 1160 + }, + { + "epoch": 0.7776289350301406, + "grad_norm": 2.553134719400903, + "learning_rate": 4.865813960759002e-06, + "loss": 1.8647, + "step": 1161 + }, + { + "epoch": 0.7782987273945077, + "grad_norm": 1.636035307436938, + "learning_rate": 4.8653411850958685e-06, + "loss": 1.4878, + "step": 1162 + }, + { + "epoch": 0.7789685197588747, + "grad_norm": 4.386168493718435, + "learning_rate": 4.8648676010850424e-06, + "loss": 1.4333, + "step": 1163 + }, + { + "epoch": 0.7796383121232418, + "grad_norm": 2.2693142709008134, + "learning_rate": 4.864393208888371e-06, + "loss": 1.5064, + "step": 1164 + }, + { + "epoch": 0.7803081044876088, + "grad_norm": 2.6669001449951204, + "learning_rate": 4.863918008667975e-06, + "loss": 1.4996, + "step": 1165 + }, + { + "epoch": 0.7809778968519759, + "grad_norm": 1.921497254603518, + "learning_rate": 4.863442000586253e-06, + "loss": 1.638, + "step": 1166 + }, + { + "epoch": 0.781647689216343, + "grad_norm": 1.707547682243014, + "learning_rate": 4.8629651848058805e-06, + "loss": 1.6393, + "step": 1167 + }, + { + "epoch": 0.78231748158071, + "grad_norm": 1.6089734999231418, + "learning_rate": 4.862487561489805e-06, + "loss": 1.7213, + "step": 1168 + }, + { + "epoch": 0.7829872739450771, + "grad_norm": 1.4948838464490282, + "learning_rate": 4.862009130801255e-06, + "loss": 1.7116, + "step": 1169 + }, + { + "epoch": 0.7836570663094441, + "grad_norm": 1.4909608828109753, + "learning_rate": 4.861529892903731e-06, + "loss": 1.8026, + "step": 1170 + }, + { + "epoch": 0.7843268586738111, + "grad_norm": 1.5523466661752996, + "learning_rate": 4.861049847961014e-06, + "loss": 1.6886, + "step": 1171 + }, + { + "epoch": 0.7849966510381782, + "grad_norm": 2.3406642409925373, + "learning_rate": 4.8605689961371536e-06, + "loss": 1.6358, + "step": 1172 + }, + { + "epoch": 0.7856664434025452, + "grad_norm": 1.7365532216661859, + "learning_rate": 4.860087337596482e-06, + "loss": 1.6752, + "step": 1173 + }, + { + "epoch": 0.7863362357669123, + "grad_norm": 1.755371796706508, + "learning_rate": 4.859604872503604e-06, + "loss": 1.8456, + "step": 1174 + }, + { + "epoch": 0.7870060281312793, + "grad_norm": 2.1115416814448995, + "learning_rate": 4.859121601023401e-06, + "loss": 1.7614, + "step": 1175 + }, + { + "epoch": 0.7876758204956463, + "grad_norm": 1.5704219809795763, + "learning_rate": 4.858637523321028e-06, + "loss": 1.5974, + "step": 1176 + }, + { + "epoch": 0.7883456128600134, + "grad_norm": 1.613015754013905, + "learning_rate": 4.8581526395619184e-06, + "loss": 1.5999, + "step": 1177 + }, + { + "epoch": 0.7890154052243804, + "grad_norm": 2.1661295655298844, + "learning_rate": 4.857666949911779e-06, + "loss": 1.8079, + "step": 1178 + }, + { + "epoch": 0.7896851975887474, + "grad_norm": 4.6993893066346315, + "learning_rate": 4.857180454536593e-06, + "loss": 1.6253, + "step": 1179 + }, + { + "epoch": 0.7903549899531145, + "grad_norm": 1.9815834615137635, + "learning_rate": 4.856693153602618e-06, + "loss": 1.5917, + "step": 1180 + }, + { + "epoch": 0.7910247823174816, + "grad_norm": 1.8972910818454105, + "learning_rate": 4.856205047276388e-06, + "loss": 1.6508, + "step": 1181 + }, + { + "epoch": 0.7916945746818487, + "grad_norm": 1.6192438643912788, + "learning_rate": 4.855716135724712e-06, + "loss": 1.6995, + "step": 1182 + }, + { + "epoch": 0.7923643670462157, + "grad_norm": 3.8466305159916585, + "learning_rate": 4.855226419114673e-06, + "loss": 1.3092, + "step": 1183 + }, + { + "epoch": 0.7930341594105828, + "grad_norm": 5.719691231300143, + "learning_rate": 4.854735897613631e-06, + "loss": 1.5428, + "step": 1184 + }, + { + "epoch": 0.7937039517749498, + "grad_norm": 1.582171158296338, + "learning_rate": 4.85424457138922e-06, + "loss": 1.6379, + "step": 1185 + }, + { + "epoch": 0.7943737441393168, + "grad_norm": 1.5065628202169727, + "learning_rate": 4.853752440609348e-06, + "loss": 1.6345, + "step": 1186 + }, + { + "epoch": 0.7950435365036839, + "grad_norm": 2.6961284793032756, + "learning_rate": 4.853259505442202e-06, + "loss": 1.7654, + "step": 1187 + }, + { + "epoch": 0.7957133288680509, + "grad_norm": 1.8150874161625645, + "learning_rate": 4.852765766056238e-06, + "loss": 1.7199, + "step": 1188 + }, + { + "epoch": 0.796383121232418, + "grad_norm": 1.5638702700742346, + "learning_rate": 4.85227122262019e-06, + "loss": 1.5384, + "step": 1189 + }, + { + "epoch": 0.797052913596785, + "grad_norm": 1.6674903815983344, + "learning_rate": 4.851775875303069e-06, + "loss": 1.5153, + "step": 1190 + }, + { + "epoch": 0.797722705961152, + "grad_norm": 2.2930372416031166, + "learning_rate": 4.851279724274155e-06, + "loss": 1.6218, + "step": 1191 + }, + { + "epoch": 0.7983924983255191, + "grad_norm": 2.442700305393262, + "learning_rate": 4.850782769703009e-06, + "loss": 1.6269, + "step": 1192 + }, + { + "epoch": 0.7990622906898861, + "grad_norm": 2.83710632065793, + "learning_rate": 4.850285011759462e-06, + "loss": 1.5009, + "step": 1193 + }, + { + "epoch": 0.7997320830542531, + "grad_norm": 1.9316802308222054, + "learning_rate": 4.849786450613622e-06, + "loss": 1.8363, + "step": 1194 + }, + { + "epoch": 0.8004018754186202, + "grad_norm": 3.9246785940217253, + "learning_rate": 4.849287086435871e-06, + "loss": 1.7326, + "step": 1195 + }, + { + "epoch": 0.8010716677829873, + "grad_norm": 1.5257702523196313, + "learning_rate": 4.848786919396864e-06, + "loss": 1.6052, + "step": 1196 + }, + { + "epoch": 0.8017414601473544, + "grad_norm": 1.815421495701149, + "learning_rate": 4.848285949667532e-06, + "loss": 1.5222, + "step": 1197 + }, + { + "epoch": 0.8024112525117214, + "grad_norm": 1.8233391019518355, + "learning_rate": 4.847784177419079e-06, + "loss": 1.6375, + "step": 1198 + }, + { + "epoch": 0.8030810448760884, + "grad_norm": 2.3058677100621927, + "learning_rate": 4.847281602822985e-06, + "loss": 1.5337, + "step": 1199 + }, + { + "epoch": 0.8037508372404555, + "grad_norm": 3.7268259378412947, + "learning_rate": 4.846778226051002e-06, + "loss": 1.338, + "step": 1200 + }, + { + "epoch": 0.8044206296048225, + "grad_norm": 1.7250112446400587, + "learning_rate": 4.846274047275159e-06, + "loss": 1.3757, + "step": 1201 + }, + { + "epoch": 0.8050904219691896, + "grad_norm": 3.1011193275985844, + "learning_rate": 4.845769066667757e-06, + "loss": 1.5848, + "step": 1202 + }, + { + "epoch": 0.8057602143335566, + "grad_norm": 1.5674944118947576, + "learning_rate": 4.845263284401371e-06, + "loss": 1.6559, + "step": 1203 + }, + { + "epoch": 0.8064300066979236, + "grad_norm": 1.6027225713544286, + "learning_rate": 4.844756700648852e-06, + "loss": 1.6152, + "step": 1204 + }, + { + "epoch": 0.8070997990622907, + "grad_norm": 3.667019584620633, + "learning_rate": 4.844249315583321e-06, + "loss": 1.6767, + "step": 1205 + }, + { + "epoch": 0.8077695914266577, + "grad_norm": 1.790359525150899, + "learning_rate": 4.843741129378175e-06, + "loss": 1.5675, + "step": 1206 + }, + { + "epoch": 0.8084393837910248, + "grad_norm": 1.7079453025909954, + "learning_rate": 4.843232142207088e-06, + "loss": 1.7911, + "step": 1207 + }, + { + "epoch": 0.8091091761553918, + "grad_norm": 1.6549947189129133, + "learning_rate": 4.842722354244001e-06, + "loss": 1.6391, + "step": 1208 + }, + { + "epoch": 0.8097789685197588, + "grad_norm": 1.5151331534807673, + "learning_rate": 4.842211765663136e-06, + "loss": 1.519, + "step": 1209 + }, + { + "epoch": 0.8104487608841259, + "grad_norm": 1.7343606210459428, + "learning_rate": 4.841700376638983e-06, + "loss": 1.5256, + "step": 1210 + }, + { + "epoch": 0.8111185532484929, + "grad_norm": 2.3776420135997083, + "learning_rate": 4.8411881873463085e-06, + "loss": 1.7327, + "step": 1211 + }, + { + "epoch": 0.8117883456128601, + "grad_norm": 2.415949880264286, + "learning_rate": 4.840675197960151e-06, + "loss": 1.7861, + "step": 1212 + }, + { + "epoch": 0.8124581379772271, + "grad_norm": 2.748453654111449, + "learning_rate": 4.840161408655822e-06, + "loss": 1.6449, + "step": 1213 + }, + { + "epoch": 0.8131279303415941, + "grad_norm": 2.4417604346784336, + "learning_rate": 4.839646819608908e-06, + "loss": 1.4764, + "step": 1214 + }, + { + "epoch": 0.8137977227059612, + "grad_norm": 1.675447403156417, + "learning_rate": 4.8391314309952695e-06, + "loss": 1.7677, + "step": 1215 + }, + { + "epoch": 0.8144675150703282, + "grad_norm": 1.735491424056127, + "learning_rate": 4.838615242991037e-06, + "loss": 1.5871, + "step": 1216 + }, + { + "epoch": 0.8151373074346953, + "grad_norm": 1.5184413194512627, + "learning_rate": 4.838098255772617e-06, + "loss": 1.6439, + "step": 1217 + }, + { + "epoch": 0.8158070997990623, + "grad_norm": 7.277164181563595, + "learning_rate": 4.837580469516688e-06, + "loss": 1.7299, + "step": 1218 + }, + { + "epoch": 0.8164768921634293, + "grad_norm": 1.9778229712269637, + "learning_rate": 4.837061884400202e-06, + "loss": 1.6209, + "step": 1219 + }, + { + "epoch": 0.8171466845277964, + "grad_norm": 4.052859609826651, + "learning_rate": 4.836542500600383e-06, + "loss": 1.5166, + "step": 1220 + }, + { + "epoch": 0.8178164768921634, + "grad_norm": 3.6773199277083015, + "learning_rate": 4.83602231829473e-06, + "loss": 1.7464, + "step": 1221 + }, + { + "epoch": 0.8184862692565305, + "grad_norm": 1.6672667831544674, + "learning_rate": 4.8355013376610124e-06, + "loss": 1.6142, + "step": 1222 + }, + { + "epoch": 0.8191560616208975, + "grad_norm": 2.40047782442564, + "learning_rate": 4.834979558877274e-06, + "loss": 1.5695, + "step": 1223 + }, + { + "epoch": 0.8198258539852645, + "grad_norm": 1.8304119938263237, + "learning_rate": 4.834456982121832e-06, + "loss": 1.8201, + "step": 1224 + }, + { + "epoch": 0.8204956463496316, + "grad_norm": 1.5187690469861264, + "learning_rate": 4.8339336075732745e-06, + "loss": 1.6975, + "step": 1225 + }, + { + "epoch": 0.8211654387139986, + "grad_norm": 3.9396155309515044, + "learning_rate": 4.833409435410463e-06, + "loss": 1.7527, + "step": 1226 + }, + { + "epoch": 0.8218352310783658, + "grad_norm": 1.791651375064501, + "learning_rate": 4.8328844658125305e-06, + "loss": 1.7165, + "step": 1227 + }, + { + "epoch": 0.8225050234427328, + "grad_norm": 2.5001437340509, + "learning_rate": 4.832358698958887e-06, + "loss": 1.4853, + "step": 1228 + }, + { + "epoch": 0.8231748158070998, + "grad_norm": 1.9536836108742395, + "learning_rate": 4.831832135029208e-06, + "loss": 1.7494, + "step": 1229 + }, + { + "epoch": 0.8238446081714669, + "grad_norm": 2.182816947556907, + "learning_rate": 4.831304774203447e-06, + "loss": 1.7729, + "step": 1230 + }, + { + "epoch": 0.8245144005358339, + "grad_norm": 1.5625943256129522, + "learning_rate": 4.830776616661827e-06, + "loss": 1.6622, + "step": 1231 + }, + { + "epoch": 0.825184192900201, + "grad_norm": 1.6088797381468614, + "learning_rate": 4.830247662584845e-06, + "loss": 1.7309, + "step": 1232 + }, + { + "epoch": 0.825853985264568, + "grad_norm": 2.0975869991711735, + "learning_rate": 4.829717912153268e-06, + "loss": 1.6362, + "step": 1233 + }, + { + "epoch": 0.826523777628935, + "grad_norm": 1.644127140657863, + "learning_rate": 4.829187365548138e-06, + "loss": 1.6818, + "step": 1234 + }, + { + "epoch": 0.8271935699933021, + "grad_norm": 1.8705096429498924, + "learning_rate": 4.828656022950767e-06, + "loss": 1.4804, + "step": 1235 + }, + { + "epoch": 0.8278633623576691, + "grad_norm": 1.773999225734142, + "learning_rate": 4.828123884542739e-06, + "loss": 1.6683, + "step": 1236 + }, + { + "epoch": 0.8285331547220361, + "grad_norm": 2.5636529745535666, + "learning_rate": 4.827590950505911e-06, + "loss": 1.6094, + "step": 1237 + }, + { + "epoch": 0.8292029470864032, + "grad_norm": 1.6222815314874792, + "learning_rate": 4.827057221022413e-06, + "loss": 1.634, + "step": 1238 + }, + { + "epoch": 0.8298727394507702, + "grad_norm": 1.827194787262136, + "learning_rate": 4.826522696274642e-06, + "loss": 1.6508, + "step": 1239 + }, + { + "epoch": 0.8305425318151373, + "grad_norm": 2.917663338602079, + "learning_rate": 4.825987376445273e-06, + "loss": 1.4963, + "step": 1240 + }, + { + "epoch": 0.8312123241795043, + "grad_norm": 1.8610339464600825, + "learning_rate": 4.82545126171725e-06, + "loss": 1.5919, + "step": 1241 + }, + { + "epoch": 0.8318821165438715, + "grad_norm": 1.5786371763620632, + "learning_rate": 4.824914352273786e-06, + "loss": 1.489, + "step": 1242 + }, + { + "epoch": 0.8325519089082385, + "grad_norm": 2.590110932000008, + "learning_rate": 4.82437664829837e-06, + "loss": 1.611, + "step": 1243 + }, + { + "epoch": 0.8332217012726055, + "grad_norm": 1.6321039400373702, + "learning_rate": 4.823838149974759e-06, + "loss": 1.6855, + "step": 1244 + }, + { + "epoch": 0.8338914936369726, + "grad_norm": 1.9835675933854497, + "learning_rate": 4.823298857486984e-06, + "loss": 1.4832, + "step": 1245 + }, + { + "epoch": 0.8345612860013396, + "grad_norm": 2.4514056467723955, + "learning_rate": 4.822758771019348e-06, + "loss": 1.5117, + "step": 1246 + }, + { + "epoch": 0.8352310783657066, + "grad_norm": 1.9107306696664474, + "learning_rate": 4.822217890756421e-06, + "loss": 1.7414, + "step": 1247 + }, + { + "epoch": 0.8359008707300737, + "grad_norm": 1.4556626186221049, + "learning_rate": 4.821676216883049e-06, + "loss": 1.5872, + "step": 1248 + }, + { + "epoch": 0.8365706630944407, + "grad_norm": 1.7444986445327346, + "learning_rate": 4.821133749584346e-06, + "loss": 1.6261, + "step": 1249 + }, + { + "epoch": 0.8372404554588078, + "grad_norm": 3.253154088016151, + "learning_rate": 4.820590489045699e-06, + "loss": 1.6343, + "step": 1250 + }, + { + "epoch": 0.8379102478231748, + "grad_norm": 2.0239765599337107, + "learning_rate": 4.820046435452767e-06, + "loss": 1.6006, + "step": 1251 + }, + { + "epoch": 0.8385800401875418, + "grad_norm": 1.8823675584783641, + "learning_rate": 4.819501588991475e-06, + "loss": 1.7829, + "step": 1252 + }, + { + "epoch": 0.8392498325519089, + "grad_norm": 2.6925348181387068, + "learning_rate": 4.818955949848027e-06, + "loss": 1.6649, + "step": 1253 + }, + { + "epoch": 0.8399196249162759, + "grad_norm": 2.1719325292631346, + "learning_rate": 4.81840951820889e-06, + "loss": 1.3972, + "step": 1254 + }, + { + "epoch": 0.840589417280643, + "grad_norm": 1.8948372237147797, + "learning_rate": 4.817862294260806e-06, + "loss": 1.6257, + "step": 1255 + }, + { + "epoch": 0.84125920964501, + "grad_norm": 1.638362453731948, + "learning_rate": 4.817314278190788e-06, + "loss": 1.6271, + "step": 1256 + }, + { + "epoch": 0.8419290020093771, + "grad_norm": 1.5629754520784647, + "learning_rate": 4.816765470186117e-06, + "loss": 1.7507, + "step": 1257 + }, + { + "epoch": 0.8425987943737442, + "grad_norm": 1.856187968492717, + "learning_rate": 4.816215870434349e-06, + "loss": 1.3833, + "step": 1258 + }, + { + "epoch": 0.8432685867381112, + "grad_norm": 1.4746728490617087, + "learning_rate": 4.815665479123306e-06, + "loss": 1.7959, + "step": 1259 + }, + { + "epoch": 0.8439383791024783, + "grad_norm": 2.3895142291333613, + "learning_rate": 4.815114296441082e-06, + "loss": 1.4512, + "step": 1260 + }, + { + "epoch": 0.8446081714668453, + "grad_norm": 1.5610638178092755, + "learning_rate": 4.814562322576043e-06, + "loss": 1.6328, + "step": 1261 + }, + { + "epoch": 0.8452779638312123, + "grad_norm": 1.5241673058199792, + "learning_rate": 4.814009557716825e-06, + "loss": 1.639, + "step": 1262 + }, + { + "epoch": 0.8459477561955794, + "grad_norm": 1.6005109089753695, + "learning_rate": 4.813456002052331e-06, + "loss": 1.7924, + "step": 1263 + }, + { + "epoch": 0.8466175485599464, + "grad_norm": 1.83285616957328, + "learning_rate": 4.81290165577174e-06, + "loss": 1.6262, + "step": 1264 + }, + { + "epoch": 0.8472873409243135, + "grad_norm": 1.7358142557912501, + "learning_rate": 4.812346519064496e-06, + "loss": 1.6168, + "step": 1265 + }, + { + "epoch": 0.8479571332886805, + "grad_norm": 1.9613307639431197, + "learning_rate": 4.811790592120316e-06, + "loss": 1.6025, + "step": 1266 + }, + { + "epoch": 0.8486269256530475, + "grad_norm": 1.639245057196164, + "learning_rate": 4.811233875129185e-06, + "loss": 1.6691, + "step": 1267 + }, + { + "epoch": 0.8492967180174146, + "grad_norm": 1.729287475958116, + "learning_rate": 4.810676368281361e-06, + "loss": 1.5754, + "step": 1268 + }, + { + "epoch": 0.8499665103817816, + "grad_norm": 2.5110053971976845, + "learning_rate": 4.8101180717673686e-06, + "loss": 1.6358, + "step": 1269 + }, + { + "epoch": 0.8506363027461487, + "grad_norm": 2.130835271849982, + "learning_rate": 4.809558985778004e-06, + "loss": 1.6425, + "step": 1270 + }, + { + "epoch": 0.8513060951105157, + "grad_norm": 1.74915115660789, + "learning_rate": 4.808999110504335e-06, + "loss": 1.6834, + "step": 1271 + }, + { + "epoch": 0.8519758874748827, + "grad_norm": 5.0863729751754105, + "learning_rate": 4.808438446137696e-06, + "loss": 1.6272, + "step": 1272 + }, + { + "epoch": 0.8526456798392499, + "grad_norm": 3.2037993433002843, + "learning_rate": 4.80787699286969e-06, + "loss": 1.7672, + "step": 1273 + }, + { + "epoch": 0.8533154722036169, + "grad_norm": 2.305337935492417, + "learning_rate": 4.807314750892195e-06, + "loss": 1.7249, + "step": 1274 + }, + { + "epoch": 0.853985264567984, + "grad_norm": 4.852091439974091, + "learning_rate": 4.806751720397354e-06, + "loss": 1.5197, + "step": 1275 + }, + { + "epoch": 0.854655056932351, + "grad_norm": 2.496130157556178, + "learning_rate": 4.806187901577581e-06, + "loss": 1.518, + "step": 1276 + }, + { + "epoch": 0.855324849296718, + "grad_norm": 2.9002679259076625, + "learning_rate": 4.8056232946255595e-06, + "loss": 1.4642, + "step": 1277 + }, + { + "epoch": 0.8559946416610851, + "grad_norm": 1.6181437988540903, + "learning_rate": 4.805057899734241e-06, + "loss": 1.7621, + "step": 1278 + }, + { + "epoch": 0.8566644340254521, + "grad_norm": 1.6261692089684898, + "learning_rate": 4.8044917170968496e-06, + "loss": 1.6941, + "step": 1279 + }, + { + "epoch": 0.8573342263898192, + "grad_norm": 1.667669719137197, + "learning_rate": 4.803924746906874e-06, + "loss": 1.6276, + "step": 1280 + }, + { + "epoch": 0.8580040187541862, + "grad_norm": 1.6606023010178188, + "learning_rate": 4.803356989358076e-06, + "loss": 1.8047, + "step": 1281 + }, + { + "epoch": 0.8586738111185532, + "grad_norm": 1.702239411602121, + "learning_rate": 4.802788444644484e-06, + "loss": 1.7671, + "step": 1282 + }, + { + "epoch": 0.8593436034829203, + "grad_norm": 2.5266521492045713, + "learning_rate": 4.802219112960397e-06, + "loss": 1.5531, + "step": 1283 + }, + { + "epoch": 0.8600133958472873, + "grad_norm": 1.7755102385605375, + "learning_rate": 4.801648994500382e-06, + "loss": 1.54, + "step": 1284 + }, + { + "epoch": 0.8606831882116543, + "grad_norm": 1.6248192063066604, + "learning_rate": 4.801078089459275e-06, + "loss": 1.6275, + "step": 1285 + }, + { + "epoch": 0.8613529805760214, + "grad_norm": 2.630605310972499, + "learning_rate": 4.800506398032179e-06, + "loss": 1.5977, + "step": 1286 + }, + { + "epoch": 0.8620227729403884, + "grad_norm": 1.676091156156629, + "learning_rate": 4.7999339204144725e-06, + "loss": 1.6754, + "step": 1287 + }, + { + "epoch": 0.8626925653047556, + "grad_norm": 3.038125063484439, + "learning_rate": 4.799360656801794e-06, + "loss": 1.6448, + "step": 1288 + }, + { + "epoch": 0.8633623576691226, + "grad_norm": 3.2686266235726484, + "learning_rate": 4.798786607390056e-06, + "loss": 1.7736, + "step": 1289 + }, + { + "epoch": 0.8640321500334897, + "grad_norm": 3.2608623554553793, + "learning_rate": 4.798211772375437e-06, + "loss": 1.6538, + "step": 1290 + }, + { + "epoch": 0.8647019423978567, + "grad_norm": 2.4884808904353375, + "learning_rate": 4.797636151954386e-06, + "loss": 1.5752, + "step": 1291 + }, + { + "epoch": 0.8653717347622237, + "grad_norm": 1.874143841684833, + "learning_rate": 4.797059746323619e-06, + "loss": 1.7738, + "step": 1292 + }, + { + "epoch": 0.8660415271265908, + "grad_norm": 1.7393038550502105, + "learning_rate": 4.796482555680121e-06, + "loss": 1.398, + "step": 1293 + }, + { + "epoch": 0.8667113194909578, + "grad_norm": 4.6866305524719625, + "learning_rate": 4.795904580221144e-06, + "loss": 1.583, + "step": 1294 + }, + { + "epoch": 0.8673811118553248, + "grad_norm": 2.6996243876399983, + "learning_rate": 4.795325820144211e-06, + "loss": 1.4445, + "step": 1295 + }, + { + "epoch": 0.8680509042196919, + "grad_norm": 3.636506037867529, + "learning_rate": 4.79474627564711e-06, + "loss": 1.6301, + "step": 1296 + }, + { + "epoch": 0.8687206965840589, + "grad_norm": 1.660161983005232, + "learning_rate": 4.794165946927898e-06, + "loss": 1.5183, + "step": 1297 + }, + { + "epoch": 0.869390488948426, + "grad_norm": 2.131499114261242, + "learning_rate": 4.793584834184902e-06, + "loss": 1.5617, + "step": 1298 + }, + { + "epoch": 0.870060281312793, + "grad_norm": 1.9709651848816683, + "learning_rate": 4.793002937616715e-06, + "loss": 1.5489, + "step": 1299 + }, + { + "epoch": 0.87073007367716, + "grad_norm": 2.2433777347173973, + "learning_rate": 4.792420257422199e-06, + "loss": 1.2957, + "step": 1300 + }, + { + "epoch": 0.8713998660415271, + "grad_norm": 1.8836942706541067, + "learning_rate": 4.79183679380048e-06, + "loss": 1.6186, + "step": 1301 + }, + { + "epoch": 0.8720696584058941, + "grad_norm": 2.7667857592326537, + "learning_rate": 4.791252546950959e-06, + "loss": 1.6936, + "step": 1302 + }, + { + "epoch": 0.8727394507702613, + "grad_norm": 1.6196962392617835, + "learning_rate": 4.790667517073297e-06, + "loss": 1.7019, + "step": 1303 + }, + { + "epoch": 0.8734092431346283, + "grad_norm": 2.093723262999209, + "learning_rate": 4.7900817043674274e-06, + "loss": 1.5075, + "step": 1304 + }, + { + "epoch": 0.8740790354989953, + "grad_norm": 1.9775613353501162, + "learning_rate": 4.78949510903355e-06, + "loss": 1.5935, + "step": 1305 + }, + { + "epoch": 0.8747488278633624, + "grad_norm": 2.4402001500479544, + "learning_rate": 4.788907731272132e-06, + "loss": 1.5513, + "step": 1306 + }, + { + "epoch": 0.8754186202277294, + "grad_norm": 1.924925434133836, + "learning_rate": 4.788319571283907e-06, + "loss": 1.3334, + "step": 1307 + }, + { + "epoch": 0.8760884125920965, + "grad_norm": 1.8418269122081046, + "learning_rate": 4.787730629269877e-06, + "loss": 1.7094, + "step": 1308 + }, + { + "epoch": 0.8767582049564635, + "grad_norm": 1.9471163261827173, + "learning_rate": 4.7871409054313125e-06, + "loss": 1.6477, + "step": 1309 + }, + { + "epoch": 0.8774279973208305, + "grad_norm": 1.8072387964860488, + "learning_rate": 4.786550399969748e-06, + "loss": 1.5329, + "step": 1310 + }, + { + "epoch": 0.8780977896851976, + "grad_norm": 1.7101854766557318, + "learning_rate": 4.7859591130869876e-06, + "loss": 1.5371, + "step": 1311 + }, + { + "epoch": 0.8787675820495646, + "grad_norm": 1.5999407024685512, + "learning_rate": 4.785367044985101e-06, + "loss": 1.609, + "step": 1312 + }, + { + "epoch": 0.8794373744139317, + "grad_norm": 1.487024440474636, + "learning_rate": 4.784774195866427e-06, + "loss": 1.769, + "step": 1313 + }, + { + "epoch": 0.8801071667782987, + "grad_norm": 1.820142261693874, + "learning_rate": 4.784180565933567e-06, + "loss": 1.711, + "step": 1314 + }, + { + "epoch": 0.8807769591426657, + "grad_norm": 5.200758992978447, + "learning_rate": 4.7835861553893955e-06, + "loss": 1.7043, + "step": 1315 + }, + { + "epoch": 0.8814467515070328, + "grad_norm": 3.4261550916472983, + "learning_rate": 4.782990964437048e-06, + "loss": 1.6082, + "step": 1316 + }, + { + "epoch": 0.8821165438713998, + "grad_norm": 5.217425870726873, + "learning_rate": 4.782394993279931e-06, + "loss": 1.6178, + "step": 1317 + }, + { + "epoch": 0.882786336235767, + "grad_norm": 1.7587972858240342, + "learning_rate": 4.7817982421217134e-06, + "loss": 1.6178, + "step": 1318 + }, + { + "epoch": 0.883456128600134, + "grad_norm": 1.6213736926215023, + "learning_rate": 4.781200711166335e-06, + "loss": 1.5891, + "step": 1319 + }, + { + "epoch": 0.884125920964501, + "grad_norm": 1.649634392556891, + "learning_rate": 4.780602400617999e-06, + "loss": 1.6739, + "step": 1320 + }, + { + "epoch": 0.8847957133288681, + "grad_norm": 1.6104273075556252, + "learning_rate": 4.780003310681177e-06, + "loss": 1.5753, + "step": 1321 + }, + { + "epoch": 0.8854655056932351, + "grad_norm": 1.9799235763841707, + "learning_rate": 4.779403441560604e-06, + "loss": 1.6984, + "step": 1322 + }, + { + "epoch": 0.8861352980576022, + "grad_norm": 1.6324128740132242, + "learning_rate": 4.778802793461287e-06, + "loss": 1.4628, + "step": 1323 + }, + { + "epoch": 0.8868050904219692, + "grad_norm": 1.7102874158202521, + "learning_rate": 4.778201366588491e-06, + "loss": 1.6145, + "step": 1324 + }, + { + "epoch": 0.8874748827863362, + "grad_norm": 1.634251556030998, + "learning_rate": 4.777599161147755e-06, + "loss": 1.6881, + "step": 1325 + }, + { + "epoch": 0.8881446751507033, + "grad_norm": 1.6574395864056426, + "learning_rate": 4.77699617734488e-06, + "loss": 1.5808, + "step": 1326 + }, + { + "epoch": 0.8888144675150703, + "grad_norm": 3.2070361439831405, + "learning_rate": 4.7763924153859334e-06, + "loss": 1.5242, + "step": 1327 + }, + { + "epoch": 0.8894842598794374, + "grad_norm": 1.6155111000076556, + "learning_rate": 4.775787875477248e-06, + "loss": 1.4715, + "step": 1328 + }, + { + "epoch": 0.8901540522438044, + "grad_norm": 3.168268611494752, + "learning_rate": 4.775182557825426e-06, + "loss": 1.458, + "step": 1329 + }, + { + "epoch": 0.8908238446081714, + "grad_norm": 1.7386487569006133, + "learning_rate": 4.77457646263733e-06, + "loss": 1.6481, + "step": 1330 + }, + { + "epoch": 0.8914936369725385, + "grad_norm": 1.5505517976141991, + "learning_rate": 4.773969590120092e-06, + "loss": 1.4922, + "step": 1331 + }, + { + "epoch": 0.8921634293369055, + "grad_norm": 1.5301067166575544, + "learning_rate": 4.77336194048111e-06, + "loss": 1.433, + "step": 1332 + }, + { + "epoch": 0.8928332217012726, + "grad_norm": 2.5093474026830793, + "learning_rate": 4.7727535139280446e-06, + "loss": 1.5839, + "step": 1333 + }, + { + "epoch": 0.8935030140656397, + "grad_norm": 1.6790905075348797, + "learning_rate": 4.772144310668824e-06, + "loss": 1.8321, + "step": 1334 + }, + { + "epoch": 0.8941728064300067, + "grad_norm": 2.063078371426315, + "learning_rate": 4.771534330911641e-06, + "loss": 1.628, + "step": 1335 + }, + { + "epoch": 0.8948425987943738, + "grad_norm": 1.5006128090087962, + "learning_rate": 4.770923574864957e-06, + "loss": 1.6338, + "step": 1336 + }, + { + "epoch": 0.8955123911587408, + "grad_norm": 1.613105271310712, + "learning_rate": 4.770312042737491e-06, + "loss": 1.6878, + "step": 1337 + }, + { + "epoch": 0.8961821835231079, + "grad_norm": 3.0110026610511014, + "learning_rate": 4.769699734738237e-06, + "loss": 1.5998, + "step": 1338 + }, + { + "epoch": 0.8968519758874749, + "grad_norm": 1.6840867597507472, + "learning_rate": 4.769086651076447e-06, + "loss": 1.7849, + "step": 1339 + }, + { + "epoch": 0.8975217682518419, + "grad_norm": 2.3054886475358543, + "learning_rate": 4.768472791961639e-06, + "loss": 1.7082, + "step": 1340 + }, + { + "epoch": 0.898191560616209, + "grad_norm": 1.7919180198115396, + "learning_rate": 4.7678581576036e-06, + "loss": 1.3027, + "step": 1341 + }, + { + "epoch": 0.898861352980576, + "grad_norm": 2.464176121296641, + "learning_rate": 4.767242748212379e-06, + "loss": 1.5089, + "step": 1342 + }, + { + "epoch": 0.899531145344943, + "grad_norm": 2.1217387333008264, + "learning_rate": 4.766626563998288e-06, + "loss": 1.729, + "step": 1343 + }, + { + "epoch": 0.9002009377093101, + "grad_norm": 1.5994175475231163, + "learning_rate": 4.766009605171908e-06, + "loss": 1.3895, + "step": 1344 + }, + { + "epoch": 0.9008707300736771, + "grad_norm": 2.473661997231294, + "learning_rate": 4.765391871944081e-06, + "loss": 1.6935, + "step": 1345 + }, + { + "epoch": 0.9015405224380442, + "grad_norm": 1.8516134848314691, + "learning_rate": 4.764773364525916e-06, + "loss": 1.4127, + "step": 1346 + }, + { + "epoch": 0.9022103148024112, + "grad_norm": 4.104974638454503, + "learning_rate": 4.7641540831287884e-06, + "loss": 1.4963, + "step": 1347 + }, + { + "epoch": 0.9028801071667782, + "grad_norm": 2.2334996723452485, + "learning_rate": 4.763534027964332e-06, + "loss": 1.4485, + "step": 1348 + }, + { + "epoch": 0.9035498995311454, + "grad_norm": 2.001046503063752, + "learning_rate": 4.76291319924445e-06, + "loss": 1.5808, + "step": 1349 + }, + { + "epoch": 0.9042196918955124, + "grad_norm": 1.688428089079836, + "learning_rate": 4.762291597181309e-06, + "loss": 1.4627, + "step": 1350 + }, + { + "epoch": 0.9048894842598795, + "grad_norm": 3.7811625423721456, + "learning_rate": 4.7616692219873394e-06, + "loss": 1.4737, + "step": 1351 + }, + { + "epoch": 0.9055592766242465, + "grad_norm": 2.7723894518056196, + "learning_rate": 4.761046073875235e-06, + "loss": 1.6791, + "step": 1352 + }, + { + "epoch": 0.9062290689886136, + "grad_norm": 1.8468705038565394, + "learning_rate": 4.760422153057956e-06, + "loss": 1.6175, + "step": 1353 + }, + { + "epoch": 0.9068988613529806, + "grad_norm": 1.576930418681883, + "learning_rate": 4.759797459748724e-06, + "loss": 1.7295, + "step": 1354 + }, + { + "epoch": 0.9075686537173476, + "grad_norm": 1.7203766949367765, + "learning_rate": 4.759171994161027e-06, + "loss": 1.6171, + "step": 1355 + }, + { + "epoch": 0.9082384460817147, + "grad_norm": 2.3150223835901973, + "learning_rate": 4.758545756508616e-06, + "loss": 1.716, + "step": 1356 + }, + { + "epoch": 0.9089082384460817, + "grad_norm": 2.4188244078780374, + "learning_rate": 4.757918747005504e-06, + "loss": 1.6435, + "step": 1357 + }, + { + "epoch": 0.9095780308104487, + "grad_norm": 2.1914905480796483, + "learning_rate": 4.757290965865971e-06, + "loss": 1.6938, + "step": 1358 + }, + { + "epoch": 0.9102478231748158, + "grad_norm": 3.2193561329064178, + "learning_rate": 4.756662413304559e-06, + "loss": 1.4748, + "step": 1359 + }, + { + "epoch": 0.9109176155391828, + "grad_norm": 2.287304200103759, + "learning_rate": 4.756033089536073e-06, + "loss": 1.5732, + "step": 1360 + }, + { + "epoch": 0.9115874079035499, + "grad_norm": 2.35578399679824, + "learning_rate": 4.755402994775583e-06, + "loss": 1.6088, + "step": 1361 + }, + { + "epoch": 0.9122572002679169, + "grad_norm": 1.7321349304426008, + "learning_rate": 4.754772129238423e-06, + "loss": 1.6808, + "step": 1362 + }, + { + "epoch": 0.9129269926322839, + "grad_norm": 5.470293948212696, + "learning_rate": 4.754140493140187e-06, + "loss": 1.6051, + "step": 1363 + }, + { + "epoch": 0.9135967849966511, + "grad_norm": 2.6648017461408777, + "learning_rate": 4.753508086696735e-06, + "loss": 1.5751, + "step": 1364 + }, + { + "epoch": 0.9142665773610181, + "grad_norm": 1.8253489781555927, + "learning_rate": 4.752874910124191e-06, + "loss": 1.6707, + "step": 1365 + }, + { + "epoch": 0.9149363697253852, + "grad_norm": 1.832751245059737, + "learning_rate": 4.75224096363894e-06, + "loss": 1.593, + "step": 1366 + }, + { + "epoch": 0.9156061620897522, + "grad_norm": 2.497528163805989, + "learning_rate": 4.751606247457632e-06, + "loss": 1.632, + "step": 1367 + }, + { + "epoch": 0.9162759544541192, + "grad_norm": 1.6626277008713557, + "learning_rate": 4.750970761797179e-06, + "loss": 1.6804, + "step": 1368 + }, + { + "epoch": 0.9169457468184863, + "grad_norm": 3.044946922263043, + "learning_rate": 4.750334506874756e-06, + "loss": 1.4384, + "step": 1369 + }, + { + "epoch": 0.9176155391828533, + "grad_norm": 1.8764861469475689, + "learning_rate": 4.749697482907801e-06, + "loss": 1.6007, + "step": 1370 + }, + { + "epoch": 0.9182853315472204, + "grad_norm": 1.471747566069408, + "learning_rate": 4.749059690114016e-06, + "loss": 1.4072, + "step": 1371 + }, + { + "epoch": 0.9189551239115874, + "grad_norm": 1.6756133291930904, + "learning_rate": 4.7484211287113625e-06, + "loss": 1.7875, + "step": 1372 + }, + { + "epoch": 0.9196249162759544, + "grad_norm": 1.5159347895847983, + "learning_rate": 4.747781798918068e-06, + "loss": 1.4402, + "step": 1373 + }, + { + "epoch": 0.9202947086403215, + "grad_norm": 1.4946763456946315, + "learning_rate": 4.7471417009526214e-06, + "loss": 1.6753, + "step": 1374 + }, + { + "epoch": 0.9209645010046885, + "grad_norm": 1.7261876811584878, + "learning_rate": 4.746500835033773e-06, + "loss": 1.633, + "step": 1375 + }, + { + "epoch": 0.9216342933690556, + "grad_norm": 2.5236657626311794, + "learning_rate": 4.745859201380539e-06, + "loss": 1.5886, + "step": 1376 + }, + { + "epoch": 0.9223040857334226, + "grad_norm": 2.3802164786563913, + "learning_rate": 4.7452168002121935e-06, + "loss": 1.442, + "step": 1377 + }, + { + "epoch": 0.9229738780977896, + "grad_norm": 1.6426233535890262, + "learning_rate": 4.744573631748276e-06, + "loss": 1.6041, + "step": 1378 + }, + { + "epoch": 0.9236436704621568, + "grad_norm": 1.6376472680815275, + "learning_rate": 4.743929696208587e-06, + "loss": 1.5405, + "step": 1379 + }, + { + "epoch": 0.9243134628265238, + "grad_norm": 1.991742740392972, + "learning_rate": 4.743284993813189e-06, + "loss": 1.5713, + "step": 1380 + }, + { + "epoch": 0.9249832551908909, + "grad_norm": 2.052722345187809, + "learning_rate": 4.742639524782408e-06, + "loss": 1.4647, + "step": 1381 + }, + { + "epoch": 0.9256530475552579, + "grad_norm": 2.9790415253071862, + "learning_rate": 4.74199328933683e-06, + "loss": 1.5579, + "step": 1382 + }, + { + "epoch": 0.9263228399196249, + "grad_norm": 2.195708170267665, + "learning_rate": 4.7413462876973035e-06, + "loss": 1.5091, + "step": 1383 + }, + { + "epoch": 0.926992632283992, + "grad_norm": 2.2428351157545996, + "learning_rate": 4.740698520084941e-06, + "loss": 1.6194, + "step": 1384 + }, + { + "epoch": 0.927662424648359, + "grad_norm": 4.419541536367736, + "learning_rate": 4.740049986721112e-06, + "loss": 1.5986, + "step": 1385 + }, + { + "epoch": 0.9283322170127261, + "grad_norm": 2.0196027268371024, + "learning_rate": 4.739400687827454e-06, + "loss": 1.5814, + "step": 1386 + }, + { + "epoch": 0.9290020093770931, + "grad_norm": 1.5575974174216214, + "learning_rate": 4.738750623625862e-06, + "loss": 1.5368, + "step": 1387 + }, + { + "epoch": 0.9296718017414601, + "grad_norm": 2.311307140647184, + "learning_rate": 4.7380997943384925e-06, + "loss": 1.8701, + "step": 1388 + }, + { + "epoch": 0.9303415941058272, + "grad_norm": 1.67626631467201, + "learning_rate": 4.737448200187763e-06, + "loss": 1.5954, + "step": 1389 + }, + { + "epoch": 0.9310113864701942, + "grad_norm": 1.9027061796184248, + "learning_rate": 4.736795841396356e-06, + "loss": 1.4016, + "step": 1390 + }, + { + "epoch": 0.9316811788345613, + "grad_norm": 1.6265389294843269, + "learning_rate": 4.7361427181872125e-06, + "loss": 1.66, + "step": 1391 + }, + { + "epoch": 0.9323509711989283, + "grad_norm": 1.7523608068560732, + "learning_rate": 4.7354888307835344e-06, + "loss": 1.5466, + "step": 1392 + }, + { + "epoch": 0.9330207635632953, + "grad_norm": 1.5861916752275151, + "learning_rate": 4.734834179408786e-06, + "loss": 1.4305, + "step": 1393 + }, + { + "epoch": 0.9336905559276625, + "grad_norm": 1.7750386660173392, + "learning_rate": 4.7341787642866935e-06, + "loss": 1.6594, + "step": 1394 + }, + { + "epoch": 0.9343603482920295, + "grad_norm": 1.7210940247274633, + "learning_rate": 4.73352258564124e-06, + "loss": 1.4027, + "step": 1395 + }, + { + "epoch": 0.9350301406563966, + "grad_norm": 1.6329038325427965, + "learning_rate": 4.732865643696676e-06, + "loss": 1.6049, + "step": 1396 + }, + { + "epoch": 0.9356999330207636, + "grad_norm": 2.1853296707029335, + "learning_rate": 4.732207938677507e-06, + "loss": 1.29, + "step": 1397 + }, + { + "epoch": 0.9363697253851306, + "grad_norm": 2.487843592470518, + "learning_rate": 4.731549470808501e-06, + "loss": 1.6281, + "step": 1398 + }, + { + "epoch": 0.9370395177494977, + "grad_norm": 2.4183575795302756, + "learning_rate": 4.73089024031469e-06, + "loss": 1.5256, + "step": 1399 + }, + { + "epoch": 0.9377093101138647, + "grad_norm": 1.682416497996635, + "learning_rate": 4.730230247421361e-06, + "loss": 1.6384, + "step": 1400 + }, + { + "epoch": 0.9383791024782318, + "grad_norm": 1.6244367232494115, + "learning_rate": 4.729569492354066e-06, + "loss": 1.5693, + "step": 1401 + }, + { + "epoch": 0.9390488948425988, + "grad_norm": 2.229066465220502, + "learning_rate": 4.728907975338615e-06, + "loss": 1.5709, + "step": 1402 + }, + { + "epoch": 0.9397186872069658, + "grad_norm": 1.6928852917514094, + "learning_rate": 4.728245696601081e-06, + "loss": 1.6245, + "step": 1403 + }, + { + "epoch": 0.9403884795713329, + "grad_norm": 5.001981627432854, + "learning_rate": 4.727582656367794e-06, + "loss": 1.3826, + "step": 1404 + }, + { + "epoch": 0.9410582719356999, + "grad_norm": 1.725179874697997, + "learning_rate": 4.726918854865345e-06, + "loss": 1.4981, + "step": 1405 + }, + { + "epoch": 0.941728064300067, + "grad_norm": 1.6632006020921049, + "learning_rate": 4.726254292320589e-06, + "loss": 1.645, + "step": 1406 + }, + { + "epoch": 0.942397856664434, + "grad_norm": 1.8179496406153572, + "learning_rate": 4.725588968960636e-06, + "loss": 1.6593, + "step": 1407 + }, + { + "epoch": 0.943067649028801, + "grad_norm": 3.3043882292262583, + "learning_rate": 4.724922885012859e-06, + "loss": 1.5246, + "step": 1408 + }, + { + "epoch": 0.9437374413931681, + "grad_norm": 2.354489807922938, + "learning_rate": 4.7242560407048886e-06, + "loss": 1.7145, + "step": 1409 + }, + { + "epoch": 0.9444072337575352, + "grad_norm": 2.3224766222566506, + "learning_rate": 4.723588436264619e-06, + "loss": 1.5106, + "step": 1410 + }, + { + "epoch": 0.9450770261219023, + "grad_norm": 4.723447978020994, + "learning_rate": 4.7229200719201994e-06, + "loss": 1.5877, + "step": 1411 + }, + { + "epoch": 0.9457468184862693, + "grad_norm": 1.9144934334401946, + "learning_rate": 4.7222509479000425e-06, + "loss": 1.7444, + "step": 1412 + }, + { + "epoch": 0.9464166108506363, + "grad_norm": 1.596037680880698, + "learning_rate": 4.721581064432818e-06, + "loss": 1.54, + "step": 1413 + }, + { + "epoch": 0.9470864032150034, + "grad_norm": 4.882666546569319, + "learning_rate": 4.7209104217474595e-06, + "loss": 1.7926, + "step": 1414 + }, + { + "epoch": 0.9477561955793704, + "grad_norm": 4.46766420081222, + "learning_rate": 4.720239020073154e-06, + "loss": 1.699, + "step": 1415 + }, + { + "epoch": 0.9484259879437374, + "grad_norm": 1.683231057316115, + "learning_rate": 4.719566859639352e-06, + "loss": 1.5223, + "step": 1416 + }, + { + "epoch": 0.9490957803081045, + "grad_norm": 1.6633790432598439, + "learning_rate": 4.7188939406757615e-06, + "loss": 1.6936, + "step": 1417 + }, + { + "epoch": 0.9497655726724715, + "grad_norm": 5.990825690931383, + "learning_rate": 4.71822026341235e-06, + "loss": 1.8062, + "step": 1418 + }, + { + "epoch": 0.9504353650368386, + "grad_norm": 2.62089606178454, + "learning_rate": 4.717545828079347e-06, + "loss": 1.6024, + "step": 1419 + }, + { + "epoch": 0.9511051574012056, + "grad_norm": 1.6639931214495889, + "learning_rate": 4.7168706349072355e-06, + "loss": 1.7526, + "step": 1420 + }, + { + "epoch": 0.9517749497655726, + "grad_norm": 2.3536170402635457, + "learning_rate": 4.716194684126762e-06, + "loss": 1.716, + "step": 1421 + }, + { + "epoch": 0.9524447421299397, + "grad_norm": 1.922598262167524, + "learning_rate": 4.715517975968932e-06, + "loss": 1.647, + "step": 1422 + }, + { + "epoch": 0.9531145344943067, + "grad_norm": 1.8895305453158846, + "learning_rate": 4.7148405106650055e-06, + "loss": 1.6025, + "step": 1423 + }, + { + "epoch": 0.9537843268586738, + "grad_norm": 2.7018282823285866, + "learning_rate": 4.714162288446505e-06, + "loss": 1.6643, + "step": 1424 + }, + { + "epoch": 0.9544541192230409, + "grad_norm": 1.7075606103544165, + "learning_rate": 4.713483309545212e-06, + "loss": 1.7405, + "step": 1425 + }, + { + "epoch": 0.955123911587408, + "grad_norm": 1.6626375918592804, + "learning_rate": 4.712803574193165e-06, + "loss": 1.6432, + "step": 1426 + }, + { + "epoch": 0.955793703951775, + "grad_norm": 1.6984782978539645, + "learning_rate": 4.71212308262266e-06, + "loss": 1.6178, + "step": 1427 + }, + { + "epoch": 0.956463496316142, + "grad_norm": 2.1998366720398295, + "learning_rate": 4.711441835066253e-06, + "loss": 1.4646, + "step": 1428 + }, + { + "epoch": 0.9571332886805091, + "grad_norm": 2.1841927565469677, + "learning_rate": 4.710759831756759e-06, + "loss": 1.5912, + "step": 1429 + }, + { + "epoch": 0.9578030810448761, + "grad_norm": 1.9108313325780255, + "learning_rate": 4.71007707292725e-06, + "loss": 1.3493, + "step": 1430 + }, + { + "epoch": 0.9584728734092431, + "grad_norm": 3.8408396143611503, + "learning_rate": 4.7093935588110585e-06, + "loss": 1.6296, + "step": 1431 + }, + { + "epoch": 0.9591426657736102, + "grad_norm": 2.154079856141397, + "learning_rate": 4.708709289641769e-06, + "loss": 1.7323, + "step": 1432 + }, + { + "epoch": 0.9598124581379772, + "grad_norm": 3.546102525687208, + "learning_rate": 4.7080242656532326e-06, + "loss": 1.678, + "step": 1433 + }, + { + "epoch": 0.9604822505023443, + "grad_norm": 2.4793899573639444, + "learning_rate": 4.707338487079553e-06, + "loss": 1.5034, + "step": 1434 + }, + { + "epoch": 0.9611520428667113, + "grad_norm": 1.9390366084807495, + "learning_rate": 4.70665195415509e-06, + "loss": 1.7163, + "step": 1435 + }, + { + "epoch": 0.9618218352310783, + "grad_norm": 1.7154025232965795, + "learning_rate": 4.7059646671144665e-06, + "loss": 1.5922, + "step": 1436 + }, + { + "epoch": 0.9624916275954454, + "grad_norm": 2.4206872718117114, + "learning_rate": 4.705276626192561e-06, + "loss": 1.4659, + "step": 1437 + }, + { + "epoch": 0.9631614199598124, + "grad_norm": 2.80197570034924, + "learning_rate": 4.7045878316245075e-06, + "loss": 1.4661, + "step": 1438 + }, + { + "epoch": 0.9638312123241795, + "grad_norm": 13.13747451668712, + "learning_rate": 4.703898283645701e-06, + "loss": 1.5935, + "step": 1439 + }, + { + "epoch": 0.9645010046885466, + "grad_norm": 2.0524723402125113, + "learning_rate": 4.703207982491791e-06, + "loss": 1.4808, + "step": 1440 + }, + { + "epoch": 0.9651707970529136, + "grad_norm": 1.8459233793101222, + "learning_rate": 4.702516928398686e-06, + "loss": 1.7109, + "step": 1441 + }, + { + "epoch": 0.9658405894172807, + "grad_norm": 1.7491231542677455, + "learning_rate": 4.7018251216025525e-06, + "loss": 1.5025, + "step": 1442 + }, + { + "epoch": 0.9665103817816477, + "grad_norm": 1.6924361779867354, + "learning_rate": 4.701132562339812e-06, + "loss": 1.4804, + "step": 1443 + }, + { + "epoch": 0.9671801741460148, + "grad_norm": 1.6434374953113284, + "learning_rate": 4.7004392508471445e-06, + "loss": 1.5904, + "step": 1444 + }, + { + "epoch": 0.9678499665103818, + "grad_norm": 2.457800512388633, + "learning_rate": 4.699745187361487e-06, + "loss": 1.6993, + "step": 1445 + }, + { + "epoch": 0.9685197588747488, + "grad_norm": 2.3952611721654473, + "learning_rate": 4.699050372120035e-06, + "loss": 1.4595, + "step": 1446 + }, + { + "epoch": 0.9691895512391159, + "grad_norm": 2.962991013634981, + "learning_rate": 4.698354805360237e-06, + "loss": 1.4253, + "step": 1447 + }, + { + "epoch": 0.9698593436034829, + "grad_norm": 2.5001708855802955, + "learning_rate": 4.697658487319803e-06, + "loss": 1.6589, + "step": 1448 + }, + { + "epoch": 0.97052913596785, + "grad_norm": 2.256280837442918, + "learning_rate": 4.696961418236695e-06, + "loss": 1.6082, + "step": 1449 + }, + { + "epoch": 0.971198928332217, + "grad_norm": 1.6009190801714777, + "learning_rate": 4.696263598349136e-06, + "loss": 1.5337, + "step": 1450 + }, + { + "epoch": 0.971868720696584, + "grad_norm": 1.9379894586741453, + "learning_rate": 4.6955650278956025e-06, + "loss": 1.7265, + "step": 1451 + }, + { + "epoch": 0.9725385130609511, + "grad_norm": 2.7529673086902626, + "learning_rate": 4.69486570711483e-06, + "loss": 1.645, + "step": 1452 + }, + { + "epoch": 0.9732083054253181, + "grad_norm": 2.1048056771391632, + "learning_rate": 4.6941656362458074e-06, + "loss": 1.7017, + "step": 1453 + }, + { + "epoch": 0.9738780977896851, + "grad_norm": 2.5798304864052173, + "learning_rate": 4.693464815527783e-06, + "loss": 1.5736, + "step": 1454 + }, + { + "epoch": 0.9745478901540523, + "grad_norm": 1.8001825107695868, + "learning_rate": 4.6927632452002595e-06, + "loss": 1.689, + "step": 1455 + }, + { + "epoch": 0.9752176825184193, + "grad_norm": 1.8031743568764238, + "learning_rate": 4.692060925502996e-06, + "loss": 1.5568, + "step": 1456 + }, + { + "epoch": 0.9758874748827864, + "grad_norm": 1.7567727401718056, + "learning_rate": 4.691357856676009e-06, + "loss": 1.6911, + "step": 1457 + }, + { + "epoch": 0.9765572672471534, + "grad_norm": 2.0838317221129827, + "learning_rate": 4.690654038959568e-06, + "loss": 1.7071, + "step": 1458 + }, + { + "epoch": 0.9772270596115205, + "grad_norm": 2.440202551361925, + "learning_rate": 4.689949472594203e-06, + "loss": 1.4912, + "step": 1459 + }, + { + "epoch": 0.9778968519758875, + "grad_norm": 1.6813032595981066, + "learning_rate": 4.689244157820696e-06, + "loss": 1.4478, + "step": 1460 + }, + { + "epoch": 0.9785666443402545, + "grad_norm": 2.916467908896828, + "learning_rate": 4.6885380948800854e-06, + "loss": 1.5451, + "step": 1461 + }, + { + "epoch": 0.9792364367046216, + "grad_norm": 2.4413468305788, + "learning_rate": 4.6878312840136675e-06, + "loss": 1.5708, + "step": 1462 + }, + { + "epoch": 0.9799062290689886, + "grad_norm": 2.4726394815688137, + "learning_rate": 4.687123725462991e-06, + "loss": 1.595, + "step": 1463 + }, + { + "epoch": 0.9805760214333556, + "grad_norm": 2.012740347669707, + "learning_rate": 4.686415419469863e-06, + "loss": 1.4247, + "step": 1464 + }, + { + "epoch": 0.9812458137977227, + "grad_norm": 1.8439998893968703, + "learning_rate": 4.685706366276344e-06, + "loss": 1.619, + "step": 1465 + }, + { + "epoch": 0.9819156061620897, + "grad_norm": 3.036425428617017, + "learning_rate": 4.684996566124751e-06, + "loss": 1.5606, + "step": 1466 + }, + { + "epoch": 0.9825853985264568, + "grad_norm": 1.9130913443941235, + "learning_rate": 4.684286019257656e-06, + "loss": 1.5351, + "step": 1467 + }, + { + "epoch": 0.9832551908908238, + "grad_norm": 3.491160310526752, + "learning_rate": 4.683574725917884e-06, + "loss": 1.6203, + "step": 1468 + }, + { + "epoch": 0.9839249832551908, + "grad_norm": 2.7344616414335134, + "learning_rate": 4.682862686348522e-06, + "loss": 1.5671, + "step": 1469 + }, + { + "epoch": 0.9845947756195579, + "grad_norm": 1.9237166205306495, + "learning_rate": 4.682149900792902e-06, + "loss": 1.6376, + "step": 1470 + }, + { + "epoch": 0.985264567983925, + "grad_norm": 2.350518263695344, + "learning_rate": 4.68143636949462e-06, + "loss": 1.508, + "step": 1471 + }, + { + "epoch": 0.9859343603482921, + "grad_norm": 1.8219213788573003, + "learning_rate": 4.680722092697521e-06, + "loss": 1.5197, + "step": 1472 + }, + { + "epoch": 0.9866041527126591, + "grad_norm": 1.683750383588073, + "learning_rate": 4.6800070706457065e-06, + "loss": 1.5538, + "step": 1473 + }, + { + "epoch": 0.9872739450770261, + "grad_norm": 2.2446998927851456, + "learning_rate": 4.679291303583534e-06, + "loss": 1.6721, + "step": 1474 + }, + { + "epoch": 0.9879437374413932, + "grad_norm": 1.558285050615945, + "learning_rate": 4.6785747917556145e-06, + "loss": 1.6552, + "step": 1475 + }, + { + "epoch": 0.9886135298057602, + "grad_norm": 2.147136773705294, + "learning_rate": 4.677857535406813e-06, + "loss": 1.4463, + "step": 1476 + }, + { + "epoch": 0.9892833221701273, + "grad_norm": 1.6671968601753926, + "learning_rate": 4.67713953478225e-06, + "loss": 1.6145, + "step": 1477 + }, + { + "epoch": 0.9899531145344943, + "grad_norm": 2.0705068639862936, + "learning_rate": 4.676420790127301e-06, + "loss": 1.6832, + "step": 1478 + }, + { + "epoch": 0.9906229068988613, + "grad_norm": 2.0674740194800014, + "learning_rate": 4.675701301687592e-06, + "loss": 1.5386, + "step": 1479 + }, + { + "epoch": 0.9912926992632284, + "grad_norm": 1.6437698287926656, + "learning_rate": 4.674981069709008e-06, + "loss": 1.6041, + "step": 1480 + }, + { + "epoch": 0.9919624916275954, + "grad_norm": 2.106464120856585, + "learning_rate": 4.674260094437685e-06, + "loss": 1.7701, + "step": 1481 + }, + { + "epoch": 0.9926322839919625, + "grad_norm": 3.300143284374255, + "learning_rate": 4.673538376120015e-06, + "loss": 1.292, + "step": 1482 + }, + { + "epoch": 0.9933020763563295, + "grad_norm": 1.8082721903240218, + "learning_rate": 4.672815915002642e-06, + "loss": 1.6726, + "step": 1483 + }, + { + "epoch": 0.9939718687206965, + "grad_norm": 10.211489048835286, + "learning_rate": 4.672092711332464e-06, + "loss": 2.0493, + "step": 1484 + }, + { + "epoch": 0.9946416610850636, + "grad_norm": 2.103131040931886, + "learning_rate": 4.671368765356634e-06, + "loss": 1.6676, + "step": 1485 + }, + { + "epoch": 0.9953114534494307, + "grad_norm": 1.675141570070665, + "learning_rate": 4.67064407732256e-06, + "loss": 1.3279, + "step": 1486 + }, + { + "epoch": 0.9959812458137978, + "grad_norm": 1.6668323009963546, + "learning_rate": 4.6699186474779e-06, + "loss": 1.571, + "step": 1487 + }, + { + "epoch": 0.9966510381781648, + "grad_norm": 1.7536971199043736, + "learning_rate": 4.669192476070567e-06, + "loss": 1.5566, + "step": 1488 + }, + { + "epoch": 0.9973208305425318, + "grad_norm": 1.6481652312662325, + "learning_rate": 4.6684655633487285e-06, + "loss": 1.7539, + "step": 1489 + }, + { + "epoch": 0.9979906229068989, + "grad_norm": 2.360229105460337, + "learning_rate": 4.667737909560805e-06, + "loss": 1.5327, + "step": 1490 + }, + { + "epoch": 0.9986604152712659, + "grad_norm": 2.2797324573276336, + "learning_rate": 4.667009514955469e-06, + "loss": 1.6085, + "step": 1491 + }, + { + "epoch": 0.999330207635633, + "grad_norm": 5.673294767735898, + "learning_rate": 4.666280379781647e-06, + "loss": 1.5339, + "step": 1492 + }, + { + "epoch": 1.0, + "grad_norm": 1.8012024819338672, + "learning_rate": 4.66555050428852e-06, + "loss": 1.6037, + "step": 1493 + }, + { + "epoch": 1.000669792364367, + "grad_norm": 1.9930903170920202, + "learning_rate": 4.664819888725518e-06, + "loss": 1.6338, + "step": 1494 + }, + { + "epoch": 1.001339584728734, + "grad_norm": 1.8442770836137086, + "learning_rate": 4.664088533342329e-06, + "loss": 1.4882, + "step": 1495 + }, + { + "epoch": 1.0020093770931011, + "grad_norm": 1.7996702096618904, + "learning_rate": 4.66335643838889e-06, + "loss": 1.4398, + "step": 1496 + }, + { + "epoch": 1.0026791694574682, + "grad_norm": 1.7097523011467717, + "learning_rate": 4.662623604115392e-06, + "loss": 1.4299, + "step": 1497 + }, + { + "epoch": 1.0033489618218352, + "grad_norm": 1.6583503466176486, + "learning_rate": 4.66189003077228e-06, + "loss": 1.4105, + "step": 1498 + }, + { + "epoch": 1.0040187541862022, + "grad_norm": 2.127584294258686, + "learning_rate": 4.661155718610248e-06, + "loss": 1.5838, + "step": 1499 + }, + { + "epoch": 1.0046885465505693, + "grad_norm": 2.0321463166052633, + "learning_rate": 4.660420667880247e-06, + "loss": 1.5058, + "step": 1500 + }, + { + "epoch": 1.0053583389149363, + "grad_norm": 1.8464276204518422, + "learning_rate": 4.659684878833478e-06, + "loss": 1.4943, + "step": 1501 + }, + { + "epoch": 1.0060281312793034, + "grad_norm": 1.7539820705050115, + "learning_rate": 4.658948351721393e-06, + "loss": 1.6047, + "step": 1502 + }, + { + "epoch": 1.0066979236436704, + "grad_norm": 2.06148549869631, + "learning_rate": 4.658211086795699e-06, + "loss": 1.3822, + "step": 1503 + }, + { + "epoch": 1.0073677160080374, + "grad_norm": 1.9307878997726668, + "learning_rate": 4.657473084308354e-06, + "loss": 1.3822, + "step": 1504 + }, + { + "epoch": 1.0080375083724045, + "grad_norm": 2.016549620652912, + "learning_rate": 4.656734344511568e-06, + "loss": 1.5476, + "step": 1505 + }, + { + "epoch": 1.0087073007367715, + "grad_norm": 1.6948004487904502, + "learning_rate": 4.655994867657802e-06, + "loss": 1.3975, + "step": 1506 + }, + { + "epoch": 1.0093770931011385, + "grad_norm": 2.065263505082478, + "learning_rate": 4.65525465399977e-06, + "loss": 1.2739, + "step": 1507 + }, + { + "epoch": 1.0100468854655056, + "grad_norm": 1.7782800254156157, + "learning_rate": 4.654513703790438e-06, + "loss": 1.3857, + "step": 1508 + }, + { + "epoch": 1.0107166778298728, + "grad_norm": 2.015225333328401, + "learning_rate": 4.653772017283024e-06, + "loss": 1.5582, + "step": 1509 + }, + { + "epoch": 1.0113864701942399, + "grad_norm": 1.8815311826929402, + "learning_rate": 4.653029594730995e-06, + "loss": 1.4665, + "step": 1510 + }, + { + "epoch": 1.012056262558607, + "grad_norm": 1.9119033074934046, + "learning_rate": 4.652286436388074e-06, + "loss": 1.4683, + "step": 1511 + }, + { + "epoch": 1.012726054922974, + "grad_norm": 1.9088985508587697, + "learning_rate": 4.651542542508231e-06, + "loss": 1.1496, + "step": 1512 + }, + { + "epoch": 1.013395847287341, + "grad_norm": 1.8279138329864129, + "learning_rate": 4.65079791334569e-06, + "loss": 1.5332, + "step": 1513 + }, + { + "epoch": 1.014065639651708, + "grad_norm": 1.8308656636495255, + "learning_rate": 4.650052549154926e-06, + "loss": 1.5382, + "step": 1514 + }, + { + "epoch": 1.014735432016075, + "grad_norm": 2.732855340033793, + "learning_rate": 4.6493064501906646e-06, + "loss": 1.3223, + "step": 1515 + }, + { + "epoch": 1.0154052243804421, + "grad_norm": 1.7171145241453056, + "learning_rate": 4.648559616707881e-06, + "loss": 1.5139, + "step": 1516 + }, + { + "epoch": 1.0160750167448092, + "grad_norm": 2.6938627381832525, + "learning_rate": 4.647812048961807e-06, + "loss": 1.552, + "step": 1517 + }, + { + "epoch": 1.0167448091091762, + "grad_norm": 1.7257423230309057, + "learning_rate": 4.6470637472079185e-06, + "loss": 1.4277, + "step": 1518 + }, + { + "epoch": 1.0174146014735432, + "grad_norm": 2.6814313355295254, + "learning_rate": 4.646314711701945e-06, + "loss": 1.537, + "step": 1519 + }, + { + "epoch": 1.0180843938379103, + "grad_norm": 1.7489916975169482, + "learning_rate": 4.645564942699869e-06, + "loss": 1.4295, + "step": 1520 + }, + { + "epoch": 1.0187541862022773, + "grad_norm": 2.0491425041222118, + "learning_rate": 4.6448144404579186e-06, + "loss": 1.6639, + "step": 1521 + }, + { + "epoch": 1.0194239785666444, + "grad_norm": 6.402978127833841, + "learning_rate": 4.644063205232579e-06, + "loss": 1.4222, + "step": 1522 + }, + { + "epoch": 1.0200937709310114, + "grad_norm": 1.7053130716115195, + "learning_rate": 4.64331123728058e-06, + "loss": 1.3871, + "step": 1523 + }, + { + "epoch": 1.0207635632953784, + "grad_norm": 1.6977427351266021, + "learning_rate": 4.6425585368589056e-06, + "loss": 1.4683, + "step": 1524 + }, + { + "epoch": 1.0214333556597455, + "grad_norm": 2.1374652978778608, + "learning_rate": 4.6418051042247865e-06, + "loss": 1.3919, + "step": 1525 + }, + { + "epoch": 1.0221031480241125, + "grad_norm": 2.469617676820888, + "learning_rate": 4.64105093963571e-06, + "loss": 1.6184, + "step": 1526 + }, + { + "epoch": 1.0227729403884795, + "grad_norm": 1.6733374834100956, + "learning_rate": 4.640296043349405e-06, + "loss": 1.4432, + "step": 1527 + }, + { + "epoch": 1.0234427327528466, + "grad_norm": 1.9776646099428337, + "learning_rate": 4.639540415623857e-06, + "loss": 1.5912, + "step": 1528 + }, + { + "epoch": 1.0241125251172136, + "grad_norm": 1.6966713485863305, + "learning_rate": 4.638784056717299e-06, + "loss": 1.4566, + "step": 1529 + }, + { + "epoch": 1.0247823174815807, + "grad_norm": 1.8183707763304569, + "learning_rate": 4.638026966888214e-06, + "loss": 1.3812, + "step": 1530 + }, + { + "epoch": 1.0254521098459477, + "grad_norm": 2.2274416673643556, + "learning_rate": 4.637269146395336e-06, + "loss": 1.38, + "step": 1531 + }, + { + "epoch": 1.0261219022103147, + "grad_norm": 2.2056274317484608, + "learning_rate": 4.636510595497647e-06, + "loss": 1.3773, + "step": 1532 + }, + { + "epoch": 1.0267916945746818, + "grad_norm": 2.768506340405935, + "learning_rate": 4.635751314454379e-06, + "loss": 1.4634, + "step": 1533 + }, + { + "epoch": 1.0274614869390488, + "grad_norm": 2.6625258937570018, + "learning_rate": 4.634991303525014e-06, + "loss": 1.4007, + "step": 1534 + }, + { + "epoch": 1.0281312793034159, + "grad_norm": 1.9118134849018, + "learning_rate": 4.634230562969283e-06, + "loss": 1.5504, + "step": 1535 + }, + { + "epoch": 1.028801071667783, + "grad_norm": 2.5555140122394686, + "learning_rate": 4.633469093047167e-06, + "loss": 1.2112, + "step": 1536 + }, + { + "epoch": 1.02947086403215, + "grad_norm": 4.112610462491212, + "learning_rate": 4.632706894018897e-06, + "loss": 1.4096, + "step": 1537 + }, + { + "epoch": 1.030140656396517, + "grad_norm": 2.601545188879586, + "learning_rate": 4.63194396614495e-06, + "loss": 1.3503, + "step": 1538 + }, + { + "epoch": 1.0308104487608842, + "grad_norm": 1.9889669107461916, + "learning_rate": 4.6311803096860555e-06, + "loss": 1.5277, + "step": 1539 + }, + { + "epoch": 1.0314802411252513, + "grad_norm": 1.9308489482419335, + "learning_rate": 4.63041592490319e-06, + "loss": 1.3494, + "step": 1540 + }, + { + "epoch": 1.0321500334896183, + "grad_norm": 1.758437912196147, + "learning_rate": 4.62965081205758e-06, + "loss": 1.4191, + "step": 1541 + }, + { + "epoch": 1.0328198258539854, + "grad_norm": 1.8578629781512752, + "learning_rate": 4.6288849714107e-06, + "loss": 1.4254, + "step": 1542 + }, + { + "epoch": 1.0334896182183524, + "grad_norm": 2.0781268701459297, + "learning_rate": 4.628118403224273e-06, + "loss": 1.5718, + "step": 1543 + }, + { + "epoch": 1.0341594105827194, + "grad_norm": 1.7717062337966902, + "learning_rate": 4.627351107760272e-06, + "loss": 1.5919, + "step": 1544 + }, + { + "epoch": 1.0348292029470865, + "grad_norm": 1.8264428465060079, + "learning_rate": 4.626583085280918e-06, + "loss": 1.3688, + "step": 1545 + }, + { + "epoch": 1.0354989953114535, + "grad_norm": 1.6872953328700653, + "learning_rate": 4.625814336048679e-06, + "loss": 1.5281, + "step": 1546 + }, + { + "epoch": 1.0361687876758205, + "grad_norm": 1.7542310103770242, + "learning_rate": 4.625044860326274e-06, + "loss": 1.4128, + "step": 1547 + }, + { + "epoch": 1.0368385800401876, + "grad_norm": 2.8563798007371397, + "learning_rate": 4.624274658376667e-06, + "loss": 1.3198, + "step": 1548 + }, + { + "epoch": 1.0375083724045546, + "grad_norm": 2.0738815615936836, + "learning_rate": 4.623503730463075e-06, + "loss": 1.3552, + "step": 1549 + }, + { + "epoch": 1.0381781647689217, + "grad_norm": 1.8676160191984224, + "learning_rate": 4.622732076848957e-06, + "loss": 1.361, + "step": 1550 + }, + { + "epoch": 1.0388479571332887, + "grad_norm": 1.8671843655452962, + "learning_rate": 4.621959697798024e-06, + "loss": 1.4361, + "step": 1551 + }, + { + "epoch": 1.0395177494976557, + "grad_norm": 3.4933813746956224, + "learning_rate": 4.621186593574236e-06, + "loss": 1.3293, + "step": 1552 + }, + { + "epoch": 1.0401875418620228, + "grad_norm": 1.9084604427249785, + "learning_rate": 4.620412764441796e-06, + "loss": 1.4673, + "step": 1553 + }, + { + "epoch": 1.0408573342263898, + "grad_norm": 1.7246751964243077, + "learning_rate": 4.619638210665159e-06, + "loss": 1.3991, + "step": 1554 + }, + { + "epoch": 1.0415271265907569, + "grad_norm": 1.9754904649985434, + "learning_rate": 4.6188629325090276e-06, + "loss": 1.6496, + "step": 1555 + }, + { + "epoch": 1.042196918955124, + "grad_norm": 2.380929655362163, + "learning_rate": 4.618086930238348e-06, + "loss": 1.4083, + "step": 1556 + }, + { + "epoch": 1.042866711319491, + "grad_norm": 2.2471273311664834, + "learning_rate": 4.617310204118317e-06, + "loss": 1.3994, + "step": 1557 + }, + { + "epoch": 1.043536503683858, + "grad_norm": 1.756361937824022, + "learning_rate": 4.61653275441438e-06, + "loss": 1.4573, + "step": 1558 + }, + { + "epoch": 1.044206296048225, + "grad_norm": 2.23443481545335, + "learning_rate": 4.6157545813922265e-06, + "loss": 1.3805, + "step": 1559 + }, + { + "epoch": 1.044876088412592, + "grad_norm": 2.379803030143016, + "learning_rate": 4.614975685317794e-06, + "loss": 1.2477, + "step": 1560 + }, + { + "epoch": 1.045545880776959, + "grad_norm": 2.750060685361732, + "learning_rate": 4.61419606645727e-06, + "loss": 1.2781, + "step": 1561 + }, + { + "epoch": 1.0462156731413261, + "grad_norm": 3.8335446192192215, + "learning_rate": 4.613415725077084e-06, + "loss": 1.308, + "step": 1562 + }, + { + "epoch": 1.0468854655056932, + "grad_norm": 1.7470431470089853, + "learning_rate": 4.612634661443918e-06, + "loss": 1.3739, + "step": 1563 + }, + { + "epoch": 1.0475552578700602, + "grad_norm": 1.8203976390742889, + "learning_rate": 4.611852875824695e-06, + "loss": 1.6121, + "step": 1564 + }, + { + "epoch": 1.0482250502344272, + "grad_norm": 2.350063169444484, + "learning_rate": 4.61107036848659e-06, + "loss": 1.3754, + "step": 1565 + }, + { + "epoch": 1.0488948425987943, + "grad_norm": 2.7722599388642806, + "learning_rate": 4.610287139697021e-06, + "loss": 1.5129, + "step": 1566 + }, + { + "epoch": 1.0495646349631613, + "grad_norm": 2.4270855385542514, + "learning_rate": 4.609503189723654e-06, + "loss": 1.3701, + "step": 1567 + }, + { + "epoch": 1.0502344273275284, + "grad_norm": 4.896564970248193, + "learning_rate": 4.608718518834402e-06, + "loss": 1.4419, + "step": 1568 + }, + { + "epoch": 1.0509042196918954, + "grad_norm": 1.889858546434338, + "learning_rate": 4.6079331272974246e-06, + "loss": 1.4337, + "step": 1569 + }, + { + "epoch": 1.0515740120562627, + "grad_norm": 1.7436527391196086, + "learning_rate": 4.6071470153811245e-06, + "loss": 1.3395, + "step": 1570 + }, + { + "epoch": 1.0522438044206297, + "grad_norm": 2.296901680607646, + "learning_rate": 4.606360183354155e-06, + "loss": 1.1789, + "step": 1571 + }, + { + "epoch": 1.0529135967849967, + "grad_norm": 1.6210551982188663, + "learning_rate": 4.605572631485412e-06, + "loss": 1.377, + "step": 1572 + }, + { + "epoch": 1.0535833891493638, + "grad_norm": 1.740242412498489, + "learning_rate": 4.604784360044039e-06, + "loss": 1.4605, + "step": 1573 + }, + { + "epoch": 1.0542531815137308, + "grad_norm": 2.2327320143069738, + "learning_rate": 4.603995369299425e-06, + "loss": 1.4671, + "step": 1574 + }, + { + "epoch": 1.0549229738780979, + "grad_norm": 1.800732340343318, + "learning_rate": 4.603205659521207e-06, + "loss": 1.2522, + "step": 1575 + }, + { + "epoch": 1.055592766242465, + "grad_norm": 1.922764199875792, + "learning_rate": 4.602415230979264e-06, + "loss": 1.6042, + "step": 1576 + }, + { + "epoch": 1.056262558606832, + "grad_norm": 1.7718792602334266, + "learning_rate": 4.601624083943722e-06, + "loss": 1.3544, + "step": 1577 + }, + { + "epoch": 1.056932350971199, + "grad_norm": 2.475651092327954, + "learning_rate": 4.600832218684954e-06, + "loss": 1.3781, + "step": 1578 + }, + { + "epoch": 1.057602143335566, + "grad_norm": 1.643983966442258, + "learning_rate": 4.600039635473577e-06, + "loss": 1.3207, + "step": 1579 + }, + { + "epoch": 1.058271935699933, + "grad_norm": 1.6755650343401287, + "learning_rate": 4.5992463345804536e-06, + "loss": 1.4277, + "step": 1580 + }, + { + "epoch": 1.0589417280643, + "grad_norm": 1.6502927650958685, + "learning_rate": 4.598452316276692e-06, + "loss": 1.2793, + "step": 1581 + }, + { + "epoch": 1.0596115204286671, + "grad_norm": 1.796075806610797, + "learning_rate": 4.5976575808336455e-06, + "loss": 1.408, + "step": 1582 + }, + { + "epoch": 1.0602813127930342, + "grad_norm": 1.724084437241363, + "learning_rate": 4.596862128522912e-06, + "loss": 1.2168, + "step": 1583 + }, + { + "epoch": 1.0609511051574012, + "grad_norm": 2.9069757535213987, + "learning_rate": 4.596065959616336e-06, + "loss": 1.4847, + "step": 1584 + }, + { + "epoch": 1.0616208975217682, + "grad_norm": 1.7967264467171542, + "learning_rate": 4.595269074386003e-06, + "loss": 1.4802, + "step": 1585 + }, + { + "epoch": 1.0622906898861353, + "grad_norm": 1.721689104547856, + "learning_rate": 4.5944714731042486e-06, + "loss": 1.4964, + "step": 1586 + }, + { + "epoch": 1.0629604822505023, + "grad_norm": 1.7917019974718058, + "learning_rate": 4.5936731560436495e-06, + "loss": 1.2884, + "step": 1587 + }, + { + "epoch": 1.0636302746148694, + "grad_norm": 1.9181633937476381, + "learning_rate": 4.592874123477028e-06, + "loss": 1.2153, + "step": 1588 + }, + { + "epoch": 1.0643000669792364, + "grad_norm": 1.939470244495616, + "learning_rate": 4.5920743756774505e-06, + "loss": 1.3696, + "step": 1589 + }, + { + "epoch": 1.0649698593436034, + "grad_norm": 2.447350403453829, + "learning_rate": 4.591273912918228e-06, + "loss": 1.4817, + "step": 1590 + }, + { + "epoch": 1.0656396517079705, + "grad_norm": 1.9561565680901483, + "learning_rate": 4.590472735472917e-06, + "loss": 1.5341, + "step": 1591 + }, + { + "epoch": 1.0663094440723375, + "grad_norm": 1.8460263226860276, + "learning_rate": 4.5896708436153174e-06, + "loss": 1.3871, + "step": 1592 + }, + { + "epoch": 1.0669792364367046, + "grad_norm": 1.7084171249473057, + "learning_rate": 4.5888682376194726e-06, + "loss": 1.4796, + "step": 1593 + }, + { + "epoch": 1.0676490288010716, + "grad_norm": 1.8186788258707662, + "learning_rate": 4.588064917759671e-06, + "loss": 1.3588, + "step": 1594 + }, + { + "epoch": 1.0683188211654386, + "grad_norm": 1.7159875078682518, + "learning_rate": 4.587260884310443e-06, + "loss": 1.3852, + "step": 1595 + }, + { + "epoch": 1.0689886135298057, + "grad_norm": 1.860471006229174, + "learning_rate": 4.5864561375465654e-06, + "loss": 1.2234, + "step": 1596 + }, + { + "epoch": 1.0696584058941727, + "grad_norm": 1.7213988817615746, + "learning_rate": 4.5856506777430585e-06, + "loss": 1.2671, + "step": 1597 + }, + { + "epoch": 1.0703281982585398, + "grad_norm": 2.598531048037298, + "learning_rate": 4.584844505175186e-06, + "loss": 1.464, + "step": 1598 + }, + { + "epoch": 1.070997990622907, + "grad_norm": 2.233789660439796, + "learning_rate": 4.584037620118452e-06, + "loss": 1.3505, + "step": 1599 + }, + { + "epoch": 1.0716677829872738, + "grad_norm": 2.27329086648017, + "learning_rate": 4.58323002284861e-06, + "loss": 1.2676, + "step": 1600 + }, + { + "epoch": 1.072337575351641, + "grad_norm": 2.078098886550789, + "learning_rate": 4.582421713641653e-06, + "loss": 1.3482, + "step": 1601 + }, + { + "epoch": 1.0730073677160081, + "grad_norm": 3.5016215946056186, + "learning_rate": 4.5816126927738165e-06, + "loss": 1.4173, + "step": 1602 + }, + { + "epoch": 1.0736771600803752, + "grad_norm": 1.7972279027917826, + "learning_rate": 4.580802960521582e-06, + "loss": 1.5758, + "step": 1603 + }, + { + "epoch": 1.0743469524447422, + "grad_norm": 2.0503288778878432, + "learning_rate": 4.579992517161672e-06, + "loss": 1.433, + "step": 1604 + }, + { + "epoch": 1.0750167448091092, + "grad_norm": 1.574020457005811, + "learning_rate": 4.579181362971055e-06, + "loss": 1.2146, + "step": 1605 + }, + { + "epoch": 1.0756865371734763, + "grad_norm": 2.268359785239924, + "learning_rate": 4.578369498226938e-06, + "loss": 1.5551, + "step": 1606 + }, + { + "epoch": 1.0763563295378433, + "grad_norm": 2.039068900292723, + "learning_rate": 4.577556923206774e-06, + "loss": 1.5935, + "step": 1607 + }, + { + "epoch": 1.0770261219022104, + "grad_norm": 3.766803438731601, + "learning_rate": 4.576743638188258e-06, + "loss": 1.4818, + "step": 1608 + }, + { + "epoch": 1.0776959142665774, + "grad_norm": 2.193339735106668, + "learning_rate": 4.575929643449328e-06, + "loss": 1.3134, + "step": 1609 + }, + { + "epoch": 1.0783657066309444, + "grad_norm": 1.5783368348501519, + "learning_rate": 4.575114939268163e-06, + "loss": 1.3898, + "step": 1610 + }, + { + "epoch": 1.0790354989953115, + "grad_norm": 2.4097137360023138, + "learning_rate": 4.574299525923186e-06, + "loss": 1.1819, + "step": 1611 + }, + { + "epoch": 1.0797052913596785, + "grad_norm": 1.6639818842473058, + "learning_rate": 4.573483403693061e-06, + "loss": 1.5511, + "step": 1612 + }, + { + "epoch": 1.0803750837240456, + "grad_norm": 2.483025274226483, + "learning_rate": 4.572666572856697e-06, + "loss": 1.3635, + "step": 1613 + }, + { + "epoch": 1.0810448760884126, + "grad_norm": 3.865152446318553, + "learning_rate": 4.571849033693242e-06, + "loss": 1.2705, + "step": 1614 + }, + { + "epoch": 1.0817146684527796, + "grad_norm": 1.910610220661721, + "learning_rate": 4.571030786482088e-06, + "loss": 1.3719, + "step": 1615 + }, + { + "epoch": 1.0823844608171467, + "grad_norm": 2.5584445728362915, + "learning_rate": 4.5702118315028675e-06, + "loss": 1.4269, + "step": 1616 + }, + { + "epoch": 1.0830542531815137, + "grad_norm": 2.225693644665274, + "learning_rate": 4.569392169035457e-06, + "loss": 1.3669, + "step": 1617 + }, + { + "epoch": 1.0837240455458808, + "grad_norm": 2.9668219989421285, + "learning_rate": 4.568571799359972e-06, + "loss": 1.1272, + "step": 1618 + }, + { + "epoch": 1.0843938379102478, + "grad_norm": 2.0632477130741123, + "learning_rate": 4.5677507227567726e-06, + "loss": 1.2312, + "step": 1619 + }, + { + "epoch": 1.0850636302746148, + "grad_norm": 2.06487821136432, + "learning_rate": 4.566928939506457e-06, + "loss": 1.5462, + "step": 1620 + }, + { + "epoch": 1.0857334226389819, + "grad_norm": 1.735662165613914, + "learning_rate": 4.56610644988987e-06, + "loss": 1.5262, + "step": 1621 + }, + { + "epoch": 1.086403215003349, + "grad_norm": 1.8149578809151075, + "learning_rate": 4.565283254188092e-06, + "loss": 1.5871, + "step": 1622 + }, + { + "epoch": 1.087073007367716, + "grad_norm": 2.5932558052485164, + "learning_rate": 4.5644593526824485e-06, + "loss": 1.4856, + "step": 1623 + }, + { + "epoch": 1.087742799732083, + "grad_norm": 1.7833434093410458, + "learning_rate": 4.563634745654506e-06, + "loss": 1.5316, + "step": 1624 + }, + { + "epoch": 1.08841259209645, + "grad_norm": 2.122505299736645, + "learning_rate": 4.5628094333860696e-06, + "loss": 1.7031, + "step": 1625 + }, + { + "epoch": 1.089082384460817, + "grad_norm": 2.0915171981584146, + "learning_rate": 4.5619834161591885e-06, + "loss": 1.3028, + "step": 1626 + }, + { + "epoch": 1.089752176825184, + "grad_norm": 1.7834357093361155, + "learning_rate": 4.5611566942561515e-06, + "loss": 1.4399, + "step": 1627 + }, + { + "epoch": 1.0904219691895511, + "grad_norm": 1.6619232098728276, + "learning_rate": 4.5603292679594865e-06, + "loss": 1.4911, + "step": 1628 + }, + { + "epoch": 1.0910917615539182, + "grad_norm": 1.9528634620154772, + "learning_rate": 4.559501137551965e-06, + "loss": 1.6165, + "step": 1629 + }, + { + "epoch": 1.0917615539182854, + "grad_norm": 1.741944287698139, + "learning_rate": 4.558672303316598e-06, + "loss": 1.5538, + "step": 1630 + }, + { + "epoch": 1.0924313462826523, + "grad_norm": 1.8266883381548422, + "learning_rate": 4.557842765536637e-06, + "loss": 1.4681, + "step": 1631 + }, + { + "epoch": 1.0931011386470195, + "grad_norm": 1.8359986768350083, + "learning_rate": 4.5570125244955726e-06, + "loss": 1.627, + "step": 1632 + }, + { + "epoch": 1.0937709310113866, + "grad_norm": 1.812587434364424, + "learning_rate": 4.556181580477138e-06, + "loss": 1.5029, + "step": 1633 + }, + { + "epoch": 1.0944407233757536, + "grad_norm": 1.7773063301674157, + "learning_rate": 4.555349933765306e-06, + "loss": 1.448, + "step": 1634 + }, + { + "epoch": 1.0951105157401206, + "grad_norm": 1.6954919825888486, + "learning_rate": 4.554517584644288e-06, + "loss": 1.4575, + "step": 1635 + }, + { + "epoch": 1.0957803081044877, + "grad_norm": 3.1715320021237408, + "learning_rate": 4.553684533398538e-06, + "loss": 1.3952, + "step": 1636 + }, + { + "epoch": 1.0964501004688547, + "grad_norm": 2.692491968118054, + "learning_rate": 4.552850780312747e-06, + "loss": 1.3464, + "step": 1637 + }, + { + "epoch": 1.0971198928332218, + "grad_norm": 2.5592361016519063, + "learning_rate": 4.552016325671848e-06, + "loss": 1.3508, + "step": 1638 + }, + { + "epoch": 1.0977896851975888, + "grad_norm": 2.0142855716160577, + "learning_rate": 4.5511811697610146e-06, + "loss": 1.4494, + "step": 1639 + }, + { + "epoch": 1.0984594775619558, + "grad_norm": 2.0529121924697886, + "learning_rate": 4.550345312865657e-06, + "loss": 1.3807, + "step": 1640 + }, + { + "epoch": 1.0991292699263229, + "grad_norm": 1.5711533158129936, + "learning_rate": 4.5495087552714265e-06, + "loss": 1.3476, + "step": 1641 + }, + { + "epoch": 1.09979906229069, + "grad_norm": 2.156304910648002, + "learning_rate": 4.548671497264215e-06, + "loss": 1.4465, + "step": 1642 + }, + { + "epoch": 1.100468854655057, + "grad_norm": 2.4895038048329496, + "learning_rate": 4.547833539130152e-06, + "loss": 1.4463, + "step": 1643 + }, + { + "epoch": 1.101138647019424, + "grad_norm": 1.8739727181275347, + "learning_rate": 4.546994881155608e-06, + "loss": 1.3343, + "step": 1644 + }, + { + "epoch": 1.101808439383791, + "grad_norm": 2.5510431670354, + "learning_rate": 4.54615552362719e-06, + "loss": 1.4292, + "step": 1645 + }, + { + "epoch": 1.102478231748158, + "grad_norm": 1.804643877661367, + "learning_rate": 4.545315466831747e-06, + "loss": 1.451, + "step": 1646 + }, + { + "epoch": 1.103148024112525, + "grad_norm": 1.8690612108004687, + "learning_rate": 4.544474711056365e-06, + "loss": 1.3089, + "step": 1647 + }, + { + "epoch": 1.1038178164768921, + "grad_norm": 1.8734211332154127, + "learning_rate": 4.543633256588369e-06, + "loss": 1.3611, + "step": 1648 + }, + { + "epoch": 1.1044876088412592, + "grad_norm": 3.5329130610201687, + "learning_rate": 4.542791103715325e-06, + "loss": 1.473, + "step": 1649 + }, + { + "epoch": 1.1051574012056262, + "grad_norm": 1.775596677672033, + "learning_rate": 4.5419482527250346e-06, + "loss": 1.5031, + "step": 1650 + }, + { + "epoch": 1.1058271935699933, + "grad_norm": 2.1058566992754857, + "learning_rate": 4.541104703905541e-06, + "loss": 1.5026, + "step": 1651 + }, + { + "epoch": 1.1064969859343603, + "grad_norm": 1.9048949977036265, + "learning_rate": 4.540260457545123e-06, + "loss": 1.4555, + "step": 1652 + }, + { + "epoch": 1.1071667782987273, + "grad_norm": 2.2620628107542506, + "learning_rate": 4.539415513932298e-06, + "loss": 1.4714, + "step": 1653 + }, + { + "epoch": 1.1078365706630944, + "grad_norm": 2.730580402765484, + "learning_rate": 4.5385698733558245e-06, + "loss": 1.4264, + "step": 1654 + }, + { + "epoch": 1.1085063630274614, + "grad_norm": 2.6035678743103636, + "learning_rate": 4.537723536104698e-06, + "loss": 1.4049, + "step": 1655 + }, + { + "epoch": 1.1091761553918285, + "grad_norm": 2.143042381242636, + "learning_rate": 4.536876502468149e-06, + "loss": 1.296, + "step": 1656 + }, + { + "epoch": 1.1098459477561955, + "grad_norm": 5.141399423133512, + "learning_rate": 4.5360287727356515e-06, + "loss": 1.3562, + "step": 1657 + }, + { + "epoch": 1.1105157401205625, + "grad_norm": 2.8797132501654237, + "learning_rate": 4.535180347196912e-06, + "loss": 1.3431, + "step": 1658 + }, + { + "epoch": 1.1111855324849296, + "grad_norm": 2.0245258516636206, + "learning_rate": 4.534331226141878e-06, + "loss": 1.4478, + "step": 1659 + }, + { + "epoch": 1.1118553248492966, + "grad_norm": 1.7355493495741057, + "learning_rate": 4.533481409860734e-06, + "loss": 1.3992, + "step": 1660 + }, + { + "epoch": 1.1125251172136639, + "grad_norm": 2.032932075230013, + "learning_rate": 4.532630898643902e-06, + "loss": 1.3211, + "step": 1661 + }, + { + "epoch": 1.113194909578031, + "grad_norm": 2.285325926219152, + "learning_rate": 4.531779692782041e-06, + "loss": 1.4377, + "step": 1662 + }, + { + "epoch": 1.113864701942398, + "grad_norm": 2.4584428469363355, + "learning_rate": 4.5309277925660485e-06, + "loss": 1.3607, + "step": 1663 + }, + { + "epoch": 1.114534494306765, + "grad_norm": 2.0182494486150873, + "learning_rate": 4.530075198287059e-06, + "loss": 1.4728, + "step": 1664 + }, + { + "epoch": 1.115204286671132, + "grad_norm": 1.7855950258236821, + "learning_rate": 4.529221910236442e-06, + "loss": 1.6508, + "step": 1665 + }, + { + "epoch": 1.115874079035499, + "grad_norm": 1.5799627262757714, + "learning_rate": 4.528367928705808e-06, + "loss": 1.3369, + "step": 1666 + }, + { + "epoch": 1.116543871399866, + "grad_norm": 1.8353830636876285, + "learning_rate": 4.527513253987002e-06, + "loss": 1.4727, + "step": 1667 + }, + { + "epoch": 1.1172136637642331, + "grad_norm": 2.243302651079927, + "learning_rate": 4.526657886372104e-06, + "loss": 1.1783, + "step": 1668 + }, + { + "epoch": 1.1178834561286002, + "grad_norm": 1.6314667023572862, + "learning_rate": 4.525801826153436e-06, + "loss": 1.3843, + "step": 1669 + }, + { + "epoch": 1.1185532484929672, + "grad_norm": 2.3822956741350456, + "learning_rate": 4.524945073623553e-06, + "loss": 1.3374, + "step": 1670 + }, + { + "epoch": 1.1192230408573343, + "grad_norm": 2.9063282679421314, + "learning_rate": 4.5240876290752465e-06, + "loss": 1.2109, + "step": 1671 + }, + { + "epoch": 1.1198928332217013, + "grad_norm": 1.7852643911797692, + "learning_rate": 4.5232294928015464e-06, + "loss": 1.4626, + "step": 1672 + }, + { + "epoch": 1.1205626255860683, + "grad_norm": 1.900671434489505, + "learning_rate": 4.522370665095718e-06, + "loss": 1.5914, + "step": 1673 + }, + { + "epoch": 1.1212324179504354, + "grad_norm": 1.6467133798304487, + "learning_rate": 4.5215111462512615e-06, + "loss": 1.4402, + "step": 1674 + }, + { + "epoch": 1.1219022103148024, + "grad_norm": 1.6800712540401652, + "learning_rate": 4.520650936561917e-06, + "loss": 1.4042, + "step": 1675 + }, + { + "epoch": 1.1225720026791695, + "grad_norm": 3.6983914732446097, + "learning_rate": 4.519790036321656e-06, + "loss": 1.203, + "step": 1676 + }, + { + "epoch": 1.1232417950435365, + "grad_norm": 1.7701982835535084, + "learning_rate": 4.51892844582469e-06, + "loss": 1.5236, + "step": 1677 + }, + { + "epoch": 1.1239115874079035, + "grad_norm": 1.6293393030437675, + "learning_rate": 4.518066165365464e-06, + "loss": 1.2952, + "step": 1678 + }, + { + "epoch": 1.1245813797722706, + "grad_norm": 2.744909641352959, + "learning_rate": 4.5172031952386596e-06, + "loss": 1.3266, + "step": 1679 + }, + { + "epoch": 1.1252511721366376, + "grad_norm": 1.848539397484213, + "learning_rate": 4.516339535739195e-06, + "loss": 1.4, + "step": 1680 + }, + { + "epoch": 1.1259209645010047, + "grad_norm": 1.8286206422617386, + "learning_rate": 4.515475187162221e-06, + "loss": 1.3371, + "step": 1681 + }, + { + "epoch": 1.1265907568653717, + "grad_norm": 1.8013410795312825, + "learning_rate": 4.514610149803129e-06, + "loss": 1.4811, + "step": 1682 + }, + { + "epoch": 1.1272605492297387, + "grad_norm": 1.6379547220474144, + "learning_rate": 4.51374442395754e-06, + "loss": 1.1907, + "step": 1683 + }, + { + "epoch": 1.1279303415941058, + "grad_norm": 2.3560305590956934, + "learning_rate": 4.512878009921314e-06, + "loss": 1.235, + "step": 1684 + }, + { + "epoch": 1.1286001339584728, + "grad_norm": 1.8244364688622356, + "learning_rate": 4.512010907990545e-06, + "loss": 1.4016, + "step": 1685 + }, + { + "epoch": 1.1292699263228398, + "grad_norm": 1.8655556923330934, + "learning_rate": 4.511143118461562e-06, + "loss": 1.239, + "step": 1686 + }, + { + "epoch": 1.1299397186872069, + "grad_norm": 1.8499489263085398, + "learning_rate": 4.51027464163093e-06, + "loss": 1.4779, + "step": 1687 + }, + { + "epoch": 1.130609511051574, + "grad_norm": 1.6246120445020242, + "learning_rate": 4.509405477795448e-06, + "loss": 1.4802, + "step": 1688 + }, + { + "epoch": 1.131279303415941, + "grad_norm": 1.7862577682946041, + "learning_rate": 4.50853562725215e-06, + "loss": 1.4686, + "step": 1689 + }, + { + "epoch": 1.1319490957803082, + "grad_norm": 1.8575080504767334, + "learning_rate": 4.507665090298303e-06, + "loss": 1.3642, + "step": 1690 + }, + { + "epoch": 1.132618888144675, + "grad_norm": 2.057179431160873, + "learning_rate": 4.5067938672314124e-06, + "loss": 1.5137, + "step": 1691 + }, + { + "epoch": 1.1332886805090423, + "grad_norm": 1.7988219276858277, + "learning_rate": 4.5059219583492145e-06, + "loss": 1.4179, + "step": 1692 + }, + { + "epoch": 1.1339584728734093, + "grad_norm": 3.9385536999354933, + "learning_rate": 4.505049363949683e-06, + "loss": 1.4915, + "step": 1693 + }, + { + "epoch": 1.1346282652377764, + "grad_norm": 1.8663798957697237, + "learning_rate": 4.504176084331021e-06, + "loss": 1.4723, + "step": 1694 + }, + { + "epoch": 1.1352980576021434, + "grad_norm": 1.8498589714161955, + "learning_rate": 4.503302119791673e-06, + "loss": 1.4901, + "step": 1695 + }, + { + "epoch": 1.1359678499665105, + "grad_norm": 1.7849965595765758, + "learning_rate": 4.5024274706303105e-06, + "loss": 1.3436, + "step": 1696 + }, + { + "epoch": 1.1366376423308775, + "grad_norm": 2.0349670963808753, + "learning_rate": 4.5015521371458436e-06, + "loss": 1.3787, + "step": 1697 + }, + { + "epoch": 1.1373074346952445, + "grad_norm": 3.9664013385529753, + "learning_rate": 4.500676119637414e-06, + "loss": 1.0913, + "step": 1698 + }, + { + "epoch": 1.1379772270596116, + "grad_norm": 2.46751761263172, + "learning_rate": 4.499799418404398e-06, + "loss": 1.313, + "step": 1699 + }, + { + "epoch": 1.1386470194239786, + "grad_norm": 1.8551947046383372, + "learning_rate": 4.4989220337464055e-06, + "loss": 1.4169, + "step": 1700 + }, + { + "epoch": 1.1393168117883457, + "grad_norm": 2.6767813470631667, + "learning_rate": 4.49804396596328e-06, + "loss": 1.3072, + "step": 1701 + }, + { + "epoch": 1.1399866041527127, + "grad_norm": 2.018191137919592, + "learning_rate": 4.497165215355097e-06, + "loss": 1.5375, + "step": 1702 + }, + { + "epoch": 1.1406563965170797, + "grad_norm": 2.9279741406063455, + "learning_rate": 4.496285782222169e-06, + "loss": 1.3145, + "step": 1703 + }, + { + "epoch": 1.1413261888814468, + "grad_norm": 3.6385920293663667, + "learning_rate": 4.495405666865038e-06, + "loss": 1.4816, + "step": 1704 + }, + { + "epoch": 1.1419959812458138, + "grad_norm": 2.021437736470645, + "learning_rate": 4.49452486958448e-06, + "loss": 1.3585, + "step": 1705 + }, + { + "epoch": 1.1426657736101808, + "grad_norm": 1.6767167738569968, + "learning_rate": 4.4936433906815055e-06, + "loss": 1.2662, + "step": 1706 + }, + { + "epoch": 1.1433355659745479, + "grad_norm": 9.013789258468615, + "learning_rate": 4.492761230457358e-06, + "loss": 1.387, + "step": 1707 + }, + { + "epoch": 1.144005358338915, + "grad_norm": 1.927740834837445, + "learning_rate": 4.491878389213511e-06, + "loss": 1.496, + "step": 1708 + }, + { + "epoch": 1.144675150703282, + "grad_norm": 2.17790203303502, + "learning_rate": 4.490994867251674e-06, + "loss": 1.2385, + "step": 1709 + }, + { + "epoch": 1.145344943067649, + "grad_norm": 3.0297634952210406, + "learning_rate": 4.490110664873787e-06, + "loss": 1.4506, + "step": 1710 + }, + { + "epoch": 1.146014735432016, + "grad_norm": 2.9196166801068086, + "learning_rate": 4.489225782382023e-06, + "loss": 1.2492, + "step": 1711 + }, + { + "epoch": 1.146684527796383, + "grad_norm": 1.7667956271517526, + "learning_rate": 4.488340220078789e-06, + "loss": 1.3907, + "step": 1712 + }, + { + "epoch": 1.1473543201607501, + "grad_norm": 1.929516562606296, + "learning_rate": 4.4874539782667226e-06, + "loss": 1.4369, + "step": 1713 + }, + { + "epoch": 1.1480241125251172, + "grad_norm": 1.912267389319924, + "learning_rate": 4.486567057248693e-06, + "loss": 1.5987, + "step": 1714 + }, + { + "epoch": 1.1486939048894842, + "grad_norm": 3.216967961793869, + "learning_rate": 4.485679457327804e-06, + "loss": 1.345, + "step": 1715 + }, + { + "epoch": 1.1493636972538512, + "grad_norm": 1.9241622402063712, + "learning_rate": 4.484791178807389e-06, + "loss": 1.4942, + "step": 1716 + }, + { + "epoch": 1.1500334896182183, + "grad_norm": 2.005002130106839, + "learning_rate": 4.483902221991015e-06, + "loss": 1.3148, + "step": 1717 + }, + { + "epoch": 1.1507032819825853, + "grad_norm": 3.6285653118216543, + "learning_rate": 4.48301258718248e-06, + "loss": 1.2917, + "step": 1718 + }, + { + "epoch": 1.1513730743469524, + "grad_norm": 1.6849936800774346, + "learning_rate": 4.482122274685813e-06, + "loss": 1.4801, + "step": 1719 + }, + { + "epoch": 1.1520428667113194, + "grad_norm": 1.659110775286233, + "learning_rate": 4.481231284805277e-06, + "loss": 1.4498, + "step": 1720 + }, + { + "epoch": 1.1527126590756867, + "grad_norm": 1.868523754120658, + "learning_rate": 4.480339617845363e-06, + "loss": 1.4431, + "step": 1721 + }, + { + "epoch": 1.1533824514400535, + "grad_norm": 1.9848545907038089, + "learning_rate": 4.479447274110796e-06, + "loss": 1.5503, + "step": 1722 + }, + { + "epoch": 1.1540522438044207, + "grad_norm": 1.6967846020908002, + "learning_rate": 4.478554253906533e-06, + "loss": 1.4324, + "step": 1723 + }, + { + "epoch": 1.1547220361687878, + "grad_norm": 2.7191986638542813, + "learning_rate": 4.477660557537759e-06, + "loss": 1.4418, + "step": 1724 + }, + { + "epoch": 1.1553918285331548, + "grad_norm": 1.8650996525106907, + "learning_rate": 4.476766185309892e-06, + "loss": 1.2762, + "step": 1725 + }, + { + "epoch": 1.1560616208975218, + "grad_norm": 1.8198188108143076, + "learning_rate": 4.475871137528582e-06, + "loss": 1.4415, + "step": 1726 + }, + { + "epoch": 1.1567314132618889, + "grad_norm": 1.911628940031355, + "learning_rate": 4.474975414499707e-06, + "loss": 1.4071, + "step": 1727 + }, + { + "epoch": 1.157401205626256, + "grad_norm": 2.015807970883756, + "learning_rate": 4.47407901652938e-06, + "loss": 1.3616, + "step": 1728 + }, + { + "epoch": 1.158070997990623, + "grad_norm": 2.957151010590823, + "learning_rate": 4.4731819439239385e-06, + "loss": 1.3139, + "step": 1729 + }, + { + "epoch": 1.15874079035499, + "grad_norm": 1.6709081975707543, + "learning_rate": 4.472284196989958e-06, + "loss": 1.4212, + "step": 1730 + }, + { + "epoch": 1.159410582719357, + "grad_norm": 1.6562476843913363, + "learning_rate": 4.471385776034238e-06, + "loss": 1.2852, + "step": 1731 + }, + { + "epoch": 1.160080375083724, + "grad_norm": 1.532192301959975, + "learning_rate": 4.470486681363812e-06, + "loss": 1.3985, + "step": 1732 + }, + { + "epoch": 1.1607501674480911, + "grad_norm": 2.02249518832034, + "learning_rate": 4.469586913285941e-06, + "loss": 1.3036, + "step": 1733 + }, + { + "epoch": 1.1614199598124582, + "grad_norm": 1.7626420089779533, + "learning_rate": 4.4686864721081205e-06, + "loss": 1.3271, + "step": 1734 + }, + { + "epoch": 1.1620897521768252, + "grad_norm": 2.075884653818596, + "learning_rate": 4.467785358138072e-06, + "loss": 1.1212, + "step": 1735 + }, + { + "epoch": 1.1627595445411922, + "grad_norm": 2.0807327754679323, + "learning_rate": 4.466883571683748e-06, + "loss": 1.3612, + "step": 1736 + }, + { + "epoch": 1.1634293369055593, + "grad_norm": 2.5160978715351727, + "learning_rate": 4.465981113053331e-06, + "loss": 1.3648, + "step": 1737 + }, + { + "epoch": 1.1640991292699263, + "grad_norm": 1.8699079610208411, + "learning_rate": 4.465077982555235e-06, + "loss": 1.4425, + "step": 1738 + }, + { + "epoch": 1.1647689216342934, + "grad_norm": 3.3032436569644488, + "learning_rate": 4.4641741804981e-06, + "loss": 1.3185, + "step": 1739 + }, + { + "epoch": 1.1654387139986604, + "grad_norm": 2.5980271829533064, + "learning_rate": 4.463269707190798e-06, + "loss": 1.2746, + "step": 1740 + }, + { + "epoch": 1.1661085063630274, + "grad_norm": 2.006574690186742, + "learning_rate": 4.462364562942431e-06, + "loss": 1.1495, + "step": 1741 + }, + { + "epoch": 1.1667782987273945, + "grad_norm": 2.614254469616762, + "learning_rate": 4.461458748062327e-06, + "loss": 1.3477, + "step": 1742 + }, + { + "epoch": 1.1674480910917615, + "grad_norm": 2.931264296282635, + "learning_rate": 4.460552262860046e-06, + "loss": 1.3696, + "step": 1743 + }, + { + "epoch": 1.1681178834561285, + "grad_norm": 1.7978351756786841, + "learning_rate": 4.459645107645378e-06, + "loss": 1.4181, + "step": 1744 + }, + { + "epoch": 1.1687876758204956, + "grad_norm": 1.746153903477942, + "learning_rate": 4.458737282728338e-06, + "loss": 1.3444, + "step": 1745 + }, + { + "epoch": 1.1694574681848626, + "grad_norm": 5.163696403486195, + "learning_rate": 4.457828788419174e-06, + "loss": 1.1937, + "step": 1746 + }, + { + "epoch": 1.1701272605492297, + "grad_norm": 4.1416570531970684, + "learning_rate": 4.45691962502836e-06, + "loss": 1.2459, + "step": 1747 + }, + { + "epoch": 1.1707970529135967, + "grad_norm": 3.322938737896554, + "learning_rate": 4.4560097928666e-06, + "loss": 1.3973, + "step": 1748 + }, + { + "epoch": 1.1714668452779637, + "grad_norm": 3.142914979572626, + "learning_rate": 4.455099292244827e-06, + "loss": 1.3121, + "step": 1749 + }, + { + "epoch": 1.1721366376423308, + "grad_norm": 1.7332485393061179, + "learning_rate": 4.454188123474199e-06, + "loss": 1.522, + "step": 1750 + }, + { + "epoch": 1.1728064300066978, + "grad_norm": 2.4532010563185995, + "learning_rate": 4.453276286866108e-06, + "loss": 1.4833, + "step": 1751 + }, + { + "epoch": 1.173476222371065, + "grad_norm": 7.3945004047128915, + "learning_rate": 4.45236378273217e-06, + "loss": 1.3762, + "step": 1752 + }, + { + "epoch": 1.174146014735432, + "grad_norm": 1.7327814189764157, + "learning_rate": 4.45145061138423e-06, + "loss": 1.3278, + "step": 1753 + }, + { + "epoch": 1.1748158070997992, + "grad_norm": 2.9468624506216012, + "learning_rate": 4.450536773134363e-06, + "loss": 1.4029, + "step": 1754 + }, + { + "epoch": 1.1754855994641662, + "grad_norm": 2.1933647013353226, + "learning_rate": 4.449622268294868e-06, + "loss": 1.4238, + "step": 1755 + }, + { + "epoch": 1.1761553918285332, + "grad_norm": 1.674561117729518, + "learning_rate": 4.448707097178274e-06, + "loss": 1.4727, + "step": 1756 + }, + { + "epoch": 1.1768251841929003, + "grad_norm": 1.7325364405178445, + "learning_rate": 4.447791260097339e-06, + "loss": 1.4364, + "step": 1757 + }, + { + "epoch": 1.1774949765572673, + "grad_norm": 2.1419435875774293, + "learning_rate": 4.446874757365049e-06, + "loss": 1.3122, + "step": 1758 + }, + { + "epoch": 1.1781647689216344, + "grad_norm": 2.0395263600346825, + "learning_rate": 4.445957589294611e-06, + "loss": 1.3765, + "step": 1759 + }, + { + "epoch": 1.1788345612860014, + "grad_norm": 2.0024914825325815, + "learning_rate": 4.44503975619947e-06, + "loss": 1.2246, + "step": 1760 + }, + { + "epoch": 1.1795043536503684, + "grad_norm": 3.443382557044529, + "learning_rate": 4.444121258393289e-06, + "loss": 1.446, + "step": 1761 + }, + { + "epoch": 1.1801741460147355, + "grad_norm": 1.7356792463544637, + "learning_rate": 4.443202096189962e-06, + "loss": 1.3142, + "step": 1762 + }, + { + "epoch": 1.1808439383791025, + "grad_norm": 1.6290108914058026, + "learning_rate": 4.442282269903609e-06, + "loss": 1.2151, + "step": 1763 + }, + { + "epoch": 1.1815137307434695, + "grad_norm": 1.8194049744049956, + "learning_rate": 4.4413617798485795e-06, + "loss": 1.2996, + "step": 1764 + }, + { + "epoch": 1.1821835231078366, + "grad_norm": 2.699754285663659, + "learning_rate": 4.440440626339447e-06, + "loss": 1.2605, + "step": 1765 + }, + { + "epoch": 1.1828533154722036, + "grad_norm": 1.936160535473525, + "learning_rate": 4.439518809691013e-06, + "loss": 1.4218, + "step": 1766 + }, + { + "epoch": 1.1835231078365707, + "grad_norm": 2.1885762821542305, + "learning_rate": 4.438596330218307e-06, + "loss": 1.2529, + "step": 1767 + }, + { + "epoch": 1.1841929002009377, + "grad_norm": 1.708050891929493, + "learning_rate": 4.43767318823658e-06, + "loss": 1.4834, + "step": 1768 + }, + { + "epoch": 1.1848626925653047, + "grad_norm": 3.2233583464003366, + "learning_rate": 4.436749384061314e-06, + "loss": 1.143, + "step": 1769 + }, + { + "epoch": 1.1855324849296718, + "grad_norm": 2.764278410569707, + "learning_rate": 4.435824918008217e-06, + "loss": 1.3845, + "step": 1770 + }, + { + "epoch": 1.1862022772940388, + "grad_norm": 3.2303449239639774, + "learning_rate": 4.434899790393222e-06, + "loss": 1.3278, + "step": 1771 + }, + { + "epoch": 1.1868720696584059, + "grad_norm": 2.102089759975298, + "learning_rate": 4.433974001532488e-06, + "loss": 1.4716, + "step": 1772 + }, + { + "epoch": 1.187541862022773, + "grad_norm": 2.629747260102024, + "learning_rate": 4.433047551742401e-06, + "loss": 1.2438, + "step": 1773 + }, + { + "epoch": 1.18821165438714, + "grad_norm": 1.672083472186788, + "learning_rate": 4.4321204413395715e-06, + "loss": 1.3683, + "step": 1774 + }, + { + "epoch": 1.188881446751507, + "grad_norm": 2.5412094933168454, + "learning_rate": 4.431192670640836e-06, + "loss": 1.5197, + "step": 1775 + }, + { + "epoch": 1.189551239115874, + "grad_norm": 1.68113091997307, + "learning_rate": 4.430264239963259e-06, + "loss": 1.5056, + "step": 1776 + }, + { + "epoch": 1.190221031480241, + "grad_norm": 1.6802892042683435, + "learning_rate": 4.4293351496241265e-06, + "loss": 1.4042, + "step": 1777 + }, + { + "epoch": 1.190890823844608, + "grad_norm": 2.2791242115230395, + "learning_rate": 4.428405399940954e-06, + "loss": 1.3515, + "step": 1778 + }, + { + "epoch": 1.1915606162089751, + "grad_norm": 1.9856600072320751, + "learning_rate": 4.427474991231479e-06, + "loss": 1.4546, + "step": 1779 + }, + { + "epoch": 1.1922304085733422, + "grad_norm": 1.5297613454226886, + "learning_rate": 4.426543923813666e-06, + "loss": 1.3848, + "step": 1780 + }, + { + "epoch": 1.1929002009377094, + "grad_norm": 1.8725882718868723, + "learning_rate": 4.425612198005704e-06, + "loss": 1.5728, + "step": 1781 + }, + { + "epoch": 1.1935699933020762, + "grad_norm": 1.9232350636472157, + "learning_rate": 4.42467981412601e-06, + "loss": 1.4921, + "step": 1782 + }, + { + "epoch": 1.1942397856664435, + "grad_norm": 1.7796068159507141, + "learning_rate": 4.423746772493219e-06, + "loss": 1.5444, + "step": 1783 + }, + { + "epoch": 1.1949095780308103, + "grad_norm": 1.9732070762507994, + "learning_rate": 4.422813073426198e-06, + "loss": 1.4131, + "step": 1784 + }, + { + "epoch": 1.1955793703951776, + "grad_norm": 1.8852720098243836, + "learning_rate": 4.421878717244033e-06, + "loss": 1.3333, + "step": 1785 + }, + { + "epoch": 1.1962491627595446, + "grad_norm": 1.8814865349646634, + "learning_rate": 4.42094370426604e-06, + "loss": 1.3928, + "step": 1786 + }, + { + "epoch": 1.1969189551239117, + "grad_norm": 2.8191390478338185, + "learning_rate": 4.420008034811756e-06, + "loss": 1.3438, + "step": 1787 + }, + { + "epoch": 1.1975887474882787, + "grad_norm": 2.803978636001589, + "learning_rate": 4.419071709200942e-06, + "loss": 1.3645, + "step": 1788 + }, + { + "epoch": 1.1982585398526457, + "grad_norm": 2.252424045446934, + "learning_rate": 4.418134727753584e-06, + "loss": 1.2723, + "step": 1789 + }, + { + "epoch": 1.1989283322170128, + "grad_norm": 2.3396580209507514, + "learning_rate": 4.417197090789893e-06, + "loss": 1.3798, + "step": 1790 + }, + { + "epoch": 1.1995981245813798, + "grad_norm": 1.7713551976204645, + "learning_rate": 4.416258798630304e-06, + "loss": 1.3596, + "step": 1791 + }, + { + "epoch": 1.2002679169457469, + "grad_norm": 1.8970154241250448, + "learning_rate": 4.415319851595474e-06, + "loss": 1.3764, + "step": 1792 + }, + { + "epoch": 1.200937709310114, + "grad_norm": 2.4887801725223557, + "learning_rate": 4.414380250006287e-06, + "loss": 1.3166, + "step": 1793 + }, + { + "epoch": 1.201607501674481, + "grad_norm": 2.1327283038117972, + "learning_rate": 4.413439994183847e-06, + "loss": 1.3836, + "step": 1794 + }, + { + "epoch": 1.202277294038848, + "grad_norm": 1.7977339809133601, + "learning_rate": 4.412499084449484e-06, + "loss": 1.4283, + "step": 1795 + }, + { + "epoch": 1.202947086403215, + "grad_norm": 2.360419058527206, + "learning_rate": 4.411557521124751e-06, + "loss": 1.4073, + "step": 1796 + }, + { + "epoch": 1.203616878767582, + "grad_norm": 1.6858164847563692, + "learning_rate": 4.410615304531424e-06, + "loss": 1.3425, + "step": 1797 + }, + { + "epoch": 1.204286671131949, + "grad_norm": 1.9742109798385439, + "learning_rate": 4.409672434991503e-06, + "loss": 1.3954, + "step": 1798 + }, + { + "epoch": 1.2049564634963161, + "grad_norm": 1.625628981998633, + "learning_rate": 4.4087289128272095e-06, + "loss": 1.2732, + "step": 1799 + }, + { + "epoch": 1.2056262558606832, + "grad_norm": 1.7861427313214155, + "learning_rate": 4.407784738360991e-06, + "loss": 1.3684, + "step": 1800 + }, + { + "epoch": 1.2062960482250502, + "grad_norm": 2.1222658640759096, + "learning_rate": 4.406839911915513e-06, + "loss": 1.2657, + "step": 1801 + }, + { + "epoch": 1.2069658405894172, + "grad_norm": 2.249828135786986, + "learning_rate": 4.405894433813671e-06, + "loss": 1.3487, + "step": 1802 + }, + { + "epoch": 1.2076356329537843, + "grad_norm": 1.6028832315039825, + "learning_rate": 4.404948304378575e-06, + "loss": 1.5222, + "step": 1803 + }, + { + "epoch": 1.2083054253181513, + "grad_norm": 1.8671026622917022, + "learning_rate": 4.404001523933564e-06, + "loss": 1.4907, + "step": 1804 + }, + { + "epoch": 1.2089752176825184, + "grad_norm": 2.3946388467776996, + "learning_rate": 4.403054092802198e-06, + "loss": 1.2591, + "step": 1805 + }, + { + "epoch": 1.2096450100468854, + "grad_norm": 1.6933048724689848, + "learning_rate": 4.402106011308256e-06, + "loss": 1.2135, + "step": 1806 + }, + { + "epoch": 1.2103148024112524, + "grad_norm": 4.682210256520438, + "learning_rate": 4.401157279775743e-06, + "loss": 1.3311, + "step": 1807 + }, + { + "epoch": 1.2109845947756195, + "grad_norm": 2.0548422835903395, + "learning_rate": 4.400207898528885e-06, + "loss": 1.2869, + "step": 1808 + }, + { + "epoch": 1.2116543871399865, + "grad_norm": 2.4117330523815363, + "learning_rate": 4.399257867892131e-06, + "loss": 0.9027, + "step": 1809 + }, + { + "epoch": 1.2123241795043536, + "grad_norm": 3.640263468960856, + "learning_rate": 4.398307188190149e-06, + "loss": 1.3467, + "step": 1810 + }, + { + "epoch": 1.2129939718687206, + "grad_norm": 2.1447858769483643, + "learning_rate": 4.397355859747831e-06, + "loss": 1.3188, + "step": 1811 + }, + { + "epoch": 1.2136637642330879, + "grad_norm": 1.7057108251365318, + "learning_rate": 4.39640388289029e-06, + "loss": 1.4251, + "step": 1812 + }, + { + "epoch": 1.2143335565974547, + "grad_norm": 1.610339301124247, + "learning_rate": 4.395451257942864e-06, + "loss": 1.372, + "step": 1813 + }, + { + "epoch": 1.215003348961822, + "grad_norm": 1.7887454457904406, + "learning_rate": 4.394497985231107e-06, + "loss": 1.3876, + "step": 1814 + }, + { + "epoch": 1.215673141326189, + "grad_norm": 1.830398597772064, + "learning_rate": 4.3935440650807955e-06, + "loss": 1.3215, + "step": 1815 + }, + { + "epoch": 1.216342933690556, + "grad_norm": 2.5541525995431513, + "learning_rate": 4.392589497817933e-06, + "loss": 1.48, + "step": 1816 + }, + { + "epoch": 1.217012726054923, + "grad_norm": 2.7131147942369283, + "learning_rate": 4.391634283768735e-06, + "loss": 1.2993, + "step": 1817 + }, + { + "epoch": 1.21768251841929, + "grad_norm": 1.6215062316687192, + "learning_rate": 4.3906784232596464e-06, + "loss": 1.246, + "step": 1818 + }, + { + "epoch": 1.2183523107836571, + "grad_norm": 1.6588431286315388, + "learning_rate": 4.389721916617328e-06, + "loss": 1.2974, + "step": 1819 + }, + { + "epoch": 1.2190221031480242, + "grad_norm": 1.7128413958824131, + "learning_rate": 4.388764764168661e-06, + "loss": 1.2962, + "step": 1820 + }, + { + "epoch": 1.2196918955123912, + "grad_norm": 1.766136251680633, + "learning_rate": 4.387806966240753e-06, + "loss": 1.396, + "step": 1821 + }, + { + "epoch": 1.2203616878767582, + "grad_norm": 1.731148739352855, + "learning_rate": 4.386848523160926e-06, + "loss": 1.3245, + "step": 1822 + }, + { + "epoch": 1.2210314802411253, + "grad_norm": 1.7205535942680403, + "learning_rate": 4.385889435256725e-06, + "loss": 1.493, + "step": 1823 + }, + { + "epoch": 1.2217012726054923, + "grad_norm": 2.238538118282545, + "learning_rate": 4.384929702855915e-06, + "loss": 1.4964, + "step": 1824 + }, + { + "epoch": 1.2223710649698594, + "grad_norm": 2.475127938584194, + "learning_rate": 4.3839693262864816e-06, + "loss": 0.9998, + "step": 1825 + }, + { + "epoch": 1.2230408573342264, + "grad_norm": 2.3401014678543803, + "learning_rate": 4.383008305876632e-06, + "loss": 1.2297, + "step": 1826 + }, + { + "epoch": 1.2237106496985934, + "grad_norm": 2.236349700782146, + "learning_rate": 4.382046641954789e-06, + "loss": 1.396, + "step": 1827 + }, + { + "epoch": 1.2243804420629605, + "grad_norm": 2.0004272312842986, + "learning_rate": 4.3810843348496e-06, + "loss": 1.3487, + "step": 1828 + }, + { + "epoch": 1.2250502344273275, + "grad_norm": 1.8754114258290298, + "learning_rate": 4.3801213848899295e-06, + "loss": 1.2837, + "step": 1829 + }, + { + "epoch": 1.2257200267916946, + "grad_norm": 1.7962296391783033, + "learning_rate": 4.379157792404864e-06, + "loss": 1.335, + "step": 1830 + }, + { + "epoch": 1.2263898191560616, + "grad_norm": 1.9243233242544795, + "learning_rate": 4.378193557723706e-06, + "loss": 1.3301, + "step": 1831 + }, + { + "epoch": 1.2270596115204286, + "grad_norm": 2.0302096948218384, + "learning_rate": 4.37722868117598e-06, + "loss": 1.3713, + "step": 1832 + }, + { + "epoch": 1.2277294038847957, + "grad_norm": 1.6331558370910697, + "learning_rate": 4.376263163091431e-06, + "loss": 1.3815, + "step": 1833 + }, + { + "epoch": 1.2283991962491627, + "grad_norm": 1.8726604288234898, + "learning_rate": 4.375297003800021e-06, + "loss": 1.2592, + "step": 1834 + }, + { + "epoch": 1.2290689886135298, + "grad_norm": 2.3546485253331246, + "learning_rate": 4.37433020363193e-06, + "loss": 1.2736, + "step": 1835 + }, + { + "epoch": 1.2297387809778968, + "grad_norm": 1.8723559850804423, + "learning_rate": 4.373362762917561e-06, + "loss": 1.3897, + "step": 1836 + }, + { + "epoch": 1.2304085733422638, + "grad_norm": 1.963072516122455, + "learning_rate": 4.372394681987534e-06, + "loss": 1.4724, + "step": 1837 + }, + { + "epoch": 1.2310783657066309, + "grad_norm": 1.7327665653285766, + "learning_rate": 4.371425961172686e-06, + "loss": 1.3559, + "step": 1838 + }, + { + "epoch": 1.231748158070998, + "grad_norm": 1.900247616972825, + "learning_rate": 4.370456600804075e-06, + "loss": 1.497, + "step": 1839 + }, + { + "epoch": 1.232417950435365, + "grad_norm": 1.731276835059715, + "learning_rate": 4.369486601212976e-06, + "loss": 1.4379, + "step": 1840 + }, + { + "epoch": 1.233087742799732, + "grad_norm": 3.2021345006910074, + "learning_rate": 4.368515962730883e-06, + "loss": 1.1669, + "step": 1841 + }, + { + "epoch": 1.233757535164099, + "grad_norm": 3.2422080858167615, + "learning_rate": 4.367544685689511e-06, + "loss": 1.3164, + "step": 1842 + }, + { + "epoch": 1.2344273275284663, + "grad_norm": 1.782093024809426, + "learning_rate": 4.36657277042079e-06, + "loss": 1.3271, + "step": 1843 + }, + { + "epoch": 1.235097119892833, + "grad_norm": 1.7584556778219695, + "learning_rate": 4.365600217256866e-06, + "loss": 1.3306, + "step": 1844 + }, + { + "epoch": 1.2357669122572004, + "grad_norm": 1.9051224425843465, + "learning_rate": 4.364627026530108e-06, + "loss": 1.5085, + "step": 1845 + }, + { + "epoch": 1.2364367046215674, + "grad_norm": 2.0798352482413986, + "learning_rate": 4.363653198573101e-06, + "loss": 1.2614, + "step": 1846 + }, + { + "epoch": 1.2371064969859344, + "grad_norm": 2.2341856822112462, + "learning_rate": 4.362678733718649e-06, + "loss": 1.3336, + "step": 1847 + }, + { + "epoch": 1.2377762893503015, + "grad_norm": 1.7486209735026836, + "learning_rate": 4.361703632299768e-06, + "loss": 1.2205, + "step": 1848 + }, + { + "epoch": 1.2384460817146685, + "grad_norm": 2.108966910534176, + "learning_rate": 4.360727894649699e-06, + "loss": 1.4214, + "step": 1849 + }, + { + "epoch": 1.2391158740790356, + "grad_norm": 1.9832685370799097, + "learning_rate": 4.359751521101896e-06, + "loss": 1.448, + "step": 1850 + }, + { + "epoch": 1.2397856664434026, + "grad_norm": 2.171169153737893, + "learning_rate": 4.3587745119900325e-06, + "loss": 1.2916, + "step": 1851 + }, + { + "epoch": 1.2404554588077696, + "grad_norm": 2.1474836285354955, + "learning_rate": 4.357796867647998e-06, + "loss": 1.412, + "step": 1852 + }, + { + "epoch": 1.2411252511721367, + "grad_norm": 1.8639088190300686, + "learning_rate": 4.356818588409898e-06, + "loss": 1.3872, + "step": 1853 + }, + { + "epoch": 1.2417950435365037, + "grad_norm": 1.7448166861848189, + "learning_rate": 4.355839674610058e-06, + "loss": 1.3809, + "step": 1854 + }, + { + "epoch": 1.2424648359008708, + "grad_norm": 1.7642948794330844, + "learning_rate": 4.354860126583017e-06, + "loss": 1.412, + "step": 1855 + }, + { + "epoch": 1.2431346282652378, + "grad_norm": 3.107265762893665, + "learning_rate": 4.3538799446635335e-06, + "loss": 1.4064, + "step": 1856 + }, + { + "epoch": 1.2438044206296048, + "grad_norm": 2.019748092463195, + "learning_rate": 4.352899129186581e-06, + "loss": 1.3599, + "step": 1857 + }, + { + "epoch": 1.2444742129939719, + "grad_norm": 1.994706396406876, + "learning_rate": 4.351917680487351e-06, + "loss": 1.3759, + "step": 1858 + }, + { + "epoch": 1.245144005358339, + "grad_norm": 1.9762233308321662, + "learning_rate": 4.350935598901248e-06, + "loss": 1.4341, + "step": 1859 + }, + { + "epoch": 1.245813797722706, + "grad_norm": 1.9774118333663246, + "learning_rate": 4.349952884763898e-06, + "loss": 1.3248, + "step": 1860 + }, + { + "epoch": 1.246483590087073, + "grad_norm": 1.830934767256062, + "learning_rate": 4.348969538411138e-06, + "loss": 1.3353, + "step": 1861 + }, + { + "epoch": 1.24715338245144, + "grad_norm": 1.8817979309301123, + "learning_rate": 4.347985560179025e-06, + "loss": 1.3439, + "step": 1862 + }, + { + "epoch": 1.247823174815807, + "grad_norm": 4.521027450421792, + "learning_rate": 4.347000950403831e-06, + "loss": 1.3724, + "step": 1863 + }, + { + "epoch": 1.248492967180174, + "grad_norm": 1.9925844661951433, + "learning_rate": 4.3460157094220415e-06, + "loss": 1.5284, + "step": 1864 + }, + { + "epoch": 1.2491627595445411, + "grad_norm": 2.0685520381190052, + "learning_rate": 4.345029837570359e-06, + "loss": 1.4832, + "step": 1865 + }, + { + "epoch": 1.2498325519089082, + "grad_norm": 2.559917002568953, + "learning_rate": 4.344043335185705e-06, + "loss": 1.3849, + "step": 1866 + }, + { + "epoch": 1.2505023442732752, + "grad_norm": 2.5019968035299787, + "learning_rate": 4.34305620260521e-06, + "loss": 1.6048, + "step": 1867 + }, + { + "epoch": 1.2511721366376423, + "grad_norm": 1.8325401957911267, + "learning_rate": 4.342068440166225e-06, + "loss": 1.2796, + "step": 1868 + }, + { + "epoch": 1.2518419290020093, + "grad_norm": 3.268462203817769, + "learning_rate": 4.3410800482063155e-06, + "loss": 1.3612, + "step": 1869 + }, + { + "epoch": 1.2525117213663763, + "grad_norm": 2.194006016041873, + "learning_rate": 4.34009102706326e-06, + "loss": 1.3685, + "step": 1870 + }, + { + "epoch": 1.2531815137307434, + "grad_norm": 2.067882887250671, + "learning_rate": 4.339101377075054e-06, + "loss": 1.3419, + "step": 1871 + }, + { + "epoch": 1.2538513060951106, + "grad_norm": 4.1609460797659645, + "learning_rate": 4.338111098579905e-06, + "loss": 1.118, + "step": 1872 + }, + { + "epoch": 1.2545210984594775, + "grad_norm": 5.861331429773086, + "learning_rate": 4.337120191916241e-06, + "loss": 1.3778, + "step": 1873 + }, + { + "epoch": 1.2551908908238447, + "grad_norm": 2.1652343806994203, + "learning_rate": 4.336128657422698e-06, + "loss": 1.3171, + "step": 1874 + }, + { + "epoch": 1.2558606831882115, + "grad_norm": 2.865605499624319, + "learning_rate": 4.335136495438132e-06, + "loss": 1.4807, + "step": 1875 + }, + { + "epoch": 1.2565304755525788, + "grad_norm": 1.7914148947414639, + "learning_rate": 4.33414370630161e-06, + "loss": 1.395, + "step": 1876 + }, + { + "epoch": 1.2572002679169456, + "grad_norm": 1.8742575140906748, + "learning_rate": 4.333150290352415e-06, + "loss": 1.4936, + "step": 1877 + }, + { + "epoch": 1.2578700602813129, + "grad_norm": 1.898993379382328, + "learning_rate": 4.332156247930043e-06, + "loss": 1.224, + "step": 1878 + }, + { + "epoch": 1.25853985264568, + "grad_norm": 1.9161265625903872, + "learning_rate": 4.331161579374205e-06, + "loss": 1.4836, + "step": 1879 + }, + { + "epoch": 1.259209645010047, + "grad_norm": 2.5035205307286863, + "learning_rate": 4.330166285024827e-06, + "loss": 1.3586, + "step": 1880 + }, + { + "epoch": 1.259879437374414, + "grad_norm": 2.134357147805428, + "learning_rate": 4.329170365222045e-06, + "loss": 1.3307, + "step": 1881 + }, + { + "epoch": 1.260549229738781, + "grad_norm": 2.4866004806861572, + "learning_rate": 4.328173820306214e-06, + "loss": 1.2604, + "step": 1882 + }, + { + "epoch": 1.261219022103148, + "grad_norm": 1.8011892795429845, + "learning_rate": 4.327176650617898e-06, + "loss": 1.2361, + "step": 1883 + }, + { + "epoch": 1.261888814467515, + "grad_norm": 1.6112713234080114, + "learning_rate": 4.326178856497878e-06, + "loss": 1.2775, + "step": 1884 + }, + { + "epoch": 1.2625586068318821, + "grad_norm": 5.194007962168302, + "learning_rate": 4.325180438287147e-06, + "loss": 1.1699, + "step": 1885 + }, + { + "epoch": 1.2632283991962492, + "grad_norm": 2.2789736374891705, + "learning_rate": 4.324181396326909e-06, + "loss": 1.4749, + "step": 1886 + }, + { + "epoch": 1.2638981915606162, + "grad_norm": 1.8824142295651125, + "learning_rate": 4.3231817309585855e-06, + "loss": 1.5185, + "step": 1887 + }, + { + "epoch": 1.2645679839249833, + "grad_norm": 1.7633875118801325, + "learning_rate": 4.322181442523809e-06, + "loss": 1.0859, + "step": 1888 + }, + { + "epoch": 1.2652377762893503, + "grad_norm": 1.78687227424992, + "learning_rate": 4.321180531364423e-06, + "loss": 1.4312, + "step": 1889 + }, + { + "epoch": 1.2659075686537173, + "grad_norm": 2.678470575057133, + "learning_rate": 4.320178997822487e-06, + "loss": 1.4419, + "step": 1890 + }, + { + "epoch": 1.2665773610180844, + "grad_norm": 2.731380347068121, + "learning_rate": 4.3191768422402725e-06, + "loss": 1.4199, + "step": 1891 + }, + { + "epoch": 1.2672471533824514, + "grad_norm": 1.7656078386996754, + "learning_rate": 4.318174064960261e-06, + "loss": 1.4968, + "step": 1892 + }, + { + "epoch": 1.2679169457468185, + "grad_norm": 3.39978638146202, + "learning_rate": 4.31717066632515e-06, + "loss": 1.4351, + "step": 1893 + }, + { + "epoch": 1.2685867381111855, + "grad_norm": 1.946589484636482, + "learning_rate": 4.316166646677847e-06, + "loss": 1.1024, + "step": 1894 + }, + { + "epoch": 1.2692565304755525, + "grad_norm": 2.4944814380558924, + "learning_rate": 4.315162006361472e-06, + "loss": 1.2135, + "step": 1895 + }, + { + "epoch": 1.2699263228399196, + "grad_norm": 2.792092549881359, + "learning_rate": 4.314156745719359e-06, + "loss": 1.3295, + "step": 1896 + }, + { + "epoch": 1.2705961152042866, + "grad_norm": 2.808361145912672, + "learning_rate": 4.313150865095052e-06, + "loss": 1.371, + "step": 1897 + }, + { + "epoch": 1.2712659075686537, + "grad_norm": 1.882485768478413, + "learning_rate": 4.312144364832307e-06, + "loss": 1.29, + "step": 1898 + }, + { + "epoch": 1.2719356999330207, + "grad_norm": 1.9036282626306507, + "learning_rate": 4.311137245275091e-06, + "loss": 1.3343, + "step": 1899 + }, + { + "epoch": 1.2726054922973877, + "grad_norm": 1.7910620862199071, + "learning_rate": 4.310129506767587e-06, + "loss": 1.4141, + "step": 1900 + }, + { + "epoch": 1.273275284661755, + "grad_norm": 2.447346326762054, + "learning_rate": 4.309121149654184e-06, + "loss": 1.2362, + "step": 1901 + }, + { + "epoch": 1.2739450770261218, + "grad_norm": 1.762418867353042, + "learning_rate": 4.308112174279485e-06, + "loss": 1.3976, + "step": 1902 + }, + { + "epoch": 1.274614869390489, + "grad_norm": 2.401921579095633, + "learning_rate": 4.307102580988305e-06, + "loss": 1.3446, + "step": 1903 + }, + { + "epoch": 1.2752846617548559, + "grad_norm": 2.0780505863447445, + "learning_rate": 4.3060923701256686e-06, + "loss": 1.4426, + "step": 1904 + }, + { + "epoch": 1.2759544541192231, + "grad_norm": 1.883320001915085, + "learning_rate": 4.305081542036813e-06, + "loss": 1.4013, + "step": 1905 + }, + { + "epoch": 1.27662424648359, + "grad_norm": 1.6387482298674974, + "learning_rate": 4.3040700970671835e-06, + "loss": 1.2904, + "step": 1906 + }, + { + "epoch": 1.2772940388479572, + "grad_norm": 1.9213514987935778, + "learning_rate": 4.30305803556244e-06, + "loss": 1.6254, + "step": 1907 + }, + { + "epoch": 1.277963831212324, + "grad_norm": 1.9204759450059, + "learning_rate": 4.30204535786845e-06, + "loss": 1.321, + "step": 1908 + }, + { + "epoch": 1.2786336235766913, + "grad_norm": 1.9528115255101992, + "learning_rate": 4.3010320643312935e-06, + "loss": 1.3101, + "step": 1909 + }, + { + "epoch": 1.2793034159410583, + "grad_norm": 2.283620901454181, + "learning_rate": 4.30001815529726e-06, + "loss": 1.4888, + "step": 1910 + }, + { + "epoch": 1.2799732083054254, + "grad_norm": 2.4211537964911765, + "learning_rate": 4.299003631112849e-06, + "loss": 1.5042, + "step": 1911 + }, + { + "epoch": 1.2806430006697924, + "grad_norm": 1.8697737536863157, + "learning_rate": 4.2979884921247726e-06, + "loss": 1.2854, + "step": 1912 + }, + { + "epoch": 1.2813127930341595, + "grad_norm": 3.2166893045760334, + "learning_rate": 4.296972738679951e-06, + "loss": 1.3163, + "step": 1913 + }, + { + "epoch": 1.2819825853985265, + "grad_norm": 1.7422116346565444, + "learning_rate": 4.295956371125513e-06, + "loss": 1.3305, + "step": 1914 + }, + { + "epoch": 1.2826523777628935, + "grad_norm": 1.7194024714072917, + "learning_rate": 4.2949393898088e-06, + "loss": 1.3748, + "step": 1915 + }, + { + "epoch": 1.2833221701272606, + "grad_norm": 1.6555557314830986, + "learning_rate": 4.293921795077361e-06, + "loss": 1.5408, + "step": 1916 + }, + { + "epoch": 1.2839919624916276, + "grad_norm": 1.800778203668544, + "learning_rate": 4.292903587278958e-06, + "loss": 1.2949, + "step": 1917 + }, + { + "epoch": 1.2846617548559947, + "grad_norm": 1.731880240669278, + "learning_rate": 4.291884766761558e-06, + "loss": 1.4168, + "step": 1918 + }, + { + "epoch": 1.2853315472203617, + "grad_norm": 1.8726674634821614, + "learning_rate": 4.290865333873341e-06, + "loss": 1.2396, + "step": 1919 + }, + { + "epoch": 1.2860013395847287, + "grad_norm": 1.7994636635332875, + "learning_rate": 4.289845288962694e-06, + "loss": 1.3146, + "step": 1920 + }, + { + "epoch": 1.2866711319490958, + "grad_norm": 1.846873974431505, + "learning_rate": 4.2888246323782145e-06, + "loss": 1.1866, + "step": 1921 + }, + { + "epoch": 1.2873409243134628, + "grad_norm": 2.4825818230426204, + "learning_rate": 4.287803364468709e-06, + "loss": 1.3117, + "step": 1922 + }, + { + "epoch": 1.2880107166778298, + "grad_norm": 2.3262739682715607, + "learning_rate": 4.286781485583191e-06, + "loss": 1.3925, + "step": 1923 + }, + { + "epoch": 1.2886805090421969, + "grad_norm": 2.191666566633194, + "learning_rate": 4.285758996070886e-06, + "loss": 1.1128, + "step": 1924 + }, + { + "epoch": 1.289350301406564, + "grad_norm": 2.4399403213093187, + "learning_rate": 4.2847358962812255e-06, + "loss": 1.3262, + "step": 1925 + }, + { + "epoch": 1.290020093770931, + "grad_norm": 2.4023218368335337, + "learning_rate": 4.283712186563851e-06, + "loss": 1.238, + "step": 1926 + }, + { + "epoch": 1.290689886135298, + "grad_norm": 2.145802680758944, + "learning_rate": 4.282687867268612e-06, + "loss": 1.3757, + "step": 1927 + }, + { + "epoch": 1.291359678499665, + "grad_norm": 3.8208321733135984, + "learning_rate": 4.281662938745565e-06, + "loss": 1.3677, + "step": 1928 + }, + { + "epoch": 1.292029470864032, + "grad_norm": 1.7791644872566432, + "learning_rate": 4.280637401344978e-06, + "loss": 1.3568, + "step": 1929 + }, + { + "epoch": 1.2926992632283991, + "grad_norm": 1.84363789750736, + "learning_rate": 4.279611255417324e-06, + "loss": 1.5021, + "step": 1930 + }, + { + "epoch": 1.2933690555927662, + "grad_norm": 1.693825329826023, + "learning_rate": 4.278584501313286e-06, + "loss": 1.4072, + "step": 1931 + }, + { + "epoch": 1.2940388479571334, + "grad_norm": 1.5767367808123567, + "learning_rate": 4.277557139383752e-06, + "loss": 1.3804, + "step": 1932 + }, + { + "epoch": 1.2947086403215002, + "grad_norm": 2.377003862165646, + "learning_rate": 4.2765291699798205e-06, + "loss": 1.3262, + "step": 1933 + }, + { + "epoch": 1.2953784326858675, + "grad_norm": 3.00083859741379, + "learning_rate": 4.275500593452798e-06, + "loss": 1.1451, + "step": 1934 + }, + { + "epoch": 1.2960482250502343, + "grad_norm": 2.2951374954172463, + "learning_rate": 4.274471410154195e-06, + "loss": 1.4083, + "step": 1935 + }, + { + "epoch": 1.2967180174146016, + "grad_norm": 2.4732560669484616, + "learning_rate": 4.2734416204357335e-06, + "loss": 1.178, + "step": 1936 + }, + { + "epoch": 1.2973878097789684, + "grad_norm": 1.757447638940287, + "learning_rate": 4.27241122464934e-06, + "loss": 1.2833, + "step": 1937 + }, + { + "epoch": 1.2980576021433357, + "grad_norm": 2.5171275062704814, + "learning_rate": 4.271380223147149e-06, + "loss": 1.2134, + "step": 1938 + }, + { + "epoch": 1.2987273945077027, + "grad_norm": 2.2526758386701387, + "learning_rate": 4.270348616281501e-06, + "loss": 1.3791, + "step": 1939 + }, + { + "epoch": 1.2993971868720697, + "grad_norm": 1.7361961222476043, + "learning_rate": 4.269316404404946e-06, + "loss": 1.2432, + "step": 1940 + }, + { + "epoch": 1.3000669792364368, + "grad_norm": 1.7644721428994605, + "learning_rate": 4.2682835878702376e-06, + "loss": 1.4277, + "step": 1941 + }, + { + "epoch": 1.3007367716008038, + "grad_norm": 2.7962721147062166, + "learning_rate": 4.267250167030339e-06, + "loss": 1.2773, + "step": 1942 + }, + { + "epoch": 1.3014065639651708, + "grad_norm": 1.868546026550747, + "learning_rate": 4.266216142238417e-06, + "loss": 1.1319, + "step": 1943 + }, + { + "epoch": 1.3020763563295379, + "grad_norm": 1.6699533011571315, + "learning_rate": 4.2651815138478465e-06, + "loss": 1.3146, + "step": 1944 + }, + { + "epoch": 1.302746148693905, + "grad_norm": 1.7735266022956386, + "learning_rate": 4.264146282212209e-06, + "loss": 1.3962, + "step": 1945 + }, + { + "epoch": 1.303415941058272, + "grad_norm": 2.156496126200323, + "learning_rate": 4.263110447685291e-06, + "loss": 0.8897, + "step": 1946 + }, + { + "epoch": 1.304085733422639, + "grad_norm": 2.3173165945214462, + "learning_rate": 4.262074010621087e-06, + "loss": 1.392, + "step": 1947 + }, + { + "epoch": 1.304755525787006, + "grad_norm": 2.165932026984, + "learning_rate": 4.261036971373794e-06, + "loss": 1.1826, + "step": 1948 + }, + { + "epoch": 1.305425318151373, + "grad_norm": 2.0803447910558073, + "learning_rate": 4.259999330297817e-06, + "loss": 1.4372, + "step": 1949 + }, + { + "epoch": 1.3060951105157401, + "grad_norm": 1.5797806888035641, + "learning_rate": 4.2589610877477685e-06, + "loss": 1.443, + "step": 1950 + }, + { + "epoch": 1.3067649028801072, + "grad_norm": 1.7174141874458775, + "learning_rate": 4.2579222440784625e-06, + "loss": 1.3531, + "step": 1951 + }, + { + "epoch": 1.3074346952444742, + "grad_norm": 1.5855981285414582, + "learning_rate": 4.2568827996449205e-06, + "loss": 1.123, + "step": 1952 + }, + { + "epoch": 1.3081044876088412, + "grad_norm": 2.5142172004890706, + "learning_rate": 4.2558427548023704e-06, + "loss": 1.4415, + "step": 1953 + }, + { + "epoch": 1.3087742799732083, + "grad_norm": 2.1524789271950455, + "learning_rate": 4.254802109906244e-06, + "loss": 1.3193, + "step": 1954 + }, + { + "epoch": 1.3094440723375753, + "grad_norm": 1.7048016626357552, + "learning_rate": 4.253760865312178e-06, + "loss": 1.223, + "step": 1955 + }, + { + "epoch": 1.3101138647019424, + "grad_norm": 2.204548232682556, + "learning_rate": 4.252719021376014e-06, + "loss": 1.2822, + "step": 1956 + }, + { + "epoch": 1.3107836570663094, + "grad_norm": 4.198339231765006, + "learning_rate": 4.2516765784537994e-06, + "loss": 1.2193, + "step": 1957 + }, + { + "epoch": 1.3114534494306764, + "grad_norm": 1.8828816624974503, + "learning_rate": 4.2506335369017844e-06, + "loss": 1.2627, + "step": 1958 + }, + { + "epoch": 1.3121232417950435, + "grad_norm": 3.58892255137571, + "learning_rate": 4.249589897076427e-06, + "loss": 1.5224, + "step": 1959 + }, + { + "epoch": 1.3127930341594105, + "grad_norm": 2.191156099238444, + "learning_rate": 4.248545659334386e-06, + "loss": 1.3273, + "step": 1960 + }, + { + "epoch": 1.3134628265237775, + "grad_norm": 2.828563572800012, + "learning_rate": 4.247500824032529e-06, + "loss": 1.1629, + "step": 1961 + }, + { + "epoch": 1.3141326188881446, + "grad_norm": 2.417739302151157, + "learning_rate": 4.246455391527921e-06, + "loss": 1.2121, + "step": 1962 + }, + { + "epoch": 1.3148024112525118, + "grad_norm": 1.7175190581345852, + "learning_rate": 4.2454093621778396e-06, + "loss": 1.5281, + "step": 1963 + }, + { + "epoch": 1.3154722036168787, + "grad_norm": 1.867453224204404, + "learning_rate": 4.244362736339758e-06, + "loss": 1.3194, + "step": 1964 + }, + { + "epoch": 1.316141995981246, + "grad_norm": 1.8750582289240179, + "learning_rate": 4.243315514371359e-06, + "loss": 1.2949, + "step": 1965 + }, + { + "epoch": 1.3168117883456127, + "grad_norm": 2.5514088585206625, + "learning_rate": 4.2422676966305285e-06, + "loss": 1.549, + "step": 1966 + }, + { + "epoch": 1.31748158070998, + "grad_norm": 3.2502667461310826, + "learning_rate": 4.241219283475352e-06, + "loss": 1.3817, + "step": 1967 + }, + { + "epoch": 1.3181513730743468, + "grad_norm": 1.8570017171989737, + "learning_rate": 4.240170275264124e-06, + "loss": 1.5579, + "step": 1968 + }, + { + "epoch": 1.318821165438714, + "grad_norm": 1.6614463214705442, + "learning_rate": 4.239120672355338e-06, + "loss": 1.3013, + "step": 1969 + }, + { + "epoch": 1.3194909578030811, + "grad_norm": 2.5092687171135517, + "learning_rate": 4.2380704751076925e-06, + "loss": 1.1448, + "step": 1970 + }, + { + "epoch": 1.3201607501674482, + "grad_norm": 1.7307349522342967, + "learning_rate": 4.23701968388009e-06, + "loss": 1.3138, + "step": 1971 + }, + { + "epoch": 1.3208305425318152, + "grad_norm": 1.6998140311806529, + "learning_rate": 4.235968299031634e-06, + "loss": 1.3302, + "step": 1972 + }, + { + "epoch": 1.3215003348961822, + "grad_norm": 2.1075632003487756, + "learning_rate": 4.234916320921633e-06, + "loss": 1.3692, + "step": 1973 + }, + { + "epoch": 1.3221701272605493, + "grad_norm": 2.4167679991839046, + "learning_rate": 4.2338637499095956e-06, + "loss": 1.2862, + "step": 1974 + }, + { + "epoch": 1.3228399196249163, + "grad_norm": 1.7656040607052295, + "learning_rate": 4.232810586355235e-06, + "loss": 1.347, + "step": 1975 + }, + { + "epoch": 1.3235097119892834, + "grad_norm": 1.8091816049602971, + "learning_rate": 4.231756830618467e-06, + "loss": 1.4129, + "step": 1976 + }, + { + "epoch": 1.3241795043536504, + "grad_norm": 2.0709824167383886, + "learning_rate": 4.23070248305941e-06, + "loss": 1.2522, + "step": 1977 + }, + { + "epoch": 1.3248492967180174, + "grad_norm": 1.953022597540587, + "learning_rate": 4.229647544038382e-06, + "loss": 1.3628, + "step": 1978 + }, + { + "epoch": 1.3255190890823845, + "grad_norm": 1.8443580812801286, + "learning_rate": 4.228592013915905e-06, + "loss": 1.424, + "step": 1979 + }, + { + "epoch": 1.3261888814467515, + "grad_norm": 1.860525636280478, + "learning_rate": 4.227535893052705e-06, + "loss": 1.4487, + "step": 1980 + }, + { + "epoch": 1.3268586738111185, + "grad_norm": 1.6139708938648327, + "learning_rate": 4.2264791818097075e-06, + "loss": 1.4291, + "step": 1981 + }, + { + "epoch": 1.3275284661754856, + "grad_norm": 1.8487594966605425, + "learning_rate": 4.225421880548038e-06, + "loss": 1.1799, + "step": 1982 + }, + { + "epoch": 1.3281982585398526, + "grad_norm": 2.6255990787013097, + "learning_rate": 4.2243639896290264e-06, + "loss": 1.2353, + "step": 1983 + }, + { + "epoch": 1.3288680509042197, + "grad_norm": 3.4772579060299664, + "learning_rate": 4.223305509414206e-06, + "loss": 1.2857, + "step": 1984 + }, + { + "epoch": 1.3295378432685867, + "grad_norm": 1.5569931266132837, + "learning_rate": 4.222246440265306e-06, + "loss": 1.1664, + "step": 1985 + }, + { + "epoch": 1.3302076356329537, + "grad_norm": 2.3817195997173473, + "learning_rate": 4.221186782544262e-06, + "loss": 1.4646, + "step": 1986 + }, + { + "epoch": 1.3308774279973208, + "grad_norm": 1.9040857552152337, + "learning_rate": 4.220126536613207e-06, + "loss": 1.3728, + "step": 1987 + }, + { + "epoch": 1.3315472203616878, + "grad_norm": 1.6220585145590096, + "learning_rate": 4.219065702834478e-06, + "loss": 1.281, + "step": 1988 + }, + { + "epoch": 1.3322170127260549, + "grad_norm": 1.5114390244074658, + "learning_rate": 4.21800428157061e-06, + "loss": 1.2091, + "step": 1989 + }, + { + "epoch": 1.332886805090422, + "grad_norm": 2.5208339855885633, + "learning_rate": 4.216942273184341e-06, + "loss": 1.1999, + "step": 1990 + }, + { + "epoch": 1.333556597454789, + "grad_norm": 1.7642978699587446, + "learning_rate": 4.215879678038609e-06, + "loss": 1.3766, + "step": 1991 + }, + { + "epoch": 1.3342263898191562, + "grad_norm": 3.993184622827881, + "learning_rate": 4.214816496496553e-06, + "loss": 1.4132, + "step": 1992 + }, + { + "epoch": 1.334896182183523, + "grad_norm": 1.9818614738848495, + "learning_rate": 4.2137527289215115e-06, + "loss": 1.29, + "step": 1993 + }, + { + "epoch": 1.3355659745478903, + "grad_norm": 3.0974736893912334, + "learning_rate": 4.2126883756770235e-06, + "loss": 1.3054, + "step": 1994 + }, + { + "epoch": 1.336235766912257, + "grad_norm": 2.7272030223437094, + "learning_rate": 4.211623437126827e-06, + "loss": 1.299, + "step": 1995 + }, + { + "epoch": 1.3369055592766244, + "grad_norm": 4.589808216563492, + "learning_rate": 4.210557913634864e-06, + "loss": 1.1858, + "step": 1996 + }, + { + "epoch": 1.3375753516409912, + "grad_norm": 1.951514614134133, + "learning_rate": 4.209491805565271e-06, + "loss": 1.2034, + "step": 1997 + }, + { + "epoch": 1.3382451440053584, + "grad_norm": 1.7018084806107698, + "learning_rate": 4.208425113282388e-06, + "loss": 1.2929, + "step": 1998 + }, + { + "epoch": 1.3389149363697253, + "grad_norm": 2.020417529482415, + "learning_rate": 4.207357837150755e-06, + "loss": 1.2251, + "step": 1999 + }, + { + "epoch": 1.3395847287340925, + "grad_norm": 2.0420490900781134, + "learning_rate": 4.206289977535108e-06, + "loss": 1.3697, + "step": 2000 + }, + { + "epoch": 1.3402545210984596, + "grad_norm": 3.0761331010601562, + "learning_rate": 4.205221534800386e-06, + "loss": 1.4084, + "step": 2001 + }, + { + "epoch": 1.3409243134628266, + "grad_norm": 2.198830632742176, + "learning_rate": 4.204152509311726e-06, + "loss": 1.1554, + "step": 2002 + }, + { + "epoch": 1.3415941058271936, + "grad_norm": 1.665795241515319, + "learning_rate": 4.203082901434463e-06, + "loss": 1.4456, + "step": 2003 + }, + { + "epoch": 1.3422638981915607, + "grad_norm": 2.3681924610395333, + "learning_rate": 4.202012711534132e-06, + "loss": 1.2545, + "step": 2004 + }, + { + "epoch": 1.3429336905559277, + "grad_norm": 2.184750425697016, + "learning_rate": 4.200941939976467e-06, + "loss": 1.3542, + "step": 2005 + }, + { + "epoch": 1.3436034829202947, + "grad_norm": 1.7311972688494852, + "learning_rate": 4.199870587127402e-06, + "loss": 1.1565, + "step": 2006 + }, + { + "epoch": 1.3442732752846618, + "grad_norm": 6.7992897036570215, + "learning_rate": 4.1987986533530675e-06, + "loss": 1.3786, + "step": 2007 + }, + { + "epoch": 1.3449430676490288, + "grad_norm": 1.896105066846702, + "learning_rate": 4.197726139019792e-06, + "loss": 1.2597, + "step": 2008 + }, + { + "epoch": 1.3456128600133959, + "grad_norm": 3.1413320409815753, + "learning_rate": 4.196653044494107e-06, + "loss": 1.3371, + "step": 2009 + }, + { + "epoch": 1.346282652377763, + "grad_norm": 1.7737144531320113, + "learning_rate": 4.195579370142736e-06, + "loss": 1.2351, + "step": 2010 + }, + { + "epoch": 1.34695244474213, + "grad_norm": 1.7526319438586644, + "learning_rate": 4.194505116332606e-06, + "loss": 1.3015, + "step": 2011 + }, + { + "epoch": 1.347622237106497, + "grad_norm": 2.0567774554551304, + "learning_rate": 4.193430283430837e-06, + "loss": 1.4182, + "step": 2012 + }, + { + "epoch": 1.348292029470864, + "grad_norm": 2.0540701982271434, + "learning_rate": 4.192354871804753e-06, + "loss": 1.3312, + "step": 2013 + }, + { + "epoch": 1.348961821835231, + "grad_norm": 1.8019193325200906, + "learning_rate": 4.19127888182187e-06, + "loss": 1.0298, + "step": 2014 + }, + { + "epoch": 1.349631614199598, + "grad_norm": 2.588487148359236, + "learning_rate": 4.190202313849905e-06, + "loss": 1.2805, + "step": 2015 + }, + { + "epoch": 1.3503014065639651, + "grad_norm": 4.743453760250258, + "learning_rate": 4.189125168256771e-06, + "loss": 1.2509, + "step": 2016 + }, + { + "epoch": 1.3509711989283322, + "grad_norm": 2.1843876669706757, + "learning_rate": 4.18804744541058e-06, + "loss": 1.2772, + "step": 2017 + }, + { + "epoch": 1.3516409912926992, + "grad_norm": 1.7617304019439994, + "learning_rate": 4.18696914567964e-06, + "loss": 1.3985, + "step": 2018 + }, + { + "epoch": 1.3523107836570663, + "grad_norm": 1.798173611593727, + "learning_rate": 4.185890269432456e-06, + "loss": 1.455, + "step": 2019 + }, + { + "epoch": 1.3529805760214333, + "grad_norm": 1.849683147367904, + "learning_rate": 4.184810817037731e-06, + "loss": 1.4295, + "step": 2020 + }, + { + "epoch": 1.3536503683858003, + "grad_norm": 1.978856051996764, + "learning_rate": 4.183730788864365e-06, + "loss": 1.4334, + "step": 2021 + }, + { + "epoch": 1.3543201607501674, + "grad_norm": 1.8979370763448156, + "learning_rate": 4.182650185281452e-06, + "loss": 1.4228, + "step": 2022 + }, + { + "epoch": 1.3549899531145346, + "grad_norm": 1.9598588940998476, + "learning_rate": 4.181569006658286e-06, + "loss": 1.4188, + "step": 2023 + }, + { + "epoch": 1.3556597454789014, + "grad_norm": 2.5139138621134576, + "learning_rate": 4.1804872533643574e-06, + "loss": 1.2175, + "step": 2024 + }, + { + "epoch": 1.3563295378432687, + "grad_norm": 1.8472909357281793, + "learning_rate": 4.17940492576935e-06, + "loss": 1.1938, + "step": 2025 + }, + { + "epoch": 1.3569993302076355, + "grad_norm": 1.8804520149355812, + "learning_rate": 4.1783220242431465e-06, + "loss": 1.1925, + "step": 2026 + }, + { + "epoch": 1.3576691225720028, + "grad_norm": 1.8276090640078417, + "learning_rate": 4.177238549155825e-06, + "loss": 1.2951, + "step": 2027 + }, + { + "epoch": 1.3583389149363696, + "grad_norm": 1.8223246239344255, + "learning_rate": 4.17615450087766e-06, + "loss": 1.195, + "step": 2028 + }, + { + "epoch": 1.3590087073007369, + "grad_norm": 1.8319425314088196, + "learning_rate": 4.175069879779121e-06, + "loss": 1.2321, + "step": 2029 + }, + { + "epoch": 1.3596784996651037, + "grad_norm": 2.1101222071665537, + "learning_rate": 4.173984686230873e-06, + "loss": 1.388, + "step": 2030 + }, + { + "epoch": 1.360348292029471, + "grad_norm": 2.7133988647466003, + "learning_rate": 4.172898920603779e-06, + "loss": 1.3056, + "step": 2031 + }, + { + "epoch": 1.361018084393838, + "grad_norm": 2.587158054392974, + "learning_rate": 4.171812583268896e-06, + "loss": 1.1034, + "step": 2032 + }, + { + "epoch": 1.361687876758205, + "grad_norm": 2.402866025755026, + "learning_rate": 4.170725674597474e-06, + "loss": 1.1802, + "step": 2033 + }, + { + "epoch": 1.362357669122572, + "grad_norm": 2.112025625266841, + "learning_rate": 4.169638194960962e-06, + "loss": 1.1495, + "step": 2034 + }, + { + "epoch": 1.363027461486939, + "grad_norm": 1.7928853725446832, + "learning_rate": 4.168550144731004e-06, + "loss": 1.3751, + "step": 2035 + }, + { + "epoch": 1.3636972538513061, + "grad_norm": 2.808523116757509, + "learning_rate": 4.1674615242794346e-06, + "loss": 1.3986, + "step": 2036 + }, + { + "epoch": 1.3643670462156732, + "grad_norm": 1.9549613635808485, + "learning_rate": 4.166372333978289e-06, + "loss": 1.3216, + "step": 2037 + }, + { + "epoch": 1.3650368385800402, + "grad_norm": 1.8137362497271023, + "learning_rate": 4.165282574199793e-06, + "loss": 1.2044, + "step": 2038 + }, + { + "epoch": 1.3657066309444073, + "grad_norm": 1.6946115626881677, + "learning_rate": 4.164192245316369e-06, + "loss": 1.3277, + "step": 2039 + }, + { + "epoch": 1.3663764233087743, + "grad_norm": 1.6430946808274005, + "learning_rate": 4.163101347700633e-06, + "loss": 1.3799, + "step": 2040 + }, + { + "epoch": 1.3670462156731413, + "grad_norm": 2.4501571294024735, + "learning_rate": 4.162009881725396e-06, + "loss": 1.126, + "step": 2041 + }, + { + "epoch": 1.3677160080375084, + "grad_norm": 1.903287568339633, + "learning_rate": 4.160917847763663e-06, + "loss": 1.4744, + "step": 2042 + }, + { + "epoch": 1.3683858004018754, + "grad_norm": 1.7198348767606133, + "learning_rate": 4.159825246188633e-06, + "loss": 1.2792, + "step": 2043 + }, + { + "epoch": 1.3690555927662424, + "grad_norm": 2.147984816899298, + "learning_rate": 4.1587320773737e-06, + "loss": 1.336, + "step": 2044 + }, + { + "epoch": 1.3697253851306095, + "grad_norm": 2.429546955476504, + "learning_rate": 4.1576383416924485e-06, + "loss": 1.3762, + "step": 2045 + }, + { + "epoch": 1.3703951774949765, + "grad_norm": 1.7386678026558706, + "learning_rate": 4.156544039518662e-06, + "loss": 1.1499, + "step": 2046 + }, + { + "epoch": 1.3710649698593436, + "grad_norm": 1.6303292478843487, + "learning_rate": 4.155449171226312e-06, + "loss": 1.2292, + "step": 2047 + }, + { + "epoch": 1.3717347622237106, + "grad_norm": 1.986021243988692, + "learning_rate": 4.154353737189568e-06, + "loss": 1.394, + "step": 2048 + }, + { + "epoch": 1.3724045545880776, + "grad_norm": 1.6047982651464054, + "learning_rate": 4.153257737782791e-06, + "loss": 1.3647, + "step": 2049 + }, + { + "epoch": 1.3730743469524447, + "grad_norm": 2.030997095179918, + "learning_rate": 4.152161173380534e-06, + "loss": 1.4031, + "step": 2050 + }, + { + "epoch": 1.3737441393168117, + "grad_norm": 2.3069586197140546, + "learning_rate": 4.151064044357545e-06, + "loss": 1.0984, + "step": 2051 + }, + { + "epoch": 1.3744139316811788, + "grad_norm": 1.9839345312411318, + "learning_rate": 4.149966351088765e-06, + "loss": 1.2871, + "step": 2052 + }, + { + "epoch": 1.3750837240455458, + "grad_norm": 2.890773109755685, + "learning_rate": 4.148868093949326e-06, + "loss": 1.3874, + "step": 2053 + }, + { + "epoch": 1.375753516409913, + "grad_norm": 2.131410586384643, + "learning_rate": 4.147769273314555e-06, + "loss": 1.2611, + "step": 2054 + }, + { + "epoch": 1.3764233087742799, + "grad_norm": 2.0943539139562266, + "learning_rate": 4.146669889559969e-06, + "loss": 1.3415, + "step": 2055 + }, + { + "epoch": 1.3770931011386471, + "grad_norm": 1.6043771070547714, + "learning_rate": 4.145569943061281e-06, + "loss": 1.2417, + "step": 2056 + }, + { + "epoch": 1.377762893503014, + "grad_norm": 1.816895834344982, + "learning_rate": 4.144469434194391e-06, + "loss": 1.3904, + "step": 2057 + }, + { + "epoch": 1.3784326858673812, + "grad_norm": 1.8836125475327863, + "learning_rate": 4.143368363335397e-06, + "loss": 1.4475, + "step": 2058 + }, + { + "epoch": 1.379102478231748, + "grad_norm": 1.9348748908090125, + "learning_rate": 4.142266730860586e-06, + "loss": 1.3506, + "step": 2059 + }, + { + "epoch": 1.3797722705961153, + "grad_norm": 2.7056449274127723, + "learning_rate": 4.141164537146436e-06, + "loss": 1.2694, + "step": 2060 + }, + { + "epoch": 1.3804420629604823, + "grad_norm": 5.846856339695571, + "learning_rate": 4.140061782569619e-06, + "loss": 1.3575, + "step": 2061 + }, + { + "epoch": 1.3811118553248494, + "grad_norm": 2.112143539543931, + "learning_rate": 4.138958467506998e-06, + "loss": 1.4403, + "step": 2062 + }, + { + "epoch": 1.3817816476892164, + "grad_norm": 1.8206069443783193, + "learning_rate": 4.137854592335627e-06, + "loss": 1.3557, + "step": 2063 + }, + { + "epoch": 1.3824514400535834, + "grad_norm": 3.2408740902382873, + "learning_rate": 4.136750157432752e-06, + "loss": 1.139, + "step": 2064 + }, + { + "epoch": 1.3831212324179505, + "grad_norm": 2.6863968249449512, + "learning_rate": 4.13564516317581e-06, + "loss": 1.3532, + "step": 2065 + }, + { + "epoch": 1.3837910247823175, + "grad_norm": 1.964184355708665, + "learning_rate": 4.134539609942428e-06, + "loss": 1.2906, + "step": 2066 + }, + { + "epoch": 1.3844608171466846, + "grad_norm": 1.8515627828152719, + "learning_rate": 4.133433498110428e-06, + "loss": 1.5321, + "step": 2067 + }, + { + "epoch": 1.3851306095110516, + "grad_norm": 3.1567607544735563, + "learning_rate": 4.132326828057817e-06, + "loss": 1.4281, + "step": 2068 + }, + { + "epoch": 1.3858004018754186, + "grad_norm": 1.8290270088624965, + "learning_rate": 4.131219600162799e-06, + "loss": 1.1936, + "step": 2069 + }, + { + "epoch": 1.3864701942397857, + "grad_norm": 2.529230315921409, + "learning_rate": 4.130111814803763e-06, + "loss": 1.363, + "step": 2070 + }, + { + "epoch": 1.3871399866041527, + "grad_norm": 2.3073311663524394, + "learning_rate": 4.129003472359292e-06, + "loss": 1.3572, + "step": 2071 + }, + { + "epoch": 1.3878097789685198, + "grad_norm": 1.8144635351352254, + "learning_rate": 4.127894573208159e-06, + "loss": 1.4682, + "step": 2072 + }, + { + "epoch": 1.3884795713328868, + "grad_norm": 1.9035051626860617, + "learning_rate": 4.126785117729326e-06, + "loss": 1.4518, + "step": 2073 + }, + { + "epoch": 1.3891493636972538, + "grad_norm": 2.8118809953573107, + "learning_rate": 4.1256751063019455e-06, + "loss": 1.3213, + "step": 2074 + }, + { + "epoch": 1.3898191560616209, + "grad_norm": 1.9009971205180647, + "learning_rate": 4.124564539305361e-06, + "loss": 1.4474, + "step": 2075 + }, + { + "epoch": 1.390488948425988, + "grad_norm": 2.288212806645739, + "learning_rate": 4.123453417119105e-06, + "loss": 1.1333, + "step": 2076 + }, + { + "epoch": 1.391158740790355, + "grad_norm": 1.9910806156357614, + "learning_rate": 4.122341740122901e-06, + "loss": 1.0692, + "step": 2077 + }, + { + "epoch": 1.391828533154722, + "grad_norm": 1.8102620175927975, + "learning_rate": 4.121229508696658e-06, + "loss": 1.3991, + "step": 2078 + }, + { + "epoch": 1.392498325519089, + "grad_norm": 2.0770060956109364, + "learning_rate": 4.1201167232204805e-06, + "loss": 1.1463, + "step": 2079 + }, + { + "epoch": 1.393168117883456, + "grad_norm": 2.0559885514640346, + "learning_rate": 4.119003384074658e-06, + "loss": 1.267, + "step": 2080 + }, + { + "epoch": 1.393837910247823, + "grad_norm": 2.369339777277258, + "learning_rate": 4.117889491639671e-06, + "loss": 1.0707, + "step": 2081 + }, + { + "epoch": 1.3945077026121901, + "grad_norm": 2.4983512918944673, + "learning_rate": 4.1167750462961875e-06, + "loss": 1.2891, + "step": 2082 + }, + { + "epoch": 1.3951774949765572, + "grad_norm": 2.0141382859209265, + "learning_rate": 4.1156600484250676e-06, + "loss": 1.3794, + "step": 2083 + }, + { + "epoch": 1.3958472873409242, + "grad_norm": 1.7129661410209565, + "learning_rate": 4.114544498407356e-06, + "loss": 1.269, + "step": 2084 + }, + { + "epoch": 1.3965170797052915, + "grad_norm": 1.7554764095543083, + "learning_rate": 4.113428396624291e-06, + "loss": 1.403, + "step": 2085 + }, + { + "epoch": 1.3971868720696583, + "grad_norm": 1.6429832049593385, + "learning_rate": 4.112311743457293e-06, + "loss": 1.257, + "step": 2086 + }, + { + "epoch": 1.3978566644340256, + "grad_norm": 3.426114993794384, + "learning_rate": 4.111194539287978e-06, + "loss": 1.3284, + "step": 2087 + }, + { + "epoch": 1.3985264567983924, + "grad_norm": 1.7834980240809482, + "learning_rate": 4.110076784498146e-06, + "loss": 1.3605, + "step": 2088 + }, + { + "epoch": 1.3991962491627596, + "grad_norm": 1.7544137511330613, + "learning_rate": 4.1089584794697854e-06, + "loss": 1.217, + "step": 2089 + }, + { + "epoch": 1.3998660415271265, + "grad_norm": 2.714960247847016, + "learning_rate": 4.107839624585073e-06, + "loss": 1.3204, + "step": 2090 + }, + { + "epoch": 1.4005358338914937, + "grad_norm": 1.6704926054758382, + "learning_rate": 4.1067202202263756e-06, + "loss": 1.2056, + "step": 2091 + }, + { + "epoch": 1.4012056262558608, + "grad_norm": 2.2264183171529917, + "learning_rate": 4.105600266776245e-06, + "loss": 1.3921, + "step": 2092 + }, + { + "epoch": 1.4018754186202278, + "grad_norm": 1.765664171278173, + "learning_rate": 4.10447976461742e-06, + "loss": 1.3588, + "step": 2093 + }, + { + "epoch": 1.4025452109845948, + "grad_norm": 1.6792077300713757, + "learning_rate": 4.103358714132832e-06, + "loss": 1.3831, + "step": 2094 + }, + { + "epoch": 1.4032150033489619, + "grad_norm": 2.626022546217674, + "learning_rate": 4.102237115705594e-06, + "loss": 1.2892, + "step": 2095 + }, + { + "epoch": 1.403884795713329, + "grad_norm": 1.5560007735752794, + "learning_rate": 4.101114969719009e-06, + "loss": 1.1911, + "step": 2096 + }, + { + "epoch": 1.404554588077696, + "grad_norm": 2.9522802467901545, + "learning_rate": 4.099992276556567e-06, + "loss": 1.1092, + "step": 2097 + }, + { + "epoch": 1.405224380442063, + "grad_norm": 3.2561270717004462, + "learning_rate": 4.0988690366019446e-06, + "loss": 1.3423, + "step": 2098 + }, + { + "epoch": 1.40589417280643, + "grad_norm": 2.5999447914048277, + "learning_rate": 4.097745250239006e-06, + "loss": 1.3268, + "step": 2099 + }, + { + "epoch": 1.406563965170797, + "grad_norm": 1.7290764737661501, + "learning_rate": 4.0966209178518e-06, + "loss": 1.3733, + "step": 2100 + }, + { + "epoch": 1.407233757535164, + "grad_norm": 5.533561482879472, + "learning_rate": 4.095496039824566e-06, + "loss": 1.2877, + "step": 2101 + }, + { + "epoch": 1.4079035498995311, + "grad_norm": 3.2472010505185414, + "learning_rate": 4.094370616541725e-06, + "loss": 1.2567, + "step": 2102 + }, + { + "epoch": 1.4085733422638982, + "grad_norm": 1.72830658009572, + "learning_rate": 4.093244648387889e-06, + "loss": 1.4026, + "step": 2103 + }, + { + "epoch": 1.4092431346282652, + "grad_norm": 2.0756550668683467, + "learning_rate": 4.092118135747852e-06, + "loss": 1.3066, + "step": 2104 + }, + { + "epoch": 1.4099129269926323, + "grad_norm": 1.6613262038724965, + "learning_rate": 4.0909910790065965e-06, + "loss": 1.4124, + "step": 2105 + }, + { + "epoch": 1.4105827193569993, + "grad_norm": 2.4974651991549597, + "learning_rate": 4.089863478549291e-06, + "loss": 1.3673, + "step": 2106 + }, + { + "epoch": 1.4112525117213663, + "grad_norm": 2.194630116947152, + "learning_rate": 4.088735334761289e-06, + "loss": 1.1635, + "step": 2107 + }, + { + "epoch": 1.4119223040857334, + "grad_norm": 1.7715233768133454, + "learning_rate": 4.087606648028131e-06, + "loss": 1.4594, + "step": 2108 + }, + { + "epoch": 1.4125920964501004, + "grad_norm": 2.69478928843965, + "learning_rate": 4.08647741873554e-06, + "loss": 1.4618, + "step": 2109 + }, + { + "epoch": 1.4132618888144675, + "grad_norm": 2.6626441994719685, + "learning_rate": 4.085347647269429e-06, + "loss": 1.3427, + "step": 2110 + }, + { + "epoch": 1.4139316811788345, + "grad_norm": 1.7070210925046205, + "learning_rate": 4.0842173340158905e-06, + "loss": 1.2584, + "step": 2111 + }, + { + "epoch": 1.4146014735432015, + "grad_norm": 2.2180377042153956, + "learning_rate": 4.083086479361209e-06, + "loss": 1.2656, + "step": 2112 + }, + { + "epoch": 1.4152712659075686, + "grad_norm": 1.5626526481909393, + "learning_rate": 4.081955083691848e-06, + "loss": 1.3549, + "step": 2113 + }, + { + "epoch": 1.4159410582719358, + "grad_norm": 1.5429317227355144, + "learning_rate": 4.080823147394459e-06, + "loss": 1.3784, + "step": 2114 + }, + { + "epoch": 1.4166108506363027, + "grad_norm": 1.651075119491224, + "learning_rate": 4.079690670855877e-06, + "loss": 1.1914, + "step": 2115 + }, + { + "epoch": 1.41728064300067, + "grad_norm": 1.9732121960447389, + "learning_rate": 4.078557654463123e-06, + "loss": 1.5183, + "step": 2116 + }, + { + "epoch": 1.4179504353650367, + "grad_norm": 1.611205514363595, + "learning_rate": 4.077424098603402e-06, + "loss": 1.4084, + "step": 2117 + }, + { + "epoch": 1.418620227729404, + "grad_norm": 1.6249555649301144, + "learning_rate": 4.0762900036641015e-06, + "loss": 1.2819, + "step": 2118 + }, + { + "epoch": 1.4192900200937708, + "grad_norm": 3.237046017914554, + "learning_rate": 4.075155370032796e-06, + "loss": 1.2579, + "step": 2119 + }, + { + "epoch": 1.419959812458138, + "grad_norm": 2.01828134418352, + "learning_rate": 4.074020198097244e-06, + "loss": 1.1466, + "step": 2120 + }, + { + "epoch": 1.4206296048225049, + "grad_norm": 1.7315029178619419, + "learning_rate": 4.072884488245384e-06, + "loss": 1.1515, + "step": 2121 + }, + { + "epoch": 1.4212993971868721, + "grad_norm": 1.6560015525775278, + "learning_rate": 4.071748240865343e-06, + "loss": 1.4577, + "step": 2122 + }, + { + "epoch": 1.4219691895512392, + "grad_norm": 2.0651688971608735, + "learning_rate": 4.070611456345429e-06, + "loss": 1.1794, + "step": 2123 + }, + { + "epoch": 1.4226389819156062, + "grad_norm": 1.8478501793208872, + "learning_rate": 4.069474135074135e-06, + "loss": 1.2852, + "step": 2124 + }, + { + "epoch": 1.4233087742799733, + "grad_norm": 2.850170894271534, + "learning_rate": 4.068336277440137e-06, + "loss": 1.1889, + "step": 2125 + }, + { + "epoch": 1.4239785666443403, + "grad_norm": 2.0042598302751355, + "learning_rate": 4.067197883832294e-06, + "loss": 1.5267, + "step": 2126 + }, + { + "epoch": 1.4246483590087073, + "grad_norm": 2.2665331228793044, + "learning_rate": 4.066058954639648e-06, + "loss": 1.333, + "step": 2127 + }, + { + "epoch": 1.4253181513730744, + "grad_norm": 1.8182457010660746, + "learning_rate": 4.064919490251425e-06, + "loss": 1.3725, + "step": 2128 + }, + { + "epoch": 1.4259879437374414, + "grad_norm": 1.836127633902497, + "learning_rate": 4.063779491057033e-06, + "loss": 1.2923, + "step": 2129 + }, + { + "epoch": 1.4266577361018085, + "grad_norm": 2.317977126096382, + "learning_rate": 4.062638957446062e-06, + "loss": 1.2757, + "step": 2130 + }, + { + "epoch": 1.4273275284661755, + "grad_norm": 5.356246409351548, + "learning_rate": 4.0614978898082865e-06, + "loss": 1.0912, + "step": 2131 + }, + { + "epoch": 1.4279973208305425, + "grad_norm": 1.8148839227228173, + "learning_rate": 4.060356288533664e-06, + "loss": 1.4314, + "step": 2132 + }, + { + "epoch": 1.4286671131949096, + "grad_norm": 1.684990545351784, + "learning_rate": 4.059214154012331e-06, + "loss": 1.4385, + "step": 2133 + }, + { + "epoch": 1.4293369055592766, + "grad_norm": 1.8403671062858222, + "learning_rate": 4.058071486634609e-06, + "loss": 1.2922, + "step": 2134 + }, + { + "epoch": 1.4300066979236437, + "grad_norm": 3.5135760245474534, + "learning_rate": 4.056928286791001e-06, + "loss": 1.2618, + "step": 2135 + }, + { + "epoch": 1.4306764902880107, + "grad_norm": 1.769886246253838, + "learning_rate": 4.055784554872192e-06, + "loss": 1.2373, + "step": 2136 + }, + { + "epoch": 1.4313462826523777, + "grad_norm": 1.6273398209594603, + "learning_rate": 4.054640291269049e-06, + "loss": 1.3991, + "step": 2137 + }, + { + "epoch": 1.4320160750167448, + "grad_norm": 2.5094492111754785, + "learning_rate": 4.053495496372619e-06, + "loss": 1.1215, + "step": 2138 + }, + { + "epoch": 1.4326858673811118, + "grad_norm": 1.8211237729469871, + "learning_rate": 4.052350170574132e-06, + "loss": 1.3104, + "step": 2139 + }, + { + "epoch": 1.4333556597454788, + "grad_norm": 1.676978452969212, + "learning_rate": 4.0512043142650015e-06, + "loss": 1.2638, + "step": 2140 + }, + { + "epoch": 1.4340254521098459, + "grad_norm": 1.7456841013515811, + "learning_rate": 4.050057927836819e-06, + "loss": 1.1415, + "step": 2141 + }, + { + "epoch": 1.434695244474213, + "grad_norm": 2.762017480371968, + "learning_rate": 4.048911011681357e-06, + "loss": 1.1993, + "step": 2142 + }, + { + "epoch": 1.43536503683858, + "grad_norm": 2.6673878204040444, + "learning_rate": 4.047763566190573e-06, + "loss": 1.4304, + "step": 2143 + }, + { + "epoch": 1.436034829202947, + "grad_norm": 2.781994273312807, + "learning_rate": 4.0466155917566e-06, + "loss": 1.2269, + "step": 2144 + }, + { + "epoch": 1.4367046215673143, + "grad_norm": 1.8993066856961627, + "learning_rate": 4.045467088771758e-06, + "loss": 1.4041, + "step": 2145 + }, + { + "epoch": 1.437374413931681, + "grad_norm": 1.931463246131812, + "learning_rate": 4.044318057628539e-06, + "loss": 1.0703, + "step": 2146 + }, + { + "epoch": 1.4380442062960483, + "grad_norm": 2.94936792447949, + "learning_rate": 4.043168498719626e-06, + "loss": 1.2755, + "step": 2147 + }, + { + "epoch": 1.4387139986604152, + "grad_norm": 1.6472314485180322, + "learning_rate": 4.042018412437874e-06, + "loss": 1.3207, + "step": 2148 + }, + { + "epoch": 1.4393837910247824, + "grad_norm": 1.8078073578304699, + "learning_rate": 4.040867799176323e-06, + "loss": 1.3777, + "step": 2149 + }, + { + "epoch": 1.4400535833891492, + "grad_norm": 1.8321398455908742, + "learning_rate": 4.03971665932819e-06, + "loss": 1.3251, + "step": 2150 + }, + { + "epoch": 1.4407233757535165, + "grad_norm": 1.6428089276722562, + "learning_rate": 4.038564993286873e-06, + "loss": 1.2959, + "step": 2151 + }, + { + "epoch": 1.4413931681178835, + "grad_norm": 2.044466784680703, + "learning_rate": 4.037412801445952e-06, + "loss": 1.2917, + "step": 2152 + }, + { + "epoch": 1.4420629604822506, + "grad_norm": 2.0542121704512386, + "learning_rate": 4.036260084199183e-06, + "loss": 1.4105, + "step": 2153 + }, + { + "epoch": 1.4427327528466176, + "grad_norm": 1.721641112818315, + "learning_rate": 4.035106841940503e-06, + "loss": 1.4785, + "step": 2154 + }, + { + "epoch": 1.4434025452109847, + "grad_norm": 4.177992644732162, + "learning_rate": 4.033953075064032e-06, + "loss": 1.213, + "step": 2155 + }, + { + "epoch": 1.4440723375753517, + "grad_norm": 2.0787183377689615, + "learning_rate": 4.032798783964063e-06, + "loss": 1.1912, + "step": 2156 + }, + { + "epoch": 1.4447421299397187, + "grad_norm": 1.8153389608711694, + "learning_rate": 4.0316439690350725e-06, + "loss": 1.4782, + "step": 2157 + }, + { + "epoch": 1.4454119223040858, + "grad_norm": 1.651546991159091, + "learning_rate": 4.030488630671714e-06, + "loss": 1.2444, + "step": 2158 + }, + { + "epoch": 1.4460817146684528, + "grad_norm": 1.635420983551353, + "learning_rate": 4.029332769268822e-06, + "loss": 1.3658, + "step": 2159 + }, + { + "epoch": 1.4467515070328199, + "grad_norm": 2.3196671942347042, + "learning_rate": 4.028176385221408e-06, + "loss": 1.2625, + "step": 2160 + }, + { + "epoch": 1.447421299397187, + "grad_norm": 1.8516406140039872, + "learning_rate": 4.02701947892466e-06, + "loss": 1.5045, + "step": 2161 + }, + { + "epoch": 1.448091091761554, + "grad_norm": 3.839161927592724, + "learning_rate": 4.02586205077395e-06, + "loss": 1.1734, + "step": 2162 + }, + { + "epoch": 1.448760884125921, + "grad_norm": 2.0370674499847645, + "learning_rate": 4.024704101164824e-06, + "loss": 1.2966, + "step": 2163 + }, + { + "epoch": 1.449430676490288, + "grad_norm": 1.5981032604022867, + "learning_rate": 4.023545630493009e-06, + "loss": 1.3432, + "step": 2164 + }, + { + "epoch": 1.450100468854655, + "grad_norm": 1.836987675370408, + "learning_rate": 4.0223866391544075e-06, + "loss": 1.3773, + "step": 2165 + }, + { + "epoch": 1.450770261219022, + "grad_norm": 1.8095527318018443, + "learning_rate": 4.0212271275451e-06, + "loss": 1.2527, + "step": 2166 + }, + { + "epoch": 1.4514400535833891, + "grad_norm": 1.7486265367048601, + "learning_rate": 4.020067096061347e-06, + "loss": 1.3036, + "step": 2167 + }, + { + "epoch": 1.4521098459477562, + "grad_norm": 1.6895569278544775, + "learning_rate": 4.018906545099587e-06, + "loss": 1.2065, + "step": 2168 + }, + { + "epoch": 1.4527796383121232, + "grad_norm": 1.767790415180234, + "learning_rate": 4.017745475056431e-06, + "loss": 1.4101, + "step": 2169 + }, + { + "epoch": 1.4534494306764902, + "grad_norm": 1.6243130936040342, + "learning_rate": 4.016583886328673e-06, + "loss": 1.2721, + "step": 2170 + }, + { + "epoch": 1.4541192230408573, + "grad_norm": 1.7742602212307348, + "learning_rate": 4.015421779313283e-06, + "loss": 1.3011, + "step": 2171 + }, + { + "epoch": 1.4547890154052243, + "grad_norm": 1.7233685876542852, + "learning_rate": 4.014259154407405e-06, + "loss": 1.0952, + "step": 2172 + }, + { + "epoch": 1.4554588077695914, + "grad_norm": 1.6883528858380468, + "learning_rate": 4.013096012008365e-06, + "loss": 1.3987, + "step": 2173 + }, + { + "epoch": 1.4561286001339584, + "grad_norm": 2.0716441756238706, + "learning_rate": 4.011932352513661e-06, + "loss": 1.3277, + "step": 2174 + }, + { + "epoch": 1.4567983924983254, + "grad_norm": 1.9411004628112776, + "learning_rate": 4.010768176320971e-06, + "loss": 1.2721, + "step": 2175 + }, + { + "epoch": 1.4574681848626927, + "grad_norm": 1.754628006702524, + "learning_rate": 4.009603483828147e-06, + "loss": 1.2617, + "step": 2176 + }, + { + "epoch": 1.4581379772270595, + "grad_norm": 1.7377544241331906, + "learning_rate": 4.008438275433221e-06, + "loss": 1.3613, + "step": 2177 + }, + { + "epoch": 1.4588077695914268, + "grad_norm": 1.97217805936069, + "learning_rate": 4.007272551534398e-06, + "loss": 1.2785, + "step": 2178 + }, + { + "epoch": 1.4594775619557936, + "grad_norm": 5.103260755081333, + "learning_rate": 4.006106312530061e-06, + "loss": 1.2583, + "step": 2179 + }, + { + "epoch": 1.4601473543201609, + "grad_norm": 4.917257077638042, + "learning_rate": 4.0049395588187676e-06, + "loss": 1.1594, + "step": 2180 + }, + { + "epoch": 1.4608171466845277, + "grad_norm": 3.0274329560773046, + "learning_rate": 4.003772290799252e-06, + "loss": 1.1876, + "step": 2181 + }, + { + "epoch": 1.461486939048895, + "grad_norm": 4.042682010781464, + "learning_rate": 4.002604508870425e-06, + "loss": 1.3268, + "step": 2182 + }, + { + "epoch": 1.462156731413262, + "grad_norm": 1.709370860559288, + "learning_rate": 4.001436213431372e-06, + "loss": 1.2147, + "step": 2183 + }, + { + "epoch": 1.462826523777629, + "grad_norm": 1.9385659044064383, + "learning_rate": 4.000267404881354e-06, + "loss": 1.3492, + "step": 2184 + }, + { + "epoch": 1.463496316141996, + "grad_norm": 1.6637442302299976, + "learning_rate": 3.999098083619808e-06, + "loss": 1.2662, + "step": 2185 + }, + { + "epoch": 1.464166108506363, + "grad_norm": 1.597136786941643, + "learning_rate": 3.9979282500463455e-06, + "loss": 1.2658, + "step": 2186 + }, + { + "epoch": 1.4648359008707301, + "grad_norm": 2.4429289116590676, + "learning_rate": 3.996757904560754e-06, + "loss": 1.2035, + "step": 2187 + }, + { + "epoch": 1.4655056932350972, + "grad_norm": 1.7900805405641673, + "learning_rate": 3.995587047562994e-06, + "loss": 1.4503, + "step": 2188 + }, + { + "epoch": 1.4661754855994642, + "grad_norm": 1.6238150170596297, + "learning_rate": 3.994415679453202e-06, + "loss": 1.3223, + "step": 2189 + }, + { + "epoch": 1.4668452779638312, + "grad_norm": 1.6031572963963265, + "learning_rate": 3.993243800631691e-06, + "loss": 1.3261, + "step": 2190 + }, + { + "epoch": 1.4675150703281983, + "grad_norm": 2.0074328426007844, + "learning_rate": 3.992071411498945e-06, + "loss": 1.35, + "step": 2191 + }, + { + "epoch": 1.4681848626925653, + "grad_norm": 2.6520910523242582, + "learning_rate": 3.990898512455625e-06, + "loss": 1.2815, + "step": 2192 + }, + { + "epoch": 1.4688546550569324, + "grad_norm": 2.5882672078146394, + "learning_rate": 3.9897251039025655e-06, + "loss": 1.198, + "step": 2193 + }, + { + "epoch": 1.4695244474212994, + "grad_norm": 42.517924196524, + "learning_rate": 3.988551186240775e-06, + "loss": 1.489, + "step": 2194 + }, + { + "epoch": 1.4701942397856664, + "grad_norm": 3.292128677252889, + "learning_rate": 3.987376759871436e-06, + "loss": 1.3136, + "step": 2195 + }, + { + "epoch": 1.4708640321500335, + "grad_norm": 3.8102936843710027, + "learning_rate": 3.986201825195905e-06, + "loss": 1.3915, + "step": 2196 + }, + { + "epoch": 1.4715338245144005, + "grad_norm": 1.717293257229791, + "learning_rate": 3.985026382615712e-06, + "loss": 1.2175, + "step": 2197 + }, + { + "epoch": 1.4722036168787676, + "grad_norm": 1.6623382500923725, + "learning_rate": 3.983850432532561e-06, + "loss": 1.3098, + "step": 2198 + }, + { + "epoch": 1.4728734092431346, + "grad_norm": 1.5202668679501716, + "learning_rate": 3.982673975348328e-06, + "loss": 1.2187, + "step": 2199 + }, + { + "epoch": 1.4735432016075016, + "grad_norm": 2.218756265383963, + "learning_rate": 3.981497011465065e-06, + "loss": 1.2462, + "step": 2200 + }, + { + "epoch": 1.4742129939718687, + "grad_norm": 1.8695273235254448, + "learning_rate": 3.980319541284995e-06, + "loss": 1.1222, + "step": 2201 + }, + { + "epoch": 1.4748827863362357, + "grad_norm": 1.9246592745420978, + "learning_rate": 3.979141565210514e-06, + "loss": 1.3363, + "step": 2202 + }, + { + "epoch": 1.4755525787006027, + "grad_norm": 2.1657817059429405, + "learning_rate": 3.977963083644192e-06, + "loss": 1.334, + "step": 2203 + }, + { + "epoch": 1.4762223710649698, + "grad_norm": 2.4183420323015645, + "learning_rate": 3.9767840969887726e-06, + "loss": 1.2894, + "step": 2204 + }, + { + "epoch": 1.4768921634293368, + "grad_norm": 1.7232082588810649, + "learning_rate": 3.975604605647167e-06, + "loss": 1.3276, + "step": 2205 + }, + { + "epoch": 1.4775619557937039, + "grad_norm": 1.5906931380901315, + "learning_rate": 3.974424610022467e-06, + "loss": 1.2676, + "step": 2206 + }, + { + "epoch": 1.4782317481580711, + "grad_norm": 2.2172570984386737, + "learning_rate": 3.97324411051793e-06, + "loss": 1.29, + "step": 2207 + }, + { + "epoch": 1.478901540522438, + "grad_norm": 1.7857719769861709, + "learning_rate": 3.972063107536987e-06, + "loss": 1.2414, + "step": 2208 + }, + { + "epoch": 1.4795713328868052, + "grad_norm": 1.9887796356989622, + "learning_rate": 3.970881601483244e-06, + "loss": 1.1845, + "step": 2209 + }, + { + "epoch": 1.480241125251172, + "grad_norm": 1.868678048863761, + "learning_rate": 3.969699592760476e-06, + "loss": 1.3051, + "step": 2210 + }, + { + "epoch": 1.4809109176155393, + "grad_norm": 1.8589070986840803, + "learning_rate": 3.968517081772629e-06, + "loss": 1.3636, + "step": 2211 + }, + { + "epoch": 1.481580709979906, + "grad_norm": 2.2632882165865906, + "learning_rate": 3.967334068923825e-06, + "loss": 1.4655, + "step": 2212 + }, + { + "epoch": 1.4822505023442734, + "grad_norm": 2.262426073557919, + "learning_rate": 3.966150554618355e-06, + "loss": 1.4636, + "step": 2213 + }, + { + "epoch": 1.4829202947086404, + "grad_norm": 1.8670694550790616, + "learning_rate": 3.964966539260677e-06, + "loss": 1.2432, + "step": 2214 + }, + { + "epoch": 1.4835900870730074, + "grad_norm": 2.2119126910033486, + "learning_rate": 3.963782023255429e-06, + "loss": 1.3188, + "step": 2215 + }, + { + "epoch": 1.4842598794373745, + "grad_norm": 2.050427419655218, + "learning_rate": 3.962597007007414e-06, + "loss": 1.3645, + "step": 2216 + }, + { + "epoch": 1.4849296718017415, + "grad_norm": 1.7394894426790164, + "learning_rate": 3.961411490921606e-06, + "loss": 1.2005, + "step": 2217 + }, + { + "epoch": 1.4855994641661086, + "grad_norm": 1.9900572604622093, + "learning_rate": 3.960225475403153e-06, + "loss": 1.3608, + "step": 2218 + }, + { + "epoch": 1.4862692565304756, + "grad_norm": 1.698544489087216, + "learning_rate": 3.959038960857372e-06, + "loss": 1.235, + "step": 2219 + }, + { + "epoch": 1.4869390488948426, + "grad_norm": 2.6787745009565413, + "learning_rate": 3.95785194768975e-06, + "loss": 1.2772, + "step": 2220 + }, + { + "epoch": 1.4876088412592097, + "grad_norm": 2.605516078659256, + "learning_rate": 3.956664436305945e-06, + "loss": 1.263, + "step": 2221 + }, + { + "epoch": 1.4882786336235767, + "grad_norm": 1.8568988352787272, + "learning_rate": 3.9554764271117844e-06, + "loss": 1.2597, + "step": 2222 + }, + { + "epoch": 1.4889484259879437, + "grad_norm": 1.8687684646284533, + "learning_rate": 3.954287920513267e-06, + "loss": 1.386, + "step": 2223 + }, + { + "epoch": 1.4896182183523108, + "grad_norm": 2.08047087560056, + "learning_rate": 3.953098916916563e-06, + "loss": 1.3059, + "step": 2224 + }, + { + "epoch": 1.4902880107166778, + "grad_norm": 1.6272148802722834, + "learning_rate": 3.951909416728007e-06, + "loss": 1.3385, + "step": 2225 + }, + { + "epoch": 1.4909578030810449, + "grad_norm": 1.7896224253362, + "learning_rate": 3.950719420354109e-06, + "loss": 1.4265, + "step": 2226 + }, + { + "epoch": 1.491627595445412, + "grad_norm": 1.7521003685011618, + "learning_rate": 3.9495289282015455e-06, + "loss": 1.2256, + "step": 2227 + }, + { + "epoch": 1.492297387809779, + "grad_norm": 1.6914505505290076, + "learning_rate": 3.948337940677165e-06, + "loss": 1.3983, + "step": 2228 + }, + { + "epoch": 1.492967180174146, + "grad_norm": 1.5892339834866527, + "learning_rate": 3.947146458187982e-06, + "loss": 1.4029, + "step": 2229 + }, + { + "epoch": 1.493636972538513, + "grad_norm": 2.044012122212964, + "learning_rate": 3.945954481141181e-06, + "loss": 1.1994, + "step": 2230 + }, + { + "epoch": 1.49430676490288, + "grad_norm": 2.1043702304170226, + "learning_rate": 3.9447620099441195e-06, + "loss": 1.3175, + "step": 2231 + }, + { + "epoch": 1.494976557267247, + "grad_norm": 1.9383711246817603, + "learning_rate": 3.943569045004317e-06, + "loss": 1.0745, + "step": 2232 + }, + { + "epoch": 1.4956463496316141, + "grad_norm": 1.774721915605331, + "learning_rate": 3.942375586729469e-06, + "loss": 1.4537, + "step": 2233 + }, + { + "epoch": 1.4963161419959812, + "grad_norm": 2.0649463031298905, + "learning_rate": 3.941181635527433e-06, + "loss": 1.2166, + "step": 2234 + }, + { + "epoch": 1.4969859343603482, + "grad_norm": 1.7319917116336743, + "learning_rate": 3.939987191806239e-06, + "loss": 1.3047, + "step": 2235 + }, + { + "epoch": 1.4976557267247155, + "grad_norm": 2.4602772390316225, + "learning_rate": 3.938792255974085e-06, + "loss": 1.1222, + "step": 2236 + }, + { + "epoch": 1.4983255190890823, + "grad_norm": 2.236298026858651, + "learning_rate": 3.937596828439336e-06, + "loss": 1.2913, + "step": 2237 + }, + { + "epoch": 1.4989953114534496, + "grad_norm": 1.6459167891564876, + "learning_rate": 3.936400909610525e-06, + "loss": 1.1493, + "step": 2238 + }, + { + "epoch": 1.4996651038178164, + "grad_norm": 2.9225107634787317, + "learning_rate": 3.9352044998963535e-06, + "loss": 1.2844, + "step": 2239 + }, + { + "epoch": 1.5003348961821836, + "grad_norm": 2.337626586965114, + "learning_rate": 3.93400759970569e-06, + "loss": 1.2366, + "step": 2240 + }, + { + "epoch": 1.5010046885465504, + "grad_norm": 2.398226223055486, + "learning_rate": 3.932810209447574e-06, + "loss": 1.161, + "step": 2241 + }, + { + "epoch": 1.5016744809109177, + "grad_norm": 2.5142234283515554, + "learning_rate": 3.931612329531207e-06, + "loss": 1.3371, + "step": 2242 + }, + { + "epoch": 1.5023442732752845, + "grad_norm": 1.955270858419112, + "learning_rate": 3.930413960365961e-06, + "loss": 1.3817, + "step": 2243 + }, + { + "epoch": 1.5030140656396518, + "grad_norm": 1.7268573209716274, + "learning_rate": 3.929215102361376e-06, + "loss": 1.3423, + "step": 2244 + }, + { + "epoch": 1.5036838580040186, + "grad_norm": 1.8716027846880123, + "learning_rate": 3.928015755927156e-06, + "loss": 1.3537, + "step": 2245 + }, + { + "epoch": 1.5043536503683859, + "grad_norm": 1.892562457848226, + "learning_rate": 3.926815921473176e-06, + "loss": 1.1431, + "step": 2246 + }, + { + "epoch": 1.5050234427327527, + "grad_norm": 1.5698101657801626, + "learning_rate": 3.925615599409473e-06, + "loss": 1.2906, + "step": 2247 + }, + { + "epoch": 1.50569323509712, + "grad_norm": 4.296187175040848, + "learning_rate": 3.924414790146256e-06, + "loss": 1.2743, + "step": 2248 + }, + { + "epoch": 1.506363027461487, + "grad_norm": 2.3605832239197087, + "learning_rate": 3.923213494093896e-06, + "loss": 1.2718, + "step": 2249 + }, + { + "epoch": 1.507032819825854, + "grad_norm": 1.794009616096358, + "learning_rate": 3.922011711662932e-06, + "loss": 1.2842, + "step": 2250 + }, + { + "epoch": 1.507702612190221, + "grad_norm": 2.8830014703880074, + "learning_rate": 3.920809443264069e-06, + "loss": 1.231, + "step": 2251 + }, + { + "epoch": 1.508372404554588, + "grad_norm": 1.7321818132749338, + "learning_rate": 3.91960668930818e-06, + "loss": 1.4377, + "step": 2252 + }, + { + "epoch": 1.5090421969189551, + "grad_norm": 1.9611774578176888, + "learning_rate": 3.918403450206299e-06, + "loss": 1.2887, + "step": 2253 + }, + { + "epoch": 1.5097119892833222, + "grad_norm": 1.7655233994521153, + "learning_rate": 3.917199726369634e-06, + "loss": 1.5276, + "step": 2254 + }, + { + "epoch": 1.5103817816476892, + "grad_norm": 1.5914785262710909, + "learning_rate": 3.915995518209549e-06, + "loss": 1.3384, + "step": 2255 + }, + { + "epoch": 1.5110515740120563, + "grad_norm": 2.2669768937347605, + "learning_rate": 3.914790826137581e-06, + "loss": 1.2942, + "step": 2256 + }, + { + "epoch": 1.5117213663764233, + "grad_norm": 3.2129999864669325, + "learning_rate": 3.913585650565428e-06, + "loss": 1.2397, + "step": 2257 + }, + { + "epoch": 1.5123911587407903, + "grad_norm": 2.023264168354295, + "learning_rate": 3.912379991904956e-06, + "loss": 1.2005, + "step": 2258 + }, + { + "epoch": 1.5130609511051574, + "grad_norm": 1.7093951027492484, + "learning_rate": 3.9111738505681945e-06, + "loss": 1.3881, + "step": 2259 + }, + { + "epoch": 1.5137307434695244, + "grad_norm": 3.313375301834002, + "learning_rate": 3.909967226967339e-06, + "loss": 1.1879, + "step": 2260 + }, + { + "epoch": 1.5144005358338914, + "grad_norm": 2.5755775121567543, + "learning_rate": 3.908760121514748e-06, + "loss": 1.2749, + "step": 2261 + }, + { + "epoch": 1.5150703281982585, + "grad_norm": 1.8501061895573208, + "learning_rate": 3.907552534622947e-06, + "loss": 1.3763, + "step": 2262 + }, + { + "epoch": 1.5157401205626257, + "grad_norm": 1.8141340018505618, + "learning_rate": 3.906344466704624e-06, + "loss": 1.2412, + "step": 2263 + }, + { + "epoch": 1.5164099129269926, + "grad_norm": 2.4104054606977297, + "learning_rate": 3.905135918172633e-06, + "loss": 1.4044, + "step": 2264 + }, + { + "epoch": 1.5170797052913598, + "grad_norm": 1.741776590714456, + "learning_rate": 3.903926889439991e-06, + "loss": 1.3435, + "step": 2265 + }, + { + "epoch": 1.5177494976557266, + "grad_norm": 1.6944931522973075, + "learning_rate": 3.90271738091988e-06, + "loss": 1.4187, + "step": 2266 + }, + { + "epoch": 1.518419290020094, + "grad_norm": 2.2437642567798934, + "learning_rate": 3.901507393025646e-06, + "loss": 1.3736, + "step": 2267 + }, + { + "epoch": 1.5190890823844607, + "grad_norm": 1.7543257613277663, + "learning_rate": 3.900296926170798e-06, + "loss": 1.2773, + "step": 2268 + }, + { + "epoch": 1.519758874748828, + "grad_norm": 1.7329502455909442, + "learning_rate": 3.899085980769009e-06, + "loss": 1.2813, + "step": 2269 + }, + { + "epoch": 1.5204286671131948, + "grad_norm": 3.9307318778403664, + "learning_rate": 3.897874557234116e-06, + "loss": 1.2513, + "step": 2270 + }, + { + "epoch": 1.521098459477562, + "grad_norm": 1.8790164227314028, + "learning_rate": 3.896662655980119e-06, + "loss": 1.2104, + "step": 2271 + }, + { + "epoch": 1.5217682518419289, + "grad_norm": 3.2102090123335825, + "learning_rate": 3.895450277421181e-06, + "loss": 1.3195, + "step": 2272 + }, + { + "epoch": 1.5224380442062961, + "grad_norm": 1.5548179368396997, + "learning_rate": 3.894237421971628e-06, + "loss": 1.2057, + "step": 2273 + }, + { + "epoch": 1.523107836570663, + "grad_norm": 1.608067862852761, + "learning_rate": 3.893024090045951e-06, + "loss": 1.1312, + "step": 2274 + }, + { + "epoch": 1.5237776289350302, + "grad_norm": 5.4836502533533045, + "learning_rate": 3.8918102820588014e-06, + "loss": 1.2653, + "step": 2275 + }, + { + "epoch": 1.524447421299397, + "grad_norm": 2.5020915567529345, + "learning_rate": 3.890595998424994e-06, + "loss": 1.2177, + "step": 2276 + }, + { + "epoch": 1.5251172136637643, + "grad_norm": 1.5207584533276395, + "learning_rate": 3.889381239559506e-06, + "loss": 1.4844, + "step": 2277 + }, + { + "epoch": 1.5257870060281313, + "grad_norm": 1.632315953851437, + "learning_rate": 3.8881660058774776e-06, + "loss": 1.2228, + "step": 2278 + }, + { + "epoch": 1.5264567983924984, + "grad_norm": 1.6056268032491043, + "learning_rate": 3.886950297794212e-06, + "loss": 1.3659, + "step": 2279 + }, + { + "epoch": 1.5271265907568654, + "grad_norm": 2.02019059083204, + "learning_rate": 3.885734115725171e-06, + "loss": 1.2745, + "step": 2280 + }, + { + "epoch": 1.5277963831212324, + "grad_norm": 2.0537720949373517, + "learning_rate": 3.884517460085983e-06, + "loss": 1.2451, + "step": 2281 + }, + { + "epoch": 1.5284661754855995, + "grad_norm": 1.7859908668847044, + "learning_rate": 3.883300331292437e-06, + "loss": 1.4375, + "step": 2282 + }, + { + "epoch": 1.5291359678499665, + "grad_norm": 2.2993984912894527, + "learning_rate": 3.8820827297604806e-06, + "loss": 1.1983, + "step": 2283 + }, + { + "epoch": 1.5298057602143336, + "grad_norm": 2.7221223300863295, + "learning_rate": 3.880864655906226e-06, + "loss": 1.1556, + "step": 2284 + }, + { + "epoch": 1.5304755525787006, + "grad_norm": 1.77654975728684, + "learning_rate": 3.879646110145946e-06, + "loss": 1.2827, + "step": 2285 + }, + { + "epoch": 1.5311453449430676, + "grad_norm": 2.3192815479959625, + "learning_rate": 3.878427092896074e-06, + "loss": 1.4754, + "step": 2286 + }, + { + "epoch": 1.5318151373074347, + "grad_norm": 2.0630218924735613, + "learning_rate": 3.877207604573207e-06, + "loss": 1.311, + "step": 2287 + }, + { + "epoch": 1.5324849296718017, + "grad_norm": 4.068383927912117, + "learning_rate": 3.875987645594099e-06, + "loss": 1.1567, + "step": 2288 + }, + { + "epoch": 1.5331547220361688, + "grad_norm": 1.6267079885842968, + "learning_rate": 3.874767216375669e-06, + "loss": 1.3587, + "step": 2289 + }, + { + "epoch": 1.5338245144005358, + "grad_norm": 1.6528283048326688, + "learning_rate": 3.873546317334993e-06, + "loss": 1.1086, + "step": 2290 + }, + { + "epoch": 1.5344943067649028, + "grad_norm": 1.6375734160998132, + "learning_rate": 3.872324948889311e-06, + "loss": 1.2982, + "step": 2291 + }, + { + "epoch": 1.5351640991292699, + "grad_norm": 1.8875932106234534, + "learning_rate": 3.8711031114560206e-06, + "loss": 1.2714, + "step": 2292 + }, + { + "epoch": 1.535833891493637, + "grad_norm": 1.7056957217336524, + "learning_rate": 3.86988080545268e-06, + "loss": 1.3788, + "step": 2293 + }, + { + "epoch": 1.5365036838580042, + "grad_norm": 5.895377124473947, + "learning_rate": 3.86865803129701e-06, + "loss": 1.2943, + "step": 2294 + }, + { + "epoch": 1.537173476222371, + "grad_norm": 2.38948531150826, + "learning_rate": 3.8674347894068895e-06, + "loss": 1.3886, + "step": 2295 + }, + { + "epoch": 1.5378432685867383, + "grad_norm": 8.605810537536636, + "learning_rate": 3.866211080200356e-06, + "loss": 1.5151, + "step": 2296 + }, + { + "epoch": 1.538513060951105, + "grad_norm": 1.5483648786525572, + "learning_rate": 3.864986904095612e-06, + "loss": 1.2745, + "step": 2297 + }, + { + "epoch": 1.5391828533154723, + "grad_norm": 2.183201255831603, + "learning_rate": 3.8637622615110094e-06, + "loss": 1.4829, + "step": 2298 + }, + { + "epoch": 1.5398526456798391, + "grad_norm": 1.6346792910079895, + "learning_rate": 3.8625371528650705e-06, + "loss": 1.3623, + "step": 2299 + }, + { + "epoch": 1.5405224380442064, + "grad_norm": 1.6738239823547794, + "learning_rate": 3.861311578576471e-06, + "loss": 1.3291, + "step": 2300 + }, + { + "epoch": 1.5411922304085732, + "grad_norm": 2.999265719298927, + "learning_rate": 3.860085539064047e-06, + "loss": 1.4093, + "step": 2301 + }, + { + "epoch": 1.5418620227729405, + "grad_norm": 1.7119651177441944, + "learning_rate": 3.858859034746793e-06, + "loss": 1.312, + "step": 2302 + }, + { + "epoch": 1.5425318151373073, + "grad_norm": 4.674233714594149, + "learning_rate": 3.857632066043865e-06, + "loss": 1.3301, + "step": 2303 + }, + { + "epoch": 1.5432016075016746, + "grad_norm": 1.681489090703136, + "learning_rate": 3.8564046333745734e-06, + "loss": 1.2572, + "step": 2304 + }, + { + "epoch": 1.5438713998660414, + "grad_norm": 1.936663186073155, + "learning_rate": 3.855176737158389e-06, + "loss": 1.2774, + "step": 2305 + }, + { + "epoch": 1.5445411922304086, + "grad_norm": 1.559878098348486, + "learning_rate": 3.853948377814943e-06, + "loss": 1.2297, + "step": 2306 + }, + { + "epoch": 1.5452109845947755, + "grad_norm": 1.5251392586546633, + "learning_rate": 3.8527195557640215e-06, + "loss": 1.1472, + "step": 2307 + }, + { + "epoch": 1.5458807769591427, + "grad_norm": 2.267276749691073, + "learning_rate": 3.8514902714255725e-06, + "loss": 1.3719, + "step": 2308 + }, + { + "epoch": 1.5465505693235098, + "grad_norm": 2.1505999798499444, + "learning_rate": 3.850260525219699e-06, + "loss": 1.261, + "step": 2309 + }, + { + "epoch": 1.5472203616878768, + "grad_norm": 1.751940688653868, + "learning_rate": 3.849030317566662e-06, + "loss": 1.2682, + "step": 2310 + }, + { + "epoch": 1.5478901540522438, + "grad_norm": 1.835266843269253, + "learning_rate": 3.847799648886882e-06, + "loss": 1.3701, + "step": 2311 + }, + { + "epoch": 1.5485599464166109, + "grad_norm": 1.5872019222782519, + "learning_rate": 3.8465685196009356e-06, + "loss": 1.2434, + "step": 2312 + }, + { + "epoch": 1.549229738780978, + "grad_norm": 3.9915620925834006, + "learning_rate": 3.845336930129557e-06, + "loss": 1.3768, + "step": 2313 + }, + { + "epoch": 1.549899531145345, + "grad_norm": 2.425347502240774, + "learning_rate": 3.8441048808936385e-06, + "loss": 1.3469, + "step": 2314 + }, + { + "epoch": 1.550569323509712, + "grad_norm": 1.8439705516787483, + "learning_rate": 3.842872372314229e-06, + "loss": 1.3142, + "step": 2315 + }, + { + "epoch": 1.551239115874079, + "grad_norm": 2.2318733479055144, + "learning_rate": 3.841639404812534e-06, + "loss": 1.3337, + "step": 2316 + }, + { + "epoch": 1.551908908238446, + "grad_norm": 2.6967934360547217, + "learning_rate": 3.840405978809916e-06, + "loss": 1.446, + "step": 2317 + }, + { + "epoch": 1.552578700602813, + "grad_norm": 2.7820610077634247, + "learning_rate": 3.839172094727895e-06, + "loss": 1.1081, + "step": 2318 + }, + { + "epoch": 1.5532484929671801, + "grad_norm": 2.901720358030242, + "learning_rate": 3.837937752988146e-06, + "loss": 1.3207, + "step": 2319 + }, + { + "epoch": 1.5539182853315472, + "grad_norm": 2.1262868174707252, + "learning_rate": 3.836702954012501e-06, + "loss": 1.3168, + "step": 2320 + }, + { + "epoch": 1.5545880776959142, + "grad_norm": 1.890172666963, + "learning_rate": 3.8354676982229495e-06, + "loss": 1.4757, + "step": 2321 + }, + { + "epoch": 1.5552578700602813, + "grad_norm": 2.6909608757178636, + "learning_rate": 3.834231986041637e-06, + "loss": 1.3061, + "step": 2322 + }, + { + "epoch": 1.5559276624246483, + "grad_norm": 9.577453389553165, + "learning_rate": 3.832995817890862e-06, + "loss": 1.3537, + "step": 2323 + }, + { + "epoch": 1.5565974547890153, + "grad_norm": 2.544416402953666, + "learning_rate": 3.831759194193082e-06, + "loss": 1.1309, + "step": 2324 + }, + { + "epoch": 1.5572672471533826, + "grad_norm": 1.5037953966756668, + "learning_rate": 3.83052211537091e-06, + "loss": 1.3688, + "step": 2325 + }, + { + "epoch": 1.5579370395177494, + "grad_norm": 1.6027722469712684, + "learning_rate": 3.829284581847112e-06, + "loss": 1.2503, + "step": 2326 + }, + { + "epoch": 1.5586068318821167, + "grad_norm": 3.4119094436438657, + "learning_rate": 3.828046594044612e-06, + "loss": 1.1739, + "step": 2327 + }, + { + "epoch": 1.5592766242464835, + "grad_norm": 2.441435602337923, + "learning_rate": 3.8268081523864896e-06, + "loss": 1.2215, + "step": 2328 + }, + { + "epoch": 1.5599464166108508, + "grad_norm": 2.3762373195211817, + "learning_rate": 3.825569257295975e-06, + "loss": 1.4326, + "step": 2329 + }, + { + "epoch": 1.5606162089752176, + "grad_norm": 1.6711100549118703, + "learning_rate": 3.82432990919646e-06, + "loss": 1.4171, + "step": 2330 + }, + { + "epoch": 1.5612860013395848, + "grad_norm": 2.259378234314998, + "learning_rate": 3.823090108511485e-06, + "loss": 1.406, + "step": 2331 + }, + { + "epoch": 1.5619557937039517, + "grad_norm": 1.5904703616202178, + "learning_rate": 3.82184985566475e-06, + "loss": 1.2745, + "step": 2332 + }, + { + "epoch": 1.562625586068319, + "grad_norm": 1.7142649906208798, + "learning_rate": 3.820609151080106e-06, + "loss": 1.3397, + "step": 2333 + }, + { + "epoch": 1.5632953784326857, + "grad_norm": 1.9096070921431476, + "learning_rate": 3.81936799518156e-06, + "loss": 1.1994, + "step": 2334 + }, + { + "epoch": 1.563965170797053, + "grad_norm": 1.607581464364536, + "learning_rate": 3.8181263883932735e-06, + "loss": 1.1638, + "step": 2335 + }, + { + "epoch": 1.5646349631614198, + "grad_norm": 1.5379780518391715, + "learning_rate": 3.816884331139562e-06, + "loss": 1.326, + "step": 2336 + }, + { + "epoch": 1.565304755525787, + "grad_norm": 1.9387832055370877, + "learning_rate": 3.8156418238448935e-06, + "loss": 1.3633, + "step": 2337 + }, + { + "epoch": 1.565974547890154, + "grad_norm": 2.019303070582434, + "learning_rate": 3.814398866933891e-06, + "loss": 1.2996, + "step": 2338 + }, + { + "epoch": 1.5666443402545212, + "grad_norm": 4.540308769653747, + "learning_rate": 3.8131554608313315e-06, + "loss": 1.4588, + "step": 2339 + }, + { + "epoch": 1.5673141326188882, + "grad_norm": 1.6776636596128396, + "learning_rate": 3.811911605962145e-06, + "loss": 1.2803, + "step": 2340 + }, + { + "epoch": 1.5679839249832552, + "grad_norm": 1.7742579640875809, + "learning_rate": 3.8106673027514148e-06, + "loss": 1.3263, + "step": 2341 + }, + { + "epoch": 1.5686537173476223, + "grad_norm": 2.065558347093205, + "learning_rate": 3.8094225516243773e-06, + "loss": 1.1214, + "step": 2342 + }, + { + "epoch": 1.5693235097119893, + "grad_norm": 1.9504759519011532, + "learning_rate": 3.8081773530064215e-06, + "loss": 1.2852, + "step": 2343 + }, + { + "epoch": 1.5699933020763563, + "grad_norm": 1.5579706517950442, + "learning_rate": 3.8069317073230917e-06, + "loss": 1.1457, + "step": 2344 + }, + { + "epoch": 1.5706630944407234, + "grad_norm": 2.3148125279613265, + "learning_rate": 3.805685615000082e-06, + "loss": 1.3588, + "step": 2345 + }, + { + "epoch": 1.5713328868050904, + "grad_norm": 2.7399820070018355, + "learning_rate": 3.8044390764632404e-06, + "loss": 1.3199, + "step": 2346 + }, + { + "epoch": 1.5720026791694575, + "grad_norm": 1.6049428587098251, + "learning_rate": 3.8031920921385678e-06, + "loss": 1.387, + "step": 2347 + }, + { + "epoch": 1.5726724715338245, + "grad_norm": 1.5364403105545894, + "learning_rate": 3.8019446624522177e-06, + "loss": 1.4168, + "step": 2348 + }, + { + "epoch": 1.5733422638981915, + "grad_norm": 1.6080024552686454, + "learning_rate": 3.800696787830494e-06, + "loss": 1.3352, + "step": 2349 + }, + { + "epoch": 1.5740120562625586, + "grad_norm": 3.003388728458768, + "learning_rate": 3.7994484686998547e-06, + "loss": 1.2626, + "step": 2350 + }, + { + "epoch": 1.5746818486269256, + "grad_norm": 1.7844155199502556, + "learning_rate": 3.7981997054869076e-06, + "loss": 1.2503, + "step": 2351 + }, + { + "epoch": 1.5753516409912927, + "grad_norm": 2.3938195877812394, + "learning_rate": 3.7969504986184145e-06, + "loss": 1.2068, + "step": 2352 + }, + { + "epoch": 1.5760214333556597, + "grad_norm": 3.2668371750655556, + "learning_rate": 3.7957008485212874e-06, + "loss": 1.3004, + "step": 2353 + }, + { + "epoch": 1.576691225720027, + "grad_norm": 1.7085358277861886, + "learning_rate": 3.79445075562259e-06, + "loss": 1.2526, + "step": 2354 + }, + { + "epoch": 1.5773610180843938, + "grad_norm": 1.5769050358816763, + "learning_rate": 3.7932002203495386e-06, + "loss": 1.4311, + "step": 2355 + }, + { + "epoch": 1.578030810448761, + "grad_norm": 1.7971788355057112, + "learning_rate": 3.791949243129499e-06, + "loss": 1.3181, + "step": 2356 + }, + { + "epoch": 1.5787006028131279, + "grad_norm": 4.43133578471168, + "learning_rate": 3.7906978243899885e-06, + "loss": 1.2083, + "step": 2357 + }, + { + "epoch": 1.5793703951774951, + "grad_norm": 1.7017645281366283, + "learning_rate": 3.7894459645586745e-06, + "loss": 1.2683, + "step": 2358 + }, + { + "epoch": 1.580040187541862, + "grad_norm": 2.6998634395524914, + "learning_rate": 3.7881936640633774e-06, + "loss": 1.3212, + "step": 2359 + }, + { + "epoch": 1.5807099799062292, + "grad_norm": 1.8245920101177517, + "learning_rate": 3.7869409233320657e-06, + "loss": 1.365, + "step": 2360 + }, + { + "epoch": 1.581379772270596, + "grad_norm": 1.861185790991835, + "learning_rate": 3.7856877427928616e-06, + "loss": 1.1683, + "step": 2361 + }, + { + "epoch": 1.5820495646349633, + "grad_norm": 1.7314937220869984, + "learning_rate": 3.784434122874032e-06, + "loss": 1.2394, + "step": 2362 + }, + { + "epoch": 1.58271935699933, + "grad_norm": 1.8653301233145723, + "learning_rate": 3.7831800640040015e-06, + "loss": 1.2405, + "step": 2363 + }, + { + "epoch": 1.5833891493636973, + "grad_norm": 2.0248817786381554, + "learning_rate": 3.7819255666113375e-06, + "loss": 1.3272, + "step": 2364 + }, + { + "epoch": 1.5840589417280642, + "grad_norm": 1.6197353391802078, + "learning_rate": 3.780670631124762e-06, + "loss": 1.1057, + "step": 2365 + }, + { + "epoch": 1.5847287340924314, + "grad_norm": 5.15985192921762, + "learning_rate": 3.7794152579731434e-06, + "loss": 1.1447, + "step": 2366 + }, + { + "epoch": 1.5853985264567982, + "grad_norm": 1.8527533985134124, + "learning_rate": 3.7781594475855044e-06, + "loss": 1.2387, + "step": 2367 + }, + { + "epoch": 1.5860683188211655, + "grad_norm": 1.763573086317251, + "learning_rate": 3.776903200391011e-06, + "loss": 1.2449, + "step": 2368 + }, + { + "epoch": 1.5867381111855325, + "grad_norm": 2.1924802714523266, + "learning_rate": 3.775646516818985e-06, + "loss": 1.274, + "step": 2369 + }, + { + "epoch": 1.5874079035498996, + "grad_norm": 1.738883190294344, + "learning_rate": 3.7743893972988898e-06, + "loss": 1.2684, + "step": 2370 + }, + { + "epoch": 1.5880776959142666, + "grad_norm": 1.633639110560793, + "learning_rate": 3.7731318422603458e-06, + "loss": 1.4382, + "step": 2371 + }, + { + "epoch": 1.5887474882786337, + "grad_norm": 3.750539569504206, + "learning_rate": 3.7718738521331153e-06, + "loss": 1.3088, + "step": 2372 + }, + { + "epoch": 1.5894172806430007, + "grad_norm": 1.8702270951366156, + "learning_rate": 3.7706154273471146e-06, + "loss": 1.2798, + "step": 2373 + }, + { + "epoch": 1.5900870730073677, + "grad_norm": 1.5863295557323398, + "learning_rate": 3.7693565683324052e-06, + "loss": 1.3534, + "step": 2374 + }, + { + "epoch": 1.5907568653717348, + "grad_norm": 3.484555672542899, + "learning_rate": 3.768097275519198e-06, + "loss": 1.4312, + "step": 2375 + }, + { + "epoch": 1.5914266577361018, + "grad_norm": 1.7225288463777653, + "learning_rate": 3.766837549337853e-06, + "loss": 1.4018, + "step": 2376 + }, + { + "epoch": 1.5920964501004689, + "grad_norm": 1.9193114885301754, + "learning_rate": 3.765577390218877e-06, + "loss": 1.2664, + "step": 2377 + }, + { + "epoch": 1.592766242464836, + "grad_norm": 1.869596217391189, + "learning_rate": 3.7643167985929246e-06, + "loss": 1.3086, + "step": 2378 + }, + { + "epoch": 1.593436034829203, + "grad_norm": 1.800478595314331, + "learning_rate": 3.7630557748908007e-06, + "loss": 1.3589, + "step": 2379 + }, + { + "epoch": 1.59410582719357, + "grad_norm": 1.7952106236897085, + "learning_rate": 3.7617943195434546e-06, + "loss": 1.1981, + "step": 2380 + }, + { + "epoch": 1.594775619557937, + "grad_norm": 2.025054126493236, + "learning_rate": 3.7605324329819858e-06, + "loss": 1.1699, + "step": 2381 + }, + { + "epoch": 1.595445411922304, + "grad_norm": 1.632023425902775, + "learning_rate": 3.7592701156376378e-06, + "loss": 1.1979, + "step": 2382 + }, + { + "epoch": 1.596115204286671, + "grad_norm": 2.820328073003248, + "learning_rate": 3.758007367941806e-06, + "loss": 1.075, + "step": 2383 + }, + { + "epoch": 1.5967849966510381, + "grad_norm": 1.9326785079871105, + "learning_rate": 3.756744190326028e-06, + "loss": 1.3656, + "step": 2384 + }, + { + "epoch": 1.5974547890154054, + "grad_norm": 1.7069908641935707, + "learning_rate": 3.755480583221993e-06, + "loss": 1.2939, + "step": 2385 + }, + { + "epoch": 1.5981245813797722, + "grad_norm": 1.8089769468339671, + "learning_rate": 3.754216547061533e-06, + "loss": 1.2731, + "step": 2386 + }, + { + "epoch": 1.5987943737441395, + "grad_norm": 1.7092546226516565, + "learning_rate": 3.7529520822766295e-06, + "loss": 1.2827, + "step": 2387 + }, + { + "epoch": 1.5994641661085063, + "grad_norm": 3.1884546821619852, + "learning_rate": 3.751687189299408e-06, + "loss": 1.2827, + "step": 2388 + }, + { + "epoch": 1.6001339584728735, + "grad_norm": 1.7901567806733163, + "learning_rate": 3.7504218685621417e-06, + "loss": 1.3609, + "step": 2389 + }, + { + "epoch": 1.6008037508372404, + "grad_norm": 2.0383231832663453, + "learning_rate": 3.7491561204972504e-06, + "loss": 1.3737, + "step": 2390 + }, + { + "epoch": 1.6014735432016076, + "grad_norm": 1.9243648707846173, + "learning_rate": 3.7478899455372997e-06, + "loss": 1.3477, + "step": 2391 + }, + { + "epoch": 1.6021433355659744, + "grad_norm": 1.736864375407767, + "learning_rate": 3.746623344115e-06, + "loss": 1.3883, + "step": 2392 + }, + { + "epoch": 1.6028131279303417, + "grad_norm": 1.639276048644666, + "learning_rate": 3.745356316663209e-06, + "loss": 1.1856, + "step": 2393 + }, + { + "epoch": 1.6034829202947085, + "grad_norm": 2.1415343050440194, + "learning_rate": 3.744088863614928e-06, + "loss": 1.384, + "step": 2394 + }, + { + "epoch": 1.6041527126590758, + "grad_norm": 2.1086990809796466, + "learning_rate": 3.7428209854033065e-06, + "loss": 1.3816, + "step": 2395 + }, + { + "epoch": 1.6048225050234426, + "grad_norm": 1.7250662995725108, + "learning_rate": 3.7415526824616367e-06, + "loss": 1.2821, + "step": 2396 + }, + { + "epoch": 1.6054922973878099, + "grad_norm": 2.186320454082252, + "learning_rate": 3.740283955223357e-06, + "loss": 1.1346, + "step": 2397 + }, + { + "epoch": 1.6061620897521767, + "grad_norm": 1.7979956514619402, + "learning_rate": 3.739014804122052e-06, + "loss": 1.3461, + "step": 2398 + }, + { + "epoch": 1.606831882116544, + "grad_norm": 3.7289480176948913, + "learning_rate": 3.7377452295914487e-06, + "loss": 1.1628, + "step": 2399 + }, + { + "epoch": 1.607501674480911, + "grad_norm": 2.094257334000137, + "learning_rate": 3.736475232065421e-06, + "loss": 1.2128, + "step": 2400 + }, + { + "epoch": 1.608171466845278, + "grad_norm": 2.169581340294522, + "learning_rate": 3.7352048119779855e-06, + "loss": 1.3135, + "step": 2401 + }, + { + "epoch": 1.608841259209645, + "grad_norm": 1.7115448425102089, + "learning_rate": 3.7339339697633053e-06, + "loss": 1.3048, + "step": 2402 + }, + { + "epoch": 1.609511051574012, + "grad_norm": 2.2102475805795163, + "learning_rate": 3.7326627058556854e-06, + "loss": 1.2625, + "step": 2403 + }, + { + "epoch": 1.6101808439383791, + "grad_norm": 5.755663924076286, + "learning_rate": 3.7313910206895776e-06, + "loss": 1.4903, + "step": 2404 + }, + { + "epoch": 1.6108506363027462, + "grad_norm": 1.5916572144701882, + "learning_rate": 3.7301189146995737e-06, + "loss": 1.183, + "step": 2405 + }, + { + "epoch": 1.6115204286671132, + "grad_norm": 1.6409061003126262, + "learning_rate": 3.728846388320415e-06, + "loss": 1.1734, + "step": 2406 + }, + { + "epoch": 1.6121902210314802, + "grad_norm": 2.4404723356693805, + "learning_rate": 3.7275734419869804e-06, + "loss": 1.3232, + "step": 2407 + }, + { + "epoch": 1.6128600133958473, + "grad_norm": 2.1971446254964606, + "learning_rate": 3.7263000761342976e-06, + "loss": 1.0462, + "step": 2408 + }, + { + "epoch": 1.6135298057602143, + "grad_norm": 1.8380253429100595, + "learning_rate": 3.7250262911975333e-06, + "loss": 1.2272, + "step": 2409 + }, + { + "epoch": 1.6141995981245814, + "grad_norm": 2.5096862973515455, + "learning_rate": 3.723752087612001e-06, + "loss": 1.1725, + "step": 2410 + }, + { + "epoch": 1.6148693904889484, + "grad_norm": 1.7364568473526991, + "learning_rate": 3.7224774658131536e-06, + "loss": 1.2827, + "step": 2411 + }, + { + "epoch": 1.6155391828533154, + "grad_norm": 1.5675846591347387, + "learning_rate": 3.7212024262365913e-06, + "loss": 1.4052, + "step": 2412 + }, + { + "epoch": 1.6162089752176825, + "grad_norm": 1.784109942587139, + "learning_rate": 3.719926969318053e-06, + "loss": 1.3626, + "step": 2413 + }, + { + "epoch": 1.6168787675820495, + "grad_norm": 2.0248556198957686, + "learning_rate": 3.7186510954934236e-06, + "loss": 1.2069, + "step": 2414 + }, + { + "epoch": 1.6175485599464166, + "grad_norm": 1.664324163266575, + "learning_rate": 3.7173748051987264e-06, + "loss": 1.2784, + "step": 2415 + }, + { + "epoch": 1.6182183523107838, + "grad_norm": 2.1407701150225735, + "learning_rate": 3.7160980988701323e-06, + "loss": 1.3462, + "step": 2416 + }, + { + "epoch": 1.6188881446751506, + "grad_norm": 2.419767026258912, + "learning_rate": 3.714820976943949e-06, + "loss": 1.1674, + "step": 2417 + }, + { + "epoch": 1.619557937039518, + "grad_norm": 1.6666302754910338, + "learning_rate": 3.7135434398566318e-06, + "loss": 1.3459, + "step": 2418 + }, + { + "epoch": 1.6202277294038847, + "grad_norm": 2.0877437497978226, + "learning_rate": 3.712265488044771e-06, + "loss": 1.2011, + "step": 2419 + }, + { + "epoch": 1.620897521768252, + "grad_norm": 1.741031850183577, + "learning_rate": 3.710987121945106e-06, + "loss": 1.425, + "step": 2420 + }, + { + "epoch": 1.6215673141326188, + "grad_norm": 2.2046086638117903, + "learning_rate": 3.709708341994512e-06, + "loss": 1.3417, + "step": 2421 + }, + { + "epoch": 1.622237106496986, + "grad_norm": 3.1760416171750605, + "learning_rate": 3.7084291486300093e-06, + "loss": 1.2022, + "step": 2422 + }, + { + "epoch": 1.6229068988613529, + "grad_norm": 1.4716617077677623, + "learning_rate": 3.7071495422887573e-06, + "loss": 1.3061, + "step": 2423 + }, + { + "epoch": 1.6235766912257201, + "grad_norm": 1.6885306537115676, + "learning_rate": 3.7058695234080577e-06, + "loss": 1.2269, + "step": 2424 + }, + { + "epoch": 1.624246483590087, + "grad_norm": 1.649797446677467, + "learning_rate": 3.704589092425352e-06, + "loss": 1.3464, + "step": 2425 + }, + { + "epoch": 1.6249162759544542, + "grad_norm": 2.101579926584569, + "learning_rate": 3.7033082497782246e-06, + "loss": 1.145, + "step": 2426 + }, + { + "epoch": 1.625586068318821, + "grad_norm": 1.9248974965994967, + "learning_rate": 3.7020269959043988e-06, + "loss": 1.2195, + "step": 2427 + }, + { + "epoch": 1.6262558606831883, + "grad_norm": 1.9441772656230343, + "learning_rate": 3.7007453312417383e-06, + "loss": 1.3828, + "step": 2428 + }, + { + "epoch": 1.626925653047555, + "grad_norm": 1.7448364154674014, + "learning_rate": 3.699463256228249e-06, + "loss": 1.309, + "step": 2429 + }, + { + "epoch": 1.6275954454119224, + "grad_norm": 1.4854456555517968, + "learning_rate": 3.698180771302075e-06, + "loss": 1.3395, + "step": 2430 + }, + { + "epoch": 1.6282652377762894, + "grad_norm": 2.0991481382111767, + "learning_rate": 3.696897876901502e-06, + "loss": 1.4382, + "step": 2431 + }, + { + "epoch": 1.6289350301406564, + "grad_norm": 1.7265809620943657, + "learning_rate": 3.6956145734649547e-06, + "loss": 1.1734, + "step": 2432 + }, + { + "epoch": 1.6296048225050235, + "grad_norm": 1.914513327927906, + "learning_rate": 3.694330861430998e-06, + "loss": 1.4768, + "step": 2433 + }, + { + "epoch": 1.6302746148693905, + "grad_norm": 1.5930907500851033, + "learning_rate": 3.6930467412383357e-06, + "loss": 1.1767, + "step": 2434 + }, + { + "epoch": 1.6309444072337576, + "grad_norm": 3.313195548801877, + "learning_rate": 3.6917622133258125e-06, + "loss": 1.444, + "step": 2435 + }, + { + "epoch": 1.6316141995981246, + "grad_norm": 1.712424917343722, + "learning_rate": 3.6904772781324116e-06, + "loss": 1.2971, + "step": 2436 + }, + { + "epoch": 1.6322839919624916, + "grad_norm": 2.613665847297047, + "learning_rate": 3.689191936097255e-06, + "loss": 1.198, + "step": 2437 + }, + { + "epoch": 1.6329537843268587, + "grad_norm": 2.4403439131614246, + "learning_rate": 3.6879061876596044e-06, + "loss": 1.3042, + "step": 2438 + }, + { + "epoch": 1.6336235766912257, + "grad_norm": 1.6994637441825093, + "learning_rate": 3.686620033258861e-06, + "loss": 1.2746, + "step": 2439 + }, + { + "epoch": 1.6342933690555927, + "grad_norm": 1.559732078785083, + "learning_rate": 3.6853334733345626e-06, + "loss": 1.301, + "step": 2440 + }, + { + "epoch": 1.6349631614199598, + "grad_norm": 2.84602307123071, + "learning_rate": 3.684046508326387e-06, + "loss": 1.3063, + "step": 2441 + }, + { + "epoch": 1.6356329537843268, + "grad_norm": 1.6299659892977076, + "learning_rate": 3.68275913867415e-06, + "loss": 1.1707, + "step": 2442 + }, + { + "epoch": 1.6363027461486939, + "grad_norm": 2.041835549262483, + "learning_rate": 3.6814713648178087e-06, + "loss": 1.1636, + "step": 2443 + }, + { + "epoch": 1.636972538513061, + "grad_norm": 2.5198988534832987, + "learning_rate": 3.680183187197452e-06, + "loss": 1.1714, + "step": 2444 + }, + { + "epoch": 1.637642330877428, + "grad_norm": 8.307559349592406, + "learning_rate": 3.678894606253313e-06, + "loss": 1.3233, + "step": 2445 + }, + { + "epoch": 1.638312123241795, + "grad_norm": 2.0205357757846727, + "learning_rate": 3.677605622425759e-06, + "loss": 1.2158, + "step": 2446 + }, + { + "epoch": 1.6389819156061622, + "grad_norm": 1.610391123849678, + "learning_rate": 3.676316236155296e-06, + "loss": 1.19, + "step": 2447 + }, + { + "epoch": 1.639651707970529, + "grad_norm": 1.89368653492746, + "learning_rate": 3.6750264478825675e-06, + "loss": 1.2132, + "step": 2448 + }, + { + "epoch": 1.6403215003348963, + "grad_norm": 2.565970872348289, + "learning_rate": 3.673736258048356e-06, + "loss": 1.3789, + "step": 2449 + }, + { + "epoch": 1.6409912926992631, + "grad_norm": 2.1091821106472994, + "learning_rate": 3.672445667093577e-06, + "loss": 1.2755, + "step": 2450 + }, + { + "epoch": 1.6416610850636304, + "grad_norm": 4.941564722438434, + "learning_rate": 3.6711546754592885e-06, + "loss": 1.2816, + "step": 2451 + }, + { + "epoch": 1.6423308774279972, + "grad_norm": 1.9170147664023423, + "learning_rate": 3.6698632835866805e-06, + "loss": 1.4736, + "step": 2452 + }, + { + "epoch": 1.6430006697923645, + "grad_norm": 1.7462101584408385, + "learning_rate": 3.6685714919170834e-06, + "loss": 1.3765, + "step": 2453 + }, + { + "epoch": 1.6436704621567313, + "grad_norm": 1.7177971944650388, + "learning_rate": 3.667279300891962e-06, + "loss": 1.1413, + "step": 2454 + }, + { + "epoch": 1.6443402545210986, + "grad_norm": 6.207430911260847, + "learning_rate": 3.6659867109529197e-06, + "loss": 1.3353, + "step": 2455 + }, + { + "epoch": 1.6450100468854654, + "grad_norm": 2.6223651954549223, + "learning_rate": 3.664693722541692e-06, + "loss": 1.0354, + "step": 2456 + }, + { + "epoch": 1.6456798392498326, + "grad_norm": 2.1425991417142614, + "learning_rate": 3.663400336100158e-06, + "loss": 1.2866, + "step": 2457 + }, + { + "epoch": 1.6463496316141994, + "grad_norm": 1.7101303671950434, + "learning_rate": 3.662106552070324e-06, + "loss": 1.2191, + "step": 2458 + }, + { + "epoch": 1.6470194239785667, + "grad_norm": 2.053508575941495, + "learning_rate": 3.6608123708943393e-06, + "loss": 1.3352, + "step": 2459 + }, + { + "epoch": 1.6476892163429335, + "grad_norm": 2.528039393979537, + "learning_rate": 3.659517793014484e-06, + "loss": 1.2888, + "step": 2460 + }, + { + "epoch": 1.6483590087073008, + "grad_norm": 1.5461271078498182, + "learning_rate": 3.6582228188731783e-06, + "loss": 1.3372, + "step": 2461 + }, + { + "epoch": 1.6490288010716678, + "grad_norm": 2.5377618371698287, + "learning_rate": 3.6569274489129737e-06, + "loss": 1.2261, + "step": 2462 + }, + { + "epoch": 1.6496985934360349, + "grad_norm": 2.143345652693287, + "learning_rate": 3.6556316835765593e-06, + "loss": 1.1341, + "step": 2463 + }, + { + "epoch": 1.650368385800402, + "grad_norm": 2.8779746297443185, + "learning_rate": 3.6543355233067586e-06, + "loss": 1.3486, + "step": 2464 + }, + { + "epoch": 1.651038178164769, + "grad_norm": 1.8160138242240875, + "learning_rate": 3.6530389685465297e-06, + "loss": 1.1031, + "step": 2465 + }, + { + "epoch": 1.651707970529136, + "grad_norm": 1.5875559352236734, + "learning_rate": 3.651742019738967e-06, + "loss": 1.282, + "step": 2466 + }, + { + "epoch": 1.652377762893503, + "grad_norm": 1.618565013553261, + "learning_rate": 3.6504446773272978e-06, + "loss": 1.3885, + "step": 2467 + }, + { + "epoch": 1.65304755525787, + "grad_norm": 2.3882527606388733, + "learning_rate": 3.6491469417548845e-06, + "loss": 1.2897, + "step": 2468 + }, + { + "epoch": 1.653717347622237, + "grad_norm": 2.0209506134906934, + "learning_rate": 3.6478488134652248e-06, + "loss": 1.2461, + "step": 2469 + }, + { + "epoch": 1.6543871399866041, + "grad_norm": 3.0844086644045006, + "learning_rate": 3.646550292901949e-06, + "loss": 1.3275, + "step": 2470 + }, + { + "epoch": 1.6550569323509712, + "grad_norm": 1.9185234750336333, + "learning_rate": 3.6452513805088236e-06, + "loss": 1.1581, + "step": 2471 + }, + { + "epoch": 1.6557267247153382, + "grad_norm": 1.7660511096464198, + "learning_rate": 3.643952076729746e-06, + "loss": 1.3748, + "step": 2472 + }, + { + "epoch": 1.6563965170797053, + "grad_norm": 2.1205350691965306, + "learning_rate": 3.64265238200875e-06, + "loss": 1.1351, + "step": 2473 + }, + { + "epoch": 1.6570663094440723, + "grad_norm": 1.7067996560736096, + "learning_rate": 3.641352296790003e-06, + "loss": 1.3552, + "step": 2474 + }, + { + "epoch": 1.6577361018084393, + "grad_norm": 1.5545099299574254, + "learning_rate": 3.640051821517803e-06, + "loss": 1.2891, + "step": 2475 + }, + { + "epoch": 1.6584058941728066, + "grad_norm": 1.3977573604349325, + "learning_rate": 3.6387509566365836e-06, + "loss": 1.3226, + "step": 2476 + }, + { + "epoch": 1.6590756865371734, + "grad_norm": 1.5999162940827307, + "learning_rate": 3.6374497025909124e-06, + "loss": 1.1349, + "step": 2477 + }, + { + "epoch": 1.6597454789015407, + "grad_norm": 1.5154394439971057, + "learning_rate": 3.636148059825487e-06, + "loss": 1.1719, + "step": 2478 + }, + { + "epoch": 1.6604152712659075, + "grad_norm": 1.6249651850897302, + "learning_rate": 3.6348460287851415e-06, + "loss": 1.3126, + "step": 2479 + }, + { + "epoch": 1.6610850636302747, + "grad_norm": 3.202211068567698, + "learning_rate": 3.63354360991484e-06, + "loss": 1.3248, + "step": 2480 + }, + { + "epoch": 1.6617548559946416, + "grad_norm": 1.5835992954346565, + "learning_rate": 3.6322408036596803e-06, + "loss": 1.3194, + "step": 2481 + }, + { + "epoch": 1.6624246483590088, + "grad_norm": 1.7288819607441483, + "learning_rate": 3.6309376104648907e-06, + "loss": 1.2331, + "step": 2482 + }, + { + "epoch": 1.6630944407233756, + "grad_norm": 1.7084726888507775, + "learning_rate": 3.6296340307758347e-06, + "loss": 1.4133, + "step": 2483 + }, + { + "epoch": 1.663764233087743, + "grad_norm": 2.6343809858928653, + "learning_rate": 3.628330065038006e-06, + "loss": 1.1341, + "step": 2484 + }, + { + "epoch": 1.6644340254521097, + "grad_norm": 1.6527810476043565, + "learning_rate": 3.6270257136970304e-06, + "loss": 1.3531, + "step": 2485 + }, + { + "epoch": 1.665103817816477, + "grad_norm": 1.98208077525281, + "learning_rate": 3.625720977198667e-06, + "loss": 1.319, + "step": 2486 + }, + { + "epoch": 1.6657736101808438, + "grad_norm": 1.7751300146962805, + "learning_rate": 3.6244158559888025e-06, + "loss": 1.2378, + "step": 2487 + }, + { + "epoch": 1.666443402545211, + "grad_norm": 1.4913988041184205, + "learning_rate": 3.6231103505134618e-06, + "loss": 1.0398, + "step": 2488 + }, + { + "epoch": 1.6671131949095779, + "grad_norm": 1.8053998313684927, + "learning_rate": 3.6218044612187935e-06, + "loss": 1.3374, + "step": 2489 + }, + { + "epoch": 1.6677829872739451, + "grad_norm": 1.8606598568054762, + "learning_rate": 3.6204981885510835e-06, + "loss": 1.2479, + "step": 2490 + }, + { + "epoch": 1.6684527796383122, + "grad_norm": 2.35282688448881, + "learning_rate": 3.619191532956745e-06, + "loss": 1.3062, + "step": 2491 + }, + { + "epoch": 1.6691225720026792, + "grad_norm": 2.499042133018218, + "learning_rate": 3.6178844948823246e-06, + "loss": 1.3099, + "step": 2492 + }, + { + "epoch": 1.6697923643670463, + "grad_norm": 1.6361522237677073, + "learning_rate": 3.6165770747744955e-06, + "loss": 1.1667, + "step": 2493 + }, + { + "epoch": 1.6704621567314133, + "grad_norm": 2.2917035498804452, + "learning_rate": 3.6152692730800677e-06, + "loss": 1.1419, + "step": 2494 + }, + { + "epoch": 1.6711319490957803, + "grad_norm": 2.3998760111110014, + "learning_rate": 3.613961090245976e-06, + "loss": 1.3066, + "step": 2495 + }, + { + "epoch": 1.6718017414601474, + "grad_norm": 2.7657248895488498, + "learning_rate": 3.612652526719289e-06, + "loss": 1.2363, + "step": 2496 + }, + { + "epoch": 1.6724715338245144, + "grad_norm": 1.8500054323989237, + "learning_rate": 3.611343582947202e-06, + "loss": 1.4431, + "step": 2497 + }, + { + "epoch": 1.6731413261888815, + "grad_norm": 1.6855246111678335, + "learning_rate": 3.610034259377045e-06, + "loss": 1.389, + "step": 2498 + }, + { + "epoch": 1.6738111185532485, + "grad_norm": 1.7480559004438811, + "learning_rate": 3.6087245564562733e-06, + "loss": 1.3328, + "step": 2499 + }, + { + "epoch": 1.6744809109176155, + "grad_norm": 2.614113271261021, + "learning_rate": 3.607414474632474e-06, + "loss": 1.3413, + "step": 2500 + }, + { + "epoch": 1.6751507032819826, + "grad_norm": 2.5443944260357307, + "learning_rate": 3.606104014353364e-06, + "loss": 1.3649, + "step": 2501 + }, + { + "epoch": 1.6758204956463496, + "grad_norm": 1.9996902880651477, + "learning_rate": 3.6047931760667877e-06, + "loss": 1.182, + "step": 2502 + }, + { + "epoch": 1.6764902880107166, + "grad_norm": 2.0419449048047253, + "learning_rate": 3.6034819602207204e-06, + "loss": 1.0722, + "step": 2503 + }, + { + "epoch": 1.6771600803750837, + "grad_norm": 1.5537804598124234, + "learning_rate": 3.6021703672632657e-06, + "loss": 1.3429, + "step": 2504 + }, + { + "epoch": 1.6778298727394507, + "grad_norm": 1.8857712633343067, + "learning_rate": 3.600858397642657e-06, + "loss": 1.2504, + "step": 2505 + }, + { + "epoch": 1.6784996651038178, + "grad_norm": 2.4391804036736597, + "learning_rate": 3.5995460518072545e-06, + "loss": 1.16, + "step": 2506 + }, + { + "epoch": 1.679169457468185, + "grad_norm": 2.2660844722440823, + "learning_rate": 3.5982333302055496e-06, + "loss": 1.3294, + "step": 2507 + }, + { + "epoch": 1.6798392498325518, + "grad_norm": 1.8921303914225251, + "learning_rate": 3.5969202332861593e-06, + "loss": 1.1954, + "step": 2508 + }, + { + "epoch": 1.680509042196919, + "grad_norm": 1.8459447576500592, + "learning_rate": 3.5956067614978307e-06, + "loss": 1.1763, + "step": 2509 + }, + { + "epoch": 1.681178834561286, + "grad_norm": 2.0036056245007847, + "learning_rate": 3.5942929152894386e-06, + "loss": 1.1657, + "step": 2510 + }, + { + "epoch": 1.6818486269256532, + "grad_norm": 1.6044836622906475, + "learning_rate": 3.5929786951099856e-06, + "loss": 1.1992, + "step": 2511 + }, + { + "epoch": 1.68251841929002, + "grad_norm": 1.6263061146779625, + "learning_rate": 3.591664101408602e-06, + "loss": 1.2793, + "step": 2512 + }, + { + "epoch": 1.6831882116543873, + "grad_norm": 1.9024895398159012, + "learning_rate": 3.5903491346345466e-06, + "loss": 1.276, + "step": 2513 + }, + { + "epoch": 1.683858004018754, + "grad_norm": 2.162109382855802, + "learning_rate": 3.5890337952372045e-06, + "loss": 1.2941, + "step": 2514 + }, + { + "epoch": 1.6845277963831213, + "grad_norm": 1.5505924278899534, + "learning_rate": 3.5877180836660885e-06, + "loss": 1.236, + "step": 2515 + }, + { + "epoch": 1.6851975887474882, + "grad_norm": 1.7653961820114228, + "learning_rate": 3.5864020003708392e-06, + "loss": 1.3154, + "step": 2516 + }, + { + "epoch": 1.6858673811118554, + "grad_norm": 1.773899048333638, + "learning_rate": 3.585085545801224e-06, + "loss": 1.3275, + "step": 2517 + }, + { + "epoch": 1.6865371734762222, + "grad_norm": 3.3202271132315757, + "learning_rate": 3.5837687204071365e-06, + "loss": 1.4092, + "step": 2518 + }, + { + "epoch": 1.6872069658405895, + "grad_norm": 1.7083010738829112, + "learning_rate": 3.582451524638597e-06, + "loss": 1.3854, + "step": 2519 + }, + { + "epoch": 1.6878767582049563, + "grad_norm": 2.682102059698821, + "learning_rate": 3.5811339589457543e-06, + "loss": 1.3659, + "step": 2520 + }, + { + "epoch": 1.6885465505693236, + "grad_norm": 1.6021614711674192, + "learning_rate": 3.5798160237788813e-06, + "loss": 1.3487, + "step": 2521 + }, + { + "epoch": 1.6892163429336906, + "grad_norm": 1.559697936299408, + "learning_rate": 3.5784977195883787e-06, + "loss": 1.262, + "step": 2522 + }, + { + "epoch": 1.6898861352980576, + "grad_norm": 1.6028262096812986, + "learning_rate": 3.5771790468247715e-06, + "loss": 1.1861, + "step": 2523 + }, + { + "epoch": 1.6905559276624247, + "grad_norm": 2.882073363840671, + "learning_rate": 3.575860005938713e-06, + "loss": 1.2186, + "step": 2524 + }, + { + "epoch": 1.6912257200267917, + "grad_norm": 3.271692642769882, + "learning_rate": 3.5745405973809806e-06, + "loss": 1.0583, + "step": 2525 + }, + { + "epoch": 1.6918955123911588, + "grad_norm": 1.8817503317190494, + "learning_rate": 3.5732208216024776e-06, + "loss": 1.2758, + "step": 2526 + }, + { + "epoch": 1.6925653047555258, + "grad_norm": 2.695473636727527, + "learning_rate": 3.5719006790542343e-06, + "loss": 1.2531, + "step": 2527 + }, + { + "epoch": 1.6932350971198928, + "grad_norm": 1.6903647345263975, + "learning_rate": 3.570580170187404e-06, + "loss": 1.3725, + "step": 2528 + }, + { + "epoch": 1.6939048894842599, + "grad_norm": 2.2333530427152506, + "learning_rate": 3.569259295453267e-06, + "loss": 1.2276, + "step": 2529 + }, + { + "epoch": 1.694574681848627, + "grad_norm": 3.2502713821506415, + "learning_rate": 3.567938055303227e-06, + "loss": 1.1994, + "step": 2530 + }, + { + "epoch": 1.695244474212994, + "grad_norm": 2.670708679422478, + "learning_rate": 3.5666164501888147e-06, + "loss": 1.2901, + "step": 2531 + }, + { + "epoch": 1.695914266577361, + "grad_norm": 1.5019295262693562, + "learning_rate": 3.565294480561683e-06, + "loss": 1.2566, + "step": 2532 + }, + { + "epoch": 1.696584058941728, + "grad_norm": 1.7236776571074193, + "learning_rate": 3.563972146873613e-06, + "loss": 1.3185, + "step": 2533 + }, + { + "epoch": 1.697253851306095, + "grad_norm": 2.621704751245828, + "learning_rate": 3.562649449576505e-06, + "loss": 1.1956, + "step": 2534 + }, + { + "epoch": 1.6979236436704621, + "grad_norm": 1.9882051553700097, + "learning_rate": 3.561326389122389e-06, + "loss": 1.1713, + "step": 2535 + }, + { + "epoch": 1.6985934360348292, + "grad_norm": 1.9290571727911279, + "learning_rate": 3.560002965963416e-06, + "loss": 1.1925, + "step": 2536 + }, + { + "epoch": 1.6992632283991962, + "grad_norm": 2.39181832958909, + "learning_rate": 3.5586791805518616e-06, + "loss": 1.097, + "step": 2537 + }, + { + "epoch": 1.6999330207635635, + "grad_norm": 1.7325170841518265, + "learning_rate": 3.5573550333401242e-06, + "loss": 1.3305, + "step": 2538 + }, + { + "epoch": 1.7006028131279303, + "grad_norm": 2.143137493349388, + "learning_rate": 3.5560305247807273e-06, + "loss": 1.3651, + "step": 2539 + }, + { + "epoch": 1.7012726054922975, + "grad_norm": 2.9312138883163463, + "learning_rate": 3.554705655326318e-06, + "loss": 1.2242, + "step": 2540 + }, + { + "epoch": 1.7019423978566643, + "grad_norm": 1.8321008909805439, + "learning_rate": 3.553380425429666e-06, + "loss": 1.2653, + "step": 2541 + }, + { + "epoch": 1.7026121902210316, + "grad_norm": 2.050671309697764, + "learning_rate": 3.552054835543664e-06, + "loss": 1.1125, + "step": 2542 + }, + { + "epoch": 1.7032819825853984, + "grad_norm": 4.411202337330066, + "learning_rate": 3.550728886121328e-06, + "loss": 1.214, + "step": 2543 + }, + { + "epoch": 1.7039517749497657, + "grad_norm": 1.614052245405937, + "learning_rate": 3.549402577615797e-06, + "loss": 1.2923, + "step": 2544 + }, + { + "epoch": 1.7046215673141325, + "grad_norm": 2.0707033153852046, + "learning_rate": 3.548075910480333e-06, + "loss": 1.1378, + "step": 2545 + }, + { + "epoch": 1.7052913596784998, + "grad_norm": 1.752795660296258, + "learning_rate": 3.5467488851683195e-06, + "loss": 1.3398, + "step": 2546 + }, + { + "epoch": 1.7059611520428666, + "grad_norm": 1.7643689360804016, + "learning_rate": 3.545421502133264e-06, + "loss": 1.1726, + "step": 2547 + }, + { + "epoch": 1.7066309444072338, + "grad_norm": 1.9645230207942161, + "learning_rate": 3.544093761828794e-06, + "loss": 1.1884, + "step": 2548 + }, + { + "epoch": 1.7073007367716007, + "grad_norm": 2.904194267736768, + "learning_rate": 3.542765664708662e-06, + "loss": 1.31, + "step": 2549 + }, + { + "epoch": 1.707970529135968, + "grad_norm": 1.6659230832856302, + "learning_rate": 3.54143721122674e-06, + "loss": 1.2247, + "step": 2550 + }, + { + "epoch": 1.7086403215003347, + "grad_norm": 2.1412216413867906, + "learning_rate": 3.540108401837023e-06, + "loss": 1.1264, + "step": 2551 + }, + { + "epoch": 1.709310113864702, + "grad_norm": 1.6895893602561594, + "learning_rate": 3.5387792369936268e-06, + "loss": 1.2444, + "step": 2552 + }, + { + "epoch": 1.709979906229069, + "grad_norm": 1.8588052871189042, + "learning_rate": 3.5374497171507897e-06, + "loss": 1.4201, + "step": 2553 + }, + { + "epoch": 1.710649698593436, + "grad_norm": 1.5562090799030757, + "learning_rate": 3.5361198427628706e-06, + "loss": 1.184, + "step": 2554 + }, + { + "epoch": 1.7113194909578031, + "grad_norm": 1.6113230742049922, + "learning_rate": 3.534789614284351e-06, + "loss": 1.2986, + "step": 2555 + }, + { + "epoch": 1.7119892833221702, + "grad_norm": 2.040723893040833, + "learning_rate": 3.533459032169829e-06, + "loss": 1.2991, + "step": 2556 + }, + { + "epoch": 1.7126590756865372, + "grad_norm": 1.6279141473422705, + "learning_rate": 3.5321280968740303e-06, + "loss": 1.3273, + "step": 2557 + }, + { + "epoch": 1.7133288680509042, + "grad_norm": 1.5806880448301586, + "learning_rate": 3.5307968088517954e-06, + "loss": 1.3855, + "step": 2558 + }, + { + "epoch": 1.7139986604152713, + "grad_norm": 1.832720081486646, + "learning_rate": 3.529465168558089e-06, + "loss": 1.1857, + "step": 2559 + }, + { + "epoch": 1.7146684527796383, + "grad_norm": 1.5714153314529609, + "learning_rate": 3.528133176447994e-06, + "loss": 1.2704, + "step": 2560 + }, + { + "epoch": 1.7153382451440053, + "grad_norm": 2.49821412795672, + "learning_rate": 3.5268008329767144e-06, + "loss": 1.2929, + "step": 2561 + }, + { + "epoch": 1.7160080375083724, + "grad_norm": 1.8717128028703478, + "learning_rate": 3.525468138599575e-06, + "loss": 1.0531, + "step": 2562 + }, + { + "epoch": 1.7166778298727394, + "grad_norm": 2.0576859132391134, + "learning_rate": 3.524135093772019e-06, + "loss": 1.2348, + "step": 2563 + }, + { + "epoch": 1.7173476222371065, + "grad_norm": 1.9735528384036625, + "learning_rate": 3.5228016989496107e-06, + "loss": 1.2087, + "step": 2564 + }, + { + "epoch": 1.7180174146014735, + "grad_norm": 1.5204088933141537, + "learning_rate": 3.5214679545880332e-06, + "loss": 1.1953, + "step": 2565 + }, + { + "epoch": 1.7186872069658405, + "grad_norm": 2.28210126995331, + "learning_rate": 3.520133861143089e-06, + "loss": 1.3203, + "step": 2566 + }, + { + "epoch": 1.7193569993302076, + "grad_norm": 1.5823333277637253, + "learning_rate": 3.5187994190707016e-06, + "loss": 1.237, + "step": 2567 + }, + { + "epoch": 1.7200267916945746, + "grad_norm": 2.4800075760920826, + "learning_rate": 3.5174646288269106e-06, + "loss": 1.1377, + "step": 2568 + }, + { + "epoch": 1.7206965840589419, + "grad_norm": 1.6848215117176228, + "learning_rate": 3.516129490867877e-06, + "loss": 1.2009, + "step": 2569 + }, + { + "epoch": 1.7213663764233087, + "grad_norm": 1.7798996507252556, + "learning_rate": 3.5147940056498788e-06, + "loss": 1.2812, + "step": 2570 + }, + { + "epoch": 1.722036168787676, + "grad_norm": 1.767111029413533, + "learning_rate": 3.513458173629316e-06, + "loss": 1.223, + "step": 2571 + }, + { + "epoch": 1.7227059611520428, + "grad_norm": 1.7498226416885085, + "learning_rate": 3.5121219952627027e-06, + "loss": 1.0476, + "step": 2572 + }, + { + "epoch": 1.72337575351641, + "grad_norm": 3.315394502865384, + "learning_rate": 3.5107854710066757e-06, + "loss": 1.1033, + "step": 2573 + }, + { + "epoch": 1.7240455458807769, + "grad_norm": 1.780235504928905, + "learning_rate": 3.5094486013179853e-06, + "loss": 1.2751, + "step": 2574 + }, + { + "epoch": 1.7247153382451441, + "grad_norm": 3.4148391142930747, + "learning_rate": 3.5081113866535045e-06, + "loss": 1.3034, + "step": 2575 + }, + { + "epoch": 1.725385130609511, + "grad_norm": 2.9638773524740936, + "learning_rate": 3.5067738274702202e-06, + "loss": 1.3307, + "step": 2576 + }, + { + "epoch": 1.7260549229738782, + "grad_norm": 1.9849211985842392, + "learning_rate": 3.5054359242252407e-06, + "loss": 1.2129, + "step": 2577 + }, + { + "epoch": 1.726724715338245, + "grad_norm": 1.6602189918302805, + "learning_rate": 3.504097677375789e-06, + "loss": 1.289, + "step": 2578 + }, + { + "epoch": 1.7273945077026123, + "grad_norm": 8.831142819771557, + "learning_rate": 3.5027590873792074e-06, + "loss": 1.2409, + "step": 2579 + }, + { + "epoch": 1.728064300066979, + "grad_norm": 1.6669535029242613, + "learning_rate": 3.501420154692954e-06, + "loss": 1.2795, + "step": 2580 + }, + { + "epoch": 1.7287340924313463, + "grad_norm": 1.663092145831909, + "learning_rate": 3.500080879774605e-06, + "loss": 1.1523, + "step": 2581 + }, + { + "epoch": 1.7294038847957132, + "grad_norm": 1.6461350804026635, + "learning_rate": 3.4987412630818534e-06, + "loss": 1.3203, + "step": 2582 + }, + { + "epoch": 1.7300736771600804, + "grad_norm": 2.08322824572389, + "learning_rate": 3.4974013050725087e-06, + "loss": 1.2853, + "step": 2583 + }, + { + "epoch": 1.7307434695244475, + "grad_norm": 1.61901189626801, + "learning_rate": 3.496061006204497e-06, + "loss": 1.235, + "step": 2584 + }, + { + "epoch": 1.7314132618888145, + "grad_norm": 1.631804729352645, + "learning_rate": 3.4947203669358615e-06, + "loss": 1.3843, + "step": 2585 + }, + { + "epoch": 1.7320830542531815, + "grad_norm": 1.6664495350162487, + "learning_rate": 3.4933793877247614e-06, + "loss": 1.2236, + "step": 2586 + }, + { + "epoch": 1.7327528466175486, + "grad_norm": 1.8261595334210459, + "learning_rate": 3.4920380690294714e-06, + "loss": 1.3828, + "step": 2587 + }, + { + "epoch": 1.7334226389819156, + "grad_norm": 1.4007707959511715, + "learning_rate": 3.490696411308383e-06, + "loss": 1.2306, + "step": 2588 + }, + { + "epoch": 1.7340924313462827, + "grad_norm": 1.6906317078425919, + "learning_rate": 3.4893544150200043e-06, + "loss": 1.1964, + "step": 2589 + }, + { + "epoch": 1.7347622237106497, + "grad_norm": 1.6152299783519732, + "learning_rate": 3.488012080622957e-06, + "loss": 1.0677, + "step": 2590 + }, + { + "epoch": 1.7354320160750167, + "grad_norm": 2.5494715906663323, + "learning_rate": 3.48666940857598e-06, + "loss": 1.3874, + "step": 2591 + }, + { + "epoch": 1.7361018084393838, + "grad_norm": 2.6245548177778315, + "learning_rate": 3.4853263993379272e-06, + "loss": 1.2601, + "step": 2592 + }, + { + "epoch": 1.7367716008037508, + "grad_norm": 1.8204855817190633, + "learning_rate": 3.483983053367767e-06, + "loss": 1.1656, + "step": 2593 + }, + { + "epoch": 1.7374413931681179, + "grad_norm": 1.8113536878327534, + "learning_rate": 3.4826393711245847e-06, + "loss": 1.3529, + "step": 2594 + }, + { + "epoch": 1.738111185532485, + "grad_norm": 2.708239572986339, + "learning_rate": 3.4812953530675784e-06, + "loss": 1.2323, + "step": 2595 + }, + { + "epoch": 1.738780977896852, + "grad_norm": 1.6287636490297375, + "learning_rate": 3.4799509996560625e-06, + "loss": 1.2307, + "step": 2596 + }, + { + "epoch": 1.739450770261219, + "grad_norm": 1.5879485021128845, + "learning_rate": 3.4786063113494656e-06, + "loss": 1.3012, + "step": 2597 + }, + { + "epoch": 1.7401205626255862, + "grad_norm": 3.046797995072702, + "learning_rate": 3.4772612886073297e-06, + "loss": 1.2595, + "step": 2598 + }, + { + "epoch": 1.740790354989953, + "grad_norm": 2.114932556226385, + "learning_rate": 3.4759159318893125e-06, + "loss": 1.2246, + "step": 2599 + }, + { + "epoch": 1.7414601473543203, + "grad_norm": 2.933461437296341, + "learning_rate": 3.474570241655185e-06, + "loss": 1.2093, + "step": 2600 + }, + { + "epoch": 1.7421299397186871, + "grad_norm": 1.5289507912336322, + "learning_rate": 3.4732242183648336e-06, + "loss": 1.3141, + "step": 2601 + }, + { + "epoch": 1.7427997320830544, + "grad_norm": 2.122060902211787, + "learning_rate": 3.4718778624782557e-06, + "loss": 1.2621, + "step": 2602 + }, + { + "epoch": 1.7434695244474212, + "grad_norm": 2.336367955556416, + "learning_rate": 3.4705311744555655e-06, + "loss": 1.1836, + "step": 2603 + }, + { + "epoch": 1.7441393168117885, + "grad_norm": 2.540101434644091, + "learning_rate": 3.4691841547569883e-06, + "loss": 1.3093, + "step": 2604 + }, + { + "epoch": 1.7448091091761553, + "grad_norm": 4.559897411667288, + "learning_rate": 3.4678368038428633e-06, + "loss": 1.3558, + "step": 2605 + }, + { + "epoch": 1.7454789015405225, + "grad_norm": 1.6262862716522204, + "learning_rate": 3.466489122173646e-06, + "loss": 1.3674, + "step": 2606 + }, + { + "epoch": 1.7461486939048894, + "grad_norm": 2.405330092716937, + "learning_rate": 3.4651411102098985e-06, + "loss": 1.1824, + "step": 2607 + }, + { + "epoch": 1.7468184862692566, + "grad_norm": 4.224872611208813, + "learning_rate": 3.463792768412303e-06, + "loss": 1.3535, + "step": 2608 + }, + { + "epoch": 1.7474882786336234, + "grad_norm": 1.8460967692752077, + "learning_rate": 3.4624440972416478e-06, + "loss": 1.2902, + "step": 2609 + }, + { + "epoch": 1.7481580709979907, + "grad_norm": 1.8472032973030057, + "learning_rate": 3.4610950971588395e-06, + "loss": 1.1249, + "step": 2610 + }, + { + "epoch": 1.7488278633623575, + "grad_norm": 2.47471905108916, + "learning_rate": 3.4597457686248927e-06, + "loss": 1.1638, + "step": 2611 + }, + { + "epoch": 1.7494976557267248, + "grad_norm": 1.9135859283745145, + "learning_rate": 3.458396112100938e-06, + "loss": 1.335, + "step": 2612 + }, + { + "epoch": 1.7501674480910918, + "grad_norm": 1.6339155880211294, + "learning_rate": 3.4570461280482133e-06, + "loss": 1.1654, + "step": 2613 + }, + { + "epoch": 1.7508372404554589, + "grad_norm": 1.6298545384110066, + "learning_rate": 3.455695816928074e-06, + "loss": 1.3272, + "step": 2614 + }, + { + "epoch": 1.751507032819826, + "grad_norm": 1.9564347757246703, + "learning_rate": 3.454345179201983e-06, + "loss": 1.1774, + "step": 2615 + }, + { + "epoch": 1.752176825184193, + "grad_norm": 1.7180224931339567, + "learning_rate": 3.4529942153315178e-06, + "loss": 1.138, + "step": 2616 + }, + { + "epoch": 1.75284661754856, + "grad_norm": 2.3208582737331045, + "learning_rate": 3.4516429257783634e-06, + "loss": 1.0825, + "step": 2617 + }, + { + "epoch": 1.753516409912927, + "grad_norm": 1.6425536054702796, + "learning_rate": 3.4502913110043213e-06, + "loss": 1.1383, + "step": 2618 + }, + { + "epoch": 1.754186202277294, + "grad_norm": 2.3146847967662594, + "learning_rate": 3.4489393714713e-06, + "loss": 1.192, + "step": 2619 + }, + { + "epoch": 1.754855994641661, + "grad_norm": 2.190295386144859, + "learning_rate": 3.4475871076413214e-06, + "loss": 0.9548, + "step": 2620 + }, + { + "epoch": 1.7555257870060281, + "grad_norm": 1.4727293927058518, + "learning_rate": 3.4462345199765156e-06, + "loss": 1.3537, + "step": 2621 + }, + { + "epoch": 1.7561955793703952, + "grad_norm": 1.722481505861712, + "learning_rate": 3.4448816089391266e-06, + "loss": 1.2428, + "step": 2622 + }, + { + "epoch": 1.7568653717347622, + "grad_norm": 1.5220932833471834, + "learning_rate": 3.443528374991507e-06, + "loss": 1.353, + "step": 2623 + }, + { + "epoch": 1.7575351640991292, + "grad_norm": 2.311139075609325, + "learning_rate": 3.442174818596121e-06, + "loss": 1.0399, + "step": 2624 + }, + { + "epoch": 1.7582049564634963, + "grad_norm": 1.864274683551395, + "learning_rate": 3.44082094021554e-06, + "loss": 1.2659, + "step": 2625 + }, + { + "epoch": 1.7588747488278633, + "grad_norm": 2.456180178316321, + "learning_rate": 3.439466740312449e-06, + "loss": 1.1331, + "step": 2626 + }, + { + "epoch": 1.7595445411922304, + "grad_norm": 2.914609450137891, + "learning_rate": 3.4381122193496418e-06, + "loss": 1.1573, + "step": 2627 + }, + { + "epoch": 1.7602143335565974, + "grad_norm": 1.7054240290230855, + "learning_rate": 3.4367573777900206e-06, + "loss": 1.2949, + "step": 2628 + }, + { + "epoch": 1.7608841259209647, + "grad_norm": 1.8345305094221855, + "learning_rate": 3.435402216096598e-06, + "loss": 1.1114, + "step": 2629 + }, + { + "epoch": 1.7615539182853315, + "grad_norm": 1.7517094257964918, + "learning_rate": 3.4340467347324967e-06, + "loss": 1.1718, + "step": 2630 + }, + { + "epoch": 1.7622237106496987, + "grad_norm": 1.7682831377899517, + "learning_rate": 3.4326909341609482e-06, + "loss": 1.2046, + "step": 2631 + }, + { + "epoch": 1.7628935030140656, + "grad_norm": 1.7323362241923466, + "learning_rate": 3.4313348148452922e-06, + "loss": 1.2382, + "step": 2632 + }, + { + "epoch": 1.7635632953784328, + "grad_norm": 1.6730238034728289, + "learning_rate": 3.4299783772489794e-06, + "loss": 1.3234, + "step": 2633 + }, + { + "epoch": 1.7642330877427996, + "grad_norm": 1.807389815456828, + "learning_rate": 3.4286216218355663e-06, + "loss": 1.2852, + "step": 2634 + }, + { + "epoch": 1.764902880107167, + "grad_norm": 1.8024403628713055, + "learning_rate": 3.4272645490687205e-06, + "loss": 1.1452, + "step": 2635 + }, + { + "epoch": 1.7655726724715337, + "grad_norm": 1.6624860166192537, + "learning_rate": 3.425907159412218e-06, + "loss": 1.4176, + "step": 2636 + }, + { + "epoch": 1.766242464835901, + "grad_norm": 2.387538516880066, + "learning_rate": 3.424549453329941e-06, + "loss": 1.3321, + "step": 2637 + }, + { + "epoch": 1.7669122572002678, + "grad_norm": 1.7568062166907372, + "learning_rate": 3.4231914312858817e-06, + "loss": 1.2168, + "step": 2638 + }, + { + "epoch": 1.767582049564635, + "grad_norm": 1.6705906148722758, + "learning_rate": 3.4218330937441397e-06, + "loss": 1.2703, + "step": 2639 + }, + { + "epoch": 1.7682518419290019, + "grad_norm": 1.7256361551321984, + "learning_rate": 3.420474441168923e-06, + "loss": 1.208, + "step": 2640 + }, + { + "epoch": 1.7689216342933691, + "grad_norm": 1.835567696608471, + "learning_rate": 3.4191154740245458e-06, + "loss": 1.2433, + "step": 2641 + }, + { + "epoch": 1.769591426657736, + "grad_norm": 2.2388771580801183, + "learning_rate": 3.41775619277543e-06, + "loss": 1.1768, + "step": 2642 + }, + { + "epoch": 1.7702612190221032, + "grad_norm": 2.7783388805231075, + "learning_rate": 3.4163965978861085e-06, + "loss": 1.3156, + "step": 2643 + }, + { + "epoch": 1.7709310113864702, + "grad_norm": 2.5729325072331086, + "learning_rate": 3.4150366898212157e-06, + "loss": 1.1916, + "step": 2644 + }, + { + "epoch": 1.7716008037508373, + "grad_norm": 1.5979060580032978, + "learning_rate": 3.4136764690454972e-06, + "loss": 1.378, + "step": 2645 + }, + { + "epoch": 1.7722705961152043, + "grad_norm": 1.9329792377355366, + "learning_rate": 3.412315936023803e-06, + "loss": 1.3286, + "step": 2646 + }, + { + "epoch": 1.7729403884795714, + "grad_norm": 1.9332119978599336, + "learning_rate": 3.410955091221092e-06, + "loss": 1.2905, + "step": 2647 + }, + { + "epoch": 1.7736101808439384, + "grad_norm": 2.207808344879476, + "learning_rate": 3.4095939351024275e-06, + "loss": 1.0379, + "step": 2648 + }, + { + "epoch": 1.7742799732083054, + "grad_norm": 3.94873689700767, + "learning_rate": 3.4082324681329815e-06, + "loss": 1.0932, + "step": 2649 + }, + { + "epoch": 1.7749497655726725, + "grad_norm": 1.6734337135599975, + "learning_rate": 3.4068706907780287e-06, + "loss": 1.2552, + "step": 2650 + }, + { + "epoch": 1.7756195579370395, + "grad_norm": 2.460425236433966, + "learning_rate": 3.4055086035029544e-06, + "loss": 1.3239, + "step": 2651 + }, + { + "epoch": 1.7762893503014066, + "grad_norm": 1.6913478139643046, + "learning_rate": 3.404146206773246e-06, + "loss": 1.3403, + "step": 2652 + }, + { + "epoch": 1.7769591426657736, + "grad_norm": 3.568370017640396, + "learning_rate": 3.4027835010545e-06, + "loss": 1.2728, + "step": 2653 + }, + { + "epoch": 1.7776289350301406, + "grad_norm": 1.5213035867613458, + "learning_rate": 3.4014204868124136e-06, + "loss": 1.1454, + "step": 2654 + }, + { + "epoch": 1.7782987273945077, + "grad_norm": 4.598462149146965, + "learning_rate": 3.4000571645127958e-06, + "loss": 1.1348, + "step": 2655 + }, + { + "epoch": 1.7789685197588747, + "grad_norm": 2.969382456981848, + "learning_rate": 3.398693534621555e-06, + "loss": 1.1621, + "step": 2656 + }, + { + "epoch": 1.7796383121232418, + "grad_norm": 2.708967470096107, + "learning_rate": 3.3973295976047093e-06, + "loss": 1.1843, + "step": 2657 + }, + { + "epoch": 1.7803081044876088, + "grad_norm": 1.9669598188080522, + "learning_rate": 3.3959653539283775e-06, + "loss": 1.3078, + "step": 2658 + }, + { + "epoch": 1.7809778968519758, + "grad_norm": 2.055449875528196, + "learning_rate": 3.3946008040587883e-06, + "loss": 1.364, + "step": 2659 + }, + { + "epoch": 1.781647689216343, + "grad_norm": 1.8906912383282841, + "learning_rate": 3.3932359484622703e-06, + "loss": 1.2715, + "step": 2660 + }, + { + "epoch": 1.78231748158071, + "grad_norm": 1.742186663368326, + "learning_rate": 3.3918707876052594e-06, + "loss": 1.1736, + "step": 2661 + }, + { + "epoch": 1.7829872739450772, + "grad_norm": 1.7378014215434612, + "learning_rate": 3.390505321954293e-06, + "loss": 1.1054, + "step": 2662 + }, + { + "epoch": 1.783657066309444, + "grad_norm": 1.7466999336985476, + "learning_rate": 3.3891395519760173e-06, + "loss": 1.308, + "step": 2663 + }, + { + "epoch": 1.7843268586738112, + "grad_norm": 2.1120817643988357, + "learning_rate": 3.3877734781371775e-06, + "loss": 1.139, + "step": 2664 + }, + { + "epoch": 1.784996651038178, + "grad_norm": 1.8600984710096233, + "learning_rate": 3.3864071009046263e-06, + "loss": 1.376, + "step": 2665 + }, + { + "epoch": 1.7856664434025453, + "grad_norm": 2.0126036406747274, + "learning_rate": 3.385040420745318e-06, + "loss": 1.1088, + "step": 2666 + }, + { + "epoch": 1.7863362357669121, + "grad_norm": 1.6925886578684102, + "learning_rate": 3.3836734381263116e-06, + "loss": 1.0756, + "step": 2667 + }, + { + "epoch": 1.7870060281312794, + "grad_norm": 1.960446571236234, + "learning_rate": 3.382306153514768e-06, + "loss": 1.3112, + "step": 2668 + }, + { + "epoch": 1.7876758204956462, + "grad_norm": 2.600593943201656, + "learning_rate": 3.3809385673779545e-06, + "loss": 1.0401, + "step": 2669 + }, + { + "epoch": 1.7883456128600135, + "grad_norm": 3.887381584923467, + "learning_rate": 3.3795706801832367e-06, + "loss": 1.1462, + "step": 2670 + }, + { + "epoch": 1.7890154052243803, + "grad_norm": 1.7258989408402392, + "learning_rate": 3.378202492398087e-06, + "loss": 1.2788, + "step": 2671 + }, + { + "epoch": 1.7896851975887476, + "grad_norm": 2.6396279707733217, + "learning_rate": 3.376834004490079e-06, + "loss": 1.2171, + "step": 2672 + }, + { + "epoch": 1.7903549899531144, + "grad_norm": 2.96000573379365, + "learning_rate": 3.375465216926889e-06, + "loss": 1.1465, + "step": 2673 + }, + { + "epoch": 1.7910247823174816, + "grad_norm": 2.0172804574078054, + "learning_rate": 3.374096130176295e-06, + "loss": 1.3142, + "step": 2674 + }, + { + "epoch": 1.7916945746818487, + "grad_norm": 1.6769642608937971, + "learning_rate": 3.3727267447061785e-06, + "loss": 1.2882, + "step": 2675 + }, + { + "epoch": 1.7923643670462157, + "grad_norm": 1.7841079062901797, + "learning_rate": 3.371357060984523e-06, + "loss": 1.2402, + "step": 2676 + }, + { + "epoch": 1.7930341594105828, + "grad_norm": 1.6252195787459904, + "learning_rate": 3.369987079479413e-06, + "loss": 1.2238, + "step": 2677 + }, + { + "epoch": 1.7937039517749498, + "grad_norm": 2.6812178500857273, + "learning_rate": 3.3686168006590346e-06, + "loss": 0.9979, + "step": 2678 + }, + { + "epoch": 1.7943737441393168, + "grad_norm": 1.4574355478899663, + "learning_rate": 3.3672462249916783e-06, + "loss": 1.1217, + "step": 2679 + }, + { + "epoch": 1.7950435365036839, + "grad_norm": 1.855141249516822, + "learning_rate": 3.365875352945731e-06, + "loss": 1.2115, + "step": 2680 + }, + { + "epoch": 1.795713328868051, + "grad_norm": 2.131206933997555, + "learning_rate": 3.3645041849896852e-06, + "loss": 1.2575, + "step": 2681 + }, + { + "epoch": 1.796383121232418, + "grad_norm": 1.8010911243214887, + "learning_rate": 3.363132721592135e-06, + "loss": 1.3271, + "step": 2682 + }, + { + "epoch": 1.797052913596785, + "grad_norm": 3.797161207380243, + "learning_rate": 3.3617609632217694e-06, + "loss": 1.219, + "step": 2683 + }, + { + "epoch": 1.797722705961152, + "grad_norm": 2.1260349560299683, + "learning_rate": 3.360388910347387e-06, + "loss": 1.2408, + "step": 2684 + }, + { + "epoch": 1.798392498325519, + "grad_norm": 1.5353380534365144, + "learning_rate": 3.359016563437878e-06, + "loss": 1.3632, + "step": 2685 + }, + { + "epoch": 1.799062290689886, + "grad_norm": 3.2135436412614578, + "learning_rate": 3.3576439229622414e-06, + "loss": 1.3092, + "step": 2686 + }, + { + "epoch": 1.7997320830542531, + "grad_norm": 2.675009322881139, + "learning_rate": 3.3562709893895696e-06, + "loss": 1.1167, + "step": 2687 + }, + { + "epoch": 1.8004018754186202, + "grad_norm": 1.831051321806097, + "learning_rate": 3.35489776318906e-06, + "loss": 1.2795, + "step": 2688 + }, + { + "epoch": 1.8010716677829874, + "grad_norm": 1.5795576033487555, + "learning_rate": 3.3535242448300076e-06, + "loss": 1.1655, + "step": 2689 + }, + { + "epoch": 1.8017414601473543, + "grad_norm": 1.9338775164594806, + "learning_rate": 3.3521504347818083e-06, + "loss": 1.2046, + "step": 2690 + }, + { + "epoch": 1.8024112525117215, + "grad_norm": 1.630384093139639, + "learning_rate": 3.350776333513956e-06, + "loss": 1.3108, + "step": 2691 + }, + { + "epoch": 1.8030810448760883, + "grad_norm": 2.66951823401117, + "learning_rate": 3.3494019414960477e-06, + "loss": 1.1657, + "step": 2692 + }, + { + "epoch": 1.8037508372404556, + "grad_norm": 1.9116829454116742, + "learning_rate": 3.348027259197774e-06, + "loss": 1.1047, + "step": 2693 + }, + { + "epoch": 1.8044206296048224, + "grad_norm": 1.61558260911413, + "learning_rate": 3.3466522870889316e-06, + "loss": 1.3621, + "step": 2694 + }, + { + "epoch": 1.8050904219691897, + "grad_norm": 2.775755237004802, + "learning_rate": 3.3452770256394107e-06, + "loss": 1.1051, + "step": 2695 + }, + { + "epoch": 1.8057602143335565, + "grad_norm": 2.1155868025750286, + "learning_rate": 3.3439014753192033e-06, + "loss": 1.2541, + "step": 2696 + }, + { + "epoch": 1.8064300066979238, + "grad_norm": 2.3990710019805044, + "learning_rate": 3.3425256365983983e-06, + "loss": 1.1618, + "step": 2697 + }, + { + "epoch": 1.8070997990622906, + "grad_norm": 1.6391559572489065, + "learning_rate": 3.3411495099471857e-06, + "loss": 1.3042, + "step": 2698 + }, + { + "epoch": 1.8077695914266578, + "grad_norm": 1.8338764873056785, + "learning_rate": 3.339773095835851e-06, + "loss": 1.4012, + "step": 2699 + }, + { + "epoch": 1.8084393837910246, + "grad_norm": 2.1275749036524996, + "learning_rate": 3.3383963947347803e-06, + "loss": 1.2642, + "step": 2700 + }, + { + "epoch": 1.809109176155392, + "grad_norm": 1.9227423637159597, + "learning_rate": 3.3370194071144547e-06, + "loss": 1.3365, + "step": 2701 + }, + { + "epoch": 1.8097789685197587, + "grad_norm": 1.7576593690288491, + "learning_rate": 3.335642133445458e-06, + "loss": 1.2764, + "step": 2702 + }, + { + "epoch": 1.810448760884126, + "grad_norm": 2.247122893878837, + "learning_rate": 3.334264574198467e-06, + "loss": 1.2542, + "step": 2703 + }, + { + "epoch": 1.8111185532484928, + "grad_norm": 1.6277728099182713, + "learning_rate": 3.3328867298442597e-06, + "loss": 1.3128, + "step": 2704 + }, + { + "epoch": 1.81178834561286, + "grad_norm": 2.1138279385704117, + "learning_rate": 3.331508600853708e-06, + "loss": 1.1417, + "step": 2705 + }, + { + "epoch": 1.812458137977227, + "grad_norm": 1.7754891887364719, + "learning_rate": 3.330130187697785e-06, + "loss": 1.4404, + "step": 2706 + }, + { + "epoch": 1.8131279303415941, + "grad_norm": 2.0497270424342835, + "learning_rate": 3.328751490847557e-06, + "loss": 1.3269, + "step": 2707 + }, + { + "epoch": 1.8137977227059612, + "grad_norm": 1.771129966299264, + "learning_rate": 3.327372510774191e-06, + "loss": 1.2809, + "step": 2708 + }, + { + "epoch": 1.8144675150703282, + "grad_norm": 1.8129569295124925, + "learning_rate": 3.3259932479489477e-06, + "loss": 1.2191, + "step": 2709 + }, + { + "epoch": 1.8151373074346953, + "grad_norm": 1.6465547178620248, + "learning_rate": 3.3246137028431864e-06, + "loss": 1.3431, + "step": 2710 + }, + { + "epoch": 1.8158070997990623, + "grad_norm": 1.6413673467024281, + "learning_rate": 3.323233875928362e-06, + "loss": 1.0879, + "step": 2711 + }, + { + "epoch": 1.8164768921634293, + "grad_norm": 3.086827931907578, + "learning_rate": 3.3218537676760255e-06, + "loss": 1.1117, + "step": 2712 + }, + { + "epoch": 1.8171466845277964, + "grad_norm": 2.093965454053697, + "learning_rate": 3.3204733785578254e-06, + "loss": 1.2122, + "step": 2713 + }, + { + "epoch": 1.8178164768921634, + "grad_norm": 1.7251287317485458, + "learning_rate": 3.3190927090455046e-06, + "loss": 1.2644, + "step": 2714 + }, + { + "epoch": 1.8184862692565305, + "grad_norm": 1.856347859803868, + "learning_rate": 3.317711759610902e-06, + "loss": 1.3099, + "step": 2715 + }, + { + "epoch": 1.8191560616208975, + "grad_norm": 1.9289398949843666, + "learning_rate": 3.316330530725953e-06, + "loss": 1.2638, + "step": 2716 + }, + { + "epoch": 1.8198258539852645, + "grad_norm": 1.5823538928363365, + "learning_rate": 3.3149490228626886e-06, + "loss": 1.3133, + "step": 2717 + }, + { + "epoch": 1.8204956463496316, + "grad_norm": 2.2188695343080647, + "learning_rate": 3.3135672364932337e-06, + "loss": 1.1866, + "step": 2718 + }, + { + "epoch": 1.8211654387139986, + "grad_norm": 1.782694954835786, + "learning_rate": 3.3121851720898094e-06, + "loss": 1.3861, + "step": 2719 + }, + { + "epoch": 1.8218352310783659, + "grad_norm": 1.739613973373911, + "learning_rate": 3.310802830124733e-06, + "loss": 1.1623, + "step": 2720 + }, + { + "epoch": 1.8225050234427327, + "grad_norm": 1.9418126083519613, + "learning_rate": 3.3094202110704145e-06, + "loss": 1.2794, + "step": 2721 + }, + { + "epoch": 1.8231748158071, + "grad_norm": 1.7261798116626093, + "learning_rate": 3.308037315399359e-06, + "loss": 1.2387, + "step": 2722 + }, + { + "epoch": 1.8238446081714668, + "grad_norm": 1.5980539132982903, + "learning_rate": 3.306654143584167e-06, + "loss": 1.0253, + "step": 2723 + }, + { + "epoch": 1.824514400535834, + "grad_norm": 3.961738146108842, + "learning_rate": 3.3052706960975332e-06, + "loss": 1.2756, + "step": 2724 + }, + { + "epoch": 1.8251841929002008, + "grad_norm": 3.0508192393981712, + "learning_rate": 3.3038869734122466e-06, + "loss": 1.3138, + "step": 2725 + }, + { + "epoch": 1.825853985264568, + "grad_norm": 1.7487086804826562, + "learning_rate": 3.3025029760011883e-06, + "loss": 1.2451, + "step": 2726 + }, + { + "epoch": 1.826523777628935, + "grad_norm": 12.962764720054658, + "learning_rate": 3.3011187043373378e-06, + "loss": 1.3768, + "step": 2727 + }, + { + "epoch": 1.8271935699933022, + "grad_norm": 2.2355514033813013, + "learning_rate": 3.2997341588937624e-06, + "loss": 1.2077, + "step": 2728 + }, + { + "epoch": 1.827863362357669, + "grad_norm": 1.7258295613257426, + "learning_rate": 3.298349340143629e-06, + "loss": 1.285, + "step": 2729 + }, + { + "epoch": 1.8285331547220363, + "grad_norm": 1.6099660223707395, + "learning_rate": 3.2969642485601923e-06, + "loss": 1.4221, + "step": 2730 + }, + { + "epoch": 1.829202947086403, + "grad_norm": 1.8100438126180725, + "learning_rate": 3.2955788846168046e-06, + "loss": 1.141, + "step": 2731 + }, + { + "epoch": 1.8298727394507703, + "grad_norm": 1.6144900620269458, + "learning_rate": 3.294193248786908e-06, + "loss": 1.1884, + "step": 2732 + }, + { + "epoch": 1.8305425318151372, + "grad_norm": 1.8869717032674582, + "learning_rate": 3.292807341544041e-06, + "loss": 1.0794, + "step": 2733 + }, + { + "epoch": 1.8312123241795044, + "grad_norm": 1.9185594664361854, + "learning_rate": 3.291421163361831e-06, + "loss": 1.2673, + "step": 2734 + }, + { + "epoch": 1.8318821165438715, + "grad_norm": 2.2842303661636514, + "learning_rate": 3.2900347147140015e-06, + "loss": 1.3018, + "step": 2735 + }, + { + "epoch": 1.8325519089082385, + "grad_norm": 1.523094127579154, + "learning_rate": 3.2886479960743655e-06, + "loss": 0.9921, + "step": 2736 + }, + { + "epoch": 1.8332217012726055, + "grad_norm": 2.6024796372197776, + "learning_rate": 3.287261007916831e-06, + "loss": 1.1237, + "step": 2737 + }, + { + "epoch": 1.8338914936369726, + "grad_norm": 3.345699175240575, + "learning_rate": 3.2858737507153947e-06, + "loss": 1.3996, + "step": 2738 + }, + { + "epoch": 1.8345612860013396, + "grad_norm": 1.662857208525645, + "learning_rate": 3.2844862249441496e-06, + "loss": 1.2616, + "step": 2739 + }, + { + "epoch": 1.8352310783657066, + "grad_norm": 1.690046700571559, + "learning_rate": 3.2830984310772764e-06, + "loss": 1.1717, + "step": 2740 + }, + { + "epoch": 1.8359008707300737, + "grad_norm": 1.867428248153463, + "learning_rate": 3.28171036958905e-06, + "loss": 1.1309, + "step": 2741 + }, + { + "epoch": 1.8365706630944407, + "grad_norm": 2.7649563974705287, + "learning_rate": 3.280322040953835e-06, + "loss": 1.3798, + "step": 2742 + }, + { + "epoch": 1.8372404554588078, + "grad_norm": 1.9861682229531097, + "learning_rate": 3.27893344564609e-06, + "loss": 1.0996, + "step": 2743 + }, + { + "epoch": 1.8379102478231748, + "grad_norm": 1.6362044418746826, + "learning_rate": 3.277544584140361e-06, + "loss": 1.2592, + "step": 2744 + }, + { + "epoch": 1.8385800401875418, + "grad_norm": 2.4820463293181856, + "learning_rate": 3.2761554569112887e-06, + "loss": 1.1588, + "step": 2745 + }, + { + "epoch": 1.8392498325519089, + "grad_norm": 1.6763652541609124, + "learning_rate": 3.274766064433601e-06, + "loss": 1.3461, + "step": 2746 + }, + { + "epoch": 1.839919624916276, + "grad_norm": 1.698829460855164, + "learning_rate": 3.27337640718212e-06, + "loss": 1.1169, + "step": 2747 + }, + { + "epoch": 1.840589417280643, + "grad_norm": 1.7286107997516547, + "learning_rate": 3.271986485631755e-06, + "loss": 1.1286, + "step": 2748 + }, + { + "epoch": 1.84125920964501, + "grad_norm": 1.5893018852287575, + "learning_rate": 3.270596300257509e-06, + "loss": 1.2331, + "step": 2749 + }, + { + "epoch": 1.841929002009377, + "grad_norm": 1.6396827673959464, + "learning_rate": 3.269205851534471e-06, + "loss": 1.2745, + "step": 2750 + }, + { + "epoch": 1.8425987943737443, + "grad_norm": 1.6972265052281899, + "learning_rate": 3.2678151399378245e-06, + "loss": 1.161, + "step": 2751 + }, + { + "epoch": 1.8432685867381111, + "grad_norm": 4.063404560837545, + "learning_rate": 3.2664241659428396e-06, + "loss": 1.3527, + "step": 2752 + }, + { + "epoch": 1.8439383791024784, + "grad_norm": 2.488843363201023, + "learning_rate": 3.2650329300248772e-06, + "loss": 1.2373, + "step": 2753 + }, + { + "epoch": 1.8446081714668452, + "grad_norm": 1.5766876655742301, + "learning_rate": 3.2636414326593884e-06, + "loss": 1.2254, + "step": 2754 + }, + { + "epoch": 1.8452779638312125, + "grad_norm": 1.8619579214835065, + "learning_rate": 3.262249674321912e-06, + "loss": 1.3503, + "step": 2755 + }, + { + "epoch": 1.8459477561955793, + "grad_norm": 3.0253083677875012, + "learning_rate": 3.2608576554880775e-06, + "loss": 1.3299, + "step": 2756 + }, + { + "epoch": 1.8466175485599465, + "grad_norm": 2.021335078198997, + "learning_rate": 3.2594653766336027e-06, + "loss": 1.2384, + "step": 2757 + }, + { + "epoch": 1.8472873409243133, + "grad_norm": 1.9082133817337257, + "learning_rate": 3.258072838234294e-06, + "loss": 1.2669, + "step": 2758 + }, + { + "epoch": 1.8479571332886806, + "grad_norm": 1.717740903903729, + "learning_rate": 3.256680040766048e-06, + "loss": 1.0729, + "step": 2759 + }, + { + "epoch": 1.8486269256530474, + "grad_norm": 1.6410104288299343, + "learning_rate": 3.255286984704847e-06, + "loss": 1.1641, + "step": 2760 + }, + { + "epoch": 1.8492967180174147, + "grad_norm": 1.6707508401527111, + "learning_rate": 3.2538936705267643e-06, + "loss": 1.2113, + "step": 2761 + }, + { + "epoch": 1.8499665103817815, + "grad_norm": 1.7893477345606692, + "learning_rate": 3.2525000987079615e-06, + "loss": 1.2046, + "step": 2762 + }, + { + "epoch": 1.8506363027461488, + "grad_norm": 3.0304761458753853, + "learning_rate": 3.2511062697246853e-06, + "loss": 1.2625, + "step": 2763 + }, + { + "epoch": 1.8513060951105156, + "grad_norm": 1.7767967910174767, + "learning_rate": 3.2497121840532734e-06, + "loss": 1.2146, + "step": 2764 + }, + { + "epoch": 1.8519758874748828, + "grad_norm": 1.6990310482592779, + "learning_rate": 3.2483178421701507e-06, + "loss": 1.3044, + "step": 2765 + }, + { + "epoch": 1.8526456798392499, + "grad_norm": 1.7141817000128259, + "learning_rate": 3.2469232445518274e-06, + "loss": 1.165, + "step": 2766 + }, + { + "epoch": 1.853315472203617, + "grad_norm": 1.7886265182365775, + "learning_rate": 3.2455283916749034e-06, + "loss": 1.3289, + "step": 2767 + }, + { + "epoch": 1.853985264567984, + "grad_norm": 1.7765933484636842, + "learning_rate": 3.2441332840160665e-06, + "loss": 1.3659, + "step": 2768 + }, + { + "epoch": 1.854655056932351, + "grad_norm": 1.703769456976103, + "learning_rate": 3.2427379220520883e-06, + "loss": 1.16, + "step": 2769 + }, + { + "epoch": 1.855324849296718, + "grad_norm": 1.7648790586232854, + "learning_rate": 3.2413423062598296e-06, + "loss": 1.2456, + "step": 2770 + }, + { + "epoch": 1.855994641661085, + "grad_norm": 1.55916647888934, + "learning_rate": 3.2399464371162382e-06, + "loss": 1.2057, + "step": 2771 + }, + { + "epoch": 1.8566644340254521, + "grad_norm": 1.6845079228735655, + "learning_rate": 3.238550315098348e-06, + "loss": 1.3301, + "step": 2772 + }, + { + "epoch": 1.8573342263898192, + "grad_norm": 2.810747358086013, + "learning_rate": 3.237153940683278e-06, + "loss": 1.2571, + "step": 2773 + }, + { + "epoch": 1.8580040187541862, + "grad_norm": 2.594901648581402, + "learning_rate": 3.2357573143482364e-06, + "loss": 1.1589, + "step": 2774 + }, + { + "epoch": 1.8586738111185532, + "grad_norm": 1.5354257364725132, + "learning_rate": 3.234360436570514e-06, + "loss": 1.1976, + "step": 2775 + }, + { + "epoch": 1.8593436034829203, + "grad_norm": 2.298358958669852, + "learning_rate": 3.2329633078274904e-06, + "loss": 1.1831, + "step": 2776 + }, + { + "epoch": 1.8600133958472873, + "grad_norm": 1.9634569192247329, + "learning_rate": 3.231565928596629e-06, + "loss": 1.137, + "step": 2777 + }, + { + "epoch": 1.8606831882116543, + "grad_norm": 3.570813350501334, + "learning_rate": 3.2301682993554806e-06, + "loss": 1.2544, + "step": 2778 + }, + { + "epoch": 1.8613529805760214, + "grad_norm": 2.56150222372982, + "learning_rate": 3.2287704205816793e-06, + "loss": 1.149, + "step": 2779 + }, + { + "epoch": 1.8620227729403884, + "grad_norm": 3.306324847707308, + "learning_rate": 3.227372292752947e-06, + "loss": 1.2391, + "step": 2780 + }, + { + "epoch": 1.8626925653047555, + "grad_norm": 1.844720342560049, + "learning_rate": 3.2259739163470878e-06, + "loss": 1.1991, + "step": 2781 + }, + { + "epoch": 1.8633623576691227, + "grad_norm": 1.5508513738808065, + "learning_rate": 3.2245752918419942e-06, + "loss": 1.0254, + "step": 2782 + }, + { + "epoch": 1.8640321500334895, + "grad_norm": 1.6554918447619003, + "learning_rate": 3.223176419715639e-06, + "loss": 1.4232, + "step": 2783 + }, + { + "epoch": 1.8647019423978568, + "grad_norm": 2.456856969391546, + "learning_rate": 3.221777300446085e-06, + "loss": 1.2019, + "step": 2784 + }, + { + "epoch": 1.8653717347622236, + "grad_norm": 2.262991300908164, + "learning_rate": 3.220377934511475e-06, + "loss": 1.0693, + "step": 2785 + }, + { + "epoch": 1.8660415271265909, + "grad_norm": 1.8918935978996765, + "learning_rate": 3.2189783223900383e-06, + "loss": 1.2987, + "step": 2786 + }, + { + "epoch": 1.8667113194909577, + "grad_norm": 1.7296819380759856, + "learning_rate": 3.2175784645600884e-06, + "loss": 1.2071, + "step": 2787 + }, + { + "epoch": 1.867381111855325, + "grad_norm": 3.4072826509678875, + "learning_rate": 3.2161783615000205e-06, + "loss": 1.1671, + "step": 2788 + }, + { + "epoch": 1.8680509042196918, + "grad_norm": 2.436580855211188, + "learning_rate": 3.2147780136883166e-06, + "loss": 1.3018, + "step": 2789 + }, + { + "epoch": 1.868720696584059, + "grad_norm": 2.979857984805404, + "learning_rate": 3.2133774216035408e-06, + "loss": 1.1086, + "step": 2790 + }, + { + "epoch": 1.8693904889484259, + "grad_norm": 2.281486239717992, + "learning_rate": 3.2119765857243406e-06, + "loss": 1.0695, + "step": 2791 + }, + { + "epoch": 1.8700602813127931, + "grad_norm": 1.9178833291459116, + "learning_rate": 3.210575506529448e-06, + "loss": 1.4003, + "step": 2792 + }, + { + "epoch": 1.87073007367716, + "grad_norm": 1.8543624467318938, + "learning_rate": 3.209174184497676e-06, + "loss": 0.9277, + "step": 2793 + }, + { + "epoch": 1.8713998660415272, + "grad_norm": 2.988441152756929, + "learning_rate": 3.207772620107923e-06, + "loss": 1.3216, + "step": 2794 + }, + { + "epoch": 1.872069658405894, + "grad_norm": 1.7216922804642028, + "learning_rate": 3.206370813839168e-06, + "loss": 1.2706, + "step": 2795 + }, + { + "epoch": 1.8727394507702613, + "grad_norm": 1.7646091098000853, + "learning_rate": 3.204968766170475e-06, + "loss": 1.0359, + "step": 2796 + }, + { + "epoch": 1.8734092431346283, + "grad_norm": 1.7221409888231929, + "learning_rate": 3.2035664775809883e-06, + "loss": 1.2164, + "step": 2797 + }, + { + "epoch": 1.8740790354989953, + "grad_norm": 2.1958363900791458, + "learning_rate": 3.202163948549936e-06, + "loss": 1.1481, + "step": 2798 + }, + { + "epoch": 1.8747488278633624, + "grad_norm": 1.7442409526620857, + "learning_rate": 3.2007611795566275e-06, + "loss": 1.3437, + "step": 2799 + }, + { + "epoch": 1.8754186202277294, + "grad_norm": 1.7452356781268357, + "learning_rate": 3.1993581710804544e-06, + "loss": 1.1924, + "step": 2800 + }, + { + "epoch": 1.8760884125920965, + "grad_norm": 2.832891265010711, + "learning_rate": 3.1979549236008905e-06, + "loss": 1.3226, + "step": 2801 + }, + { + "epoch": 1.8767582049564635, + "grad_norm": 1.6602826492092106, + "learning_rate": 3.1965514375974915e-06, + "loss": 1.1348, + "step": 2802 + }, + { + "epoch": 1.8774279973208305, + "grad_norm": 2.5096767220555862, + "learning_rate": 3.195147713549894e-06, + "loss": 1.1804, + "step": 2803 + }, + { + "epoch": 1.8780977896851976, + "grad_norm": 2.2651937815030125, + "learning_rate": 3.1937437519378163e-06, + "loss": 1.0289, + "step": 2804 + }, + { + "epoch": 1.8787675820495646, + "grad_norm": 1.6630546496590874, + "learning_rate": 3.1923395532410572e-06, + "loss": 1.1949, + "step": 2805 + }, + { + "epoch": 1.8794373744139317, + "grad_norm": 2.688512096287731, + "learning_rate": 3.1909351179394972e-06, + "loss": 1.1837, + "step": 2806 + }, + { + "epoch": 1.8801071667782987, + "grad_norm": 1.8999437841833042, + "learning_rate": 3.1895304465130976e-06, + "loss": 1.2779, + "step": 2807 + }, + { + "epoch": 1.8807769591426657, + "grad_norm": 1.9552914365900442, + "learning_rate": 3.188125539441901e-06, + "loss": 1.0159, + "step": 2808 + }, + { + "epoch": 1.8814467515070328, + "grad_norm": 1.8040862633026116, + "learning_rate": 3.1867203972060286e-06, + "loss": 1.1996, + "step": 2809 + }, + { + "epoch": 1.8821165438713998, + "grad_norm": 4.453197124120067, + "learning_rate": 3.1853150202856835e-06, + "loss": 1.0573, + "step": 2810 + }, + { + "epoch": 1.882786336235767, + "grad_norm": 1.856204203565365, + "learning_rate": 3.1839094091611496e-06, + "loss": 1.3002, + "step": 2811 + }, + { + "epoch": 1.883456128600134, + "grad_norm": 1.7495481801603219, + "learning_rate": 3.1825035643127888e-06, + "loss": 1.2796, + "step": 2812 + }, + { + "epoch": 1.8841259209645012, + "grad_norm": 2.0805317665311813, + "learning_rate": 3.1810974862210454e-06, + "loss": 1.18, + "step": 2813 + }, + { + "epoch": 1.884795713328868, + "grad_norm": 2.136818499405698, + "learning_rate": 3.1796911753664405e-06, + "loss": 1.2476, + "step": 2814 + }, + { + "epoch": 1.8854655056932352, + "grad_norm": 3.4014392684448485, + "learning_rate": 3.178284632229578e-06, + "loss": 1.1725, + "step": 2815 + }, + { + "epoch": 1.886135298057602, + "grad_norm": 2.9141705467693733, + "learning_rate": 3.1768778572911376e-06, + "loss": 1.1835, + "step": 2816 + }, + { + "epoch": 1.8868050904219693, + "grad_norm": 2.196234125375589, + "learning_rate": 3.1754708510318812e-06, + "loss": 1.186, + "step": 2817 + }, + { + "epoch": 1.8874748827863361, + "grad_norm": 1.8491190835591833, + "learning_rate": 3.174063613932648e-06, + "loss": 1.2124, + "step": 2818 + }, + { + "epoch": 1.8881446751507034, + "grad_norm": 3.23272233271744, + "learning_rate": 3.1726561464743584e-06, + "loss": 1.1445, + "step": 2819 + }, + { + "epoch": 1.8888144675150702, + "grad_norm": 1.7445110447546734, + "learning_rate": 3.1712484491380068e-06, + "loss": 1.2658, + "step": 2820 + }, + { + "epoch": 1.8894842598794375, + "grad_norm": 2.5056435324041773, + "learning_rate": 3.1698405224046725e-06, + "loss": 1.2136, + "step": 2821 + }, + { + "epoch": 1.8901540522438043, + "grad_norm": 2.0594073992110515, + "learning_rate": 3.168432366755507e-06, + "loss": 1.0309, + "step": 2822 + }, + { + "epoch": 1.8908238446081715, + "grad_norm": 1.6500000763604887, + "learning_rate": 3.1670239826717453e-06, + "loss": 1.259, + "step": 2823 + }, + { + "epoch": 1.8914936369725384, + "grad_norm": 2.8603270973444834, + "learning_rate": 3.165615370634697e-06, + "loss": 1.3534, + "step": 2824 + }, + { + "epoch": 1.8921634293369056, + "grad_norm": 1.9180404935495876, + "learning_rate": 3.1642065311257507e-06, + "loss": 1.0469, + "step": 2825 + }, + { + "epoch": 1.8928332217012724, + "grad_norm": 1.6171343337132391, + "learning_rate": 3.162797464626373e-06, + "loss": 1.1018, + "step": 2826 + }, + { + "epoch": 1.8935030140656397, + "grad_norm": 1.9204353028536463, + "learning_rate": 3.1613881716181078e-06, + "loss": 1.1789, + "step": 2827 + }, + { + "epoch": 1.8941728064300067, + "grad_norm": 1.7332330138976144, + "learning_rate": 3.159978652582576e-06, + "loss": 1.089, + "step": 2828 + }, + { + "epoch": 1.8948425987943738, + "grad_norm": 2.286105578919553, + "learning_rate": 3.1585689080014764e-06, + "loss": 1.1982, + "step": 2829 + }, + { + "epoch": 1.8955123911587408, + "grad_norm": 1.6978444033585107, + "learning_rate": 3.1571589383565847e-06, + "loss": 1.2122, + "step": 2830 + }, + { + "epoch": 1.8961821835231079, + "grad_norm": 2.363012499771346, + "learning_rate": 3.155748744129754e-06, + "loss": 1.0594, + "step": 2831 + }, + { + "epoch": 1.896851975887475, + "grad_norm": 1.8304414731555219, + "learning_rate": 3.1543383258029127e-06, + "loss": 1.2167, + "step": 2832 + }, + { + "epoch": 1.897521768251842, + "grad_norm": 1.9274316233516797, + "learning_rate": 3.152927683858067e-06, + "loss": 1.1832, + "step": 2833 + }, + { + "epoch": 1.898191560616209, + "grad_norm": 2.842844456374344, + "learning_rate": 3.1515168187772993e-06, + "loss": 1.3068, + "step": 2834 + }, + { + "epoch": 1.898861352980576, + "grad_norm": 1.61609558320872, + "learning_rate": 3.150105731042768e-06, + "loss": 1.2229, + "step": 2835 + }, + { + "epoch": 1.899531145344943, + "grad_norm": 2.2927140339499927, + "learning_rate": 3.148694421136708e-06, + "loss": 1.2593, + "step": 2836 + }, + { + "epoch": 1.90020093770931, + "grad_norm": 1.598598984512902, + "learning_rate": 3.1472828895414302e-06, + "loss": 1.3024, + "step": 2837 + }, + { + "epoch": 1.9008707300736771, + "grad_norm": 1.6671857883720744, + "learning_rate": 3.14587113673932e-06, + "loss": 1.227, + "step": 2838 + }, + { + "epoch": 1.9015405224380442, + "grad_norm": 2.3001079822222943, + "learning_rate": 3.1444591632128407e-06, + "loss": 1.4052, + "step": 2839 + }, + { + "epoch": 1.9022103148024112, + "grad_norm": 1.8703478963319284, + "learning_rate": 3.1430469694445286e-06, + "loss": 1.131, + "step": 2840 + }, + { + "epoch": 1.9028801071667782, + "grad_norm": 2.817077187686105, + "learning_rate": 3.1416345559169964e-06, + "loss": 1.1921, + "step": 2841 + }, + { + "epoch": 1.9035498995311455, + "grad_norm": 1.628116434032232, + "learning_rate": 3.140221923112932e-06, + "loss": 1.2221, + "step": 2842 + }, + { + "epoch": 1.9042196918955123, + "grad_norm": 2.433322786285593, + "learning_rate": 3.138809071515099e-06, + "loss": 1.1676, + "step": 2843 + }, + { + "epoch": 1.9048894842598796, + "grad_norm": 1.9340160706643026, + "learning_rate": 3.1373960016063333e-06, + "loss": 1.177, + "step": 2844 + }, + { + "epoch": 1.9055592766242464, + "grad_norm": 2.466410618347692, + "learning_rate": 3.1359827138695475e-06, + "loss": 1.3098, + "step": 2845 + }, + { + "epoch": 1.9062290689886137, + "grad_norm": 3.4371465122112514, + "learning_rate": 3.134569208787729e-06, + "loss": 1.0318, + "step": 2846 + }, + { + "epoch": 1.9068988613529805, + "grad_norm": 13.171279949599269, + "learning_rate": 3.1331554868439375e-06, + "loss": 1.3149, + "step": 2847 + }, + { + "epoch": 1.9075686537173477, + "grad_norm": 1.7709996512172752, + "learning_rate": 3.131741548521309e-06, + "loss": 1.2848, + "step": 2848 + }, + { + "epoch": 1.9082384460817146, + "grad_norm": 1.87631905063299, + "learning_rate": 3.130327394303051e-06, + "loss": 1.2364, + "step": 2849 + }, + { + "epoch": 1.9089082384460818, + "grad_norm": 1.5156658758855794, + "learning_rate": 3.1289130246724464e-06, + "loss": 1.3207, + "step": 2850 + }, + { + "epoch": 1.9095780308104486, + "grad_norm": 1.6023165952073157, + "learning_rate": 3.1274984401128527e-06, + "loss": 1.2856, + "step": 2851 + }, + { + "epoch": 1.910247823174816, + "grad_norm": 1.5662519016545555, + "learning_rate": 3.126083641107698e-06, + "loss": 1.1127, + "step": 2852 + }, + { + "epoch": 1.9109176155391827, + "grad_norm": 2.437937400925389, + "learning_rate": 3.1246686281404864e-06, + "loss": 1.3769, + "step": 2853 + }, + { + "epoch": 1.91158740790355, + "grad_norm": 1.892791747404528, + "learning_rate": 3.1232534016947937e-06, + "loss": 1.146, + "step": 2854 + }, + { + "epoch": 1.9122572002679168, + "grad_norm": 2.03502392985062, + "learning_rate": 3.1218379622542685e-06, + "loss": 1.1797, + "step": 2855 + }, + { + "epoch": 1.912926992632284, + "grad_norm": 2.3382666195842146, + "learning_rate": 3.1204223103026326e-06, + "loss": 1.3848, + "step": 2856 + }, + { + "epoch": 1.913596784996651, + "grad_norm": 1.8936358599730918, + "learning_rate": 3.1190064463236818e-06, + "loss": 1.0858, + "step": 2857 + }, + { + "epoch": 1.9142665773610181, + "grad_norm": 2.2241002299471178, + "learning_rate": 3.1175903708012813e-06, + "loss": 1.2812, + "step": 2858 + }, + { + "epoch": 1.9149363697253852, + "grad_norm": 1.720654974656694, + "learning_rate": 3.116174084219372e-06, + "loss": 1.4308, + "step": 2859 + }, + { + "epoch": 1.9156061620897522, + "grad_norm": 2.824097495976214, + "learning_rate": 3.1147575870619632e-06, + "loss": 1.2109, + "step": 2860 + }, + { + "epoch": 1.9162759544541192, + "grad_norm": 1.6859688505130583, + "learning_rate": 3.1133408798131403e-06, + "loss": 1.1802, + "step": 2861 + }, + { + "epoch": 1.9169457468184863, + "grad_norm": 1.767470381542964, + "learning_rate": 3.1119239629570576e-06, + "loss": 1.2652, + "step": 2862 + }, + { + "epoch": 1.9176155391828533, + "grad_norm": 3.3748129549592822, + "learning_rate": 3.1105068369779414e-06, + "loss": 1.2478, + "step": 2863 + }, + { + "epoch": 1.9182853315472204, + "grad_norm": 1.7793453435546127, + "learning_rate": 3.1090895023600896e-06, + "loss": 1.2253, + "step": 2864 + }, + { + "epoch": 1.9189551239115874, + "grad_norm": 2.000808822801129, + "learning_rate": 3.1076719595878734e-06, + "loss": 1.2036, + "step": 2865 + }, + { + "epoch": 1.9196249162759544, + "grad_norm": 1.712772748018447, + "learning_rate": 3.106254209145732e-06, + "loss": 1.0949, + "step": 2866 + }, + { + "epoch": 1.9202947086403215, + "grad_norm": 1.950628970994897, + "learning_rate": 3.1048362515181777e-06, + "loss": 1.2198, + "step": 2867 + }, + { + "epoch": 1.9209645010046885, + "grad_norm": 2.1392807816296027, + "learning_rate": 3.103418087189793e-06, + "loss": 1.2383, + "step": 2868 + }, + { + "epoch": 1.9216342933690556, + "grad_norm": 3.2398187667733738, + "learning_rate": 3.10199971664523e-06, + "loss": 1.0628, + "step": 2869 + }, + { + "epoch": 1.9223040857334226, + "grad_norm": 1.5711987698579313, + "learning_rate": 3.1005811403692134e-06, + "loss": 1.2333, + "step": 2870 + }, + { + "epoch": 1.9229738780977896, + "grad_norm": 3.157194900329438, + "learning_rate": 3.0991623588465365e-06, + "loss": 1.2466, + "step": 2871 + }, + { + "epoch": 1.9236436704621567, + "grad_norm": 1.7630905582135727, + "learning_rate": 3.0977433725620633e-06, + "loss": 1.0682, + "step": 2872 + }, + { + "epoch": 1.924313462826524, + "grad_norm": 2.3980424956066435, + "learning_rate": 3.0963241820007273e-06, + "loss": 1.1576, + "step": 2873 + }, + { + "epoch": 1.9249832551908908, + "grad_norm": 2.197030678232685, + "learning_rate": 3.094904787647533e-06, + "loss": 1.3977, + "step": 2874 + }, + { + "epoch": 1.925653047555258, + "grad_norm": 2.9833726172171726, + "learning_rate": 3.093485189987554e-06, + "loss": 1.3438, + "step": 2875 + }, + { + "epoch": 1.9263228399196248, + "grad_norm": 1.63111378587586, + "learning_rate": 3.092065389505933e-06, + "loss": 1.3614, + "step": 2876 + }, + { + "epoch": 1.926992632283992, + "grad_norm": 1.7158409904423861, + "learning_rate": 3.090645386687881e-06, + "loss": 1.1161, + "step": 2877 + }, + { + "epoch": 1.927662424648359, + "grad_norm": 2.0310363244325202, + "learning_rate": 3.0892251820186814e-06, + "loss": 0.953, + "step": 2878 + }, + { + "epoch": 1.9283322170127262, + "grad_norm": 2.4188462934222024, + "learning_rate": 3.087804775983683e-06, + "loss": 1.2014, + "step": 2879 + }, + { + "epoch": 1.929002009377093, + "grad_norm": 1.7465323132211346, + "learning_rate": 3.0863841690683054e-06, + "loss": 1.2067, + "step": 2880 + }, + { + "epoch": 1.9296718017414602, + "grad_norm": 1.9789194391830116, + "learning_rate": 3.084963361758037e-06, + "loss": 1.1864, + "step": 2881 + }, + { + "epoch": 1.930341594105827, + "grad_norm": 1.6882113188599457, + "learning_rate": 3.0835423545384333e-06, + "loss": 1.2278, + "step": 2882 + }, + { + "epoch": 1.9310113864701943, + "grad_norm": 2.8767935350340585, + "learning_rate": 3.0821211478951197e-06, + "loss": 1.1988, + "step": 2883 + }, + { + "epoch": 1.9316811788345611, + "grad_norm": 1.8912293458887874, + "learning_rate": 3.0806997423137886e-06, + "loss": 1.1996, + "step": 2884 + }, + { + "epoch": 1.9323509711989284, + "grad_norm": 6.206581089138813, + "learning_rate": 3.0792781382802e-06, + "loss": 1.1994, + "step": 2885 + }, + { + "epoch": 1.9330207635632952, + "grad_norm": 2.6707286716340004, + "learning_rate": 3.0778563362801835e-06, + "loss": 1.4059, + "step": 2886 + }, + { + "epoch": 1.9336905559276625, + "grad_norm": 2.0659423346085393, + "learning_rate": 3.0764343367996346e-06, + "loss": 1.1962, + "step": 2887 + }, + { + "epoch": 1.9343603482920295, + "grad_norm": 1.6844590816236011, + "learning_rate": 3.075012140324518e-06, + "loss": 1.252, + "step": 2888 + }, + { + "epoch": 1.9350301406563966, + "grad_norm": 1.6619378564027458, + "learning_rate": 3.0735897473408637e-06, + "loss": 1.2745, + "step": 2889 + }, + { + "epoch": 1.9356999330207636, + "grad_norm": 1.7292686663894496, + "learning_rate": 3.072167158334771e-06, + "loss": 1.2069, + "step": 2890 + }, + { + "epoch": 1.9363697253851306, + "grad_norm": 2.6012747884700973, + "learning_rate": 3.070744373792403e-06, + "loss": 1.1128, + "step": 2891 + }, + { + "epoch": 1.9370395177494977, + "grad_norm": 1.6640529346621031, + "learning_rate": 3.0693213941999945e-06, + "loss": 1.2649, + "step": 2892 + }, + { + "epoch": 1.9377093101138647, + "grad_norm": 5.710860058287882, + "learning_rate": 3.0678982200438418e-06, + "loss": 1.3036, + "step": 2893 + }, + { + "epoch": 1.9383791024782318, + "grad_norm": 2.2779790211626496, + "learning_rate": 3.066474851810312e-06, + "loss": 1.1979, + "step": 2894 + }, + { + "epoch": 1.9390488948425988, + "grad_norm": 2.026655490970141, + "learning_rate": 3.065051289985835e-06, + "loss": 1.3357, + "step": 2895 + }, + { + "epoch": 1.9397186872069658, + "grad_norm": 1.7806401769192848, + "learning_rate": 3.0636275350569094e-06, + "loss": 1.2535, + "step": 2896 + }, + { + "epoch": 1.9403884795713329, + "grad_norm": 1.5278625985881842, + "learning_rate": 3.062203587510098e-06, + "loss": 1.1608, + "step": 2897 + }, + { + "epoch": 1.9410582719357, + "grad_norm": 1.6656476055072704, + "learning_rate": 3.0607794478320316e-06, + "loss": 1.244, + "step": 2898 + }, + { + "epoch": 1.941728064300067, + "grad_norm": 1.5571091776122281, + "learning_rate": 3.0593551165094036e-06, + "loss": 1.2013, + "step": 2899 + }, + { + "epoch": 1.942397856664434, + "grad_norm": 2.677249082763471, + "learning_rate": 3.0579305940289768e-06, + "loss": 1.1887, + "step": 2900 + }, + { + "epoch": 1.943067649028801, + "grad_norm": 6.63386845122831, + "learning_rate": 3.0565058808775744e-06, + "loss": 1.1273, + "step": 2901 + }, + { + "epoch": 1.943737441393168, + "grad_norm": 2.166712132391252, + "learning_rate": 3.05508097754209e-06, + "loss": 0.9728, + "step": 2902 + }, + { + "epoch": 1.944407233757535, + "grad_norm": 1.9886959920244915, + "learning_rate": 3.053655884509478e-06, + "loss": 1.2301, + "step": 2903 + }, + { + "epoch": 1.9450770261219024, + "grad_norm": 2.0051749510883896, + "learning_rate": 3.052230602266761e-06, + "loss": 1.2151, + "step": 2904 + }, + { + "epoch": 1.9457468184862692, + "grad_norm": 2.7001674000207236, + "learning_rate": 3.050805131301023e-06, + "loss": 1.2667, + "step": 2905 + }, + { + "epoch": 1.9464166108506364, + "grad_norm": 4.473603364783255, + "learning_rate": 3.049379472099414e-06, + "loss": 1.1557, + "step": 2906 + }, + { + "epoch": 1.9470864032150033, + "grad_norm": 1.6660904876597407, + "learning_rate": 3.0479536251491503e-06, + "loss": 1.2713, + "step": 2907 + }, + { + "epoch": 1.9477561955793705, + "grad_norm": 1.6325780395591853, + "learning_rate": 3.0465275909375087e-06, + "loss": 1.2792, + "step": 2908 + }, + { + "epoch": 1.9484259879437373, + "grad_norm": 2.325218757925146, + "learning_rate": 3.045101369951832e-06, + "loss": 1.0292, + "step": 2909 + }, + { + "epoch": 1.9490957803081046, + "grad_norm": 2.1846690293640685, + "learning_rate": 3.043674962679528e-06, + "loss": 1.118, + "step": 2910 + }, + { + "epoch": 1.9497655726724714, + "grad_norm": 4.030784284520901, + "learning_rate": 3.042248369608065e-06, + "loss": 1.0878, + "step": 2911 + }, + { + "epoch": 1.9504353650368387, + "grad_norm": 1.7540141143103416, + "learning_rate": 3.0408215912249777e-06, + "loss": 1.2507, + "step": 2912 + }, + { + "epoch": 1.9511051574012055, + "grad_norm": 1.9749656269482854, + "learning_rate": 3.0393946280178623e-06, + "loss": 1.268, + "step": 2913 + }, + { + "epoch": 1.9517749497655728, + "grad_norm": 1.6501977626905397, + "learning_rate": 3.0379674804743793e-06, + "loss": 1.3837, + "step": 2914 + }, + { + "epoch": 1.9524447421299396, + "grad_norm": 1.567098358971834, + "learning_rate": 3.036540149082251e-06, + "loss": 1.1275, + "step": 2915 + }, + { + "epoch": 1.9531145344943068, + "grad_norm": 1.7993970032279631, + "learning_rate": 3.035112634329265e-06, + "loss": 1.2988, + "step": 2916 + }, + { + "epoch": 1.9537843268586736, + "grad_norm": 1.808528245216127, + "learning_rate": 3.033684936703268e-06, + "loss": 1.1028, + "step": 2917 + }, + { + "epoch": 1.954454119223041, + "grad_norm": 1.9560086738433007, + "learning_rate": 3.0322570566921714e-06, + "loss": 1.1762, + "step": 2918 + }, + { + "epoch": 1.955123911587408, + "grad_norm": 1.692070760025948, + "learning_rate": 3.03082899478395e-06, + "loss": 1.1436, + "step": 2919 + }, + { + "epoch": 1.955793703951775, + "grad_norm": 2.5063584408683153, + "learning_rate": 3.0294007514666373e-06, + "loss": 1.192, + "step": 2920 + }, + { + "epoch": 1.956463496316142, + "grad_norm": 2.790919708028313, + "learning_rate": 3.0279723272283323e-06, + "loss": 1.3125, + "step": 2921 + }, + { + "epoch": 1.957133288680509, + "grad_norm": 4.708841183311881, + "learning_rate": 3.0265437225571937e-06, + "loss": 1.2571, + "step": 2922 + }, + { + "epoch": 1.957803081044876, + "grad_norm": 1.8926598815113718, + "learning_rate": 3.025114937941443e-06, + "loss": 1.2407, + "step": 2923 + }, + { + "epoch": 1.9584728734092431, + "grad_norm": 2.277785911248552, + "learning_rate": 3.023685973869362e-06, + "loss": 1.1074, + "step": 2924 + }, + { + "epoch": 1.9591426657736102, + "grad_norm": 1.6530491440663817, + "learning_rate": 3.022256830829295e-06, + "loss": 1.3535, + "step": 2925 + }, + { + "epoch": 1.9598124581379772, + "grad_norm": 2.5840193497680857, + "learning_rate": 3.0208275093096474e-06, + "loss": 1.1195, + "step": 2926 + }, + { + "epoch": 1.9604822505023443, + "grad_norm": 1.8819753992814456, + "learning_rate": 3.019398009798884e-06, + "loss": 1.2085, + "step": 2927 + }, + { + "epoch": 1.9611520428667113, + "grad_norm": 2.6336564521869206, + "learning_rate": 3.0179683327855325e-06, + "loss": 1.138, + "step": 2928 + }, + { + "epoch": 1.9618218352310783, + "grad_norm": 2.8530826534501474, + "learning_rate": 3.0165384787581807e-06, + "loss": 1.1484, + "step": 2929 + }, + { + "epoch": 1.9624916275954454, + "grad_norm": 2.0773670503484243, + "learning_rate": 3.0151084482054752e-06, + "loss": 1.051, + "step": 2930 + }, + { + "epoch": 1.9631614199598124, + "grad_norm": 1.906526051911988, + "learning_rate": 3.013678241616127e-06, + "loss": 1.3579, + "step": 2931 + }, + { + "epoch": 1.9638312123241795, + "grad_norm": 1.9495714980053636, + "learning_rate": 3.012247859478901e-06, + "loss": 1.0342, + "step": 2932 + }, + { + "epoch": 1.9645010046885467, + "grad_norm": 1.84720838940551, + "learning_rate": 3.0108173022826294e-06, + "loss": 1.313, + "step": 2933 + }, + { + "epoch": 1.9651707970529135, + "grad_norm": 1.7799262159734264, + "learning_rate": 3.009386570516197e-06, + "loss": 1.1751, + "step": 2934 + }, + { + "epoch": 1.9658405894172808, + "grad_norm": 6.5327913184328485, + "learning_rate": 3.007955664668554e-06, + "loss": 1.2842, + "step": 2935 + }, + { + "epoch": 1.9665103817816476, + "grad_norm": 3.0284723984739026, + "learning_rate": 3.0065245852287066e-06, + "loss": 1.1163, + "step": 2936 + }, + { + "epoch": 1.9671801741460149, + "grad_norm": 2.2875614707349907, + "learning_rate": 3.0050933326857233e-06, + "loss": 1.3335, + "step": 2937 + }, + { + "epoch": 1.9678499665103817, + "grad_norm": 1.8385658388097637, + "learning_rate": 3.003661907528728e-06, + "loss": 1.1371, + "step": 2938 + }, + { + "epoch": 1.968519758874749, + "grad_norm": 1.8480346293349497, + "learning_rate": 3.0022303102469074e-06, + "loss": 1.256, + "step": 2939 + }, + { + "epoch": 1.9691895512391158, + "grad_norm": 3.200846102091416, + "learning_rate": 3.000798541329503e-06, + "loss": 1.1949, + "step": 2940 + }, + { + "epoch": 1.969859343603483, + "grad_norm": 2.7075556897891087, + "learning_rate": 2.9993666012658196e-06, + "loss": 1.0696, + "step": 2941 + }, + { + "epoch": 1.9705291359678498, + "grad_norm": 1.6714880314568703, + "learning_rate": 2.9979344905452157e-06, + "loss": 1.2223, + "step": 2942 + }, + { + "epoch": 1.971198928332217, + "grad_norm": 1.7473736136672189, + "learning_rate": 2.9965022096571132e-06, + "loss": 1.148, + "step": 2943 + }, + { + "epoch": 1.971868720696584, + "grad_norm": 1.723813068348718, + "learning_rate": 2.995069759090987e-06, + "loss": 1.1532, + "step": 2944 + }, + { + "epoch": 1.9725385130609512, + "grad_norm": 1.6803584336541073, + "learning_rate": 2.993637139336375e-06, + "loss": 1.1797, + "step": 2945 + }, + { + "epoch": 1.973208305425318, + "grad_norm": 2.6101501359636354, + "learning_rate": 2.992204350882867e-06, + "loss": 1.2741, + "step": 2946 + }, + { + "epoch": 1.9738780977896853, + "grad_norm": 1.7551945985805015, + "learning_rate": 2.990771394220117e-06, + "loss": 1.1502, + "step": 2947 + }, + { + "epoch": 1.9745478901540523, + "grad_norm": 2.2826026212982895, + "learning_rate": 2.989338269837831e-06, + "loss": 1.2076, + "step": 2948 + }, + { + "epoch": 1.9752176825184193, + "grad_norm": 1.8178215134310722, + "learning_rate": 2.9879049782257765e-06, + "loss": 1.3679, + "step": 2949 + }, + { + "epoch": 1.9758874748827864, + "grad_norm": 1.968165054973045, + "learning_rate": 2.986471519873775e-06, + "loss": 1.1182, + "step": 2950 + }, + { + "epoch": 1.9765572672471534, + "grad_norm": 2.53940336809904, + "learning_rate": 2.9850378952717063e-06, + "loss": 1.3069, + "step": 2951 + }, + { + "epoch": 1.9772270596115205, + "grad_norm": 1.74863620558012, + "learning_rate": 2.9836041049095073e-06, + "loss": 1.2382, + "step": 2952 + }, + { + "epoch": 1.9778968519758875, + "grad_norm": 1.8369165396569072, + "learning_rate": 2.9821701492771717e-06, + "loss": 1.2291, + "step": 2953 + }, + { + "epoch": 1.9785666443402545, + "grad_norm": 1.7304199059257621, + "learning_rate": 2.980736028864748e-06, + "loss": 1.3001, + "step": 2954 + }, + { + "epoch": 1.9792364367046216, + "grad_norm": 2.0115112326296223, + "learning_rate": 2.9793017441623436e-06, + "loss": 1.116, + "step": 2955 + }, + { + "epoch": 1.9799062290689886, + "grad_norm": 2.3577553061313816, + "learning_rate": 2.9778672956601194e-06, + "loss": 1.0798, + "step": 2956 + }, + { + "epoch": 1.9805760214333556, + "grad_norm": 1.7239971159341518, + "learning_rate": 2.9764326838482943e-06, + "loss": 1.0574, + "step": 2957 + }, + { + "epoch": 1.9812458137977227, + "grad_norm": 3.2756805666724023, + "learning_rate": 2.9749979092171422e-06, + "loss": 1.0394, + "step": 2958 + }, + { + "epoch": 1.9819156061620897, + "grad_norm": 2.54231352224185, + "learning_rate": 2.973562972256992e-06, + "loss": 1.2847, + "step": 2959 + }, + { + "epoch": 1.9825853985264568, + "grad_norm": 1.9427458255972223, + "learning_rate": 2.9721278734582297e-06, + "loss": 1.4506, + "step": 2960 + }, + { + "epoch": 1.9832551908908238, + "grad_norm": 1.85894944663237, + "learning_rate": 2.970692613311295e-06, + "loss": 1.1039, + "step": 2961 + }, + { + "epoch": 1.9839249832551908, + "grad_norm": 2.838551064236198, + "learning_rate": 2.9692571923066838e-06, + "loss": 1.2021, + "step": 2962 + }, + { + "epoch": 1.9845947756195579, + "grad_norm": 1.669418339901989, + "learning_rate": 2.9678216109349465e-06, + "loss": 1.0713, + "step": 2963 + }, + { + "epoch": 1.9852645679839251, + "grad_norm": 2.369344381712399, + "learning_rate": 2.9663858696866883e-06, + "loss": 0.9746, + "step": 2964 + }, + { + "epoch": 1.985934360348292, + "grad_norm": 1.7956743826926092, + "learning_rate": 2.9649499690525686e-06, + "loss": 1.3156, + "step": 2965 + }, + { + "epoch": 1.9866041527126592, + "grad_norm": 1.9923780973276148, + "learning_rate": 2.963513909523303e-06, + "loss": 1.1673, + "step": 2966 + }, + { + "epoch": 1.987273945077026, + "grad_norm": 1.8565476419911788, + "learning_rate": 2.9620776915896587e-06, + "loss": 1.2083, + "step": 2967 + }, + { + "epoch": 1.9879437374413933, + "grad_norm": 2.23611548902057, + "learning_rate": 2.9606413157424605e-06, + "loss": 1.1861, + "step": 2968 + }, + { + "epoch": 1.9886135298057601, + "grad_norm": 1.8002711879154458, + "learning_rate": 2.9592047824725827e-06, + "loss": 1.2867, + "step": 2969 + }, + { + "epoch": 1.9892833221701274, + "grad_norm": 1.829032012108952, + "learning_rate": 2.9577680922709584e-06, + "loss": 1.2346, + "step": 2970 + }, + { + "epoch": 1.9899531145344942, + "grad_norm": 1.6749165628469833, + "learning_rate": 2.9563312456285697e-06, + "loss": 1.1705, + "step": 2971 + }, + { + "epoch": 1.9906229068988615, + "grad_norm": 2.2089827546172534, + "learning_rate": 2.954894243036457e-06, + "loss": 1.0757, + "step": 2972 + }, + { + "epoch": 1.9912926992632283, + "grad_norm": 2.308618571449297, + "learning_rate": 2.9534570849857074e-06, + "loss": 1.13, + "step": 2973 + }, + { + "epoch": 1.9919624916275955, + "grad_norm": 2.0662596298597475, + "learning_rate": 2.952019771967469e-06, + "loss": 1.3095, + "step": 2974 + }, + { + "epoch": 1.9926322839919623, + "grad_norm": 1.9252231974411096, + "learning_rate": 2.9505823044729353e-06, + "loss": 1.0643, + "step": 2975 + }, + { + "epoch": 1.9933020763563296, + "grad_norm": 1.807315440843745, + "learning_rate": 2.949144682993359e-06, + "loss": 1.3523, + "step": 2976 + }, + { + "epoch": 1.9939718687206964, + "grad_norm": 2.3917542084739316, + "learning_rate": 2.9477069080200406e-06, + "loss": 1.3226, + "step": 2977 + }, + { + "epoch": 1.9946416610850637, + "grad_norm": 1.701536180030945, + "learning_rate": 2.946268980044337e-06, + "loss": 1.2093, + "step": 2978 + }, + { + "epoch": 1.9953114534494307, + "grad_norm": 1.8098952592150879, + "learning_rate": 2.944830899557653e-06, + "loss": 1.099, + "step": 2979 + }, + { + "epoch": 1.9959812458137978, + "grad_norm": 1.5369417099826728, + "learning_rate": 2.9433926670514507e-06, + "loss": 1.2888, + "step": 2980 + }, + { + "epoch": 1.9966510381781648, + "grad_norm": 2.6653788028460967, + "learning_rate": 2.9419542830172386e-06, + "loss": 1.2194, + "step": 2981 + }, + { + "epoch": 1.9973208305425318, + "grad_norm": 1.8727433859498457, + "learning_rate": 2.9405157479465823e-06, + "loss": 1.2755, + "step": 2982 + }, + { + "epoch": 1.9979906229068989, + "grad_norm": 1.5616524016361444, + "learning_rate": 2.939077062331095e-06, + "loss": 1.2059, + "step": 2983 + }, + { + "epoch": 1.998660415271266, + "grad_norm": 1.7270288733765249, + "learning_rate": 2.9376382266624437e-06, + "loss": 1.4505, + "step": 2984 + }, + { + "epoch": 1.999330207635633, + "grad_norm": 7.093821524639807, + "learning_rate": 2.9361992414323443e-06, + "loss": 1.1788, + "step": 2985 + }, + { + "epoch": 2.0, + "grad_norm": 1.9894146184502592, + "learning_rate": 2.934760107132567e-06, + "loss": 1.2787, + "step": 2986 + }, + { + "epoch": 2.0006697923643673, + "grad_norm": 1.898195819877312, + "learning_rate": 2.9333208242549303e-06, + "loss": 1.179, + "step": 2987 + }, + { + "epoch": 2.001339584728734, + "grad_norm": 2.127920664039895, + "learning_rate": 2.931881393291306e-06, + "loss": 1.0109, + "step": 2988 + }, + { + "epoch": 2.0020093770931013, + "grad_norm": 2.000272761079844, + "learning_rate": 2.930441814733612e-06, + "loss": 1.0568, + "step": 2989 + }, + { + "epoch": 2.002679169457468, + "grad_norm": 2.0689739340353555, + "learning_rate": 2.929002089073823e-06, + "loss": 1.0641, + "step": 2990 + }, + { + "epoch": 2.0033489618218354, + "grad_norm": 1.8176305031910107, + "learning_rate": 2.9275622168039582e-06, + "loss": 1.2127, + "step": 2991 + }, + { + "epoch": 2.0040187541862022, + "grad_norm": 2.321806103521041, + "learning_rate": 2.926122198416091e-06, + "loss": 1.164, + "step": 2992 + }, + { + "epoch": 2.0046885465505695, + "grad_norm": 2.450623316698051, + "learning_rate": 2.9246820344023406e-06, + "loss": 1.1765, + "step": 2993 + }, + { + "epoch": 2.0053583389149363, + "grad_norm": 2.003997649199961, + "learning_rate": 2.923241725254881e-06, + "loss": 1.0606, + "step": 2994 + }, + { + "epoch": 2.0060281312793036, + "grad_norm": 1.7414127985369736, + "learning_rate": 2.9218012714659323e-06, + "loss": 1.1762, + "step": 2995 + }, + { + "epoch": 2.0066979236436704, + "grad_norm": 2.0568264372239935, + "learning_rate": 2.9203606735277638e-06, + "loss": 1.1651, + "step": 2996 + }, + { + "epoch": 2.0073677160080377, + "grad_norm": 2.775317614977734, + "learning_rate": 2.9189199319326965e-06, + "loss": 0.9531, + "step": 2997 + }, + { + "epoch": 2.0080375083724045, + "grad_norm": 2.2321121380867397, + "learning_rate": 2.917479047173099e-06, + "loss": 1.1272, + "step": 2998 + }, + { + "epoch": 2.0087073007367717, + "grad_norm": 5.730630832972478, + "learning_rate": 2.916038019741389e-06, + "loss": 0.809, + "step": 2999 + }, + { + "epoch": 2.0093770931011385, + "grad_norm": 1.9357087860206612, + "learning_rate": 2.9145968501300325e-06, + "loss": 1.1198, + "step": 3000 + }, + { + "epoch": 2.010046885465506, + "grad_norm": 1.9482688625923943, + "learning_rate": 2.9131555388315447e-06, + "loss": 1.0864, + "step": 3001 + }, + { + "epoch": 2.0107166778298726, + "grad_norm": 2.1214859813712716, + "learning_rate": 2.9117140863384893e-06, + "loss": 1.0917, + "step": 3002 + }, + { + "epoch": 2.01138647019424, + "grad_norm": 1.8288838090018917, + "learning_rate": 2.910272493143478e-06, + "loss": 1.1623, + "step": 3003 + }, + { + "epoch": 2.0120562625586067, + "grad_norm": 1.896078255599894, + "learning_rate": 2.9088307597391695e-06, + "loss": 1.0343, + "step": 3004 + }, + { + "epoch": 2.012726054922974, + "grad_norm": 1.7436743367019543, + "learning_rate": 2.9073888866182735e-06, + "loss": 1.0768, + "step": 3005 + }, + { + "epoch": 2.013395847287341, + "grad_norm": 1.8165155515624125, + "learning_rate": 2.905946874273544e-06, + "loss": 1.0829, + "step": 3006 + }, + { + "epoch": 2.014065639651708, + "grad_norm": 1.8215650488999595, + "learning_rate": 2.9045047231977853e-06, + "loss": 0.9165, + "step": 3007 + }, + { + "epoch": 2.014735432016075, + "grad_norm": 1.6558134226632732, + "learning_rate": 2.9030624338838465e-06, + "loss": 1.1612, + "step": 3008 + }, + { + "epoch": 2.015405224380442, + "grad_norm": 2.0723973608945365, + "learning_rate": 2.9016200068246258e-06, + "loss": 1.1479, + "step": 3009 + }, + { + "epoch": 2.016075016744809, + "grad_norm": 1.9842389949748447, + "learning_rate": 2.900177442513068e-06, + "loss": 0.9815, + "step": 3010 + }, + { + "epoch": 2.016744809109176, + "grad_norm": 2.269996690319249, + "learning_rate": 2.8987347414421656e-06, + "loss": 1.2571, + "step": 3011 + }, + { + "epoch": 2.017414601473543, + "grad_norm": 1.6557663559455134, + "learning_rate": 2.897291904104955e-06, + "loss": 0.8693, + "step": 3012 + }, + { + "epoch": 2.0180843938379103, + "grad_norm": 1.871284585893852, + "learning_rate": 2.8958489309945235e-06, + "loss": 1.0456, + "step": 3013 + }, + { + "epoch": 2.018754186202277, + "grad_norm": 1.8263002342482328, + "learning_rate": 2.8944058226040013e-06, + "loss": 1.1341, + "step": 3014 + }, + { + "epoch": 2.0194239785666444, + "grad_norm": 1.9302973941911774, + "learning_rate": 2.8929625794265666e-06, + "loss": 1.1856, + "step": 3015 + }, + { + "epoch": 2.020093770931011, + "grad_norm": 1.7632180101273127, + "learning_rate": 2.8915192019554416e-06, + "loss": 0.8906, + "step": 3016 + }, + { + "epoch": 2.0207635632953784, + "grad_norm": 1.83235368387011, + "learning_rate": 2.890075690683898e-06, + "loss": 0.9688, + "step": 3017 + }, + { + "epoch": 2.0214333556597457, + "grad_norm": 2.65316856313035, + "learning_rate": 2.8886320461052487e-06, + "loss": 1.0106, + "step": 3018 + }, + { + "epoch": 2.0221031480241125, + "grad_norm": 2.694923276325767, + "learning_rate": 2.8871882687128573e-06, + "loss": 1.1168, + "step": 3019 + }, + { + "epoch": 2.0227729403884798, + "grad_norm": 2.308836006775428, + "learning_rate": 2.8857443590001277e-06, + "loss": 0.7981, + "step": 3020 + }, + { + "epoch": 2.0234427327528466, + "grad_norm": 1.8975531972010435, + "learning_rate": 2.8843003174605126e-06, + "loss": 1.1479, + "step": 3021 + }, + { + "epoch": 2.024112525117214, + "grad_norm": 2.0155462548981866, + "learning_rate": 2.882856144587508e-06, + "loss": 1.0585, + "step": 3022 + }, + { + "epoch": 2.0247823174815807, + "grad_norm": 1.7223690975192503, + "learning_rate": 2.8814118408746566e-06, + "loss": 1.046, + "step": 3023 + }, + { + "epoch": 2.025452109845948, + "grad_norm": 1.9765798664729952, + "learning_rate": 2.879967406815542e-06, + "loss": 1.1997, + "step": 3024 + }, + { + "epoch": 2.0261219022103147, + "grad_norm": 2.6096824472468296, + "learning_rate": 2.8785228429037974e-06, + "loss": 1.0074, + "step": 3025 + }, + { + "epoch": 2.026791694574682, + "grad_norm": 2.0035172906730607, + "learning_rate": 2.8770781496330963e-06, + "loss": 0.986, + "step": 3026 + }, + { + "epoch": 2.027461486939049, + "grad_norm": 4.030021903439522, + "learning_rate": 2.8756333274971587e-06, + "loss": 0.9293, + "step": 3027 + }, + { + "epoch": 2.028131279303416, + "grad_norm": 1.6571298190425106, + "learning_rate": 2.874188376989747e-06, + "loss": 1.0175, + "step": 3028 + }, + { + "epoch": 2.028801071667783, + "grad_norm": 1.7747149279345067, + "learning_rate": 2.87274329860467e-06, + "loss": 1.1455, + "step": 3029 + }, + { + "epoch": 2.02947086403215, + "grad_norm": 1.6933124193253617, + "learning_rate": 2.8712980928357765e-06, + "loss": 0.9034, + "step": 3030 + }, + { + "epoch": 2.030140656396517, + "grad_norm": 1.9326584035660002, + "learning_rate": 2.8698527601769625e-06, + "loss": 1.0054, + "step": 3031 + }, + { + "epoch": 2.0308104487608842, + "grad_norm": 2.210958580342452, + "learning_rate": 2.8684073011221646e-06, + "loss": 1.0615, + "step": 3032 + }, + { + "epoch": 2.031480241125251, + "grad_norm": 1.7802834881910545, + "learning_rate": 2.8669617161653653e-06, + "loss": 0.9663, + "step": 3033 + }, + { + "epoch": 2.0321500334896183, + "grad_norm": 1.7460389640027583, + "learning_rate": 2.865516005800586e-06, + "loss": 1.1076, + "step": 3034 + }, + { + "epoch": 2.032819825853985, + "grad_norm": 2.391997201993147, + "learning_rate": 2.8640701705218966e-06, + "loss": 1.2313, + "step": 3035 + }, + { + "epoch": 2.0334896182183524, + "grad_norm": 3.3916029942785344, + "learning_rate": 2.8626242108234044e-06, + "loss": 0.9044, + "step": 3036 + }, + { + "epoch": 2.034159410582719, + "grad_norm": 1.7042805852863356, + "learning_rate": 2.861178127199262e-06, + "loss": 1.1336, + "step": 3037 + }, + { + "epoch": 2.0348292029470865, + "grad_norm": 1.9955127931592322, + "learning_rate": 2.8597319201436645e-06, + "loss": 1.1496, + "step": 3038 + }, + { + "epoch": 2.0354989953114533, + "grad_norm": 2.3923041329749752, + "learning_rate": 2.8582855901508484e-06, + "loss": 1.0796, + "step": 3039 + }, + { + "epoch": 2.0361687876758205, + "grad_norm": 2.4429185127896065, + "learning_rate": 2.856839137715091e-06, + "loss": 1.1259, + "step": 3040 + }, + { + "epoch": 2.0368385800401874, + "grad_norm": 1.8170434008500096, + "learning_rate": 2.8553925633307143e-06, + "loss": 1.0946, + "step": 3041 + }, + { + "epoch": 2.0375083724045546, + "grad_norm": 2.432111331168653, + "learning_rate": 2.8539458674920795e-06, + "loss": 0.9969, + "step": 3042 + }, + { + "epoch": 2.0381781647689214, + "grad_norm": 8.679357486629952, + "learning_rate": 2.85249905069359e-06, + "loss": 1.2142, + "step": 3043 + }, + { + "epoch": 2.0388479571332887, + "grad_norm": 1.948241149112838, + "learning_rate": 2.851052113429692e-06, + "loss": 1.1696, + "step": 3044 + }, + { + "epoch": 2.0395177494976555, + "grad_norm": 2.5374778820842, + "learning_rate": 2.84960505619487e-06, + "loss": 0.9326, + "step": 3045 + }, + { + "epoch": 2.040187541862023, + "grad_norm": 2.039357028635505, + "learning_rate": 2.8481578794836523e-06, + "loss": 1.0312, + "step": 3046 + }, + { + "epoch": 2.0408573342263896, + "grad_norm": 1.9193692692553637, + "learning_rate": 2.8467105837906067e-06, + "loss": 1.089, + "step": 3047 + }, + { + "epoch": 2.041527126590757, + "grad_norm": 2.1259659213759696, + "learning_rate": 2.8452631696103417e-06, + "loss": 1.0464, + "step": 3048 + }, + { + "epoch": 2.042196918955124, + "grad_norm": 1.9814791337290543, + "learning_rate": 2.8438156374375057e-06, + "loss": 1.1218, + "step": 3049 + }, + { + "epoch": 2.042866711319491, + "grad_norm": 2.332027157424269, + "learning_rate": 2.8423679877667894e-06, + "loss": 1.1863, + "step": 3050 + }, + { + "epoch": 2.043536503683858, + "grad_norm": 2.7214179566039376, + "learning_rate": 2.840920221092921e-06, + "loss": 0.9033, + "step": 3051 + }, + { + "epoch": 2.044206296048225, + "grad_norm": 1.9677673839968863, + "learning_rate": 2.8394723379106714e-06, + "loss": 0.9581, + "step": 3052 + }, + { + "epoch": 2.0448760884125923, + "grad_norm": 2.4540117135224904, + "learning_rate": 2.8380243387148494e-06, + "loss": 1.048, + "step": 3053 + }, + { + "epoch": 2.045545880776959, + "grad_norm": 1.7416790822162862, + "learning_rate": 2.8365762240003043e-06, + "loss": 1.1023, + "step": 3054 + }, + { + "epoch": 2.0462156731413264, + "grad_norm": 1.9769080580478913, + "learning_rate": 2.8351279942619232e-06, + "loss": 1.1367, + "step": 3055 + }, + { + "epoch": 2.046885465505693, + "grad_norm": 1.960143389861285, + "learning_rate": 2.8336796499946363e-06, + "loss": 1.0821, + "step": 3056 + }, + { + "epoch": 2.0475552578700604, + "grad_norm": 2.102912752943972, + "learning_rate": 2.8322311916934086e-06, + "loss": 1.2675, + "step": 3057 + }, + { + "epoch": 2.0482250502344272, + "grad_norm": 2.5493848553571317, + "learning_rate": 2.8307826198532474e-06, + "loss": 1.1399, + "step": 3058 + }, + { + "epoch": 2.0488948425987945, + "grad_norm": 1.8659841063637903, + "learning_rate": 2.829333934969196e-06, + "loss": 0.9942, + "step": 3059 + }, + { + "epoch": 2.0495646349631613, + "grad_norm": 1.8302500336818197, + "learning_rate": 2.82788513753634e-06, + "loss": 1.0763, + "step": 3060 + }, + { + "epoch": 2.0502344273275286, + "grad_norm": 1.8772873339359615, + "learning_rate": 2.826436228049798e-06, + "loss": 1.0176, + "step": 3061 + }, + { + "epoch": 2.0509042196918954, + "grad_norm": 3.597657285596898, + "learning_rate": 2.824987207004734e-06, + "loss": 1.0981, + "step": 3062 + }, + { + "epoch": 2.0515740120562627, + "grad_norm": 1.7601021676987043, + "learning_rate": 2.8235380748963425e-06, + "loss": 1.2786, + "step": 3063 + }, + { + "epoch": 2.0522438044206295, + "grad_norm": 1.7701573390652872, + "learning_rate": 2.8220888322198624e-06, + "loss": 1.0207, + "step": 3064 + }, + { + "epoch": 2.0529135967849967, + "grad_norm": 1.8385118082515735, + "learning_rate": 2.820639479470566e-06, + "loss": 1.0879, + "step": 3065 + }, + { + "epoch": 2.0535833891493636, + "grad_norm": 1.7312384841796506, + "learning_rate": 2.819190017143766e-06, + "loss": 1.1385, + "step": 3066 + }, + { + "epoch": 2.054253181513731, + "grad_norm": 3.000972787561828, + "learning_rate": 2.817740445734811e-06, + "loss": 0.9708, + "step": 3067 + }, + { + "epoch": 2.0549229738780976, + "grad_norm": 1.8792930336113667, + "learning_rate": 2.8162907657390882e-06, + "loss": 1.1977, + "step": 3068 + }, + { + "epoch": 2.055592766242465, + "grad_norm": 1.8932875198170074, + "learning_rate": 2.8148409776520185e-06, + "loss": 1.2794, + "step": 3069 + }, + { + "epoch": 2.0562625586068317, + "grad_norm": 2.9364088916745406, + "learning_rate": 2.8133910819690656e-06, + "loss": 0.9715, + "step": 3070 + }, + { + "epoch": 2.056932350971199, + "grad_norm": 1.9530273768845443, + "learning_rate": 2.8119410791857244e-06, + "loss": 0.9518, + "step": 3071 + }, + { + "epoch": 2.057602143335566, + "grad_norm": 2.2826802716594723, + "learning_rate": 2.810490969797529e-06, + "loss": 1.1304, + "step": 3072 + }, + { + "epoch": 2.058271935699933, + "grad_norm": 2.091879242266409, + "learning_rate": 2.8090407543000504e-06, + "loss": 1.1516, + "step": 3073 + }, + { + "epoch": 2.0589417280643, + "grad_norm": 1.8054241017889263, + "learning_rate": 2.8075904331888946e-06, + "loss": 1.1431, + "step": 3074 + }, + { + "epoch": 2.059611520428667, + "grad_norm": 1.7235753266833125, + "learning_rate": 2.8061400069597046e-06, + "loss": 1.203, + "step": 3075 + }, + { + "epoch": 2.060281312793034, + "grad_norm": 3.2825774350643186, + "learning_rate": 2.804689476108157e-06, + "loss": 1.022, + "step": 3076 + }, + { + "epoch": 2.060951105157401, + "grad_norm": 2.5705683198196603, + "learning_rate": 2.8032388411299686e-06, + "loss": 1.1883, + "step": 3077 + }, + { + "epoch": 2.0616208975217685, + "grad_norm": 2.255420782120136, + "learning_rate": 2.8017881025208883e-06, + "loss": 0.8418, + "step": 3078 + }, + { + "epoch": 2.0622906898861353, + "grad_norm": 2.1926490747283136, + "learning_rate": 2.8003372607766997e-06, + "loss": 1.0845, + "step": 3079 + }, + { + "epoch": 2.0629604822505025, + "grad_norm": 1.8445931906327362, + "learning_rate": 2.7988863163932256e-06, + "loss": 1.0363, + "step": 3080 + }, + { + "epoch": 2.0636302746148694, + "grad_norm": 1.9555156732684156, + "learning_rate": 2.7974352698663203e-06, + "loss": 1.116, + "step": 3081 + }, + { + "epoch": 2.0643000669792366, + "grad_norm": 2.3505922115110596, + "learning_rate": 2.795984121691875e-06, + "loss": 1.1311, + "step": 3082 + }, + { + "epoch": 2.0649698593436034, + "grad_norm": 4.2561971872937185, + "learning_rate": 2.794532872365814e-06, + "loss": 1.0498, + "step": 3083 + }, + { + "epoch": 2.0656396517079707, + "grad_norm": 1.8716674201500703, + "learning_rate": 2.7930815223840973e-06, + "loss": 0.9489, + "step": 3084 + }, + { + "epoch": 2.0663094440723375, + "grad_norm": 1.8674350841331067, + "learning_rate": 2.7916300722427193e-06, + "loss": 1.0702, + "step": 3085 + }, + { + "epoch": 2.066979236436705, + "grad_norm": 1.8894757896671681, + "learning_rate": 2.7901785224377083e-06, + "loss": 1.1903, + "step": 3086 + }, + { + "epoch": 2.0676490288010716, + "grad_norm": 1.9594944398787004, + "learning_rate": 2.788726873465127e-06, + "loss": 1.2522, + "step": 3087 + }, + { + "epoch": 2.068318821165439, + "grad_norm": 1.7795832086560046, + "learning_rate": 2.7872751258210712e-06, + "loss": 1.0474, + "step": 3088 + }, + { + "epoch": 2.0689886135298057, + "grad_norm": 1.9881677300894667, + "learning_rate": 2.7858232800016714e-06, + "loss": 1.0793, + "step": 3089 + }, + { + "epoch": 2.069658405894173, + "grad_norm": 1.98966288891229, + "learning_rate": 2.784371336503091e-06, + "loss": 1.0831, + "step": 3090 + }, + { + "epoch": 2.0703281982585398, + "grad_norm": 1.872198292092948, + "learning_rate": 2.7829192958215274e-06, + "loss": 1.0333, + "step": 3091 + }, + { + "epoch": 2.070997990622907, + "grad_norm": 2.2530715441846447, + "learning_rate": 2.78146715845321e-06, + "loss": 1.1011, + "step": 3092 + }, + { + "epoch": 2.071667782987274, + "grad_norm": 1.9954683300547293, + "learning_rate": 2.7800149248944024e-06, + "loss": 1.0547, + "step": 3093 + }, + { + "epoch": 2.072337575351641, + "grad_norm": 2.095841423919162, + "learning_rate": 2.7785625956414008e-06, + "loss": 1.2109, + "step": 3094 + }, + { + "epoch": 2.073007367716008, + "grad_norm": 2.6622404484454796, + "learning_rate": 2.7771101711905348e-06, + "loss": 1.0315, + "step": 3095 + }, + { + "epoch": 2.073677160080375, + "grad_norm": 3.7037899351492864, + "learning_rate": 2.775657652038164e-06, + "loss": 0.9835, + "step": 3096 + }, + { + "epoch": 2.074346952444742, + "grad_norm": 2.0618830210034544, + "learning_rate": 2.7742050386806836e-06, + "loss": 1.1115, + "step": 3097 + }, + { + "epoch": 2.0750167448091092, + "grad_norm": 1.8796096824311033, + "learning_rate": 2.772752331614519e-06, + "loss": 1.1268, + "step": 3098 + }, + { + "epoch": 2.075686537173476, + "grad_norm": 3.893652184122403, + "learning_rate": 2.771299531336128e-06, + "loss": 1.0387, + "step": 3099 + }, + { + "epoch": 2.0763563295378433, + "grad_norm": 2.099668421498892, + "learning_rate": 2.7698466383420003e-06, + "loss": 0.9191, + "step": 3100 + }, + { + "epoch": 2.07702612190221, + "grad_norm": 2.0221393869800743, + "learning_rate": 2.768393653128658e-06, + "loss": 1.1066, + "step": 3101 + }, + { + "epoch": 2.0776959142665774, + "grad_norm": 1.9100603237854805, + "learning_rate": 2.7669405761926534e-06, + "loss": 0.9341, + "step": 3102 + }, + { + "epoch": 2.078365706630944, + "grad_norm": 2.4226464922567574, + "learning_rate": 2.765487408030572e-06, + "loss": 1.2144, + "step": 3103 + }, + { + "epoch": 2.0790354989953115, + "grad_norm": 2.166035323893145, + "learning_rate": 2.764034149139027e-06, + "loss": 0.9661, + "step": 3104 + }, + { + "epoch": 2.0797052913596783, + "grad_norm": 2.215905936434789, + "learning_rate": 2.762580800014668e-06, + "loss": 1.2427, + "step": 3105 + }, + { + "epoch": 2.0803750837240456, + "grad_norm": 1.8658834506174466, + "learning_rate": 2.7611273611541695e-06, + "loss": 0.9872, + "step": 3106 + }, + { + "epoch": 2.081044876088413, + "grad_norm": 1.966269070671071, + "learning_rate": 2.759673833054241e-06, + "loss": 1.2176, + "step": 3107 + }, + { + "epoch": 2.0817146684527796, + "grad_norm": 2.874890050446965, + "learning_rate": 2.758220216211621e-06, + "loss": 1.0079, + "step": 3108 + }, + { + "epoch": 2.082384460817147, + "grad_norm": 3.920030892534173, + "learning_rate": 2.7567665111230783e-06, + "loss": 1.1421, + "step": 3109 + }, + { + "epoch": 2.0830542531815137, + "grad_norm": 2.017014645546175, + "learning_rate": 2.755312718285412e-06, + "loss": 1.0322, + "step": 3110 + }, + { + "epoch": 2.083724045545881, + "grad_norm": 1.8802531303099268, + "learning_rate": 2.75385883819545e-06, + "loss": 1.0843, + "step": 3111 + }, + { + "epoch": 2.084393837910248, + "grad_norm": 1.979089978915766, + "learning_rate": 2.752404871350052e-06, + "loss": 1.0277, + "step": 3112 + }, + { + "epoch": 2.085063630274615, + "grad_norm": 3.177172706786513, + "learning_rate": 2.7509508182461076e-06, + "loss": 1.1559, + "step": 3113 + }, + { + "epoch": 2.085733422638982, + "grad_norm": 2.4057936857516484, + "learning_rate": 2.749496679380532e-06, + "loss": 1.1666, + "step": 3114 + }, + { + "epoch": 2.086403215003349, + "grad_norm": 2.1387486019670963, + "learning_rate": 2.7480424552502755e-06, + "loss": 1.0441, + "step": 3115 + }, + { + "epoch": 2.087073007367716, + "grad_norm": 1.7409698857385163, + "learning_rate": 2.746588146352312e-06, + "loss": 1.0822, + "step": 3116 + }, + { + "epoch": 2.087742799732083, + "grad_norm": 2.2840582670086, + "learning_rate": 2.745133753183648e-06, + "loss": 1.1089, + "step": 3117 + }, + { + "epoch": 2.08841259209645, + "grad_norm": 8.452016451601814, + "learning_rate": 2.743679276241318e-06, + "loss": 1.0922, + "step": 3118 + }, + { + "epoch": 2.0890823844608173, + "grad_norm": 2.0682514568331514, + "learning_rate": 2.7422247160223838e-06, + "loss": 0.8402, + "step": 3119 + }, + { + "epoch": 2.089752176825184, + "grad_norm": 1.8829847695346023, + "learning_rate": 2.7407700730239374e-06, + "loss": 0.9069, + "step": 3120 + }, + { + "epoch": 2.0904219691895514, + "grad_norm": 1.7808876742084407, + "learning_rate": 2.739315347743098e-06, + "loss": 1.1931, + "step": 3121 + }, + { + "epoch": 2.091091761553918, + "grad_norm": 3.8831787674788463, + "learning_rate": 2.737860540677013e-06, + "loss": 0.875, + "step": 3122 + }, + { + "epoch": 2.0917615539182854, + "grad_norm": 1.8854876152644553, + "learning_rate": 2.736405652322859e-06, + "loss": 1.1508, + "step": 3123 + }, + { + "epoch": 2.0924313462826523, + "grad_norm": 2.099468184146658, + "learning_rate": 2.7349506831778375e-06, + "loss": 0.8544, + "step": 3124 + }, + { + "epoch": 2.0931011386470195, + "grad_norm": 2.0712324930426065, + "learning_rate": 2.7334956337391823e-06, + "loss": 1.2529, + "step": 3125 + }, + { + "epoch": 2.0937709310113863, + "grad_norm": 2.577477184618326, + "learning_rate": 2.732040504504149e-06, + "loss": 1.1804, + "step": 3126 + }, + { + "epoch": 2.0944407233757536, + "grad_norm": 2.027484628359175, + "learning_rate": 2.7305852959700254e-06, + "loss": 1.0129, + "step": 3127 + }, + { + "epoch": 2.0951105157401204, + "grad_norm": 2.641868913353579, + "learning_rate": 2.7291300086341226e-06, + "loss": 0.938, + "step": 3128 + }, + { + "epoch": 2.0957803081044877, + "grad_norm": 2.1506478519331655, + "learning_rate": 2.7276746429937817e-06, + "loss": 1.0439, + "step": 3129 + }, + { + "epoch": 2.0964501004688545, + "grad_norm": 2.564218225819112, + "learning_rate": 2.726219199546369e-06, + "loss": 0.9007, + "step": 3130 + }, + { + "epoch": 2.0971198928332218, + "grad_norm": 1.9230722561628528, + "learning_rate": 2.7247636787892766e-06, + "loss": 0.9067, + "step": 3131 + }, + { + "epoch": 2.0977896851975886, + "grad_norm": 2.075554991639794, + "learning_rate": 2.723308081219925e-06, + "loss": 1.1388, + "step": 3132 + }, + { + "epoch": 2.098459477561956, + "grad_norm": 3.153120745724425, + "learning_rate": 2.72185240733576e-06, + "loss": 1.1233, + "step": 3133 + }, + { + "epoch": 2.0991292699263226, + "grad_norm": 1.967022731307546, + "learning_rate": 2.7203966576342526e-06, + "loss": 1.0107, + "step": 3134 + }, + { + "epoch": 2.09979906229069, + "grad_norm": 3.351359466862036, + "learning_rate": 2.7189408326129014e-06, + "loss": 0.967, + "step": 3135 + }, + { + "epoch": 2.1004688546550567, + "grad_norm": 2.2268362639189876, + "learning_rate": 2.717484932769229e-06, + "loss": 1.1242, + "step": 3136 + }, + { + "epoch": 2.101138647019424, + "grad_norm": 1.8125078145034772, + "learning_rate": 2.7160289586007854e-06, + "loss": 1.0575, + "step": 3137 + }, + { + "epoch": 2.101808439383791, + "grad_norm": 2.0119093868543234, + "learning_rate": 2.7145729106051445e-06, + "loss": 1.0633, + "step": 3138 + }, + { + "epoch": 2.102478231748158, + "grad_norm": 2.1535385707363224, + "learning_rate": 2.713116789279906e-06, + "loss": 1.1113, + "step": 3139 + }, + { + "epoch": 2.1031480241125253, + "grad_norm": 10.95680876123397, + "learning_rate": 2.711660595122695e-06, + "loss": 1.0229, + "step": 3140 + }, + { + "epoch": 2.103817816476892, + "grad_norm": 1.707345668159806, + "learning_rate": 2.71020432863116e-06, + "loss": 1.0167, + "step": 3141 + }, + { + "epoch": 2.1044876088412594, + "grad_norm": 3.443343017548517, + "learning_rate": 2.7087479903029767e-06, + "loss": 1.1235, + "step": 3142 + }, + { + "epoch": 2.105157401205626, + "grad_norm": 1.8883954764430881, + "learning_rate": 2.7072915806358446e-06, + "loss": 1.1513, + "step": 3143 + }, + { + "epoch": 2.1058271935699935, + "grad_norm": 1.9856917454063543, + "learning_rate": 2.7058351001274847e-06, + "loss": 1.0833, + "step": 3144 + }, + { + "epoch": 2.1064969859343603, + "grad_norm": 2.0774675611499296, + "learning_rate": 2.7043785492756464e-06, + "loss": 1.1693, + "step": 3145 + }, + { + "epoch": 2.1071667782987276, + "grad_norm": 1.875032145667421, + "learning_rate": 2.7029219285781e-06, + "loss": 1.0482, + "step": 3146 + }, + { + "epoch": 2.1078365706630944, + "grad_norm": 2.0817396328889557, + "learning_rate": 2.7014652385326425e-06, + "loss": 1.0805, + "step": 3147 + }, + { + "epoch": 2.1085063630274616, + "grad_norm": 2.043432887830379, + "learning_rate": 2.700008479637091e-06, + "loss": 1.1688, + "step": 3148 + }, + { + "epoch": 2.1091761553918285, + "grad_norm": 1.9538489499167122, + "learning_rate": 2.69855165238929e-06, + "loss": 1.0565, + "step": 3149 + }, + { + "epoch": 2.1098459477561957, + "grad_norm": 2.7780706989705872, + "learning_rate": 2.6970947572871043e-06, + "loss": 1.1316, + "step": 3150 + }, + { + "epoch": 2.1105157401205625, + "grad_norm": 2.225520740728046, + "learning_rate": 2.695637794828423e-06, + "loss": 1.0989, + "step": 3151 + }, + { + "epoch": 2.11118553248493, + "grad_norm": 1.8726013164501216, + "learning_rate": 2.694180765511159e-06, + "loss": 0.9775, + "step": 3152 + }, + { + "epoch": 2.1118553248492966, + "grad_norm": 1.9383181480043645, + "learning_rate": 2.692723669833247e-06, + "loss": 1.0141, + "step": 3153 + }, + { + "epoch": 2.112525117213664, + "grad_norm": 2.158873152483856, + "learning_rate": 2.6912665082926452e-06, + "loss": 1.0179, + "step": 3154 + }, + { + "epoch": 2.1131949095780307, + "grad_norm": 1.9602502172312346, + "learning_rate": 2.6898092813873335e-06, + "loss": 1.0844, + "step": 3155 + }, + { + "epoch": 2.113864701942398, + "grad_norm": 2.4652594610210543, + "learning_rate": 2.6883519896153137e-06, + "loss": 1.0983, + "step": 3156 + }, + { + "epoch": 2.1145344943067648, + "grad_norm": 1.8993643318142037, + "learning_rate": 2.686894633474612e-06, + "loss": 1.098, + "step": 3157 + }, + { + "epoch": 2.115204286671132, + "grad_norm": 2.0048748036006807, + "learning_rate": 2.685437213463274e-06, + "loss": 1.0654, + "step": 3158 + }, + { + "epoch": 2.115874079035499, + "grad_norm": 2.149099185330886, + "learning_rate": 2.683979730079369e-06, + "loss": 0.9286, + "step": 3159 + }, + { + "epoch": 2.116543871399866, + "grad_norm": 1.9028137756042338, + "learning_rate": 2.6825221838209865e-06, + "loss": 1.0398, + "step": 3160 + }, + { + "epoch": 2.117213663764233, + "grad_norm": 1.731458712584097, + "learning_rate": 2.6810645751862392e-06, + "loss": 1.1521, + "step": 3161 + }, + { + "epoch": 2.1178834561286, + "grad_norm": 3.4650986521656164, + "learning_rate": 2.679606904673259e-06, + "loss": 0.8615, + "step": 3162 + }, + { + "epoch": 2.118553248492967, + "grad_norm": 2.902028405740626, + "learning_rate": 2.6781491727802007e-06, + "loss": 0.9263, + "step": 3163 + }, + { + "epoch": 2.1192230408573343, + "grad_norm": 1.8016383109059932, + "learning_rate": 2.6766913800052395e-06, + "loss": 1.0536, + "step": 3164 + }, + { + "epoch": 2.119892833221701, + "grad_norm": 2.252273410869708, + "learning_rate": 2.6752335268465714e-06, + "loss": 1.1674, + "step": 3165 + }, + { + "epoch": 2.1205626255860683, + "grad_norm": 1.9379086063158328, + "learning_rate": 2.673775613802412e-06, + "loss": 1.0462, + "step": 3166 + }, + { + "epoch": 2.121232417950435, + "grad_norm": 2.124353028903355, + "learning_rate": 2.6723176413710006e-06, + "loss": 0.7872, + "step": 3167 + }, + { + "epoch": 2.1219022103148024, + "grad_norm": 2.010984641338529, + "learning_rate": 2.6708596100505924e-06, + "loss": 1.219, + "step": 3168 + }, + { + "epoch": 2.1225720026791697, + "grad_norm": 2.008377728175274, + "learning_rate": 2.6694015203394656e-06, + "loss": 0.941, + "step": 3169 + }, + { + "epoch": 2.1232417950435365, + "grad_norm": 2.1417394906596146, + "learning_rate": 2.6679433727359176e-06, + "loss": 1.1337, + "step": 3170 + }, + { + "epoch": 2.1239115874079038, + "grad_norm": 1.9043937222771792, + "learning_rate": 2.6664851677382658e-06, + "loss": 1.2054, + "step": 3171 + }, + { + "epoch": 2.1245813797722706, + "grad_norm": 2.532074988651399, + "learning_rate": 2.6650269058448476e-06, + "loss": 0.9282, + "step": 3172 + }, + { + "epoch": 2.125251172136638, + "grad_norm": 2.189532101347387, + "learning_rate": 2.663568587554018e-06, + "loss": 1.1319, + "step": 3173 + }, + { + "epoch": 2.1259209645010047, + "grad_norm": 1.8103786939600772, + "learning_rate": 2.662110213364153e-06, + "loss": 1.1192, + "step": 3174 + }, + { + "epoch": 2.126590756865372, + "grad_norm": 1.7618059517256879, + "learning_rate": 2.660651783773648e-06, + "loss": 1.1189, + "step": 3175 + }, + { + "epoch": 2.1272605492297387, + "grad_norm": 2.0442337150460337, + "learning_rate": 2.6591932992809153e-06, + "loss": 1.0544, + "step": 3176 + }, + { + "epoch": 2.127930341594106, + "grad_norm": 2.1472490573885086, + "learning_rate": 2.6577347603843885e-06, + "loss": 1.0406, + "step": 3177 + }, + { + "epoch": 2.128600133958473, + "grad_norm": 2.26051911390189, + "learning_rate": 2.6562761675825185e-06, + "loss": 1.0603, + "step": 3178 + }, + { + "epoch": 2.12926992632284, + "grad_norm": 1.9454430805188936, + "learning_rate": 2.6548175213737727e-06, + "loss": 1.1745, + "step": 3179 + }, + { + "epoch": 2.129939718687207, + "grad_norm": 1.9540211347196068, + "learning_rate": 2.6533588222566415e-06, + "loss": 1.1043, + "step": 3180 + }, + { + "epoch": 2.130609511051574, + "grad_norm": 1.882355771600466, + "learning_rate": 2.6519000707296283e-06, + "loss": 1.0333, + "step": 3181 + }, + { + "epoch": 2.131279303415941, + "grad_norm": 2.323816403801207, + "learning_rate": 2.6504412672912594e-06, + "loss": 0.9915, + "step": 3182 + }, + { + "epoch": 2.1319490957803082, + "grad_norm": 1.9759498593917189, + "learning_rate": 2.6489824124400727e-06, + "loss": 1.1443, + "step": 3183 + }, + { + "epoch": 2.132618888144675, + "grad_norm": 1.9466620792891383, + "learning_rate": 2.6475235066746304e-06, + "loss": 1.0305, + "step": 3184 + }, + { + "epoch": 2.1332886805090423, + "grad_norm": 1.7878047158787456, + "learning_rate": 2.6460645504935063e-06, + "loss": 0.9117, + "step": 3185 + }, + { + "epoch": 2.133958472873409, + "grad_norm": 1.6221653878649267, + "learning_rate": 2.6446055443952957e-06, + "loss": 0.8995, + "step": 3186 + }, + { + "epoch": 2.1346282652377764, + "grad_norm": 3.501951497336738, + "learning_rate": 2.643146488878607e-06, + "loss": 1.2311, + "step": 3187 + }, + { + "epoch": 2.135298057602143, + "grad_norm": 2.0211007859104666, + "learning_rate": 2.6416873844420703e-06, + "loss": 0.9941, + "step": 3188 + }, + { + "epoch": 2.1359678499665105, + "grad_norm": 1.8556408200409127, + "learning_rate": 2.640228231584329e-06, + "loss": 0.9502, + "step": 3189 + }, + { + "epoch": 2.1366376423308773, + "grad_norm": 2.055029635511449, + "learning_rate": 2.638769030804043e-06, + "loss": 1.1471, + "step": 3190 + }, + { + "epoch": 2.1373074346952445, + "grad_norm": 1.8952254257831531, + "learning_rate": 2.6373097825998906e-06, + "loss": 0.9378, + "step": 3191 + }, + { + "epoch": 2.1379772270596114, + "grad_norm": 1.978771300172139, + "learning_rate": 2.6358504874705638e-06, + "loss": 1.2115, + "step": 3192 + }, + { + "epoch": 2.1386470194239786, + "grad_norm": 1.8350987116142896, + "learning_rate": 2.6343911459147732e-06, + "loss": 0.8955, + "step": 3193 + }, + { + "epoch": 2.1393168117883454, + "grad_norm": 2.160486490046842, + "learning_rate": 2.6329317584312436e-06, + "loss": 1.2099, + "step": 3194 + }, + { + "epoch": 2.1399866041527127, + "grad_norm": 2.435112878701795, + "learning_rate": 2.6314723255187158e-06, + "loss": 1.0815, + "step": 3195 + }, + { + "epoch": 2.1406563965170795, + "grad_norm": 2.8197977780807824, + "learning_rate": 2.6300128476759466e-06, + "loss": 0.8405, + "step": 3196 + }, + { + "epoch": 2.1413261888814468, + "grad_norm": 2.147679846029514, + "learning_rate": 2.6285533254017076e-06, + "loss": 0.9817, + "step": 3197 + }, + { + "epoch": 2.141995981245814, + "grad_norm": 3.5531152574786034, + "learning_rate": 2.6270937591947855e-06, + "loss": 1.0661, + "step": 3198 + }, + { + "epoch": 2.142665773610181, + "grad_norm": 1.9903916138129853, + "learning_rate": 2.6256341495539833e-06, + "loss": 1.1003, + "step": 3199 + }, + { + "epoch": 2.1433355659745477, + "grad_norm": 2.495752709549543, + "learning_rate": 2.624174496978117e-06, + "loss": 0.9952, + "step": 3200 + }, + { + "epoch": 2.144005358338915, + "grad_norm": 2.0888610627744786, + "learning_rate": 2.6227148019660183e-06, + "loss": 0.8793, + "step": 3201 + }, + { + "epoch": 2.144675150703282, + "grad_norm": 2.1836683586795385, + "learning_rate": 2.621255065016533e-06, + "loss": 1.0567, + "step": 3202 + }, + { + "epoch": 2.145344943067649, + "grad_norm": 2.2251593047008225, + "learning_rate": 2.619795286628522e-06, + "loss": 1.2113, + "step": 3203 + }, + { + "epoch": 2.1460147354320163, + "grad_norm": 2.9614097227518017, + "learning_rate": 2.6183354673008597e-06, + "loss": 0.872, + "step": 3204 + }, + { + "epoch": 2.146684527796383, + "grad_norm": 2.0828780744152304, + "learning_rate": 2.6168756075324348e-06, + "loss": 1.0105, + "step": 3205 + }, + { + "epoch": 2.1473543201607503, + "grad_norm": 1.8709102231493433, + "learning_rate": 2.615415707822149e-06, + "loss": 0.9639, + "step": 3206 + }, + { + "epoch": 2.148024112525117, + "grad_norm": 1.8905210686485034, + "learning_rate": 2.6139557686689192e-06, + "loss": 1.0305, + "step": 3207 + }, + { + "epoch": 2.1486939048894844, + "grad_norm": 3.281444387835858, + "learning_rate": 2.6124957905716746e-06, + "loss": 1.0345, + "step": 3208 + }, + { + "epoch": 2.1493636972538512, + "grad_norm": 2.342213723165655, + "learning_rate": 2.6110357740293574e-06, + "loss": 1.0056, + "step": 3209 + }, + { + "epoch": 2.1500334896182185, + "grad_norm": 2.583871586106714, + "learning_rate": 2.609575719540924e-06, + "loss": 0.951, + "step": 3210 + }, + { + "epoch": 2.1507032819825853, + "grad_norm": 2.082661558669091, + "learning_rate": 2.6081156276053432e-06, + "loss": 1.252, + "step": 3211 + }, + { + "epoch": 2.1513730743469526, + "grad_norm": 2.9114797233041267, + "learning_rate": 2.6066554987215964e-06, + "loss": 0.9936, + "step": 3212 + }, + { + "epoch": 2.1520428667113194, + "grad_norm": 2.4034402105062105, + "learning_rate": 2.605195333388679e-06, + "loss": 0.7748, + "step": 3213 + }, + { + "epoch": 2.1527126590756867, + "grad_norm": 2.2860698194737563, + "learning_rate": 2.603735132105596e-06, + "loss": 1.0438, + "step": 3214 + }, + { + "epoch": 2.1533824514400535, + "grad_norm": 3.1815681948734875, + "learning_rate": 2.602274895371368e-06, + "loss": 0.9886, + "step": 3215 + }, + { + "epoch": 2.1540522438044207, + "grad_norm": 1.9607547229928044, + "learning_rate": 2.600814623685025e-06, + "loss": 0.9596, + "step": 3216 + }, + { + "epoch": 2.1547220361687875, + "grad_norm": 1.8740383446742723, + "learning_rate": 2.599354317545612e-06, + "loss": 0.9701, + "step": 3217 + }, + { + "epoch": 2.155391828533155, + "grad_norm": 2.1596952707958743, + "learning_rate": 2.5978939774521816e-06, + "loss": 0.8381, + "step": 3218 + }, + { + "epoch": 2.1560616208975216, + "grad_norm": 2.223488640742381, + "learning_rate": 2.5964336039038022e-06, + "loss": 1.0882, + "step": 3219 + }, + { + "epoch": 2.156731413261889, + "grad_norm": 2.6209351991322083, + "learning_rate": 2.5949731973995503e-06, + "loss": 0.8352, + "step": 3220 + }, + { + "epoch": 2.1574012056262557, + "grad_norm": 1.766304761155313, + "learning_rate": 2.5935127584385163e-06, + "loss": 1.0504, + "step": 3221 + }, + { + "epoch": 2.158070997990623, + "grad_norm": 3.33387739679668, + "learning_rate": 2.5920522875197982e-06, + "loss": 1.2372, + "step": 3222 + }, + { + "epoch": 2.15874079035499, + "grad_norm": 1.9910896014310118, + "learning_rate": 2.590591785142511e-06, + "loss": 1.154, + "step": 3223 + }, + { + "epoch": 2.159410582719357, + "grad_norm": 2.26685707550873, + "learning_rate": 2.589131251805773e-06, + "loss": 1.1173, + "step": 3224 + }, + { + "epoch": 2.160080375083724, + "grad_norm": 2.813311656285963, + "learning_rate": 2.587670688008719e-06, + "loss": 0.9219, + "step": 3225 + }, + { + "epoch": 2.160750167448091, + "grad_norm": 2.6576883971757694, + "learning_rate": 2.586210094250491e-06, + "loss": 0.8843, + "step": 3226 + }, + { + "epoch": 2.161419959812458, + "grad_norm": 2.0269583608481816, + "learning_rate": 2.5847494710302433e-06, + "loss": 1.0111, + "step": 3227 + }, + { + "epoch": 2.162089752176825, + "grad_norm": 1.8637349823985576, + "learning_rate": 2.5832888188471365e-06, + "loss": 1.1541, + "step": 3228 + }, + { + "epoch": 2.162759544541192, + "grad_norm": 1.891619569550931, + "learning_rate": 2.5818281382003472e-06, + "loss": 1.0264, + "step": 3229 + }, + { + "epoch": 2.1634293369055593, + "grad_norm": 1.966642722200216, + "learning_rate": 2.5803674295890553e-06, + "loss": 1.0497, + "step": 3230 + }, + { + "epoch": 2.1640991292699265, + "grad_norm": 2.110446307829133, + "learning_rate": 2.578906693512456e-06, + "loss": 1.1775, + "step": 3231 + }, + { + "epoch": 2.1647689216342934, + "grad_norm": 1.9972918099006407, + "learning_rate": 2.577445930469748e-06, + "loss": 0.9713, + "step": 3232 + }, + { + "epoch": 2.1654387139986606, + "grad_norm": 1.7829387972570234, + "learning_rate": 2.5759851409601447e-06, + "loss": 1.2219, + "step": 3233 + }, + { + "epoch": 2.1661085063630274, + "grad_norm": 2.040708176436989, + "learning_rate": 2.5745243254828657e-06, + "loss": 1.2019, + "step": 3234 + }, + { + "epoch": 2.1667782987273947, + "grad_norm": 1.9468149493729936, + "learning_rate": 2.5730634845371392e-06, + "loss": 1.177, + "step": 3235 + }, + { + "epoch": 2.1674480910917615, + "grad_norm": 2.1440441166773683, + "learning_rate": 2.571602618622204e-06, + "loss": 1.0297, + "step": 3236 + }, + { + "epoch": 2.1681178834561288, + "grad_norm": 2.4257368085315005, + "learning_rate": 2.570141728237306e-06, + "loss": 1.188, + "step": 3237 + }, + { + "epoch": 2.1687876758204956, + "grad_norm": 2.870823375696447, + "learning_rate": 2.568680813881699e-06, + "loss": 1.0872, + "step": 3238 + }, + { + "epoch": 2.169457468184863, + "grad_norm": 2.1713593387532777, + "learning_rate": 2.567219876054646e-06, + "loss": 1.0115, + "step": 3239 + }, + { + "epoch": 2.1701272605492297, + "grad_norm": 3.18626483349508, + "learning_rate": 2.5657589152554183e-06, + "loss": 1.0466, + "step": 3240 + }, + { + "epoch": 2.170797052913597, + "grad_norm": 3.751087655003462, + "learning_rate": 2.5642979319832943e-06, + "loss": 0.9405, + "step": 3241 + }, + { + "epoch": 2.1714668452779637, + "grad_norm": 2.2318208084889513, + "learning_rate": 2.56283692673756e-06, + "loss": 1.266, + "step": 3242 + }, + { + "epoch": 2.172136637642331, + "grad_norm": 1.855112713918352, + "learning_rate": 2.56137590001751e-06, + "loss": 1.058, + "step": 3243 + }, + { + "epoch": 2.172806430006698, + "grad_norm": 3.3961604139548376, + "learning_rate": 2.5599148523224438e-06, + "loss": 1.0599, + "step": 3244 + }, + { + "epoch": 2.173476222371065, + "grad_norm": 1.9522251650732776, + "learning_rate": 2.5584537841516716e-06, + "loss": 0.7653, + "step": 3245 + }, + { + "epoch": 2.174146014735432, + "grad_norm": 1.939075568830834, + "learning_rate": 2.556992696004508e-06, + "loss": 0.9774, + "step": 3246 + }, + { + "epoch": 2.174815807099799, + "grad_norm": 2.169987218353444, + "learning_rate": 2.5555315883802743e-06, + "loss": 0.9919, + "step": 3247 + }, + { + "epoch": 2.175485599464166, + "grad_norm": 2.2093684580936594, + "learning_rate": 2.5540704617783006e-06, + "loss": 0.9808, + "step": 3248 + }, + { + "epoch": 2.1761553918285332, + "grad_norm": 3.015848416811986, + "learning_rate": 2.5526093166979215e-06, + "loss": 1.0032, + "step": 3249 + }, + { + "epoch": 2.1768251841929, + "grad_norm": 2.481613223972553, + "learning_rate": 2.551148153638479e-06, + "loss": 1.1166, + "step": 3250 + }, + { + "epoch": 2.1774949765572673, + "grad_norm": 2.716066765306912, + "learning_rate": 2.54968697309932e-06, + "loss": 0.9712, + "step": 3251 + }, + { + "epoch": 2.178164768921634, + "grad_norm": 2.4227052116989394, + "learning_rate": 2.5482257755797996e-06, + "loss": 0.9497, + "step": 3252 + }, + { + "epoch": 2.1788345612860014, + "grad_norm": 2.6455235247212565, + "learning_rate": 2.5467645615792757e-06, + "loss": 0.7361, + "step": 3253 + }, + { + "epoch": 2.179504353650368, + "grad_norm": 2.2270366613251915, + "learning_rate": 2.5453033315971154e-06, + "loss": 0.8923, + "step": 3254 + }, + { + "epoch": 2.1801741460147355, + "grad_norm": 2.162445424392753, + "learning_rate": 2.543842086132687e-06, + "loss": 1.1243, + "step": 3255 + }, + { + "epoch": 2.1808439383791023, + "grad_norm": 2.0939602310410512, + "learning_rate": 2.542380825685369e-06, + "loss": 1.1097, + "step": 3256 + }, + { + "epoch": 2.1815137307434695, + "grad_norm": 2.0360340206045358, + "learning_rate": 2.540919550754539e-06, + "loss": 1.0489, + "step": 3257 + }, + { + "epoch": 2.1821835231078364, + "grad_norm": 3.035870371031061, + "learning_rate": 2.5394582618395868e-06, + "loss": 1.1493, + "step": 3258 + }, + { + "epoch": 2.1828533154722036, + "grad_norm": 2.700033068977369, + "learning_rate": 2.537996959439901e-06, + "loss": 1.1346, + "step": 3259 + }, + { + "epoch": 2.183523107836571, + "grad_norm": 2.5910456011285476, + "learning_rate": 2.536535644054877e-06, + "loss": 0.8923, + "step": 3260 + }, + { + "epoch": 2.1841929002009377, + "grad_norm": 2.32979779045637, + "learning_rate": 2.5350743161839147e-06, + "loss": 1.1528, + "step": 3261 + }, + { + "epoch": 2.1848626925653045, + "grad_norm": 2.0638044854788706, + "learning_rate": 2.5336129763264196e-06, + "loss": 0.9129, + "step": 3262 + }, + { + "epoch": 2.185532484929672, + "grad_norm": 2.007954610446181, + "learning_rate": 2.5321516249817967e-06, + "loss": 1.0787, + "step": 3263 + }, + { + "epoch": 2.186202277294039, + "grad_norm": 2.935603698291652, + "learning_rate": 2.5306902626494623e-06, + "loss": 1.2344, + "step": 3264 + }, + { + "epoch": 2.186872069658406, + "grad_norm": 2.182698036169385, + "learning_rate": 2.529228889828828e-06, + "loss": 1.061, + "step": 3265 + }, + { + "epoch": 2.187541862022773, + "grad_norm": 2.6430809125485712, + "learning_rate": 2.5277675070193167e-06, + "loss": 1.0648, + "step": 3266 + }, + { + "epoch": 2.18821165438714, + "grad_norm": 1.8568009818094584, + "learning_rate": 2.526306114720349e-06, + "loss": 1.2521, + "step": 3267 + }, + { + "epoch": 2.188881446751507, + "grad_norm": 3.984374915423613, + "learning_rate": 2.5248447134313526e-06, + "loss": 0.9335, + "step": 3268 + }, + { + "epoch": 2.189551239115874, + "grad_norm": 1.7305585556147862, + "learning_rate": 2.523383303651754e-06, + "loss": 1.0806, + "step": 3269 + }, + { + "epoch": 2.1902210314802413, + "grad_norm": 2.3278492176226253, + "learning_rate": 2.5219218858809893e-06, + "loss": 0.9535, + "step": 3270 + }, + { + "epoch": 2.190890823844608, + "grad_norm": 2.7411838889109554, + "learning_rate": 2.520460460618489e-06, + "loss": 0.9181, + "step": 3271 + }, + { + "epoch": 2.1915606162089754, + "grad_norm": 2.3488905710325376, + "learning_rate": 2.5189990283636946e-06, + "loss": 1.1277, + "step": 3272 + }, + { + "epoch": 2.192230408573342, + "grad_norm": 2.637991922420916, + "learning_rate": 2.517537589616042e-06, + "loss": 1.1914, + "step": 3273 + }, + { + "epoch": 2.1929002009377094, + "grad_norm": 1.9593852723294296, + "learning_rate": 2.5160761448749764e-06, + "loss": 0.9929, + "step": 3274 + }, + { + "epoch": 2.1935699933020762, + "grad_norm": 1.8699167784935349, + "learning_rate": 2.514614694639939e-06, + "loss": 0.8958, + "step": 3275 + }, + { + "epoch": 2.1942397856664435, + "grad_norm": 3.629807762324002, + "learning_rate": 2.513153239410378e-06, + "loss": 1.0221, + "step": 3276 + }, + { + "epoch": 2.1949095780308103, + "grad_norm": 1.7403662646344986, + "learning_rate": 2.511691779685739e-06, + "loss": 0.9841, + "step": 3277 + }, + { + "epoch": 2.1955793703951776, + "grad_norm": 4.780874986003107, + "learning_rate": 2.510230315965473e-06, + "loss": 1.1677, + "step": 3278 + }, + { + "epoch": 2.1962491627595444, + "grad_norm": 1.7646600257016511, + "learning_rate": 2.5087688487490296e-06, + "loss": 1.0852, + "step": 3279 + }, + { + "epoch": 2.1969189551239117, + "grad_norm": 1.798079244293413, + "learning_rate": 2.5073073785358606e-06, + "loss": 1.1484, + "step": 3280 + }, + { + "epoch": 2.1975887474882785, + "grad_norm": 2.2367508070505613, + "learning_rate": 2.505845905825418e-06, + "loss": 1.0744, + "step": 3281 + }, + { + "epoch": 2.1982585398526457, + "grad_norm": 2.346443229243735, + "learning_rate": 2.5043844311171566e-06, + "loss": 1.0846, + "step": 3282 + }, + { + "epoch": 2.1989283322170126, + "grad_norm": 1.919327240395832, + "learning_rate": 2.5029229549105298e-06, + "loss": 1.0506, + "step": 3283 + }, + { + "epoch": 2.19959812458138, + "grad_norm": 2.369466547146716, + "learning_rate": 2.5014614777049927e-06, + "loss": 1.1255, + "step": 3284 + }, + { + "epoch": 2.2002679169457466, + "grad_norm": 2.3930519420279452, + "learning_rate": 2.5e-06, + "loss": 1.0567, + "step": 3285 + }, + { + "epoch": 2.200937709310114, + "grad_norm": 1.980378500278285, + "learning_rate": 2.4985385222950077e-06, + "loss": 1.0693, + "step": 3286 + }, + { + "epoch": 2.2016075016744807, + "grad_norm": 2.113122688230368, + "learning_rate": 2.4970770450894707e-06, + "loss": 1.1764, + "step": 3287 + }, + { + "epoch": 2.202277294038848, + "grad_norm": 2.161825313652524, + "learning_rate": 2.4956155688828442e-06, + "loss": 1.1351, + "step": 3288 + }, + { + "epoch": 2.2029470864032152, + "grad_norm": 2.529114640645506, + "learning_rate": 2.4941540941745827e-06, + "loss": 1.0087, + "step": 3289 + }, + { + "epoch": 2.203616878767582, + "grad_norm": 2.6676762009389132, + "learning_rate": 2.4926926214641407e-06, + "loss": 1.1324, + "step": 3290 + }, + { + "epoch": 2.204286671131949, + "grad_norm": 2.2957636073012266, + "learning_rate": 2.4912311512509708e-06, + "loss": 1.0134, + "step": 3291 + }, + { + "epoch": 2.204956463496316, + "grad_norm": 2.0425620568290843, + "learning_rate": 2.4897696840345276e-06, + "loss": 1.013, + "step": 3292 + }, + { + "epoch": 2.2056262558606834, + "grad_norm": 1.8890506874285222, + "learning_rate": 2.4883082203142612e-06, + "loss": 0.9976, + "step": 3293 + }, + { + "epoch": 2.20629604822505, + "grad_norm": 2.1362992514423906, + "learning_rate": 2.4868467605896225e-06, + "loss": 1.1488, + "step": 3294 + }, + { + "epoch": 2.2069658405894175, + "grad_norm": 2.3829244359529955, + "learning_rate": 2.4853853053600613e-06, + "loss": 1.1191, + "step": 3295 + }, + { + "epoch": 2.2076356329537843, + "grad_norm": 1.8710927349581343, + "learning_rate": 2.483923855125025e-06, + "loss": 1.0696, + "step": 3296 + }, + { + "epoch": 2.2083054253181515, + "grad_norm": 4.384251494016155, + "learning_rate": 2.4824624103839585e-06, + "loss": 0.9155, + "step": 3297 + }, + { + "epoch": 2.2089752176825184, + "grad_norm": 2.591238493972614, + "learning_rate": 2.4810009716363063e-06, + "loss": 1.0256, + "step": 3298 + }, + { + "epoch": 2.2096450100468856, + "grad_norm": 2.2469116119941708, + "learning_rate": 2.479539539381511e-06, + "loss": 0.9671, + "step": 3299 + }, + { + "epoch": 2.2103148024112524, + "grad_norm": 2.241590284502151, + "learning_rate": 2.478078114119012e-06, + "loss": 1.1211, + "step": 3300 + }, + { + "epoch": 2.2109845947756197, + "grad_norm": 2.253279396194031, + "learning_rate": 2.4766166963482463e-06, + "loss": 0.8339, + "step": 3301 + }, + { + "epoch": 2.2116543871399865, + "grad_norm": 2.1934560362745747, + "learning_rate": 2.4751552865686486e-06, + "loss": 0.867, + "step": 3302 + }, + { + "epoch": 2.212324179504354, + "grad_norm": 2.409304376965969, + "learning_rate": 2.473693885279651e-06, + "loss": 1.1477, + "step": 3303 + }, + { + "epoch": 2.2129939718687206, + "grad_norm": 2.54667478012424, + "learning_rate": 2.4722324929806845e-06, + "loss": 1.0942, + "step": 3304 + }, + { + "epoch": 2.213663764233088, + "grad_norm": 2.0752734260031818, + "learning_rate": 2.470771110171173e-06, + "loss": 1.0768, + "step": 3305 + }, + { + "epoch": 2.2143335565974547, + "grad_norm": 2.604311695167349, + "learning_rate": 2.4693097373505385e-06, + "loss": 0.9577, + "step": 3306 + }, + { + "epoch": 2.215003348961822, + "grad_norm": 1.986981755112157, + "learning_rate": 2.467848375018203e-06, + "loss": 1.1504, + "step": 3307 + }, + { + "epoch": 2.2156731413261888, + "grad_norm": 1.9884174277299238, + "learning_rate": 2.466387023673582e-06, + "loss": 1.1437, + "step": 3308 + }, + { + "epoch": 2.216342933690556, + "grad_norm": 2.7049871370004572, + "learning_rate": 2.4649256838160857e-06, + "loss": 1.0159, + "step": 3309 + }, + { + "epoch": 2.217012726054923, + "grad_norm": 5.359668727342027, + "learning_rate": 2.4634643559451234e-06, + "loss": 1.2869, + "step": 3310 + }, + { + "epoch": 2.21768251841929, + "grad_norm": 2.0756693876854926, + "learning_rate": 2.4620030405600996e-06, + "loss": 1.1344, + "step": 3311 + }, + { + "epoch": 2.218352310783657, + "grad_norm": 2.079937392488247, + "learning_rate": 2.460541738160414e-06, + "loss": 0.8826, + "step": 3312 + }, + { + "epoch": 2.219022103148024, + "grad_norm": 1.8985575575758804, + "learning_rate": 2.4590804492454613e-06, + "loss": 1.1418, + "step": 3313 + }, + { + "epoch": 2.219691895512391, + "grad_norm": 2.243678705807565, + "learning_rate": 2.457619174314632e-06, + "loss": 1.1855, + "step": 3314 + }, + { + "epoch": 2.2203616878767582, + "grad_norm": 2.4314718500470724, + "learning_rate": 2.4561579138673134e-06, + "loss": 0.9592, + "step": 3315 + }, + { + "epoch": 2.221031480241125, + "grad_norm": 2.0420316030792782, + "learning_rate": 2.4546966684028863e-06, + "loss": 0.9749, + "step": 3316 + }, + { + "epoch": 2.2217012726054923, + "grad_norm": 2.074701688078119, + "learning_rate": 2.4532354384207247e-06, + "loss": 1.0394, + "step": 3317 + }, + { + "epoch": 2.222371064969859, + "grad_norm": 2.182035609880066, + "learning_rate": 2.4517742244202013e-06, + "loss": 1.15, + "step": 3318 + }, + { + "epoch": 2.2230408573342264, + "grad_norm": 1.9217903159143628, + "learning_rate": 2.4503130269006803e-06, + "loss": 1.1108, + "step": 3319 + }, + { + "epoch": 2.2237106496985932, + "grad_norm": 1.7574647427357752, + "learning_rate": 2.4488518463615225e-06, + "loss": 1.1402, + "step": 3320 + }, + { + "epoch": 2.2243804420629605, + "grad_norm": 2.208725249999245, + "learning_rate": 2.4473906833020797e-06, + "loss": 1.1465, + "step": 3321 + }, + { + "epoch": 2.2250502344273277, + "grad_norm": 2.118116226596013, + "learning_rate": 2.4459295382217e-06, + "loss": 0.8904, + "step": 3322 + }, + { + "epoch": 2.2257200267916946, + "grad_norm": 1.9772580606954142, + "learning_rate": 2.444468411619726e-06, + "loss": 0.9124, + "step": 3323 + }, + { + "epoch": 2.226389819156062, + "grad_norm": 2.5681849575498115, + "learning_rate": 2.4430073039954925e-06, + "loss": 1.041, + "step": 3324 + }, + { + "epoch": 2.2270596115204286, + "grad_norm": 3.6797418196410936, + "learning_rate": 2.441546215848329e-06, + "loss": 1.0431, + "step": 3325 + }, + { + "epoch": 2.227729403884796, + "grad_norm": 2.7621160732535524, + "learning_rate": 2.4400851476775566e-06, + "loss": 1.0702, + "step": 3326 + }, + { + "epoch": 2.2283991962491627, + "grad_norm": 2.1065584879287877, + "learning_rate": 2.438624099982491e-06, + "loss": 1.0402, + "step": 3327 + }, + { + "epoch": 2.22906898861353, + "grad_norm": 3.2734457748715533, + "learning_rate": 2.4371630732624404e-06, + "loss": 1.1636, + "step": 3328 + }, + { + "epoch": 2.229738780977897, + "grad_norm": 2.2966235655462817, + "learning_rate": 2.4357020680167066e-06, + "loss": 1.1608, + "step": 3329 + }, + { + "epoch": 2.230408573342264, + "grad_norm": 2.118033306122902, + "learning_rate": 2.4342410847445825e-06, + "loss": 1.1646, + "step": 3330 + }, + { + "epoch": 2.231078365706631, + "grad_norm": 2.2067738115766917, + "learning_rate": 2.4327801239453547e-06, + "loss": 1.0138, + "step": 3331 + }, + { + "epoch": 2.231748158070998, + "grad_norm": 2.1673858085470603, + "learning_rate": 2.4313191861183015e-06, + "loss": 1.054, + "step": 3332 + }, + { + "epoch": 2.232417950435365, + "grad_norm": 2.081084085781838, + "learning_rate": 2.429858271762695e-06, + "loss": 0.9889, + "step": 3333 + }, + { + "epoch": 2.233087742799732, + "grad_norm": 1.8151107761741023, + "learning_rate": 2.4283973813777962e-06, + "loss": 1.06, + "step": 3334 + }, + { + "epoch": 2.233757535164099, + "grad_norm": 2.8304591091957487, + "learning_rate": 2.426936515462861e-06, + "loss": 0.9439, + "step": 3335 + }, + { + "epoch": 2.2344273275284663, + "grad_norm": 2.2950286745524315, + "learning_rate": 2.4254756745171348e-06, + "loss": 1.1754, + "step": 3336 + }, + { + "epoch": 2.235097119892833, + "grad_norm": 3.0214291638159616, + "learning_rate": 2.424014859039856e-06, + "loss": 1.0783, + "step": 3337 + }, + { + "epoch": 2.2357669122572004, + "grad_norm": 2.2699604874144113, + "learning_rate": 2.422554069530253e-06, + "loss": 1.1609, + "step": 3338 + }, + { + "epoch": 2.236436704621567, + "grad_norm": 1.9063264119222938, + "learning_rate": 2.421093306487545e-06, + "loss": 1.0248, + "step": 3339 + }, + { + "epoch": 2.2371064969859344, + "grad_norm": 1.9247291115616263, + "learning_rate": 2.4196325704109447e-06, + "loss": 1.1054, + "step": 3340 + }, + { + "epoch": 2.2377762893503013, + "grad_norm": 2.1621655863929745, + "learning_rate": 2.418171861799654e-06, + "loss": 1.041, + "step": 3341 + }, + { + "epoch": 2.2384460817146685, + "grad_norm": 2.444228524291366, + "learning_rate": 2.416711181152864e-06, + "loss": 0.9006, + "step": 3342 + }, + { + "epoch": 2.2391158740790353, + "grad_norm": 1.755819025551005, + "learning_rate": 2.4152505289697575e-06, + "loss": 1.0036, + "step": 3343 + }, + { + "epoch": 2.2397856664434026, + "grad_norm": 2.759550097007207, + "learning_rate": 2.413789905749509e-06, + "loss": 1.1273, + "step": 3344 + }, + { + "epoch": 2.2404554588077694, + "grad_norm": 2.0116077308434495, + "learning_rate": 2.4123293119912817e-06, + "loss": 1.1619, + "step": 3345 + }, + { + "epoch": 2.2411252511721367, + "grad_norm": 2.0873120377430845, + "learning_rate": 2.4108687481942274e-06, + "loss": 1.1207, + "step": 3346 + }, + { + "epoch": 2.2417950435365035, + "grad_norm": 2.429230997323037, + "learning_rate": 2.4094082148574897e-06, + "loss": 0.9962, + "step": 3347 + }, + { + "epoch": 2.2424648359008708, + "grad_norm": 2.1671418837749585, + "learning_rate": 2.4079477124802013e-06, + "loss": 0.7932, + "step": 3348 + }, + { + "epoch": 2.2431346282652376, + "grad_norm": 2.16686050480446, + "learning_rate": 2.406487241561485e-06, + "loss": 1.0343, + "step": 3349 + }, + { + "epoch": 2.243804420629605, + "grad_norm": 2.5231810415666396, + "learning_rate": 2.4050268026004505e-06, + "loss": 1.1871, + "step": 3350 + }, + { + "epoch": 2.244474212993972, + "grad_norm": 2.293641068062449, + "learning_rate": 2.403566396096198e-06, + "loss": 1.2816, + "step": 3351 + }, + { + "epoch": 2.245144005358339, + "grad_norm": 2.1280388282894442, + "learning_rate": 2.4021060225478184e-06, + "loss": 1.1015, + "step": 3352 + }, + { + "epoch": 2.2458137977227057, + "grad_norm": 1.903697169932585, + "learning_rate": 2.400645682454389e-06, + "loss": 1.0176, + "step": 3353 + }, + { + "epoch": 2.246483590087073, + "grad_norm": 1.840716270979947, + "learning_rate": 2.3991853763149757e-06, + "loss": 1.16, + "step": 3354 + }, + { + "epoch": 2.2471533824514403, + "grad_norm": 2.145572527743755, + "learning_rate": 2.3977251046286326e-06, + "loss": 0.8836, + "step": 3355 + }, + { + "epoch": 2.247823174815807, + "grad_norm": 1.7580545978667488, + "learning_rate": 2.3962648678944045e-06, + "loss": 1.0036, + "step": 3356 + }, + { + "epoch": 2.2484929671801743, + "grad_norm": 1.8998916589917676, + "learning_rate": 2.3948046666113216e-06, + "loss": 0.9355, + "step": 3357 + }, + { + "epoch": 2.249162759544541, + "grad_norm": 2.4012204532057133, + "learning_rate": 2.3933445012784045e-06, + "loss": 1.0477, + "step": 3358 + }, + { + "epoch": 2.2498325519089084, + "grad_norm": 2.903015522709372, + "learning_rate": 2.3918843723946576e-06, + "loss": 1.0536, + "step": 3359 + }, + { + "epoch": 2.2505023442732752, + "grad_norm": 4.631251503576025, + "learning_rate": 2.390424280459077e-06, + "loss": 0.9765, + "step": 3360 + }, + { + "epoch": 2.2511721366376425, + "grad_norm": 2.3598788815226017, + "learning_rate": 2.3889642259706426e-06, + "loss": 1.0858, + "step": 3361 + }, + { + "epoch": 2.2518419290020093, + "grad_norm": 2.4886475489372866, + "learning_rate": 2.3875042094283267e-06, + "loss": 1.165, + "step": 3362 + }, + { + "epoch": 2.2525117213663766, + "grad_norm": 2.146973691720366, + "learning_rate": 2.386044231331081e-06, + "loss": 0.9762, + "step": 3363 + }, + { + "epoch": 2.2531815137307434, + "grad_norm": 2.406466215946071, + "learning_rate": 2.3845842921778513e-06, + "loss": 1.0842, + "step": 3364 + }, + { + "epoch": 2.2538513060951106, + "grad_norm": 2.8243856004891854, + "learning_rate": 2.3831243924675652e-06, + "loss": 1.0848, + "step": 3365 + }, + { + "epoch": 2.2545210984594775, + "grad_norm": 2.082457421002856, + "learning_rate": 2.3816645326991415e-06, + "loss": 1.1065, + "step": 3366 + }, + { + "epoch": 2.2551908908238447, + "grad_norm": 1.9200668881758043, + "learning_rate": 2.3802047133714783e-06, + "loss": 0.9801, + "step": 3367 + }, + { + "epoch": 2.2558606831882115, + "grad_norm": 1.8979423863291798, + "learning_rate": 2.3787449349834674e-06, + "loss": 1.1362, + "step": 3368 + }, + { + "epoch": 2.256530475552579, + "grad_norm": 2.0286429759701456, + "learning_rate": 2.377285198033982e-06, + "loss": 0.9895, + "step": 3369 + }, + { + "epoch": 2.2572002679169456, + "grad_norm": 2.106927596678285, + "learning_rate": 2.375825503021884e-06, + "loss": 1.1367, + "step": 3370 + }, + { + "epoch": 2.257870060281313, + "grad_norm": 2.0160831337343756, + "learning_rate": 2.3743658504460176e-06, + "loss": 1.0674, + "step": 3371 + }, + { + "epoch": 2.2585398526456797, + "grad_norm": 1.8425988995554845, + "learning_rate": 2.372906240805215e-06, + "loss": 0.9292, + "step": 3372 + }, + { + "epoch": 2.259209645010047, + "grad_norm": 2.1198929678584206, + "learning_rate": 2.371446674598293e-06, + "loss": 1.0181, + "step": 3373 + }, + { + "epoch": 2.2598794373744138, + "grad_norm": 3.0698176371208965, + "learning_rate": 2.3699871523240542e-06, + "loss": 1.1222, + "step": 3374 + }, + { + "epoch": 2.260549229738781, + "grad_norm": 3.0440776035632764, + "learning_rate": 2.368527674481285e-06, + "loss": 0.9133, + "step": 3375 + }, + { + "epoch": 2.261219022103148, + "grad_norm": 2.172505336352903, + "learning_rate": 2.3670682415687573e-06, + "loss": 1.0204, + "step": 3376 + }, + { + "epoch": 2.261888814467515, + "grad_norm": 2.194395087855196, + "learning_rate": 2.3656088540852268e-06, + "loss": 1.2351, + "step": 3377 + }, + { + "epoch": 2.262558606831882, + "grad_norm": 1.9854440839244567, + "learning_rate": 2.3641495125294367e-06, + "loss": 1.1107, + "step": 3378 + }, + { + "epoch": 2.263228399196249, + "grad_norm": 2.5378039384715763, + "learning_rate": 2.3626902174001102e-06, + "loss": 0.9428, + "step": 3379 + }, + { + "epoch": 2.2638981915606164, + "grad_norm": 2.0789965031135247, + "learning_rate": 2.3612309691959576e-06, + "loss": 1.1595, + "step": 3380 + }, + { + "epoch": 2.2645679839249833, + "grad_norm": 1.9707245518847898, + "learning_rate": 2.359771768415671e-06, + "loss": 1.0584, + "step": 3381 + }, + { + "epoch": 2.26523777628935, + "grad_norm": 2.4531772608753113, + "learning_rate": 2.35831261555793e-06, + "loss": 1.0366, + "step": 3382 + }, + { + "epoch": 2.2659075686537173, + "grad_norm": 2.9058388743596146, + "learning_rate": 2.3568535111213932e-06, + "loss": 0.97, + "step": 3383 + }, + { + "epoch": 2.2665773610180846, + "grad_norm": 1.8300309694155934, + "learning_rate": 2.3553944556047055e-06, + "loss": 1.1986, + "step": 3384 + }, + { + "epoch": 2.2672471533824514, + "grad_norm": 1.9924502026391104, + "learning_rate": 2.353935449506494e-06, + "loss": 1.0138, + "step": 3385 + }, + { + "epoch": 2.2679169457468187, + "grad_norm": 2.162673810953629, + "learning_rate": 2.352476493325371e-06, + "loss": 1.1828, + "step": 3386 + }, + { + "epoch": 2.2685867381111855, + "grad_norm": 2.070499244927028, + "learning_rate": 2.351017587559928e-06, + "loss": 0.945, + "step": 3387 + }, + { + "epoch": 2.2692565304755528, + "grad_norm": 3.9727099955962664, + "learning_rate": 2.3495587327087414e-06, + "loss": 1.2323, + "step": 3388 + }, + { + "epoch": 2.2699263228399196, + "grad_norm": 6.538829970809265, + "learning_rate": 2.3480999292703717e-06, + "loss": 1.0299, + "step": 3389 + }, + { + "epoch": 2.270596115204287, + "grad_norm": 2.333610827195841, + "learning_rate": 2.3466411777433594e-06, + "loss": 0.8651, + "step": 3390 + }, + { + "epoch": 2.2712659075686537, + "grad_norm": 2.4881345678596594, + "learning_rate": 2.345182478626228e-06, + "loss": 0.9698, + "step": 3391 + }, + { + "epoch": 2.271935699933021, + "grad_norm": 1.9818569267084432, + "learning_rate": 2.343723832417483e-06, + "loss": 0.925, + "step": 3392 + }, + { + "epoch": 2.2726054922973877, + "grad_norm": 1.9825097028257206, + "learning_rate": 2.342265239615612e-06, + "loss": 1.2452, + "step": 3393 + }, + { + "epoch": 2.273275284661755, + "grad_norm": 2.2676142165204713, + "learning_rate": 2.340806700719085e-06, + "loss": 0.9925, + "step": 3394 + }, + { + "epoch": 2.273945077026122, + "grad_norm": 2.3388349368459496, + "learning_rate": 2.3393482162263537e-06, + "loss": 1.1155, + "step": 3395 + }, + { + "epoch": 2.274614869390489, + "grad_norm": 2.3643551128986533, + "learning_rate": 2.337889786635848e-06, + "loss": 0.9741, + "step": 3396 + }, + { + "epoch": 2.275284661754856, + "grad_norm": 2.1764576936330844, + "learning_rate": 2.336431412445983e-06, + "loss": 1.1425, + "step": 3397 + }, + { + "epoch": 2.275954454119223, + "grad_norm": 3.330329999436905, + "learning_rate": 2.334973094155153e-06, + "loss": 1.0685, + "step": 3398 + }, + { + "epoch": 2.27662424648359, + "grad_norm": 2.01181039921724, + "learning_rate": 2.333514832261735e-06, + "loss": 1.1041, + "step": 3399 + }, + { + "epoch": 2.2772940388479572, + "grad_norm": 2.2938167103787426, + "learning_rate": 2.332056627264083e-06, + "loss": 1.0725, + "step": 3400 + }, + { + "epoch": 2.277963831212324, + "grad_norm": 1.9084551847502207, + "learning_rate": 2.330598479660535e-06, + "loss": 1.018, + "step": 3401 + }, + { + "epoch": 2.2786336235766913, + "grad_norm": 2.5799621424947574, + "learning_rate": 2.329140389949408e-06, + "loss": 1.123, + "step": 3402 + }, + { + "epoch": 2.279303415941058, + "grad_norm": 2.7605192482505427, + "learning_rate": 2.327682358629001e-06, + "loss": 1.0192, + "step": 3403 + }, + { + "epoch": 2.2799732083054254, + "grad_norm": 2.2999112970696887, + "learning_rate": 2.3262243861975882e-06, + "loss": 0.8391, + "step": 3404 + }, + { + "epoch": 2.280643000669792, + "grad_norm": 1.9789790166990093, + "learning_rate": 2.3247664731534294e-06, + "loss": 1.1728, + "step": 3405 + }, + { + "epoch": 2.2813127930341595, + "grad_norm": 2.02751608170775, + "learning_rate": 2.323308619994761e-06, + "loss": 1.0049, + "step": 3406 + }, + { + "epoch": 2.2819825853985263, + "grad_norm": 2.4753345172605385, + "learning_rate": 2.3218508272198006e-06, + "loss": 1.1058, + "step": 3407 + }, + { + "epoch": 2.2826523777628935, + "grad_norm": 2.479330742800924, + "learning_rate": 2.320393095326742e-06, + "loss": 0.9167, + "step": 3408 + }, + { + "epoch": 2.283322170127261, + "grad_norm": 2.1047189492483884, + "learning_rate": 2.318935424813762e-06, + "loss": 1.1486, + "step": 3409 + }, + { + "epoch": 2.2839919624916276, + "grad_norm": 1.8613259856829274, + "learning_rate": 2.317477816179014e-06, + "loss": 1.0376, + "step": 3410 + }, + { + "epoch": 2.2846617548559944, + "grad_norm": 1.9774252487545358, + "learning_rate": 2.3160202699206326e-06, + "loss": 1.1617, + "step": 3411 + }, + { + "epoch": 2.2853315472203617, + "grad_norm": 2.3574469555098077, + "learning_rate": 2.314562786536727e-06, + "loss": 1.0915, + "step": 3412 + }, + { + "epoch": 2.286001339584729, + "grad_norm": 8.59313557233553, + "learning_rate": 2.313105366525389e-06, + "loss": 0.8465, + "step": 3413 + }, + { + "epoch": 2.2866711319490958, + "grad_norm": 2.115550067812702, + "learning_rate": 2.3116480103846863e-06, + "loss": 1.2029, + "step": 3414 + }, + { + "epoch": 2.2873409243134626, + "grad_norm": 1.9678067989734958, + "learning_rate": 2.3101907186126673e-06, + "loss": 0.8781, + "step": 3415 + }, + { + "epoch": 2.28801071667783, + "grad_norm": 2.494724626322386, + "learning_rate": 2.308733491707355e-06, + "loss": 1.0753, + "step": 3416 + }, + { + "epoch": 2.288680509042197, + "grad_norm": 1.9147053023769542, + "learning_rate": 2.3072763301667535e-06, + "loss": 0.9141, + "step": 3417 + }, + { + "epoch": 2.289350301406564, + "grad_norm": 2.3002090101785573, + "learning_rate": 2.305819234488841e-06, + "loss": 0.9811, + "step": 3418 + }, + { + "epoch": 2.290020093770931, + "grad_norm": 2.4781771615144077, + "learning_rate": 2.304362205171578e-06, + "loss": 1.1755, + "step": 3419 + }, + { + "epoch": 2.290689886135298, + "grad_norm": 2.264234432464779, + "learning_rate": 2.3029052427128966e-06, + "loss": 1.1595, + "step": 3420 + }, + { + "epoch": 2.2913596784996653, + "grad_norm": 2.419160071286964, + "learning_rate": 2.301448347610711e-06, + "loss": 0.9684, + "step": 3421 + }, + { + "epoch": 2.292029470864032, + "grad_norm": 2.59651553475098, + "learning_rate": 2.299991520362909e-06, + "loss": 0.8408, + "step": 3422 + }, + { + "epoch": 2.2926992632283993, + "grad_norm": 3.4348342112680177, + "learning_rate": 2.2985347614673588e-06, + "loss": 0.9486, + "step": 3423 + }, + { + "epoch": 2.293369055592766, + "grad_norm": 2.967485380505878, + "learning_rate": 2.2970780714219005e-06, + "loss": 0.9287, + "step": 3424 + }, + { + "epoch": 2.2940388479571334, + "grad_norm": 2.3058060483354685, + "learning_rate": 2.295621450724355e-06, + "loss": 1.0313, + "step": 3425 + }, + { + "epoch": 2.2947086403215002, + "grad_norm": 3.6999354810495375, + "learning_rate": 2.2941648998725153e-06, + "loss": 1.0411, + "step": 3426 + }, + { + "epoch": 2.2953784326858675, + "grad_norm": 2.0007066227961254, + "learning_rate": 2.2927084193641566e-06, + "loss": 1.1801, + "step": 3427 + }, + { + "epoch": 2.2960482250502343, + "grad_norm": 2.3303324920332518, + "learning_rate": 2.291252009697024e-06, + "loss": 1.1757, + "step": 3428 + }, + { + "epoch": 2.2967180174146016, + "grad_norm": 2.313010907689344, + "learning_rate": 2.2897956713688402e-06, + "loss": 0.9714, + "step": 3429 + }, + { + "epoch": 2.2973878097789684, + "grad_norm": 2.3192036964681932, + "learning_rate": 2.2883394048773063e-06, + "loss": 0.9644, + "step": 3430 + }, + { + "epoch": 2.2980576021433357, + "grad_norm": 1.9051664704064584, + "learning_rate": 2.286883210720095e-06, + "loss": 0.978, + "step": 3431 + }, + { + "epoch": 2.2987273945077025, + "grad_norm": 2.2243312774486834, + "learning_rate": 2.2854270893948568e-06, + "loss": 0.9345, + "step": 3432 + }, + { + "epoch": 2.2993971868720697, + "grad_norm": 3.2739286918551858, + "learning_rate": 2.2839710413992155e-06, + "loss": 0.9798, + "step": 3433 + }, + { + "epoch": 2.3000669792364365, + "grad_norm": 3.1076620791355474, + "learning_rate": 2.2825150672307715e-06, + "loss": 1.1864, + "step": 3434 + }, + { + "epoch": 2.300736771600804, + "grad_norm": 2.2058639862578766, + "learning_rate": 2.2810591673871e-06, + "loss": 0.8495, + "step": 3435 + }, + { + "epoch": 2.3014065639651706, + "grad_norm": 2.2503313833256096, + "learning_rate": 2.2796033423657486e-06, + "loss": 1.039, + "step": 3436 + }, + { + "epoch": 2.302076356329538, + "grad_norm": 2.8356778384380608, + "learning_rate": 2.278147592664241e-06, + "loss": 0.9773, + "step": 3437 + }, + { + "epoch": 2.3027461486939047, + "grad_norm": 2.473836760941889, + "learning_rate": 2.2766919187800753e-06, + "loss": 1.0809, + "step": 3438 + }, + { + "epoch": 2.303415941058272, + "grad_norm": 2.6930417829191917, + "learning_rate": 2.2752363212107238e-06, + "loss": 0.9469, + "step": 3439 + }, + { + "epoch": 2.304085733422639, + "grad_norm": 2.2420264560903793, + "learning_rate": 2.2737808004536323e-06, + "loss": 1.2595, + "step": 3440 + }, + { + "epoch": 2.304755525787006, + "grad_norm": 1.951744398335047, + "learning_rate": 2.2723253570062187e-06, + "loss": 0.9957, + "step": 3441 + }, + { + "epoch": 2.3054253181513733, + "grad_norm": 2.2148918202893877, + "learning_rate": 2.270869991365878e-06, + "loss": 1.0889, + "step": 3442 + }, + { + "epoch": 2.30609511051574, + "grad_norm": 2.110999039269792, + "learning_rate": 2.269414704029975e-06, + "loss": 1.1674, + "step": 3443 + }, + { + "epoch": 2.306764902880107, + "grad_norm": 1.962659557876729, + "learning_rate": 2.267959495495852e-06, + "loss": 1.1449, + "step": 3444 + }, + { + "epoch": 2.307434695244474, + "grad_norm": 3.311807278378278, + "learning_rate": 2.266504366260819e-06, + "loss": 1.0135, + "step": 3445 + }, + { + "epoch": 2.3081044876088415, + "grad_norm": 1.6738082487772086, + "learning_rate": 2.265049316822163e-06, + "loss": 0.8813, + "step": 3446 + }, + { + "epoch": 2.3087742799732083, + "grad_norm": 2.2088400955908396, + "learning_rate": 2.2635943476771416e-06, + "loss": 1.1391, + "step": 3447 + }, + { + "epoch": 2.3094440723375755, + "grad_norm": 2.0445764497804415, + "learning_rate": 2.2621394593229885e-06, + "loss": 1.1258, + "step": 3448 + }, + { + "epoch": 2.3101138647019424, + "grad_norm": 2.542625830193306, + "learning_rate": 2.260684652256903e-06, + "loss": 0.9341, + "step": 3449 + }, + { + "epoch": 2.3107836570663096, + "grad_norm": 3.2051324519667124, + "learning_rate": 2.2592299269760634e-06, + "loss": 1.0579, + "step": 3450 + }, + { + "epoch": 2.3114534494306764, + "grad_norm": 1.9969414990516945, + "learning_rate": 2.2577752839776167e-06, + "loss": 0.9518, + "step": 3451 + }, + { + "epoch": 2.3121232417950437, + "grad_norm": 2.031352701402326, + "learning_rate": 2.2563207237586834e-06, + "loss": 1.1121, + "step": 3452 + }, + { + "epoch": 2.3127930341594105, + "grad_norm": 2.24419078445787, + "learning_rate": 2.2548662468163525e-06, + "loss": 0.9762, + "step": 3453 + }, + { + "epoch": 2.3134628265237778, + "grad_norm": 2.3215803573716336, + "learning_rate": 2.253411853647689e-06, + "loss": 1.0521, + "step": 3454 + }, + { + "epoch": 2.3141326188881446, + "grad_norm": 2.003296409813259, + "learning_rate": 2.251957544749725e-06, + "loss": 1.1367, + "step": 3455 + }, + { + "epoch": 2.314802411252512, + "grad_norm": 2.0806052232806618, + "learning_rate": 2.2505033206194683e-06, + "loss": 0.9055, + "step": 3456 + }, + { + "epoch": 2.3154722036168787, + "grad_norm": 2.173585959883375, + "learning_rate": 2.2490491817538936e-06, + "loss": 1.0869, + "step": 3457 + }, + { + "epoch": 2.316141995981246, + "grad_norm": 2.6699178070179306, + "learning_rate": 2.2475951286499483e-06, + "loss": 1.013, + "step": 3458 + }, + { + "epoch": 2.3168117883456127, + "grad_norm": 2.0873994604260084, + "learning_rate": 2.24614116180455e-06, + "loss": 1.1854, + "step": 3459 + }, + { + "epoch": 2.31748158070998, + "grad_norm": 2.4489352616564015, + "learning_rate": 2.244687281714589e-06, + "loss": 1.0535, + "step": 3460 + }, + { + "epoch": 2.318151373074347, + "grad_norm": 1.949988400772031, + "learning_rate": 2.2432334888769225e-06, + "loss": 0.975, + "step": 3461 + }, + { + "epoch": 2.318821165438714, + "grad_norm": 2.013722069916682, + "learning_rate": 2.2417797837883796e-06, + "loss": 1.0651, + "step": 3462 + }, + { + "epoch": 2.319490957803081, + "grad_norm": 2.229705980249108, + "learning_rate": 2.240326166945759e-06, + "loss": 1.1505, + "step": 3463 + }, + { + "epoch": 2.320160750167448, + "grad_norm": 2.263207265697346, + "learning_rate": 2.2388726388458313e-06, + "loss": 0.9687, + "step": 3464 + }, + { + "epoch": 2.320830542531815, + "grad_norm": 2.47289178730125, + "learning_rate": 2.2374191999853333e-06, + "loss": 1.0823, + "step": 3465 + }, + { + "epoch": 2.3215003348961822, + "grad_norm": 2.0460218892985305, + "learning_rate": 2.2359658508609737e-06, + "loss": 1.1856, + "step": 3466 + }, + { + "epoch": 2.322170127260549, + "grad_norm": 2.0627656834488923, + "learning_rate": 2.2345125919694288e-06, + "loss": 1.1582, + "step": 3467 + }, + { + "epoch": 2.3228399196249163, + "grad_norm": 2.224193619859297, + "learning_rate": 2.233059423807347e-06, + "loss": 0.9993, + "step": 3468 + }, + { + "epoch": 2.323509711989283, + "grad_norm": 2.8550115182577316, + "learning_rate": 2.231606346871343e-06, + "loss": 0.7436, + "step": 3469 + }, + { + "epoch": 2.3241795043536504, + "grad_norm": 2.13248869408282, + "learning_rate": 2.2301533616580006e-06, + "loss": 0.8912, + "step": 3470 + }, + { + "epoch": 2.3248492967180177, + "grad_norm": 2.0386741479001533, + "learning_rate": 2.2287004686638726e-06, + "loss": 1.1811, + "step": 3471 + }, + { + "epoch": 2.3255190890823845, + "grad_norm": 2.1515228555592523, + "learning_rate": 2.227247668385482e-06, + "loss": 0.9945, + "step": 3472 + }, + { + "epoch": 2.3261888814467513, + "grad_norm": 2.171892494869318, + "learning_rate": 2.2257949613193176e-06, + "loss": 1.0473, + "step": 3473 + }, + { + "epoch": 2.3268586738111185, + "grad_norm": 2.2485623572040896, + "learning_rate": 2.224342347961837e-06, + "loss": 1.0424, + "step": 3474 + }, + { + "epoch": 2.327528466175486, + "grad_norm": 3.8833093718214466, + "learning_rate": 2.2228898288094665e-06, + "loss": 0.9151, + "step": 3475 + }, + { + "epoch": 2.3281982585398526, + "grad_norm": 2.2548297781628723, + "learning_rate": 2.2214374043585996e-06, + "loss": 0.9461, + "step": 3476 + }, + { + "epoch": 2.3288680509042194, + "grad_norm": 2.269343318688533, + "learning_rate": 2.219985075105599e-06, + "loss": 1.0838, + "step": 3477 + }, + { + "epoch": 2.3295378432685867, + "grad_norm": 2.1661642950881443, + "learning_rate": 2.218532841546791e-06, + "loss": 0.8525, + "step": 3478 + }, + { + "epoch": 2.330207635632954, + "grad_norm": 2.9618951993451486, + "learning_rate": 2.2170807041784735e-06, + "loss": 0.8985, + "step": 3479 + }, + { + "epoch": 2.330877427997321, + "grad_norm": 2.774584060276532, + "learning_rate": 2.2156286634969097e-06, + "loss": 0.9963, + "step": 3480 + }, + { + "epoch": 2.331547220361688, + "grad_norm": 2.3625038577918662, + "learning_rate": 2.21417671999833e-06, + "loss": 1.0257, + "step": 3481 + }, + { + "epoch": 2.332217012726055, + "grad_norm": 1.9354284350775823, + "learning_rate": 2.2127248741789296e-06, + "loss": 0.9448, + "step": 3482 + }, + { + "epoch": 2.332886805090422, + "grad_norm": 2.0512624260600463, + "learning_rate": 2.211273126534874e-06, + "loss": 1.1801, + "step": 3483 + }, + { + "epoch": 2.333556597454789, + "grad_norm": 1.9429538445689172, + "learning_rate": 2.209821477562292e-06, + "loss": 1.0855, + "step": 3484 + }, + { + "epoch": 2.334226389819156, + "grad_norm": 1.9753287520039606, + "learning_rate": 2.208369927757282e-06, + "loss": 1.1066, + "step": 3485 + }, + { + "epoch": 2.334896182183523, + "grad_norm": 2.236142988998061, + "learning_rate": 2.2069184776159035e-06, + "loss": 1.0847, + "step": 3486 + }, + { + "epoch": 2.3355659745478903, + "grad_norm": 2.5332704305142095, + "learning_rate": 2.205467127634187e-06, + "loss": 1.1795, + "step": 3487 + }, + { + "epoch": 2.336235766912257, + "grad_norm": 2.243134232341826, + "learning_rate": 2.2040158783081254e-06, + "loss": 1.0377, + "step": 3488 + }, + { + "epoch": 2.3369055592766244, + "grad_norm": 2.6764105370972504, + "learning_rate": 2.202564730133681e-06, + "loss": 0.8679, + "step": 3489 + }, + { + "epoch": 2.337575351640991, + "grad_norm": 2.2652152495754247, + "learning_rate": 2.2011136836067748e-06, + "loss": 0.9198, + "step": 3490 + }, + { + "epoch": 2.3382451440053584, + "grad_norm": 2.1249414334280767, + "learning_rate": 2.1996627392233007e-06, + "loss": 0.9997, + "step": 3491 + }, + { + "epoch": 2.3389149363697253, + "grad_norm": 1.9519330451631278, + "learning_rate": 2.198211897479112e-06, + "loss": 1.0534, + "step": 3492 + }, + { + "epoch": 2.3395847287340925, + "grad_norm": 2.237300233290503, + "learning_rate": 2.1967611588700326e-06, + "loss": 1.1313, + "step": 3493 + }, + { + "epoch": 2.3402545210984593, + "grad_norm": 1.9955522671858583, + "learning_rate": 2.1953105238918432e-06, + "loss": 1.1267, + "step": 3494 + }, + { + "epoch": 2.3409243134628266, + "grad_norm": 2.245311362658278, + "learning_rate": 2.1938599930402966e-06, + "loss": 1.1691, + "step": 3495 + }, + { + "epoch": 2.3415941058271934, + "grad_norm": 2.127573683352282, + "learning_rate": 2.1924095668111058e-06, + "loss": 1.0888, + "step": 3496 + }, + { + "epoch": 2.3422638981915607, + "grad_norm": 1.9959578553524966, + "learning_rate": 2.1909592456999505e-06, + "loss": 1.0368, + "step": 3497 + }, + { + "epoch": 2.3429336905559275, + "grad_norm": 1.967394076267625, + "learning_rate": 2.1895090302024717e-06, + "loss": 1.0831, + "step": 3498 + }, + { + "epoch": 2.3436034829202947, + "grad_norm": 2.1598090961192358, + "learning_rate": 2.188058920814277e-06, + "loss": 1.1863, + "step": 3499 + }, + { + "epoch": 2.3442732752846616, + "grad_norm": 3.379062500065968, + "learning_rate": 2.186608918030935e-06, + "loss": 1.1293, + "step": 3500 + }, + { + "epoch": 2.344943067649029, + "grad_norm": 2.7038551677111795, + "learning_rate": 2.1851590223479815e-06, + "loss": 1.0089, + "step": 3501 + }, + { + "epoch": 2.3456128600133956, + "grad_norm": 2.2930928594794686, + "learning_rate": 2.1837092342609134e-06, + "loss": 1.1172, + "step": 3502 + }, + { + "epoch": 2.346282652377763, + "grad_norm": 2.7974482751130374, + "learning_rate": 2.18225955426519e-06, + "loss": 0.9783, + "step": 3503 + }, + { + "epoch": 2.34695244474213, + "grad_norm": 2.344217510206406, + "learning_rate": 2.180809982856234e-06, + "loss": 1.0095, + "step": 3504 + }, + { + "epoch": 2.347622237106497, + "grad_norm": 2.0438129648782377, + "learning_rate": 2.179360520529434e-06, + "loss": 1.0344, + "step": 3505 + }, + { + "epoch": 2.348292029470864, + "grad_norm": 2.503661846240352, + "learning_rate": 2.177911167780139e-06, + "loss": 1.1222, + "step": 3506 + }, + { + "epoch": 2.348961821835231, + "grad_norm": 2.215341308030822, + "learning_rate": 2.1764619251036583e-06, + "loss": 1.0078, + "step": 3507 + }, + { + "epoch": 2.3496316141995983, + "grad_norm": 2.0241381831032434, + "learning_rate": 2.175012792995267e-06, + "loss": 0.8153, + "step": 3508 + }, + { + "epoch": 2.350301406563965, + "grad_norm": 1.8298172422620236, + "learning_rate": 2.1735637719502018e-06, + "loss": 0.9783, + "step": 3509 + }, + { + "epoch": 2.3509711989283324, + "grad_norm": 2.8090097873829305, + "learning_rate": 2.172114862463661e-06, + "loss": 1.1016, + "step": 3510 + }, + { + "epoch": 2.351640991292699, + "grad_norm": 2.410234488694755, + "learning_rate": 2.1706660650308043e-06, + "loss": 1.0593, + "step": 3511 + }, + { + "epoch": 2.3523107836570665, + "grad_norm": 2.0054986313446075, + "learning_rate": 2.169217380146753e-06, + "loss": 1.0592, + "step": 3512 + }, + { + "epoch": 2.3529805760214333, + "grad_norm": 2.531287902390262, + "learning_rate": 2.1677688083065918e-06, + "loss": 1.0517, + "step": 3513 + }, + { + "epoch": 2.3536503683858006, + "grad_norm": 2.2245832178992457, + "learning_rate": 2.166320350005365e-06, + "loss": 1.0687, + "step": 3514 + }, + { + "epoch": 2.3543201607501674, + "grad_norm": 2.81201691041167, + "learning_rate": 2.1648720057380776e-06, + "loss": 0.8966, + "step": 3515 + }, + { + "epoch": 2.3549899531145346, + "grad_norm": 2.7638870317496473, + "learning_rate": 2.163423775999697e-06, + "loss": 1.0612, + "step": 3516 + }, + { + "epoch": 2.3556597454789014, + "grad_norm": 2.285758671149403, + "learning_rate": 2.161975661285151e-06, + "loss": 1.0412, + "step": 3517 + }, + { + "epoch": 2.3563295378432687, + "grad_norm": 2.279652353495909, + "learning_rate": 2.16052766208933e-06, + "loss": 0.9426, + "step": 3518 + }, + { + "epoch": 2.3569993302076355, + "grad_norm": 3.656186984988689, + "learning_rate": 2.1590797789070795e-06, + "loss": 1.0515, + "step": 3519 + }, + { + "epoch": 2.357669122572003, + "grad_norm": 1.7510941785974017, + "learning_rate": 2.157632012233212e-06, + "loss": 0.9239, + "step": 3520 + }, + { + "epoch": 2.3583389149363696, + "grad_norm": 2.945309725816186, + "learning_rate": 2.156184362562495e-06, + "loss": 1.0727, + "step": 3521 + }, + { + "epoch": 2.359008707300737, + "grad_norm": 2.4018752932162304, + "learning_rate": 2.15473683038966e-06, + "loss": 1.0991, + "step": 3522 + }, + { + "epoch": 2.3596784996651037, + "grad_norm": 2.07171377897086, + "learning_rate": 2.153289416209394e-06, + "loss": 1.064, + "step": 3523 + }, + { + "epoch": 2.360348292029471, + "grad_norm": 2.7410326063957333, + "learning_rate": 2.1518421205163485e-06, + "loss": 1.2448, + "step": 3524 + }, + { + "epoch": 2.3610180843938378, + "grad_norm": 2.0377002772959925, + "learning_rate": 2.1503949438051304e-06, + "loss": 1.0958, + "step": 3525 + }, + { + "epoch": 2.361687876758205, + "grad_norm": 2.085410533706084, + "learning_rate": 2.1489478865703096e-06, + "loss": 1.1037, + "step": 3526 + }, + { + "epoch": 2.362357669122572, + "grad_norm": 2.8192746060646177, + "learning_rate": 2.1475009493064105e-06, + "loss": 1.0229, + "step": 3527 + }, + { + "epoch": 2.363027461486939, + "grad_norm": 2.6116836252020375, + "learning_rate": 2.1460541325079213e-06, + "loss": 1.2289, + "step": 3528 + }, + { + "epoch": 2.363697253851306, + "grad_norm": 2.45730811852349, + "learning_rate": 2.1446074366692865e-06, + "loss": 1.136, + "step": 3529 + }, + { + "epoch": 2.364367046215673, + "grad_norm": 2.231836493238112, + "learning_rate": 2.14316086228491e-06, + "loss": 0.9052, + "step": 3530 + }, + { + "epoch": 2.36503683858004, + "grad_norm": 3.0241343609314995, + "learning_rate": 2.1417144098491525e-06, + "loss": 1.0837, + "step": 3531 + }, + { + "epoch": 2.3657066309444073, + "grad_norm": 2.2781815186664467, + "learning_rate": 2.140268079856336e-06, + "loss": 0.9646, + "step": 3532 + }, + { + "epoch": 2.3663764233087745, + "grad_norm": 2.5162244298248186, + "learning_rate": 2.138821872800738e-06, + "loss": 0.9628, + "step": 3533 + }, + { + "epoch": 2.3670462156731413, + "grad_norm": 2.1839665064132734, + "learning_rate": 2.137375789176597e-06, + "loss": 1.0691, + "step": 3534 + }, + { + "epoch": 2.367716008037508, + "grad_norm": 2.2680447860749813, + "learning_rate": 2.1359298294781046e-06, + "loss": 1.0082, + "step": 3535 + }, + { + "epoch": 2.3683858004018754, + "grad_norm": 2.192933639037605, + "learning_rate": 2.1344839941994142e-06, + "loss": 0.9398, + "step": 3536 + }, + { + "epoch": 2.3690555927662427, + "grad_norm": 2.6586573378528966, + "learning_rate": 2.1330382838346356e-06, + "loss": 0.9569, + "step": 3537 + }, + { + "epoch": 2.3697253851306095, + "grad_norm": 2.264716204967781, + "learning_rate": 2.1315926988778358e-06, + "loss": 1.1165, + "step": 3538 + }, + { + "epoch": 2.3703951774949767, + "grad_norm": 2.641354707926192, + "learning_rate": 2.1301472398230387e-06, + "loss": 0.8531, + "step": 3539 + }, + { + "epoch": 2.3710649698593436, + "grad_norm": 3.8165556810819874, + "learning_rate": 2.1287019071642247e-06, + "loss": 0.9644, + "step": 3540 + }, + { + "epoch": 2.371734762223711, + "grad_norm": 3.4318868744773683, + "learning_rate": 2.127256701395331e-06, + "loss": 1.0652, + "step": 3541 + }, + { + "epoch": 2.3724045545880776, + "grad_norm": 3.0143177105574033, + "learning_rate": 2.1258116230102533e-06, + "loss": 1.1017, + "step": 3542 + }, + { + "epoch": 2.373074346952445, + "grad_norm": 4.524844122980754, + "learning_rate": 2.124366672502842e-06, + "loss": 0.9788, + "step": 3543 + }, + { + "epoch": 2.3737441393168117, + "grad_norm": 2.797025868087265, + "learning_rate": 2.1229218503669045e-06, + "loss": 1.0737, + "step": 3544 + }, + { + "epoch": 2.374413931681179, + "grad_norm": 2.1199719439712275, + "learning_rate": 2.121477157096203e-06, + "loss": 1.0586, + "step": 3545 + }, + { + "epoch": 2.375083724045546, + "grad_norm": 2.0463170169305704, + "learning_rate": 2.120032593184458e-06, + "loss": 1.0088, + "step": 3546 + }, + { + "epoch": 2.375753516409913, + "grad_norm": 1.897688404085759, + "learning_rate": 2.118588159125345e-06, + "loss": 1.1067, + "step": 3547 + }, + { + "epoch": 2.37642330877428, + "grad_norm": 2.190422917546267, + "learning_rate": 2.1171438554124927e-06, + "loss": 0.9603, + "step": 3548 + }, + { + "epoch": 2.377093101138647, + "grad_norm": 2.1774557411142004, + "learning_rate": 2.115699682539488e-06, + "loss": 0.923, + "step": 3549 + }, + { + "epoch": 2.377762893503014, + "grad_norm": 3.0392307575642983, + "learning_rate": 2.1142556409998727e-06, + "loss": 1.0906, + "step": 3550 + }, + { + "epoch": 2.378432685867381, + "grad_norm": 7.037105419138557, + "learning_rate": 2.112811731287144e-06, + "loss": 0.9493, + "step": 3551 + }, + { + "epoch": 2.379102478231748, + "grad_norm": 2.263892958912265, + "learning_rate": 2.111367953894752e-06, + "loss": 1.0925, + "step": 3552 + }, + { + "epoch": 2.3797722705961153, + "grad_norm": 2.537137562056707, + "learning_rate": 2.109924309316103e-06, + "loss": 0.9278, + "step": 3553 + }, + { + "epoch": 2.380442062960482, + "grad_norm": 1.8844827286853107, + "learning_rate": 2.108480798044559e-06, + "loss": 1.042, + "step": 3554 + }, + { + "epoch": 2.3811118553248494, + "grad_norm": 1.8958791219055238, + "learning_rate": 2.1070374205734346e-06, + "loss": 1.1192, + "step": 3555 + }, + { + "epoch": 2.381781647689216, + "grad_norm": 1.9290758920690554, + "learning_rate": 2.105594177396e-06, + "loss": 0.9531, + "step": 3556 + }, + { + "epoch": 2.3824514400535834, + "grad_norm": 2.342739992078838, + "learning_rate": 2.104151069005477e-06, + "loss": 1.1056, + "step": 3557 + }, + { + "epoch": 2.3831212324179503, + "grad_norm": 2.274212465538981, + "learning_rate": 2.1027080958950454e-06, + "loss": 0.9886, + "step": 3558 + }, + { + "epoch": 2.3837910247823175, + "grad_norm": 4.969479747349757, + "learning_rate": 2.1012652585578356e-06, + "loss": 0.9717, + "step": 3559 + }, + { + "epoch": 2.3844608171466843, + "grad_norm": 2.6846304680062385, + "learning_rate": 2.099822557486933e-06, + "loss": 1.1602, + "step": 3560 + }, + { + "epoch": 2.3851306095110516, + "grad_norm": 2.8760471994375947, + "learning_rate": 2.0983799931753746e-06, + "loss": 1.1883, + "step": 3561 + }, + { + "epoch": 2.385800401875419, + "grad_norm": 2.318404060998357, + "learning_rate": 2.0969375661161544e-06, + "loss": 0.9266, + "step": 3562 + }, + { + "epoch": 2.3864701942397857, + "grad_norm": 2.2149761920007043, + "learning_rate": 2.095495276802216e-06, + "loss": 0.9161, + "step": 3563 + }, + { + "epoch": 2.3871399866041525, + "grad_norm": 2.040157426719743, + "learning_rate": 2.0940531257264562e-06, + "loss": 1.0215, + "step": 3564 + }, + { + "epoch": 2.3878097789685198, + "grad_norm": 2.524802522117938, + "learning_rate": 2.092611113381727e-06, + "loss": 1.0107, + "step": 3565 + }, + { + "epoch": 2.388479571332887, + "grad_norm": 2.0739010416076464, + "learning_rate": 2.091169240260831e-06, + "loss": 1.0797, + "step": 3566 + }, + { + "epoch": 2.389149363697254, + "grad_norm": 2.757039298462352, + "learning_rate": 2.0897275068565237e-06, + "loss": 0.7348, + "step": 3567 + }, + { + "epoch": 2.3898191560616207, + "grad_norm": 2.2379511774914973, + "learning_rate": 2.0882859136615116e-06, + "loss": 1.118, + "step": 3568 + }, + { + "epoch": 2.390488948425988, + "grad_norm": 2.4623194165612956, + "learning_rate": 2.086844461168456e-06, + "loss": 1.1103, + "step": 3569 + }, + { + "epoch": 2.391158740790355, + "grad_norm": 4.381908396774005, + "learning_rate": 2.0854031498699683e-06, + "loss": 0.9503, + "step": 3570 + }, + { + "epoch": 2.391828533154722, + "grad_norm": 2.192659048449183, + "learning_rate": 2.0839619802586123e-06, + "loss": 0.8555, + "step": 3571 + }, + { + "epoch": 2.3924983255190893, + "grad_norm": 2.5371591711227675, + "learning_rate": 2.0825209528269013e-06, + "loss": 1.1966, + "step": 3572 + }, + { + "epoch": 2.393168117883456, + "grad_norm": 2.9416244361816366, + "learning_rate": 2.081080068067304e-06, + "loss": 1.0439, + "step": 3573 + }, + { + "epoch": 2.3938379102478233, + "grad_norm": 1.9166164172750655, + "learning_rate": 2.0796393264722366e-06, + "loss": 0.9047, + "step": 3574 + }, + { + "epoch": 2.39450770261219, + "grad_norm": 1.9276295641040937, + "learning_rate": 2.078198728534068e-06, + "loss": 1.0988, + "step": 3575 + }, + { + "epoch": 2.3951774949765574, + "grad_norm": 2.729998186844243, + "learning_rate": 2.07675827474512e-06, + "loss": 1.0135, + "step": 3576 + }, + { + "epoch": 2.3958472873409242, + "grad_norm": 2.4567726540314543, + "learning_rate": 2.0753179655976598e-06, + "loss": 1.0387, + "step": 3577 + }, + { + "epoch": 2.3965170797052915, + "grad_norm": 1.9967937467463523, + "learning_rate": 2.07387780158391e-06, + "loss": 0.9966, + "step": 3578 + }, + { + "epoch": 2.3971868720696583, + "grad_norm": 2.1086042834323666, + "learning_rate": 2.072437783196042e-06, + "loss": 1.0411, + "step": 3579 + }, + { + "epoch": 2.3978566644340256, + "grad_norm": 1.8993010225302291, + "learning_rate": 2.070997910926178e-06, + "loss": 0.9349, + "step": 3580 + }, + { + "epoch": 2.3985264567983924, + "grad_norm": 2.118963288148819, + "learning_rate": 2.0695581852663884e-06, + "loss": 1.0211, + "step": 3581 + }, + { + "epoch": 2.3991962491627596, + "grad_norm": 2.2733960166291127, + "learning_rate": 2.068118606708695e-06, + "loss": 1.0673, + "step": 3582 + }, + { + "epoch": 2.3998660415271265, + "grad_norm": 2.440974285457788, + "learning_rate": 2.06667917574507e-06, + "loss": 1.093, + "step": 3583 + }, + { + "epoch": 2.4005358338914937, + "grad_norm": 2.03636363190238, + "learning_rate": 2.065239892867434e-06, + "loss": 1.1032, + "step": 3584 + }, + { + "epoch": 2.4012056262558605, + "grad_norm": 2.8128339231079695, + "learning_rate": 2.0638007585676565e-06, + "loss": 1.0764, + "step": 3585 + }, + { + "epoch": 2.401875418620228, + "grad_norm": 3.010492233331094, + "learning_rate": 2.062361773337557e-06, + "loss": 0.9291, + "step": 3586 + }, + { + "epoch": 2.4025452109845946, + "grad_norm": 2.278853713319876, + "learning_rate": 2.0609229376689054e-06, + "loss": 1.0445, + "step": 3587 + }, + { + "epoch": 2.403215003348962, + "grad_norm": 2.685185691434871, + "learning_rate": 2.0594842520534185e-06, + "loss": 1.0884, + "step": 3588 + }, + { + "epoch": 2.4038847957133287, + "grad_norm": 2.807374278689218, + "learning_rate": 2.0580457169827618e-06, + "loss": 1.1721, + "step": 3589 + }, + { + "epoch": 2.404554588077696, + "grad_norm": 2.417331824816086, + "learning_rate": 2.05660733294855e-06, + "loss": 1.057, + "step": 3590 + }, + { + "epoch": 2.4052243804420628, + "grad_norm": 2.0247777967732055, + "learning_rate": 2.0551691004423473e-06, + "loss": 1.0028, + "step": 3591 + }, + { + "epoch": 2.40589417280643, + "grad_norm": 2.1682500790191592, + "learning_rate": 2.0537310199556644e-06, + "loss": 1.1546, + "step": 3592 + }, + { + "epoch": 2.406563965170797, + "grad_norm": 2.2054839574060843, + "learning_rate": 2.0522930919799602e-06, + "loss": 1.0473, + "step": 3593 + }, + { + "epoch": 2.407233757535164, + "grad_norm": 2.3197996171346666, + "learning_rate": 2.0508553170066417e-06, + "loss": 0.9668, + "step": 3594 + }, + { + "epoch": 2.4079035498995314, + "grad_norm": 2.977449446859137, + "learning_rate": 2.049417695527065e-06, + "loss": 0.893, + "step": 3595 + }, + { + "epoch": 2.408573342263898, + "grad_norm": 2.359421575583374, + "learning_rate": 2.047980228032533e-06, + "loss": 0.9229, + "step": 3596 + }, + { + "epoch": 2.409243134628265, + "grad_norm": 2.0089726848446254, + "learning_rate": 2.0465429150142934e-06, + "loss": 1.0902, + "step": 3597 + }, + { + "epoch": 2.4099129269926323, + "grad_norm": 2.332414898569824, + "learning_rate": 2.0451057569635443e-06, + "loss": 1.1927, + "step": 3598 + }, + { + "epoch": 2.4105827193569995, + "grad_norm": 2.1448393922138687, + "learning_rate": 2.0436687543714303e-06, + "loss": 0.8692, + "step": 3599 + }, + { + "epoch": 2.4112525117213663, + "grad_norm": 2.4189935532745217, + "learning_rate": 2.0422319077290424e-06, + "loss": 1.1539, + "step": 3600 + }, + { + "epoch": 2.4119223040857336, + "grad_norm": 2.1116350465852807, + "learning_rate": 2.0407952175274177e-06, + "loss": 0.8706, + "step": 3601 + }, + { + "epoch": 2.4125920964501004, + "grad_norm": 2.0848848271241076, + "learning_rate": 2.0393586842575403e-06, + "loss": 0.89, + "step": 3602 + }, + { + "epoch": 2.4132618888144677, + "grad_norm": 2.1877987140193866, + "learning_rate": 2.0379223084103413e-06, + "loss": 1.1433, + "step": 3603 + }, + { + "epoch": 2.4139316811788345, + "grad_norm": 2.188410486829667, + "learning_rate": 2.036486090476698e-06, + "loss": 1.1071, + "step": 3604 + }, + { + "epoch": 2.4146014735432018, + "grad_norm": 2.276971540710018, + "learning_rate": 2.0350500309474326e-06, + "loss": 1.0741, + "step": 3605 + }, + { + "epoch": 2.4152712659075686, + "grad_norm": 2.4292475019829753, + "learning_rate": 2.0336141303133125e-06, + "loss": 1.1704, + "step": 3606 + }, + { + "epoch": 2.415941058271936, + "grad_norm": 2.8626081448306873, + "learning_rate": 2.0321783890650543e-06, + "loss": 0.9954, + "step": 3607 + }, + { + "epoch": 2.4166108506363027, + "grad_norm": 2.118150822134771, + "learning_rate": 2.0307428076933162e-06, + "loss": 1.0978, + "step": 3608 + }, + { + "epoch": 2.41728064300067, + "grad_norm": 2.02064717207695, + "learning_rate": 2.0293073866887057e-06, + "loss": 1.0996, + "step": 3609 + }, + { + "epoch": 2.4179504353650367, + "grad_norm": 2.030871510572248, + "learning_rate": 2.027872126541771e-06, + "loss": 1.0957, + "step": 3610 + }, + { + "epoch": 2.418620227729404, + "grad_norm": 2.20577638656127, + "learning_rate": 2.0264370277430083e-06, + "loss": 0.9277, + "step": 3611 + }, + { + "epoch": 2.419290020093771, + "grad_norm": 2.3777302524103088, + "learning_rate": 2.025002090782858e-06, + "loss": 1.1386, + "step": 3612 + }, + { + "epoch": 2.419959812458138, + "grad_norm": 2.246374719027557, + "learning_rate": 2.0235673161517066e-06, + "loss": 1.1973, + "step": 3613 + }, + { + "epoch": 2.420629604822505, + "grad_norm": 2.1560142210986895, + "learning_rate": 2.0221327043398814e-06, + "loss": 1.0925, + "step": 3614 + }, + { + "epoch": 2.421299397186872, + "grad_norm": 2.8204080826241653, + "learning_rate": 2.0206982558376573e-06, + "loss": 1.0618, + "step": 3615 + }, + { + "epoch": 2.421969189551239, + "grad_norm": 2.20192823110747, + "learning_rate": 2.0192639711352523e-06, + "loss": 0.8815, + "step": 3616 + }, + { + "epoch": 2.4226389819156062, + "grad_norm": 2.3008890361390346, + "learning_rate": 2.0178298507228296e-06, + "loss": 1.0902, + "step": 3617 + }, + { + "epoch": 2.423308774279973, + "grad_norm": 2.2386450025333393, + "learning_rate": 2.016395895090493e-06, + "loss": 0.9945, + "step": 3618 + }, + { + "epoch": 2.4239785666443403, + "grad_norm": 2.6416595432453134, + "learning_rate": 2.0149621047282945e-06, + "loss": 1.0006, + "step": 3619 + }, + { + "epoch": 2.424648359008707, + "grad_norm": 2.2721668752925623, + "learning_rate": 2.0135284801262255e-06, + "loss": 0.973, + "step": 3620 + }, + { + "epoch": 2.4253181513730744, + "grad_norm": 3.233755909698846, + "learning_rate": 2.0120950217742248e-06, + "loss": 0.9225, + "step": 3621 + }, + { + "epoch": 2.425987943737441, + "grad_norm": 3.5163344741071563, + "learning_rate": 2.01066173016217e-06, + "loss": 0.8831, + "step": 3622 + }, + { + "epoch": 2.4266577361018085, + "grad_norm": 2.0216185064124645, + "learning_rate": 2.0092286057798836e-06, + "loss": 0.9341, + "step": 3623 + }, + { + "epoch": 2.4273275284661757, + "grad_norm": 1.9858377566034364, + "learning_rate": 2.007795649117133e-06, + "loss": 0.958, + "step": 3624 + }, + { + "epoch": 2.4279973208305425, + "grad_norm": 2.5309698245011103, + "learning_rate": 2.0063628606636265e-06, + "loss": 0.9157, + "step": 3625 + }, + { + "epoch": 2.4286671131949094, + "grad_norm": 2.430354429459674, + "learning_rate": 2.0049302409090134e-06, + "loss": 0.9543, + "step": 3626 + }, + { + "epoch": 2.4293369055592766, + "grad_norm": 2.164842441901636, + "learning_rate": 2.003497790342887e-06, + "loss": 0.9398, + "step": 3627 + }, + { + "epoch": 2.430006697923644, + "grad_norm": 2.3463262109645604, + "learning_rate": 2.0020655094547843e-06, + "loss": 1.0676, + "step": 3628 + }, + { + "epoch": 2.4306764902880107, + "grad_norm": 4.355667096799756, + "learning_rate": 2.0006333987341817e-06, + "loss": 1.183, + "step": 3629 + }, + { + "epoch": 2.431346282652378, + "grad_norm": 2.6833350483662017, + "learning_rate": 1.9992014586704976e-06, + "loss": 0.9429, + "step": 3630 + }, + { + "epoch": 2.4320160750167448, + "grad_norm": 2.2552192157612687, + "learning_rate": 1.9977696897530934e-06, + "loss": 1.1783, + "step": 3631 + }, + { + "epoch": 2.432685867381112, + "grad_norm": 2.6048164330207775, + "learning_rate": 1.996338092471272e-06, + "loss": 1.046, + "step": 3632 + }, + { + "epoch": 2.433355659745479, + "grad_norm": 1.8771280885986532, + "learning_rate": 1.994906667314278e-06, + "loss": 0.9373, + "step": 3633 + }, + { + "epoch": 2.434025452109846, + "grad_norm": 1.9803038395465666, + "learning_rate": 1.993475414771294e-06, + "loss": 0.8661, + "step": 3634 + }, + { + "epoch": 2.434695244474213, + "grad_norm": 2.10614597526592, + "learning_rate": 1.9920443353314463e-06, + "loss": 0.9368, + "step": 3635 + }, + { + "epoch": 2.43536503683858, + "grad_norm": 2.144052446414599, + "learning_rate": 1.9906134294838036e-06, + "loss": 0.9515, + "step": 3636 + }, + { + "epoch": 2.436034829202947, + "grad_norm": 2.1384587694647093, + "learning_rate": 1.989182697717372e-06, + "loss": 1.0272, + "step": 3637 + }, + { + "epoch": 2.4367046215673143, + "grad_norm": 2.143062508069492, + "learning_rate": 1.9877521405210996e-06, + "loss": 1.1141, + "step": 3638 + }, + { + "epoch": 2.437374413931681, + "grad_norm": 2.341281361832891, + "learning_rate": 1.9863217583838735e-06, + "loss": 0.937, + "step": 3639 + }, + { + "epoch": 2.4380442062960483, + "grad_norm": 2.264013641159891, + "learning_rate": 1.9848915517945247e-06, + "loss": 1.0739, + "step": 3640 + }, + { + "epoch": 2.438713998660415, + "grad_norm": 2.1270427757536923, + "learning_rate": 1.9834615212418205e-06, + "loss": 0.9806, + "step": 3641 + }, + { + "epoch": 2.4393837910247824, + "grad_norm": 2.017188356854731, + "learning_rate": 1.9820316672144684e-06, + "loss": 0.8907, + "step": 3642 + }, + { + "epoch": 2.4400535833891492, + "grad_norm": 2.0139043093382925, + "learning_rate": 1.980601990201117e-06, + "loss": 1.0119, + "step": 3643 + }, + { + "epoch": 2.4407233757535165, + "grad_norm": 2.225634995921476, + "learning_rate": 1.979172490690354e-06, + "loss": 0.9599, + "step": 3644 + }, + { + "epoch": 2.4413931681178833, + "grad_norm": 2.2081602052643023, + "learning_rate": 1.9777431691707054e-06, + "loss": 1.0267, + "step": 3645 + }, + { + "epoch": 2.4420629604822506, + "grad_norm": 2.153487448299082, + "learning_rate": 1.9763140261306395e-06, + "loss": 1.0682, + "step": 3646 + }, + { + "epoch": 2.4427327528466174, + "grad_norm": 2.5459985776359293, + "learning_rate": 1.974885062058558e-06, + "loss": 1.1785, + "step": 3647 + }, + { + "epoch": 2.4434025452109847, + "grad_norm": 2.5842427488176956, + "learning_rate": 1.973456277442807e-06, + "loss": 1.0902, + "step": 3648 + }, + { + "epoch": 2.4440723375753515, + "grad_norm": 2.2724328435657375, + "learning_rate": 1.9720276727716677e-06, + "loss": 1.0095, + "step": 3649 + }, + { + "epoch": 2.4447421299397187, + "grad_norm": 2.630403037171045, + "learning_rate": 1.970599248533364e-06, + "loss": 0.995, + "step": 3650 + }, + { + "epoch": 2.4454119223040856, + "grad_norm": 2.1248774749754515, + "learning_rate": 1.969171005216051e-06, + "loss": 0.9516, + "step": 3651 + }, + { + "epoch": 2.446081714668453, + "grad_norm": 2.083548692628587, + "learning_rate": 1.967742943307829e-06, + "loss": 1.0576, + "step": 3652 + }, + { + "epoch": 2.44675150703282, + "grad_norm": 1.9884309581296984, + "learning_rate": 1.9663150632967324e-06, + "loss": 0.9701, + "step": 3653 + }, + { + "epoch": 2.447421299397187, + "grad_norm": 2.444653229080442, + "learning_rate": 1.964887365670736e-06, + "loss": 0.9464, + "step": 3654 + }, + { + "epoch": 2.4480910917615537, + "grad_norm": 2.3417298677678207, + "learning_rate": 1.9634598509177494e-06, + "loss": 1.0176, + "step": 3655 + }, + { + "epoch": 2.448760884125921, + "grad_norm": 3.088987706580126, + "learning_rate": 1.9620325195256216e-06, + "loss": 1.1308, + "step": 3656 + }, + { + "epoch": 2.4494306764902882, + "grad_norm": 2.455547925013959, + "learning_rate": 1.960605371982138e-06, + "loss": 0.9349, + "step": 3657 + }, + { + "epoch": 2.450100468854655, + "grad_norm": 3.3147212832525716, + "learning_rate": 1.959178408775023e-06, + "loss": 1.2036, + "step": 3658 + }, + { + "epoch": 2.450770261219022, + "grad_norm": 2.0995824985624902, + "learning_rate": 1.9577516303919355e-06, + "loss": 0.9869, + "step": 3659 + }, + { + "epoch": 2.451440053583389, + "grad_norm": 2.034766232614628, + "learning_rate": 1.9563250373204727e-06, + "loss": 0.9668, + "step": 3660 + }, + { + "epoch": 2.4521098459477564, + "grad_norm": 2.1965264519856147, + "learning_rate": 1.9548986300481674e-06, + "loss": 1.0625, + "step": 3661 + }, + { + "epoch": 2.452779638312123, + "grad_norm": 3.2475529449312197, + "learning_rate": 1.953472409062492e-06, + "loss": 1.1014, + "step": 3662 + }, + { + "epoch": 2.4534494306764905, + "grad_norm": 2.357728929822871, + "learning_rate": 1.9520463748508505e-06, + "loss": 1.1027, + "step": 3663 + }, + { + "epoch": 2.4541192230408573, + "grad_norm": 2.639560558011007, + "learning_rate": 1.9506205279005862e-06, + "loss": 0.9795, + "step": 3664 + }, + { + "epoch": 2.4547890154052245, + "grad_norm": 2.1134058598763565, + "learning_rate": 1.9491948686989774e-06, + "loss": 1.0955, + "step": 3665 + }, + { + "epoch": 2.4554588077695914, + "grad_norm": 9.877871118276529, + "learning_rate": 1.94776939773324e-06, + "loss": 0.9327, + "step": 3666 + }, + { + "epoch": 2.4561286001339586, + "grad_norm": 2.955781230807848, + "learning_rate": 1.9463441154905222e-06, + "loss": 1.0247, + "step": 3667 + }, + { + "epoch": 2.4567983924983254, + "grad_norm": 2.583295409257951, + "learning_rate": 1.9449190224579102e-06, + "loss": 1.0279, + "step": 3668 + }, + { + "epoch": 2.4574681848626927, + "grad_norm": 2.3603859601796233, + "learning_rate": 1.9434941191224255e-06, + "loss": 0.9211, + "step": 3669 + }, + { + "epoch": 2.4581379772270595, + "grad_norm": 2.2732194209735104, + "learning_rate": 1.9420694059710245e-06, + "loss": 0.8869, + "step": 3670 + }, + { + "epoch": 2.4588077695914268, + "grad_norm": 2.1167690977111864, + "learning_rate": 1.940644883490597e-06, + "loss": 1.1699, + "step": 3671 + }, + { + "epoch": 2.4594775619557936, + "grad_norm": 2.271009482734072, + "learning_rate": 1.9392205521679688e-06, + "loss": 1.0623, + "step": 3672 + }, + { + "epoch": 2.460147354320161, + "grad_norm": 2.4074645650425723, + "learning_rate": 1.9377964124899023e-06, + "loss": 0.9886, + "step": 3673 + }, + { + "epoch": 2.4608171466845277, + "grad_norm": 2.2612899620922957, + "learning_rate": 1.936372464943092e-06, + "loss": 1.027, + "step": 3674 + }, + { + "epoch": 2.461486939048895, + "grad_norm": 2.2397164584371168, + "learning_rate": 1.934948710014166e-06, + "loss": 1.0417, + "step": 3675 + }, + { + "epoch": 2.4621567314132617, + "grad_norm": 2.710349067430339, + "learning_rate": 1.9335251481896887e-06, + "loss": 0.9973, + "step": 3676 + }, + { + "epoch": 2.462826523777629, + "grad_norm": 2.47991024845447, + "learning_rate": 1.932101779956158e-06, + "loss": 0.9235, + "step": 3677 + }, + { + "epoch": 2.463496316141996, + "grad_norm": 1.8190669963465467, + "learning_rate": 1.9306786058000067e-06, + "loss": 0.9089, + "step": 3678 + }, + { + "epoch": 2.464166108506363, + "grad_norm": 2.9708223664005002, + "learning_rate": 1.9292556262075977e-06, + "loss": 1.0483, + "step": 3679 + }, + { + "epoch": 2.46483590087073, + "grad_norm": 2.4239502048829653, + "learning_rate": 1.9278328416652305e-06, + "loss": 1.0429, + "step": 3680 + }, + { + "epoch": 2.465505693235097, + "grad_norm": 2.480048057024108, + "learning_rate": 1.9264102526591367e-06, + "loss": 0.9524, + "step": 3681 + }, + { + "epoch": 2.466175485599464, + "grad_norm": 4.7815604582242965, + "learning_rate": 1.9249878596754824e-06, + "loss": 0.9289, + "step": 3682 + }, + { + "epoch": 2.4668452779638312, + "grad_norm": 2.7876651800469734, + "learning_rate": 1.9235656632003662e-06, + "loss": 1.0071, + "step": 3683 + }, + { + "epoch": 2.467515070328198, + "grad_norm": 2.2965307343696213, + "learning_rate": 1.9221436637198174e-06, + "loss": 1.0437, + "step": 3684 + }, + { + "epoch": 2.4681848626925653, + "grad_norm": 2.6444851334206736, + "learning_rate": 1.9207218617198006e-06, + "loss": 1.1065, + "step": 3685 + }, + { + "epoch": 2.4688546550569326, + "grad_norm": 2.1165381074140117, + "learning_rate": 1.919300257686212e-06, + "loss": 0.9611, + "step": 3686 + }, + { + "epoch": 2.4695244474212994, + "grad_norm": 2.449944090264969, + "learning_rate": 1.917878852104881e-06, + "loss": 1.0476, + "step": 3687 + }, + { + "epoch": 2.470194239785666, + "grad_norm": 2.4293597816777357, + "learning_rate": 1.916457645461567e-06, + "loss": 1.0666, + "step": 3688 + }, + { + "epoch": 2.4708640321500335, + "grad_norm": 2.2251033876479074, + "learning_rate": 1.9150366382419634e-06, + "loss": 1.0128, + "step": 3689 + }, + { + "epoch": 2.4715338245144007, + "grad_norm": 2.2346006930845106, + "learning_rate": 1.9136158309316945e-06, + "loss": 1.1521, + "step": 3690 + }, + { + "epoch": 2.4722036168787676, + "grad_norm": 2.491838023020171, + "learning_rate": 1.9121952240163182e-06, + "loss": 0.9764, + "step": 3691 + }, + { + "epoch": 2.472873409243135, + "grad_norm": 2.4463866438832205, + "learning_rate": 1.9107748179813194e-06, + "loss": 0.9869, + "step": 3692 + }, + { + "epoch": 2.4735432016075016, + "grad_norm": 2.052232833131281, + "learning_rate": 1.9093546133121197e-06, + "loss": 0.9719, + "step": 3693 + }, + { + "epoch": 2.474212993971869, + "grad_norm": 3.575521126963573, + "learning_rate": 1.9079346104940675e-06, + "loss": 1.0218, + "step": 3694 + }, + { + "epoch": 2.4748827863362357, + "grad_norm": 2.450235728328863, + "learning_rate": 1.906514810012447e-06, + "loss": 1.0554, + "step": 3695 + }, + { + "epoch": 2.475552578700603, + "grad_norm": 2.237875222586011, + "learning_rate": 1.9050952123524677e-06, + "loss": 1.0784, + "step": 3696 + }, + { + "epoch": 2.47622237106497, + "grad_norm": 2.2052799315510225, + "learning_rate": 1.9036758179992735e-06, + "loss": 1.1053, + "step": 3697 + }, + { + "epoch": 2.476892163429337, + "grad_norm": 2.07670406131727, + "learning_rate": 1.9022566274379373e-06, + "loss": 1.1169, + "step": 3698 + }, + { + "epoch": 2.477561955793704, + "grad_norm": 2.5293651699379893, + "learning_rate": 1.9008376411534645e-06, + "loss": 0.9193, + "step": 3699 + }, + { + "epoch": 2.478231748158071, + "grad_norm": 2.4623053442870875, + "learning_rate": 1.8994188596307874e-06, + "loss": 1.0936, + "step": 3700 + }, + { + "epoch": 2.478901540522438, + "grad_norm": 2.165072935080342, + "learning_rate": 1.8980002833547706e-06, + "loss": 0.9548, + "step": 3701 + }, + { + "epoch": 2.479571332886805, + "grad_norm": 2.004042431282537, + "learning_rate": 1.8965819128102076e-06, + "loss": 1.0507, + "step": 3702 + }, + { + "epoch": 2.480241125251172, + "grad_norm": 3.2062404335335377, + "learning_rate": 1.8951637484818227e-06, + "loss": 0.9383, + "step": 3703 + }, + { + "epoch": 2.4809109176155393, + "grad_norm": 2.411700142454752, + "learning_rate": 1.8937457908542684e-06, + "loss": 1.0, + "step": 3704 + }, + { + "epoch": 2.481580709979906, + "grad_norm": 2.3034240128430747, + "learning_rate": 1.8923280404121272e-06, + "loss": 1.0939, + "step": 3705 + }, + { + "epoch": 2.4822505023442734, + "grad_norm": 2.2143835425048968, + "learning_rate": 1.8909104976399101e-06, + "loss": 1.0008, + "step": 3706 + }, + { + "epoch": 2.48292029470864, + "grad_norm": 2.2737718223089716, + "learning_rate": 1.8894931630220599e-06, + "loss": 0.9889, + "step": 3707 + }, + { + "epoch": 2.4835900870730074, + "grad_norm": 2.391834627537677, + "learning_rate": 1.8880760370429435e-06, + "loss": 0.8579, + "step": 3708 + }, + { + "epoch": 2.4842598794373743, + "grad_norm": 2.257374185449709, + "learning_rate": 1.8866591201868606e-06, + "loss": 1.03, + "step": 3709 + }, + { + "epoch": 2.4849296718017415, + "grad_norm": 2.7064410088003883, + "learning_rate": 1.885242412938037e-06, + "loss": 1.0771, + "step": 3710 + }, + { + "epoch": 2.4855994641661083, + "grad_norm": 2.3768478300046745, + "learning_rate": 1.8838259157806293e-06, + "loss": 0.9517, + "step": 3711 + }, + { + "epoch": 2.4862692565304756, + "grad_norm": 2.190111171114654, + "learning_rate": 1.8824096291987195e-06, + "loss": 1.0571, + "step": 3712 + }, + { + "epoch": 2.4869390488948424, + "grad_norm": 2.311400645213739, + "learning_rate": 1.8809935536763188e-06, + "loss": 0.9039, + "step": 3713 + }, + { + "epoch": 2.4876088412592097, + "grad_norm": 2.7911193002641457, + "learning_rate": 1.8795776896973678e-06, + "loss": 1.078, + "step": 3714 + }, + { + "epoch": 2.488278633623577, + "grad_norm": 2.0635079272473806, + "learning_rate": 1.8781620377457322e-06, + "loss": 1.1454, + "step": 3715 + }, + { + "epoch": 2.4889484259879437, + "grad_norm": 2.3379472907562686, + "learning_rate": 1.8767465983052078e-06, + "loss": 1.0929, + "step": 3716 + }, + { + "epoch": 2.4896182183523106, + "grad_norm": 2.279470386891103, + "learning_rate": 1.8753313718595142e-06, + "loss": 1.0352, + "step": 3717 + }, + { + "epoch": 2.490288010716678, + "grad_norm": 2.1863062220233327, + "learning_rate": 1.8739163588923026e-06, + "loss": 1.0566, + "step": 3718 + }, + { + "epoch": 2.490957803081045, + "grad_norm": 2.2674930389283867, + "learning_rate": 1.8725015598871481e-06, + "loss": 1.1077, + "step": 3719 + }, + { + "epoch": 2.491627595445412, + "grad_norm": 2.4038316079743414, + "learning_rate": 1.8710869753275545e-06, + "loss": 1.0771, + "step": 3720 + }, + { + "epoch": 2.4922973878097787, + "grad_norm": 2.172604784698197, + "learning_rate": 1.86967260569695e-06, + "loss": 1.1366, + "step": 3721 + }, + { + "epoch": 2.492967180174146, + "grad_norm": 2.211419336076391, + "learning_rate": 1.8682584514786923e-06, + "loss": 1.0836, + "step": 3722 + }, + { + "epoch": 2.4936369725385132, + "grad_norm": 3.1775264848120712, + "learning_rate": 1.8668445131560631e-06, + "loss": 0.9012, + "step": 3723 + }, + { + "epoch": 2.49430676490288, + "grad_norm": 2.440234296762419, + "learning_rate": 1.8654307912122721e-06, + "loss": 1.0173, + "step": 3724 + }, + { + "epoch": 2.4949765572672473, + "grad_norm": 2.5012800046113655, + "learning_rate": 1.864017286130453e-06, + "loss": 1.1579, + "step": 3725 + }, + { + "epoch": 2.495646349631614, + "grad_norm": 2.3299865467535334, + "learning_rate": 1.8626039983936675e-06, + "loss": 0.6305, + "step": 3726 + }, + { + "epoch": 2.4963161419959814, + "grad_norm": 2.218897776435059, + "learning_rate": 1.8611909284849018e-06, + "loss": 0.9153, + "step": 3727 + }, + { + "epoch": 2.496985934360348, + "grad_norm": 2.914881604534748, + "learning_rate": 1.859778076887069e-06, + "loss": 1.0383, + "step": 3728 + }, + { + "epoch": 2.4976557267247155, + "grad_norm": 3.2066546487386134, + "learning_rate": 1.8583654440830042e-06, + "loss": 1.0972, + "step": 3729 + }, + { + "epoch": 2.4983255190890823, + "grad_norm": 2.458894974148377, + "learning_rate": 1.8569530305554722e-06, + "loss": 1.1325, + "step": 3730 + }, + { + "epoch": 2.4989953114534496, + "grad_norm": 2.9199382149028836, + "learning_rate": 1.8555408367871598e-06, + "loss": 1.0594, + "step": 3731 + }, + { + "epoch": 2.4996651038178164, + "grad_norm": 2.1500426980034573, + "learning_rate": 1.8541288632606807e-06, + "loss": 0.7133, + "step": 3732 + }, + { + "epoch": 2.5003348961821836, + "grad_norm": 3.6300020923497045, + "learning_rate": 1.8527171104585706e-06, + "loss": 0.885, + "step": 3733 + }, + { + "epoch": 2.5010046885465504, + "grad_norm": 2.1623399071048093, + "learning_rate": 1.8513055788632925e-06, + "loss": 1.0792, + "step": 3734 + }, + { + "epoch": 2.5016744809109177, + "grad_norm": 1.9892013158393975, + "learning_rate": 1.849894268957232e-06, + "loss": 0.9502, + "step": 3735 + }, + { + "epoch": 2.5023442732752845, + "grad_norm": 2.623884579141214, + "learning_rate": 1.8484831812227022e-06, + "loss": 1.0398, + "step": 3736 + }, + { + "epoch": 2.503014065639652, + "grad_norm": 3.2926272305333613, + "learning_rate": 1.8470723161419342e-06, + "loss": 0.7827, + "step": 3737 + }, + { + "epoch": 2.5036838580040186, + "grad_norm": 2.9601651959497013, + "learning_rate": 1.8456616741970883e-06, + "loss": 1.0322, + "step": 3738 + }, + { + "epoch": 2.504353650368386, + "grad_norm": 2.376790713431633, + "learning_rate": 1.8442512558702463e-06, + "loss": 1.0174, + "step": 3739 + }, + { + "epoch": 2.5050234427327527, + "grad_norm": 3.8860110225903766, + "learning_rate": 1.842841061643416e-06, + "loss": 0.9298, + "step": 3740 + }, + { + "epoch": 2.50569323509712, + "grad_norm": 2.633203382584112, + "learning_rate": 1.8414310919985242e-06, + "loss": 0.859, + "step": 3741 + }, + { + "epoch": 2.5063630274614868, + "grad_norm": 2.208289691328573, + "learning_rate": 1.8400213474174248e-06, + "loss": 0.9847, + "step": 3742 + }, + { + "epoch": 2.507032819825854, + "grad_norm": 2.8225799719205993, + "learning_rate": 1.8386118283818926e-06, + "loss": 0.879, + "step": 3743 + }, + { + "epoch": 2.5077026121902213, + "grad_norm": 2.3653595782789796, + "learning_rate": 1.8372025353736279e-06, + "loss": 1.0989, + "step": 3744 + }, + { + "epoch": 2.508372404554588, + "grad_norm": 2.09119929088143, + "learning_rate": 1.83579346887425e-06, + "loss": 0.9466, + "step": 3745 + }, + { + "epoch": 2.509042196918955, + "grad_norm": 1.8840136581232976, + "learning_rate": 1.8343846293653033e-06, + "loss": 0.9659, + "step": 3746 + }, + { + "epoch": 2.509711989283322, + "grad_norm": 2.6753255559003404, + "learning_rate": 1.8329760173282545e-06, + "loss": 1.0185, + "step": 3747 + }, + { + "epoch": 2.5103817816476894, + "grad_norm": 2.7868246163372974, + "learning_rate": 1.831567633244493e-06, + "loss": 0.9067, + "step": 3748 + }, + { + "epoch": 2.5110515740120563, + "grad_norm": 2.4908415533144725, + "learning_rate": 1.8301594775953286e-06, + "loss": 0.9901, + "step": 3749 + }, + { + "epoch": 2.511721366376423, + "grad_norm": 5.04239340795835, + "learning_rate": 1.8287515508619934e-06, + "loss": 0.7682, + "step": 3750 + }, + { + "epoch": 2.5123911587407903, + "grad_norm": 2.6082388777069596, + "learning_rate": 1.8273438535256426e-06, + "loss": 0.9815, + "step": 3751 + }, + { + "epoch": 2.5130609511051576, + "grad_norm": 2.692271279130895, + "learning_rate": 1.825936386067352e-06, + "loss": 1.12, + "step": 3752 + }, + { + "epoch": 2.5137307434695244, + "grad_norm": 2.836413068123142, + "learning_rate": 1.82452914896812e-06, + "loss": 0.9908, + "step": 3753 + }, + { + "epoch": 2.5144005358338912, + "grad_norm": 3.487694423340039, + "learning_rate": 1.8231221427088635e-06, + "loss": 0.878, + "step": 3754 + }, + { + "epoch": 2.5150703281982585, + "grad_norm": 2.3910062279939246, + "learning_rate": 1.821715367770423e-06, + "loss": 1.1002, + "step": 3755 + }, + { + "epoch": 2.5157401205626257, + "grad_norm": 2.7988402065026965, + "learning_rate": 1.8203088246335599e-06, + "loss": 1.1331, + "step": 3756 + }, + { + "epoch": 2.5164099129269926, + "grad_norm": 2.520557156655269, + "learning_rate": 1.8189025137789556e-06, + "loss": 1.0267, + "step": 3757 + }, + { + "epoch": 2.51707970529136, + "grad_norm": 2.655206705620487, + "learning_rate": 1.8174964356872116e-06, + "loss": 0.8794, + "step": 3758 + }, + { + "epoch": 2.5177494976557266, + "grad_norm": 2.404578370223322, + "learning_rate": 1.816090590838851e-06, + "loss": 1.0728, + "step": 3759 + }, + { + "epoch": 2.518419290020094, + "grad_norm": 2.240341096750367, + "learning_rate": 1.814684979714317e-06, + "loss": 1.0651, + "step": 3760 + }, + { + "epoch": 2.5190890823844607, + "grad_norm": 2.5292193053705603, + "learning_rate": 1.813279602793973e-06, + "loss": 1.0155, + "step": 3761 + }, + { + "epoch": 2.519758874748828, + "grad_norm": 2.62187928928308, + "learning_rate": 1.8118744605581001e-06, + "loss": 1.0984, + "step": 3762 + }, + { + "epoch": 2.520428667113195, + "grad_norm": 2.3832336919059816, + "learning_rate": 1.8104695534869032e-06, + "loss": 1.0075, + "step": 3763 + }, + { + "epoch": 2.521098459477562, + "grad_norm": 2.853118387922772, + "learning_rate": 1.8090648820605036e-06, + "loss": 0.9505, + "step": 3764 + }, + { + "epoch": 2.521768251841929, + "grad_norm": 1.924288985468908, + "learning_rate": 1.8076604467589442e-06, + "loss": 0.9976, + "step": 3765 + }, + { + "epoch": 2.522438044206296, + "grad_norm": 4.889170193274946, + "learning_rate": 1.8062562480621846e-06, + "loss": 1.003, + "step": 3766 + }, + { + "epoch": 2.523107836570663, + "grad_norm": 2.565736016520625, + "learning_rate": 1.8048522864501067e-06, + "loss": 0.757, + "step": 3767 + }, + { + "epoch": 2.52377762893503, + "grad_norm": 3.161749504326135, + "learning_rate": 1.803448562402509e-06, + "loss": 0.9977, + "step": 3768 + }, + { + "epoch": 2.524447421299397, + "grad_norm": 2.947315801188572, + "learning_rate": 1.8020450763991104e-06, + "loss": 0.8028, + "step": 3769 + }, + { + "epoch": 2.5251172136637643, + "grad_norm": 2.962941372117988, + "learning_rate": 1.8006418289195465e-06, + "loss": 1.1197, + "step": 3770 + }, + { + "epoch": 2.525787006028131, + "grad_norm": 4.890769162876316, + "learning_rate": 1.7992388204433735e-06, + "loss": 1.0112, + "step": 3771 + }, + { + "epoch": 2.5264567983924984, + "grad_norm": 2.2778629877186165, + "learning_rate": 1.7978360514500646e-06, + "loss": 1.128, + "step": 3772 + }, + { + "epoch": 2.5271265907568656, + "grad_norm": 2.4733835288718056, + "learning_rate": 1.7964335224190133e-06, + "loss": 0.9802, + "step": 3773 + }, + { + "epoch": 2.5277963831212324, + "grad_norm": 4.022241103731157, + "learning_rate": 1.795031233829526e-06, + "loss": 0.8857, + "step": 3774 + }, + { + "epoch": 2.5284661754855993, + "grad_norm": 2.1177413186828953, + "learning_rate": 1.7936291861608324e-06, + "loss": 1.123, + "step": 3775 + }, + { + "epoch": 2.5291359678499665, + "grad_norm": 2.148211668152938, + "learning_rate": 1.7922273798920776e-06, + "loss": 1.147, + "step": 3776 + }, + { + "epoch": 2.529805760214334, + "grad_norm": 2.7710196213380214, + "learning_rate": 1.790825815502325e-06, + "loss": 0.8512, + "step": 3777 + }, + { + "epoch": 2.5304755525787006, + "grad_norm": 8.53408441401046, + "learning_rate": 1.7894244934705528e-06, + "loss": 1.0378, + "step": 3778 + }, + { + "epoch": 2.5311453449430674, + "grad_norm": 2.1896370007506287, + "learning_rate": 1.7880234142756598e-06, + "loss": 0.9531, + "step": 3779 + }, + { + "epoch": 2.5318151373074347, + "grad_norm": 1.940924935224158, + "learning_rate": 1.7866225783964592e-06, + "loss": 0.9039, + "step": 3780 + }, + { + "epoch": 2.532484929671802, + "grad_norm": 2.1450740923042777, + "learning_rate": 1.7852219863116846e-06, + "loss": 0.8545, + "step": 3781 + }, + { + "epoch": 2.5331547220361688, + "grad_norm": 2.1994992037790078, + "learning_rate": 1.7838216384999802e-06, + "loss": 0.9156, + "step": 3782 + }, + { + "epoch": 2.5338245144005356, + "grad_norm": 2.5323164833480067, + "learning_rate": 1.7824215354399127e-06, + "loss": 1.0946, + "step": 3783 + }, + { + "epoch": 2.534494306764903, + "grad_norm": 3.5962509259662307, + "learning_rate": 1.7810216776099615e-06, + "loss": 1.2146, + "step": 3784 + }, + { + "epoch": 2.53516409912927, + "grad_norm": 3.362909257806918, + "learning_rate": 1.7796220654885254e-06, + "loss": 0.91, + "step": 3785 + }, + { + "epoch": 2.535833891493637, + "grad_norm": 2.225574032288798, + "learning_rate": 1.7782226995539155e-06, + "loss": 1.0455, + "step": 3786 + }, + { + "epoch": 2.536503683858004, + "grad_norm": 2.244549870381731, + "learning_rate": 1.7768235802843615e-06, + "loss": 1.0338, + "step": 3787 + }, + { + "epoch": 2.537173476222371, + "grad_norm": 2.322333014180399, + "learning_rate": 1.7754247081580066e-06, + "loss": 1.019, + "step": 3788 + }, + { + "epoch": 2.5378432685867383, + "grad_norm": 2.167160621443656, + "learning_rate": 1.7740260836529124e-06, + "loss": 0.9644, + "step": 3789 + }, + { + "epoch": 2.538513060951105, + "grad_norm": 2.563732512027881, + "learning_rate": 1.7726277072470543e-06, + "loss": 1.0276, + "step": 3790 + }, + { + "epoch": 2.5391828533154723, + "grad_norm": 2.514910548027099, + "learning_rate": 1.7712295794183215e-06, + "loss": 1.0081, + "step": 3791 + }, + { + "epoch": 2.539852645679839, + "grad_norm": 2.671222714409986, + "learning_rate": 1.76983170064452e-06, + "loss": 0.8621, + "step": 3792 + }, + { + "epoch": 2.5405224380442064, + "grad_norm": 2.948943048080216, + "learning_rate": 1.7684340714033716e-06, + "loss": 0.8808, + "step": 3793 + }, + { + "epoch": 2.5411922304085732, + "grad_norm": 2.607091304072216, + "learning_rate": 1.7670366921725106e-06, + "loss": 1.2031, + "step": 3794 + }, + { + "epoch": 2.5418620227729405, + "grad_norm": 2.0144129428968136, + "learning_rate": 1.7656395634294871e-06, + "loss": 0.799, + "step": 3795 + }, + { + "epoch": 2.5425318151373073, + "grad_norm": 2.4138780046853374, + "learning_rate": 1.7642426856517645e-06, + "loss": 1.1594, + "step": 3796 + }, + { + "epoch": 2.5432016075016746, + "grad_norm": 2.172848074613778, + "learning_rate": 1.7628460593167223e-06, + "loss": 1.1611, + "step": 3797 + }, + { + "epoch": 2.5438713998660414, + "grad_norm": 2.2338140168539606, + "learning_rate": 1.7614496849016532e-06, + "loss": 1.1443, + "step": 3798 + }, + { + "epoch": 2.5445411922304086, + "grad_norm": 3.22273868615959, + "learning_rate": 1.7600535628837628e-06, + "loss": 1.0017, + "step": 3799 + }, + { + "epoch": 2.5452109845947755, + "grad_norm": 2.1752886991321554, + "learning_rate": 1.7586576937401712e-06, + "loss": 0.867, + "step": 3800 + }, + { + "epoch": 2.5458807769591427, + "grad_norm": 2.0082101949815483, + "learning_rate": 1.7572620779479127e-06, + "loss": 0.8413, + "step": 3801 + }, + { + "epoch": 2.54655056932351, + "grad_norm": 2.327850223853056, + "learning_rate": 1.755866715983935e-06, + "loss": 1.022, + "step": 3802 + }, + { + "epoch": 2.547220361687877, + "grad_norm": 2.798953469032794, + "learning_rate": 1.7544716083250968e-06, + "loss": 1.1158, + "step": 3803 + }, + { + "epoch": 2.5478901540522436, + "grad_norm": 2.354529537035325, + "learning_rate": 1.753076755448173e-06, + "loss": 0.9273, + "step": 3804 + }, + { + "epoch": 2.548559946416611, + "grad_norm": 2.410966751174238, + "learning_rate": 1.75168215782985e-06, + "loss": 0.9735, + "step": 3805 + }, + { + "epoch": 2.549229738780978, + "grad_norm": 2.56114470763061, + "learning_rate": 1.7502878159467274e-06, + "loss": 1.0565, + "step": 3806 + }, + { + "epoch": 2.549899531145345, + "grad_norm": 2.2548829414368416, + "learning_rate": 1.7488937302753154e-06, + "loss": 1.0127, + "step": 3807 + }, + { + "epoch": 2.5505693235097118, + "grad_norm": 2.5441889871773573, + "learning_rate": 1.7474999012920396e-06, + "loss": 1.0152, + "step": 3808 + }, + { + "epoch": 2.551239115874079, + "grad_norm": 8.74067663575576, + "learning_rate": 1.7461063294732361e-06, + "loss": 0.9081, + "step": 3809 + }, + { + "epoch": 2.5519089082384463, + "grad_norm": 2.4464416590919082, + "learning_rate": 1.7447130152951542e-06, + "loss": 1.0471, + "step": 3810 + }, + { + "epoch": 2.552578700602813, + "grad_norm": 2.625543319471503, + "learning_rate": 1.743319959233953e-06, + "loss": 0.9519, + "step": 3811 + }, + { + "epoch": 2.55324849296718, + "grad_norm": 2.37051281972943, + "learning_rate": 1.7419271617657065e-06, + "loss": 1.1115, + "step": 3812 + }, + { + "epoch": 2.553918285331547, + "grad_norm": 2.1592969443506123, + "learning_rate": 1.740534623366398e-06, + "loss": 1.0351, + "step": 3813 + }, + { + "epoch": 2.5545880776959144, + "grad_norm": 7.915962047670154, + "learning_rate": 1.7391423445119237e-06, + "loss": 0.9583, + "step": 3814 + }, + { + "epoch": 2.5552578700602813, + "grad_norm": 2.279223263818769, + "learning_rate": 1.7377503256780886e-06, + "loss": 0.8209, + "step": 3815 + }, + { + "epoch": 2.555927662424648, + "grad_norm": 3.3740701126778916, + "learning_rate": 1.7363585673406124e-06, + "loss": 1.0312, + "step": 3816 + }, + { + "epoch": 2.5565974547890153, + "grad_norm": 2.671924273983587, + "learning_rate": 1.7349670699751225e-06, + "loss": 0.8955, + "step": 3817 + }, + { + "epoch": 2.5572672471533826, + "grad_norm": 2.433055974405034, + "learning_rate": 1.7335758340571618e-06, + "loss": 0.9614, + "step": 3818 + }, + { + "epoch": 2.5579370395177494, + "grad_norm": 2.7253408429180395, + "learning_rate": 1.7321848600621763e-06, + "loss": 1.0738, + "step": 3819 + }, + { + "epoch": 2.5586068318821167, + "grad_norm": 2.8545529806998147, + "learning_rate": 1.7307941484655297e-06, + "loss": 0.9511, + "step": 3820 + }, + { + "epoch": 2.5592766242464835, + "grad_norm": 3.5270595319998934, + "learning_rate": 1.7294036997424918e-06, + "loss": 1.0362, + "step": 3821 + }, + { + "epoch": 2.5599464166108508, + "grad_norm": 2.5557598082884097, + "learning_rate": 1.728013514368246e-06, + "loss": 0.9735, + "step": 3822 + }, + { + "epoch": 2.5606162089752176, + "grad_norm": 2.500451565963482, + "learning_rate": 1.726623592817881e-06, + "loss": 1.0602, + "step": 3823 + }, + { + "epoch": 2.561286001339585, + "grad_norm": 2.417729079442435, + "learning_rate": 1.7252339355663997e-06, + "loss": 0.9892, + "step": 3824 + }, + { + "epoch": 2.5619557937039517, + "grad_norm": 2.439649307483057, + "learning_rate": 1.723844543088712e-06, + "loss": 1.0783, + "step": 3825 + }, + { + "epoch": 2.562625586068319, + "grad_norm": 2.2746076971620472, + "learning_rate": 1.722455415859639e-06, + "loss": 1.0569, + "step": 3826 + }, + { + "epoch": 2.5632953784326857, + "grad_norm": 2.0376206631962765, + "learning_rate": 1.721066554353911e-06, + "loss": 0.8972, + "step": 3827 + }, + { + "epoch": 2.563965170797053, + "grad_norm": 2.2402595744153557, + "learning_rate": 1.7196779590461654e-06, + "loss": 1.0087, + "step": 3828 + }, + { + "epoch": 2.56463496316142, + "grad_norm": 2.1266362526705613, + "learning_rate": 1.718289630410951e-06, + "loss": 1.0499, + "step": 3829 + }, + { + "epoch": 2.565304755525787, + "grad_norm": 2.521011343940099, + "learning_rate": 1.7169015689227242e-06, + "loss": 0.9965, + "step": 3830 + }, + { + "epoch": 2.565974547890154, + "grad_norm": 2.208209618140351, + "learning_rate": 1.7155137750558517e-06, + "loss": 1.0436, + "step": 3831 + }, + { + "epoch": 2.566644340254521, + "grad_norm": 2.1407673090700223, + "learning_rate": 1.714126249284606e-06, + "loss": 0.8743, + "step": 3832 + }, + { + "epoch": 2.567314132618888, + "grad_norm": 2.3291343132140714, + "learning_rate": 1.7127389920831699e-06, + "loss": 1.1151, + "step": 3833 + }, + { + "epoch": 2.5679839249832552, + "grad_norm": 2.3253702952413944, + "learning_rate": 1.7113520039256347e-06, + "loss": 1.0002, + "step": 3834 + }, + { + "epoch": 2.5686537173476225, + "grad_norm": 2.4074632536387637, + "learning_rate": 1.7099652852859997e-06, + "loss": 0.9677, + "step": 3835 + }, + { + "epoch": 2.5693235097119893, + "grad_norm": 4.0547048646201125, + "learning_rate": 1.7085788366381698e-06, + "loss": 0.9889, + "step": 3836 + }, + { + "epoch": 2.569993302076356, + "grad_norm": 3.579600634398379, + "learning_rate": 1.7071926584559595e-06, + "loss": 0.9699, + "step": 3837 + }, + { + "epoch": 2.5706630944407234, + "grad_norm": 2.2271408696467656, + "learning_rate": 1.7058067512130922e-06, + "loss": 0.8625, + "step": 3838 + }, + { + "epoch": 2.5713328868050906, + "grad_norm": 2.1301585923877955, + "learning_rate": 1.7044211153831966e-06, + "loss": 0.9977, + "step": 3839 + }, + { + "epoch": 2.5720026791694575, + "grad_norm": 2.5233071235711906, + "learning_rate": 1.7030357514398085e-06, + "loss": 1.0297, + "step": 3840 + }, + { + "epoch": 2.5726724715338243, + "grad_norm": 2.390134366500939, + "learning_rate": 1.7016506598563715e-06, + "loss": 0.9783, + "step": 3841 + }, + { + "epoch": 2.5733422638981915, + "grad_norm": 2.394308599110479, + "learning_rate": 1.7002658411062373e-06, + "loss": 0.8965, + "step": 3842 + }, + { + "epoch": 2.574012056262559, + "grad_norm": 2.366211751210963, + "learning_rate": 1.6988812956626633e-06, + "loss": 0.9329, + "step": 3843 + }, + { + "epoch": 2.5746818486269256, + "grad_norm": 3.0635929709971395, + "learning_rate": 1.697497023998812e-06, + "loss": 0.891, + "step": 3844 + }, + { + "epoch": 2.5753516409912924, + "grad_norm": 2.1841978541874223, + "learning_rate": 1.6961130265877542e-06, + "loss": 1.0329, + "step": 3845 + }, + { + "epoch": 2.5760214333556597, + "grad_norm": 1.8806076139844283, + "learning_rate": 1.6947293039024676e-06, + "loss": 0.909, + "step": 3846 + }, + { + "epoch": 2.576691225720027, + "grad_norm": 2.12551812203319, + "learning_rate": 1.6933458564158342e-06, + "loss": 0.9873, + "step": 3847 + }, + { + "epoch": 2.5773610180843938, + "grad_norm": 2.3225132184976136, + "learning_rate": 1.691962684600642e-06, + "loss": 1.0503, + "step": 3848 + }, + { + "epoch": 2.578030810448761, + "grad_norm": 2.56463820406851, + "learning_rate": 1.6905797889295866e-06, + "loss": 0.806, + "step": 3849 + }, + { + "epoch": 2.578700602813128, + "grad_norm": 2.8870368981465546, + "learning_rate": 1.6891971698752676e-06, + "loss": 0.8658, + "step": 3850 + }, + { + "epoch": 2.579370395177495, + "grad_norm": 2.628961365706689, + "learning_rate": 1.6878148279101914e-06, + "loss": 0.8844, + "step": 3851 + }, + { + "epoch": 2.580040187541862, + "grad_norm": 2.8628735592120838, + "learning_rate": 1.6864327635067673e-06, + "loss": 0.9512, + "step": 3852 + }, + { + "epoch": 2.580709979906229, + "grad_norm": 2.3969251776515605, + "learning_rate": 1.6850509771373124e-06, + "loss": 1.0272, + "step": 3853 + }, + { + "epoch": 2.581379772270596, + "grad_norm": 2.6070266731476934, + "learning_rate": 1.6836694692740477e-06, + "loss": 1.0955, + "step": 3854 + }, + { + "epoch": 2.5820495646349633, + "grad_norm": 2.60817654660172, + "learning_rate": 1.6822882403890995e-06, + "loss": 0.9907, + "step": 3855 + }, + { + "epoch": 2.58271935699933, + "grad_norm": 2.302707970528729, + "learning_rate": 1.6809072909544965e-06, + "loss": 1.0749, + "step": 3856 + }, + { + "epoch": 2.5833891493636973, + "grad_norm": 2.8433464962004793, + "learning_rate": 1.6795266214421752e-06, + "loss": 1.0309, + "step": 3857 + }, + { + "epoch": 2.584058941728064, + "grad_norm": 2.7394291707315075, + "learning_rate": 1.678146232323975e-06, + "loss": 0.8347, + "step": 3858 + }, + { + "epoch": 2.5847287340924314, + "grad_norm": 2.2476781312067136, + "learning_rate": 1.6767661240716381e-06, + "loss": 0.923, + "step": 3859 + }, + { + "epoch": 2.5853985264567982, + "grad_norm": 2.3084179566768115, + "learning_rate": 1.6753862971568144e-06, + "loss": 0.9621, + "step": 3860 + }, + { + "epoch": 2.5860683188211655, + "grad_norm": 3.0585697816863058, + "learning_rate": 1.6740067520510533e-06, + "loss": 0.8466, + "step": 3861 + }, + { + "epoch": 2.5867381111855323, + "grad_norm": 2.447055055214135, + "learning_rate": 1.6726274892258096e-06, + "loss": 0.7984, + "step": 3862 + }, + { + "epoch": 2.5874079035498996, + "grad_norm": 2.432100087202097, + "learning_rate": 1.6712485091524434e-06, + "loss": 1.0299, + "step": 3863 + }, + { + "epoch": 2.588077695914267, + "grad_norm": 2.3952886509514655, + "learning_rate": 1.6698698123022168e-06, + "loss": 1.1313, + "step": 3864 + }, + { + "epoch": 2.5887474882786337, + "grad_norm": 2.3702111556926577, + "learning_rate": 1.6684913991462932e-06, + "loss": 0.8961, + "step": 3865 + }, + { + "epoch": 2.5894172806430005, + "grad_norm": 3.999102986705367, + "learning_rate": 1.6671132701557414e-06, + "loss": 0.9165, + "step": 3866 + }, + { + "epoch": 2.5900870730073677, + "grad_norm": 2.411297361187964, + "learning_rate": 1.6657354258015334e-06, + "loss": 1.0294, + "step": 3867 + }, + { + "epoch": 2.590756865371735, + "grad_norm": 2.065470721419269, + "learning_rate": 1.6643578665545429e-06, + "loss": 0.9161, + "step": 3868 + }, + { + "epoch": 2.591426657736102, + "grad_norm": 2.147058243468339, + "learning_rate": 1.6629805928855457e-06, + "loss": 1.0022, + "step": 3869 + }, + { + "epoch": 2.5920964501004686, + "grad_norm": 2.502297255070757, + "learning_rate": 1.6616036052652208e-06, + "loss": 0.849, + "step": 3870 + }, + { + "epoch": 2.592766242464836, + "grad_norm": 3.0865951657127897, + "learning_rate": 1.6602269041641495e-06, + "loss": 1.0716, + "step": 3871 + }, + { + "epoch": 2.593436034829203, + "grad_norm": 2.634535460218479, + "learning_rate": 1.6588504900528151e-06, + "loss": 1.1288, + "step": 3872 + }, + { + "epoch": 2.59410582719357, + "grad_norm": 2.632295497514308, + "learning_rate": 1.6574743634016021e-06, + "loss": 0.9707, + "step": 3873 + }, + { + "epoch": 2.594775619557937, + "grad_norm": 2.286227843243726, + "learning_rate": 1.656098524680797e-06, + "loss": 1.0249, + "step": 3874 + }, + { + "epoch": 2.595445411922304, + "grad_norm": 2.5399553991293136, + "learning_rate": 1.6547229743605897e-06, + "loss": 0.9751, + "step": 3875 + }, + { + "epoch": 2.5961152042866713, + "grad_norm": 2.5653150565746397, + "learning_rate": 1.6533477129110693e-06, + "loss": 0.926, + "step": 3876 + }, + { + "epoch": 2.596784996651038, + "grad_norm": 2.420393013983515, + "learning_rate": 1.6519727408022262e-06, + "loss": 1.0437, + "step": 3877 + }, + { + "epoch": 2.5974547890154054, + "grad_norm": 2.0992396606068393, + "learning_rate": 1.6505980585039533e-06, + "loss": 1.0406, + "step": 3878 + }, + { + "epoch": 2.598124581379772, + "grad_norm": 2.078540609164457, + "learning_rate": 1.649223666486044e-06, + "loss": 1.0385, + "step": 3879 + }, + { + "epoch": 2.5987943737441395, + "grad_norm": 2.329426351233497, + "learning_rate": 1.647849565218193e-06, + "loss": 0.9578, + "step": 3880 + }, + { + "epoch": 2.5994641661085063, + "grad_norm": 2.5384662783139254, + "learning_rate": 1.6464757551699934e-06, + "loss": 0.9617, + "step": 3881 + }, + { + "epoch": 2.6001339584728735, + "grad_norm": 2.8878607669841796, + "learning_rate": 1.6451022368109404e-06, + "loss": 0.9999, + "step": 3882 + }, + { + "epoch": 2.6008037508372404, + "grad_norm": 2.8031496537744274, + "learning_rate": 1.6437290106104308e-06, + "loss": 0.9593, + "step": 3883 + }, + { + "epoch": 2.6014735432016076, + "grad_norm": 2.5332628337290823, + "learning_rate": 1.64235607703776e-06, + "loss": 1.0982, + "step": 3884 + }, + { + "epoch": 2.6021433355659744, + "grad_norm": 2.519068349896441, + "learning_rate": 1.6409834365621225e-06, + "loss": 0.9661, + "step": 3885 + }, + { + "epoch": 2.6028131279303417, + "grad_norm": 2.6042486898001544, + "learning_rate": 1.6396110896526141e-06, + "loss": 0.8952, + "step": 3886 + }, + { + "epoch": 2.6034829202947085, + "grad_norm": 2.8568427115442545, + "learning_rate": 1.6382390367782303e-06, + "loss": 0.9725, + "step": 3887 + }, + { + "epoch": 2.6041527126590758, + "grad_norm": 2.2069113939308758, + "learning_rate": 1.6368672784078665e-06, + "loss": 1.0189, + "step": 3888 + }, + { + "epoch": 2.6048225050234426, + "grad_norm": 2.5020301378742875, + "learning_rate": 1.635495815010315e-06, + "loss": 1.1669, + "step": 3889 + }, + { + "epoch": 2.60549229738781, + "grad_norm": 2.542120358144574, + "learning_rate": 1.6341246470542693e-06, + "loss": 0.9574, + "step": 3890 + }, + { + "epoch": 2.6061620897521767, + "grad_norm": 1.8977342008878226, + "learning_rate": 1.6327537750083227e-06, + "loss": 0.8712, + "step": 3891 + }, + { + "epoch": 2.606831882116544, + "grad_norm": 1.8966531350270914, + "learning_rate": 1.6313831993409662e-06, + "loss": 0.8389, + "step": 3892 + }, + { + "epoch": 2.607501674480911, + "grad_norm": 2.450715531734622, + "learning_rate": 1.6300129205205879e-06, + "loss": 0.9571, + "step": 3893 + }, + { + "epoch": 2.608171466845278, + "grad_norm": 2.43116484786611, + "learning_rate": 1.6286429390154778e-06, + "loss": 0.9648, + "step": 3894 + }, + { + "epoch": 2.608841259209645, + "grad_norm": 2.3691242910704133, + "learning_rate": 1.627273255293822e-06, + "loss": 0.9758, + "step": 3895 + }, + { + "epoch": 2.609511051574012, + "grad_norm": 2.1482488257706267, + "learning_rate": 1.6259038698237054e-06, + "loss": 1.0105, + "step": 3896 + }, + { + "epoch": 2.6101808439383793, + "grad_norm": 2.0941812633250714, + "learning_rate": 1.6245347830731122e-06, + "loss": 1.0972, + "step": 3897 + }, + { + "epoch": 2.610850636302746, + "grad_norm": 2.2284105534305962, + "learning_rate": 1.6231659955099219e-06, + "loss": 0.894, + "step": 3898 + }, + { + "epoch": 2.611520428667113, + "grad_norm": 3.4071562174625423, + "learning_rate": 1.6217975076019137e-06, + "loss": 0.9092, + "step": 3899 + }, + { + "epoch": 2.6121902210314802, + "grad_norm": 7.57604101519971, + "learning_rate": 1.6204293198167637e-06, + "loss": 1.0426, + "step": 3900 + }, + { + "epoch": 2.6128600133958475, + "grad_norm": 2.3975326259003427, + "learning_rate": 1.6190614326220465e-06, + "loss": 0.9587, + "step": 3901 + }, + { + "epoch": 2.6135298057602143, + "grad_norm": 2.603339866471303, + "learning_rate": 1.617693846485232e-06, + "loss": 0.9971, + "step": 3902 + }, + { + "epoch": 2.614199598124581, + "grad_norm": 2.41019684345505, + "learning_rate": 1.6163265618736895e-06, + "loss": 1.0415, + "step": 3903 + }, + { + "epoch": 2.6148693904889484, + "grad_norm": 2.5345388297657196, + "learning_rate": 1.6149595792546825e-06, + "loss": 1.1901, + "step": 3904 + }, + { + "epoch": 2.6155391828533157, + "grad_norm": 2.3937142926364077, + "learning_rate": 1.6135928990953747e-06, + "loss": 0.8473, + "step": 3905 + }, + { + "epoch": 2.6162089752176825, + "grad_norm": 2.372783132911802, + "learning_rate": 1.6122265218628235e-06, + "loss": 0.8249, + "step": 3906 + }, + { + "epoch": 2.6168787675820493, + "grad_norm": 3.235245662118242, + "learning_rate": 1.6108604480239837e-06, + "loss": 0.9072, + "step": 3907 + }, + { + "epoch": 2.6175485599464166, + "grad_norm": 3.3867668766341295, + "learning_rate": 1.6094946780457071e-06, + "loss": 0.8765, + "step": 3908 + }, + { + "epoch": 2.618218352310784, + "grad_norm": 2.395138570248278, + "learning_rate": 1.6081292123947423e-06, + "loss": 0.7894, + "step": 3909 + }, + { + "epoch": 2.6188881446751506, + "grad_norm": 3.127481459749196, + "learning_rate": 1.6067640515377308e-06, + "loss": 1.0339, + "step": 3910 + }, + { + "epoch": 2.619557937039518, + "grad_norm": 2.7833663203444288, + "learning_rate": 1.605399195941212e-06, + "loss": 0.9616, + "step": 3911 + }, + { + "epoch": 2.6202277294038847, + "grad_norm": 2.57103760749395, + "learning_rate": 1.6040346460716223e-06, + "loss": 1.0113, + "step": 3912 + }, + { + "epoch": 2.620897521768252, + "grad_norm": 2.1351724518656336, + "learning_rate": 1.6026704023952922e-06, + "loss": 0.8347, + "step": 3913 + }, + { + "epoch": 2.621567314132619, + "grad_norm": 2.3899503321207805, + "learning_rate": 1.601306465378446e-06, + "loss": 0.9356, + "step": 3914 + }, + { + "epoch": 2.622237106496986, + "grad_norm": 2.3737028011432604, + "learning_rate": 1.599942835487205e-06, + "loss": 1.0213, + "step": 3915 + }, + { + "epoch": 2.622906898861353, + "grad_norm": 2.2407179689855394, + "learning_rate": 1.5985795131875866e-06, + "loss": 1.0827, + "step": 3916 + }, + { + "epoch": 2.62357669122572, + "grad_norm": 2.3403092209598206, + "learning_rate": 1.5972164989455016e-06, + "loss": 0.8981, + "step": 3917 + }, + { + "epoch": 2.624246483590087, + "grad_norm": 2.1573963082493908, + "learning_rate": 1.5958537932267548e-06, + "loss": 1.0663, + "step": 3918 + }, + { + "epoch": 2.624916275954454, + "grad_norm": 2.2330039177754646, + "learning_rate": 1.594491396497046e-06, + "loss": 0.9607, + "step": 3919 + }, + { + "epoch": 2.625586068318821, + "grad_norm": 2.6162309974300357, + "learning_rate": 1.5931293092219715e-06, + "loss": 1.0361, + "step": 3920 + }, + { + "epoch": 2.6262558606831883, + "grad_norm": 3.138477829573083, + "learning_rate": 1.59176753186702e-06, + "loss": 0.989, + "step": 3921 + }, + { + "epoch": 2.626925653047555, + "grad_norm": 2.7337684092053642, + "learning_rate": 1.5904060648975733e-06, + "loss": 0.983, + "step": 3922 + }, + { + "epoch": 2.6275954454119224, + "grad_norm": 2.255645324146266, + "learning_rate": 1.5890449087789084e-06, + "loss": 1.2276, + "step": 3923 + }, + { + "epoch": 2.628265237776289, + "grad_norm": 2.2877672633158945, + "learning_rate": 1.5876840639761974e-06, + "loss": 1.0367, + "step": 3924 + }, + { + "epoch": 2.6289350301406564, + "grad_norm": 2.670737800133979, + "learning_rate": 1.5863235309545038e-06, + "loss": 1.028, + "step": 3925 + }, + { + "epoch": 2.6296048225050237, + "grad_norm": 2.1436452247510864, + "learning_rate": 1.5849633101787853e-06, + "loss": 0.9974, + "step": 3926 + }, + { + "epoch": 2.6302746148693905, + "grad_norm": 2.3248424732020467, + "learning_rate": 1.583603402113892e-06, + "loss": 1.1075, + "step": 3927 + }, + { + "epoch": 2.6309444072337573, + "grad_norm": 5.157307503846941, + "learning_rate": 1.5822438072245699e-06, + "loss": 0.9006, + "step": 3928 + }, + { + "epoch": 2.6316141995981246, + "grad_norm": 2.179105456303383, + "learning_rate": 1.5808845259754557e-06, + "loss": 0.8338, + "step": 3929 + }, + { + "epoch": 2.632283991962492, + "grad_norm": 2.100892684771544, + "learning_rate": 1.5795255588310787e-06, + "loss": 0.8818, + "step": 3930 + }, + { + "epoch": 2.6329537843268587, + "grad_norm": 2.5972667277891635, + "learning_rate": 1.5781669062558607e-06, + "loss": 0.9458, + "step": 3931 + }, + { + "epoch": 2.6336235766912255, + "grad_norm": 3.4780124416149008, + "learning_rate": 1.576808568714119e-06, + "loss": 1.0276, + "step": 3932 + }, + { + "epoch": 2.6342933690555927, + "grad_norm": 2.237953021344195, + "learning_rate": 1.5754505466700593e-06, + "loss": 1.1002, + "step": 3933 + }, + { + "epoch": 2.63496316141996, + "grad_norm": 2.8336412435894256, + "learning_rate": 1.5740928405877828e-06, + "loss": 1.0316, + "step": 3934 + }, + { + "epoch": 2.635632953784327, + "grad_norm": 2.408571778678402, + "learning_rate": 1.5727354509312797e-06, + "loss": 1.0414, + "step": 3935 + }, + { + "epoch": 2.6363027461486936, + "grad_norm": 2.3224876622593706, + "learning_rate": 1.5713783781644343e-06, + "loss": 0.9344, + "step": 3936 + }, + { + "epoch": 2.636972538513061, + "grad_norm": 2.2699261470453034, + "learning_rate": 1.5700216227510212e-06, + "loss": 0.9883, + "step": 3937 + }, + { + "epoch": 2.637642330877428, + "grad_norm": 2.9621416902559856, + "learning_rate": 1.5686651851547084e-06, + "loss": 1.0289, + "step": 3938 + }, + { + "epoch": 2.638312123241795, + "grad_norm": 2.706644174598605, + "learning_rate": 1.5673090658390528e-06, + "loss": 1.1959, + "step": 3939 + }, + { + "epoch": 2.6389819156061622, + "grad_norm": 2.1988848665091836, + "learning_rate": 1.5659532652675041e-06, + "loss": 0.8702, + "step": 3940 + }, + { + "epoch": 2.639651707970529, + "grad_norm": 2.6121582480067613, + "learning_rate": 1.564597783903402e-06, + "loss": 0.9435, + "step": 3941 + }, + { + "epoch": 2.6403215003348963, + "grad_norm": 2.6800555744432777, + "learning_rate": 1.5632426222099807e-06, + "loss": 0.9112, + "step": 3942 + }, + { + "epoch": 2.640991292699263, + "grad_norm": 2.6240621241419086, + "learning_rate": 1.561887780650359e-06, + "loss": 0.9852, + "step": 3943 + }, + { + "epoch": 2.6416610850636304, + "grad_norm": 2.311515533346326, + "learning_rate": 1.5605332596875514e-06, + "loss": 1.1077, + "step": 3944 + }, + { + "epoch": 2.642330877427997, + "grad_norm": 3.3071504690508413, + "learning_rate": 1.55917905978446e-06, + "loss": 0.9829, + "step": 3945 + }, + { + "epoch": 2.6430006697923645, + "grad_norm": 2.4603671614579055, + "learning_rate": 1.55782518140388e-06, + "loss": 0.9647, + "step": 3946 + }, + { + "epoch": 2.6436704621567313, + "grad_norm": 2.323576649408838, + "learning_rate": 1.5564716250084933e-06, + "loss": 0.8263, + "step": 3947 + }, + { + "epoch": 2.6443402545210986, + "grad_norm": 2.291089732633084, + "learning_rate": 1.5551183910608738e-06, + "loss": 0.9936, + "step": 3948 + }, + { + "epoch": 2.6450100468854654, + "grad_norm": 2.448469048243617, + "learning_rate": 1.5537654800234846e-06, + "loss": 0.8672, + "step": 3949 + }, + { + "epoch": 2.6456798392498326, + "grad_norm": 2.6410032568855404, + "learning_rate": 1.55241289235868e-06, + "loss": 0.9461, + "step": 3950 + }, + { + "epoch": 2.6463496316141994, + "grad_norm": 3.0772546814437303, + "learning_rate": 1.5510606285287008e-06, + "loss": 1.0484, + "step": 3951 + }, + { + "epoch": 2.6470194239785667, + "grad_norm": 2.5347480399025173, + "learning_rate": 1.5497086889956791e-06, + "loss": 0.9242, + "step": 3952 + }, + { + "epoch": 2.6476892163429335, + "grad_norm": 2.8806552317867262, + "learning_rate": 1.548357074221637e-06, + "loss": 0.896, + "step": 3953 + }, + { + "epoch": 2.648359008707301, + "grad_norm": 3.9479019764943417, + "learning_rate": 1.547005784668484e-06, + "loss": 0.9129, + "step": 3954 + }, + { + "epoch": 2.649028801071668, + "grad_norm": 2.5382066485239805, + "learning_rate": 1.5456548207980176e-06, + "loss": 0.9957, + "step": 3955 + }, + { + "epoch": 2.649698593436035, + "grad_norm": 2.680258870688721, + "learning_rate": 1.5443041830719262e-06, + "loss": 0.6899, + "step": 3956 + }, + { + "epoch": 2.6503683858004017, + "grad_norm": 2.7617635868136383, + "learning_rate": 1.5429538719517867e-06, + "loss": 0.8814, + "step": 3957 + }, + { + "epoch": 2.651038178164769, + "grad_norm": 2.763537716655671, + "learning_rate": 1.5416038878990636e-06, + "loss": 0.8758, + "step": 3958 + }, + { + "epoch": 2.651707970529136, + "grad_norm": 2.5560150088997196, + "learning_rate": 1.5402542313751079e-06, + "loss": 0.9884, + "step": 3959 + }, + { + "epoch": 2.652377762893503, + "grad_norm": 2.0417889154021744, + "learning_rate": 1.5389049028411613e-06, + "loss": 0.9434, + "step": 3960 + }, + { + "epoch": 2.65304755525787, + "grad_norm": 3.1813768791913906, + "learning_rate": 1.5375559027583524e-06, + "loss": 0.9286, + "step": 3961 + }, + { + "epoch": 2.653717347622237, + "grad_norm": 2.307195865130105, + "learning_rate": 1.5362072315876986e-06, + "loss": 0.9695, + "step": 3962 + }, + { + "epoch": 2.6543871399866044, + "grad_norm": 2.534297292683669, + "learning_rate": 1.5348588897901021e-06, + "loss": 1.0059, + "step": 3963 + }, + { + "epoch": 2.655056932350971, + "grad_norm": 2.502566520925561, + "learning_rate": 1.533510877826355e-06, + "loss": 0.9991, + "step": 3964 + }, + { + "epoch": 2.655726724715338, + "grad_norm": 2.1997282882419427, + "learning_rate": 1.5321631961571365e-06, + "loss": 0.8703, + "step": 3965 + }, + { + "epoch": 2.6563965170797053, + "grad_norm": 2.2788734283533487, + "learning_rate": 1.5308158452430119e-06, + "loss": 1.0386, + "step": 3966 + }, + { + "epoch": 2.6570663094440725, + "grad_norm": 2.0492310107031733, + "learning_rate": 1.5294688255444357e-06, + "loss": 0.8959, + "step": 3967 + }, + { + "epoch": 2.6577361018084393, + "grad_norm": 2.4576540511019003, + "learning_rate": 1.5281221375217447e-06, + "loss": 0.9136, + "step": 3968 + }, + { + "epoch": 2.6584058941728066, + "grad_norm": 3.020511052795364, + "learning_rate": 1.5267757816351672e-06, + "loss": 0.8552, + "step": 3969 + }, + { + "epoch": 2.6590756865371734, + "grad_norm": 2.4675226126389305, + "learning_rate": 1.5254297583448146e-06, + "loss": 0.9822, + "step": 3970 + }, + { + "epoch": 2.6597454789015407, + "grad_norm": 2.8641229442651457, + "learning_rate": 1.5240840681106885e-06, + "loss": 0.842, + "step": 3971 + }, + { + "epoch": 2.6604152712659075, + "grad_norm": 3.7773194354535833, + "learning_rate": 1.5227387113926709e-06, + "loss": 0.8291, + "step": 3972 + }, + { + "epoch": 2.6610850636302747, + "grad_norm": 2.6283020849577845, + "learning_rate": 1.5213936886505352e-06, + "loss": 1.0549, + "step": 3973 + }, + { + "epoch": 2.6617548559946416, + "grad_norm": 2.356603763499872, + "learning_rate": 1.5200490003439372e-06, + "loss": 0.8405, + "step": 3974 + }, + { + "epoch": 2.662424648359009, + "grad_norm": 2.6074576405086916, + "learning_rate": 1.5187046469324224e-06, + "loss": 1.0647, + "step": 3975 + }, + { + "epoch": 2.6630944407233756, + "grad_norm": 2.1864967706192413, + "learning_rate": 1.5173606288754162e-06, + "loss": 1.0732, + "step": 3976 + }, + { + "epoch": 2.663764233087743, + "grad_norm": 2.399827762571335, + "learning_rate": 1.5160169466322333e-06, + "loss": 0.9784, + "step": 3977 + }, + { + "epoch": 2.6644340254521097, + "grad_norm": 4.173900523656434, + "learning_rate": 1.5146736006620732e-06, + "loss": 1.1093, + "step": 3978 + }, + { + "epoch": 2.665103817816477, + "grad_norm": 2.5113068220912056, + "learning_rate": 1.513330591424021e-06, + "loss": 1.0107, + "step": 3979 + }, + { + "epoch": 2.665773610180844, + "grad_norm": 2.4809198949795297, + "learning_rate": 1.5119879193770438e-06, + "loss": 0.9659, + "step": 3980 + }, + { + "epoch": 2.666443402545211, + "grad_norm": 2.8729456512314693, + "learning_rate": 1.5106455849799968e-06, + "loss": 0.8909, + "step": 3981 + }, + { + "epoch": 2.667113194909578, + "grad_norm": 2.03526997600701, + "learning_rate": 1.5093035886916168e-06, + "loss": 0.7151, + "step": 3982 + }, + { + "epoch": 2.667782987273945, + "grad_norm": 2.2288790867981985, + "learning_rate": 1.5079619309705293e-06, + "loss": 1.0034, + "step": 3983 + }, + { + "epoch": 2.6684527796383124, + "grad_norm": 2.277376580637135, + "learning_rate": 1.5066206122752395e-06, + "loss": 1.0729, + "step": 3984 + }, + { + "epoch": 2.669122572002679, + "grad_norm": 4.67877442336952, + "learning_rate": 1.505279633064139e-06, + "loss": 1.0019, + "step": 3985 + }, + { + "epoch": 2.669792364367046, + "grad_norm": 2.059998457155522, + "learning_rate": 1.5039389937955029e-06, + "loss": 0.8519, + "step": 3986 + }, + { + "epoch": 2.6704621567314133, + "grad_norm": 2.8129701267846805, + "learning_rate": 1.5025986949274921e-06, + "loss": 0.9262, + "step": 3987 + }, + { + "epoch": 2.6711319490957806, + "grad_norm": 2.0672560900215244, + "learning_rate": 1.5012587369181472e-06, + "loss": 0.8405, + "step": 3988 + }, + { + "epoch": 2.6718017414601474, + "grad_norm": 5.543113837489373, + "learning_rate": 1.4999191202253954e-06, + "loss": 0.8572, + "step": 3989 + }, + { + "epoch": 2.672471533824514, + "grad_norm": 2.564210373913769, + "learning_rate": 1.498579845307046e-06, + "loss": 1.1137, + "step": 3990 + }, + { + "epoch": 2.6731413261888815, + "grad_norm": 2.5019915946009603, + "learning_rate": 1.4972409126207932e-06, + "loss": 1.0267, + "step": 3991 + }, + { + "epoch": 2.6738111185532487, + "grad_norm": 2.5261188657340234, + "learning_rate": 1.4959023226242115e-06, + "loss": 1.0694, + "step": 3992 + }, + { + "epoch": 2.6744809109176155, + "grad_norm": 2.690622207899595, + "learning_rate": 1.4945640757747601e-06, + "loss": 1.0449, + "step": 3993 + }, + { + "epoch": 2.6751507032819823, + "grad_norm": 2.6346805599426872, + "learning_rate": 1.4932261725297797e-06, + "loss": 1.0446, + "step": 3994 + }, + { + "epoch": 2.6758204956463496, + "grad_norm": 2.8376283331620664, + "learning_rate": 1.4918886133464966e-06, + "loss": 0.9492, + "step": 3995 + }, + { + "epoch": 2.676490288010717, + "grad_norm": 2.582138628593843, + "learning_rate": 1.4905513986820155e-06, + "loss": 0.9918, + "step": 3996 + }, + { + "epoch": 2.6771600803750837, + "grad_norm": 2.518733928658582, + "learning_rate": 1.4892145289933251e-06, + "loss": 0.9846, + "step": 3997 + }, + { + "epoch": 2.6778298727394505, + "grad_norm": 2.6167824679464187, + "learning_rate": 1.4878780047372973e-06, + "loss": 1.0554, + "step": 3998 + }, + { + "epoch": 2.6784996651038178, + "grad_norm": 2.392953068045487, + "learning_rate": 1.4865418263706849e-06, + "loss": 1.071, + "step": 3999 + }, + { + "epoch": 2.679169457468185, + "grad_norm": 2.2199473950814483, + "learning_rate": 1.4852059943501218e-06, + "loss": 0.9238, + "step": 4000 + }, + { + "epoch": 2.679839249832552, + "grad_norm": 2.9584496002143585, + "learning_rate": 1.483870509132124e-06, + "loss": 0.8977, + "step": 4001 + }, + { + "epoch": 2.680509042196919, + "grad_norm": 2.1140501985361544, + "learning_rate": 1.4825353711730906e-06, + "loss": 1.0037, + "step": 4002 + }, + { + "epoch": 2.681178834561286, + "grad_norm": 2.3300708199586, + "learning_rate": 1.4812005809292994e-06, + "loss": 0.9649, + "step": 4003 + }, + { + "epoch": 2.681848626925653, + "grad_norm": 3.1791351339904823, + "learning_rate": 1.4798661388569119e-06, + "loss": 0.8205, + "step": 4004 + }, + { + "epoch": 2.68251841929002, + "grad_norm": 2.272649258717913, + "learning_rate": 1.4785320454119676e-06, + "loss": 1.0089, + "step": 4005 + }, + { + "epoch": 2.6831882116543873, + "grad_norm": 2.880280235284979, + "learning_rate": 1.4771983010503899e-06, + "loss": 0.9358, + "step": 4006 + }, + { + "epoch": 2.683858004018754, + "grad_norm": 2.6638604207998964, + "learning_rate": 1.4758649062279818e-06, + "loss": 0.99, + "step": 4007 + }, + { + "epoch": 2.6845277963831213, + "grad_norm": 2.2638463756306106, + "learning_rate": 1.4745318614004264e-06, + "loss": 0.7079, + "step": 4008 + }, + { + "epoch": 2.685197588747488, + "grad_norm": 3.4914316584616114, + "learning_rate": 1.4731991670232862e-06, + "loss": 1.0429, + "step": 4009 + }, + { + "epoch": 2.6858673811118554, + "grad_norm": 2.505630568155371, + "learning_rate": 1.4718668235520068e-06, + "loss": 1.03, + "step": 4010 + }, + { + "epoch": 2.6865371734762222, + "grad_norm": 2.2117232847503328, + "learning_rate": 1.4705348314419115e-06, + "loss": 0.8833, + "step": 4011 + }, + { + "epoch": 2.6872069658405895, + "grad_norm": 3.0644079203924846, + "learning_rate": 1.4692031911482057e-06, + "loss": 1.043, + "step": 4012 + }, + { + "epoch": 2.6878767582049563, + "grad_norm": 2.23705675897874, + "learning_rate": 1.4678719031259703e-06, + "loss": 1.0114, + "step": 4013 + }, + { + "epoch": 2.6885465505693236, + "grad_norm": 3.396919763002264, + "learning_rate": 1.4665409678301712e-06, + "loss": 1.0224, + "step": 4014 + }, + { + "epoch": 2.6892163429336904, + "grad_norm": 2.2054273823423944, + "learning_rate": 1.4652103857156497e-06, + "loss": 0.9497, + "step": 4015 + }, + { + "epoch": 2.6898861352980576, + "grad_norm": 2.4809778339432773, + "learning_rate": 1.4638801572371303e-06, + "loss": 0.892, + "step": 4016 + }, + { + "epoch": 2.690555927662425, + "grad_norm": 2.287701472251954, + "learning_rate": 1.4625502828492107e-06, + "loss": 1.0118, + "step": 4017 + }, + { + "epoch": 2.6912257200267917, + "grad_norm": 2.512923291712094, + "learning_rate": 1.4612207630063738e-06, + "loss": 1.0055, + "step": 4018 + }, + { + "epoch": 2.6918955123911585, + "grad_norm": 2.4815188555983254, + "learning_rate": 1.4598915981629774e-06, + "loss": 0.8707, + "step": 4019 + }, + { + "epoch": 2.692565304755526, + "grad_norm": 2.4404763627937305, + "learning_rate": 1.4585627887732606e-06, + "loss": 0.6723, + "step": 4020 + }, + { + "epoch": 2.693235097119893, + "grad_norm": 2.593789423929429, + "learning_rate": 1.4572343352913388e-06, + "loss": 0.9321, + "step": 4021 + }, + { + "epoch": 2.69390488948426, + "grad_norm": 3.215621715191415, + "learning_rate": 1.4559062381712066e-06, + "loss": 0.8849, + "step": 4022 + }, + { + "epoch": 2.6945746818486267, + "grad_norm": 2.989306372002308, + "learning_rate": 1.454578497866737e-06, + "loss": 0.9723, + "step": 4023 + }, + { + "epoch": 2.695244474212994, + "grad_norm": 2.7903770884836905, + "learning_rate": 1.4532511148316814e-06, + "loss": 0.9285, + "step": 4024 + }, + { + "epoch": 2.695914266577361, + "grad_norm": 2.9344143590850686, + "learning_rate": 1.451924089519668e-06, + "loss": 1.0093, + "step": 4025 + }, + { + "epoch": 2.696584058941728, + "grad_norm": 2.392762890995635, + "learning_rate": 1.4505974223842037e-06, + "loss": 0.9278, + "step": 4026 + }, + { + "epoch": 2.697253851306095, + "grad_norm": 2.1498383316324072, + "learning_rate": 1.4492711138786728e-06, + "loss": 0.9211, + "step": 4027 + }, + { + "epoch": 2.697923643670462, + "grad_norm": 2.3061722508718487, + "learning_rate": 1.4479451644563367e-06, + "loss": 0.9615, + "step": 4028 + }, + { + "epoch": 2.6985934360348294, + "grad_norm": 2.565424152550157, + "learning_rate": 1.4466195745703345e-06, + "loss": 0.784, + "step": 4029 + }, + { + "epoch": 2.699263228399196, + "grad_norm": 2.9921512627821483, + "learning_rate": 1.4452943446736823e-06, + "loss": 0.9989, + "step": 4030 + }, + { + "epoch": 2.6999330207635635, + "grad_norm": 2.6300751313657327, + "learning_rate": 1.4439694752192729e-06, + "loss": 0.8994, + "step": 4031 + }, + { + "epoch": 2.7006028131279303, + "grad_norm": 2.6861148434916933, + "learning_rate": 1.4426449666598764e-06, + "loss": 0.9788, + "step": 4032 + }, + { + "epoch": 2.7012726054922975, + "grad_norm": 2.5585357005157556, + "learning_rate": 1.4413208194481392e-06, + "loss": 0.9832, + "step": 4033 + }, + { + "epoch": 2.7019423978566643, + "grad_norm": 2.684838568285819, + "learning_rate": 1.4399970340365842e-06, + "loss": 1.1105, + "step": 4034 + }, + { + "epoch": 2.7026121902210316, + "grad_norm": 2.252012340358557, + "learning_rate": 1.4386736108776111e-06, + "loss": 0.8992, + "step": 4035 + }, + { + "epoch": 2.7032819825853984, + "grad_norm": 2.3671957725186052, + "learning_rate": 1.4373505504234953e-06, + "loss": 0.9736, + "step": 4036 + }, + { + "epoch": 2.7039517749497657, + "grad_norm": 3.3993705307951885, + "learning_rate": 1.4360278531263883e-06, + "loss": 0.9636, + "step": 4037 + }, + { + "epoch": 2.7046215673141325, + "grad_norm": 2.745924978415913, + "learning_rate": 1.4347055194383175e-06, + "loss": 1.0285, + "step": 4038 + }, + { + "epoch": 2.7052913596784998, + "grad_norm": 2.5711325208495297, + "learning_rate": 1.4333835498111868e-06, + "loss": 0.9939, + "step": 4039 + }, + { + "epoch": 2.7059611520428666, + "grad_norm": 2.954150765961732, + "learning_rate": 1.4320619446967732e-06, + "loss": 1.1922, + "step": 4040 + }, + { + "epoch": 2.706630944407234, + "grad_norm": 2.564232682058493, + "learning_rate": 1.4307407045467348e-06, + "loss": 0.8666, + "step": 4041 + }, + { + "epoch": 2.7073007367716007, + "grad_norm": 2.642308701500394, + "learning_rate": 1.429419829812597e-06, + "loss": 1.0554, + "step": 4042 + }, + { + "epoch": 2.707970529135968, + "grad_norm": 2.36158367550851, + "learning_rate": 1.4280993209457671e-06, + "loss": 0.8403, + "step": 4043 + }, + { + "epoch": 2.7086403215003347, + "grad_norm": 2.3774978548640497, + "learning_rate": 1.4267791783975224e-06, + "loss": 0.9361, + "step": 4044 + }, + { + "epoch": 2.709310113864702, + "grad_norm": 2.810809346180731, + "learning_rate": 1.4254594026190208e-06, + "loss": 0.8549, + "step": 4045 + }, + { + "epoch": 2.7099799062290693, + "grad_norm": 1.9609948528591756, + "learning_rate": 1.4241399940612882e-06, + "loss": 0.8988, + "step": 4046 + }, + { + "epoch": 2.710649698593436, + "grad_norm": 2.518195653487045, + "learning_rate": 1.4228209531752285e-06, + "loss": 0.9574, + "step": 4047 + }, + { + "epoch": 2.711319490957803, + "grad_norm": 2.177147224674761, + "learning_rate": 1.4215022804116219e-06, + "loss": 0.9893, + "step": 4048 + }, + { + "epoch": 2.71198928332217, + "grad_norm": 2.3098260624214166, + "learning_rate": 1.4201839762211197e-06, + "loss": 0.7491, + "step": 4049 + }, + { + "epoch": 2.7126590756865374, + "grad_norm": 3.1271816913283974, + "learning_rate": 1.4188660410542466e-06, + "loss": 1.0047, + "step": 4050 + }, + { + "epoch": 2.7133288680509042, + "grad_norm": 2.369932853866952, + "learning_rate": 1.4175484753614027e-06, + "loss": 0.8339, + "step": 4051 + }, + { + "epoch": 2.713998660415271, + "grad_norm": 2.5436423576816063, + "learning_rate": 1.416231279592864e-06, + "loss": 0.9777, + "step": 4052 + }, + { + "epoch": 2.7146684527796383, + "grad_norm": 2.462779189745867, + "learning_rate": 1.4149144541987772e-06, + "loss": 0.944, + "step": 4053 + }, + { + "epoch": 2.7153382451440056, + "grad_norm": 3.238793032247908, + "learning_rate": 1.4135979996291618e-06, + "loss": 1.1374, + "step": 4054 + }, + { + "epoch": 2.7160080375083724, + "grad_norm": 2.7656597154358167, + "learning_rate": 1.4122819163339117e-06, + "loss": 1.0917, + "step": 4055 + }, + { + "epoch": 2.716677829872739, + "grad_norm": 2.754302402494247, + "learning_rate": 1.4109662047627959e-06, + "loss": 0.8123, + "step": 4056 + }, + { + "epoch": 2.7173476222371065, + "grad_norm": 2.824946513138882, + "learning_rate": 1.4096508653654545e-06, + "loss": 0.9303, + "step": 4057 + }, + { + "epoch": 2.7180174146014737, + "grad_norm": 2.5260748505652044, + "learning_rate": 1.4083358985913992e-06, + "loss": 0.9271, + "step": 4058 + }, + { + "epoch": 2.7186872069658405, + "grad_norm": 2.3091959447683625, + "learning_rate": 1.4070213048900146e-06, + "loss": 1.0489, + "step": 4059 + }, + { + "epoch": 2.7193569993302074, + "grad_norm": 3.885484230867651, + "learning_rate": 1.405707084710562e-06, + "loss": 1.1131, + "step": 4060 + }, + { + "epoch": 2.7200267916945746, + "grad_norm": 2.677436253350919, + "learning_rate": 1.4043932385021708e-06, + "loss": 0.9332, + "step": 4061 + }, + { + "epoch": 2.720696584058942, + "grad_norm": 2.5963880945960844, + "learning_rate": 1.4030797667138421e-06, + "loss": 1.04, + "step": 4062 + }, + { + "epoch": 2.7213663764233087, + "grad_norm": 2.7635721016218056, + "learning_rate": 1.4017666697944506e-06, + "loss": 1.0041, + "step": 4063 + }, + { + "epoch": 2.722036168787676, + "grad_norm": 2.4142260387258534, + "learning_rate": 1.4004539481927453e-06, + "loss": 0.9019, + "step": 4064 + }, + { + "epoch": 2.7227059611520428, + "grad_norm": 2.691976043224127, + "learning_rate": 1.3991416023573442e-06, + "loss": 0.8777, + "step": 4065 + }, + { + "epoch": 2.72337575351641, + "grad_norm": 2.4358496799246105, + "learning_rate": 1.3978296327367352e-06, + "loss": 1.1529, + "step": 4066 + }, + { + "epoch": 2.724045545880777, + "grad_norm": 2.584537129878208, + "learning_rate": 1.3965180397792798e-06, + "loss": 0.9798, + "step": 4067 + }, + { + "epoch": 2.724715338245144, + "grad_norm": 2.4131388436775385, + "learning_rate": 1.395206823933213e-06, + "loss": 1.0558, + "step": 4068 + }, + { + "epoch": 2.725385130609511, + "grad_norm": 2.4821721899190696, + "learning_rate": 1.3938959856466377e-06, + "loss": 1.1437, + "step": 4069 + }, + { + "epoch": 2.726054922973878, + "grad_norm": 4.102738116759603, + "learning_rate": 1.392585525367527e-06, + "loss": 0.8777, + "step": 4070 + }, + { + "epoch": 2.726724715338245, + "grad_norm": 3.0246284914378068, + "learning_rate": 1.391275443543727e-06, + "loss": 0.9196, + "step": 4071 + }, + { + "epoch": 2.7273945077026123, + "grad_norm": 2.399820778573006, + "learning_rate": 1.3899657406229555e-06, + "loss": 0.789, + "step": 4072 + }, + { + "epoch": 2.728064300066979, + "grad_norm": 2.3071987928557416, + "learning_rate": 1.3886564170527989e-06, + "loss": 0.9401, + "step": 4073 + }, + { + "epoch": 2.7287340924313463, + "grad_norm": 2.384850593740421, + "learning_rate": 1.387347473280713e-06, + "loss": 0.7567, + "step": 4074 + }, + { + "epoch": 2.729403884795713, + "grad_norm": 2.4698883000079443, + "learning_rate": 1.3860389097540244e-06, + "loss": 0.9915, + "step": 4075 + }, + { + "epoch": 2.7300736771600804, + "grad_norm": 2.2729358726444877, + "learning_rate": 1.3847307269199328e-06, + "loss": 0.7347, + "step": 4076 + }, + { + "epoch": 2.7307434695244472, + "grad_norm": 2.245333045312695, + "learning_rate": 1.3834229252255047e-06, + "loss": 1.0545, + "step": 4077 + }, + { + "epoch": 2.7314132618888145, + "grad_norm": 2.3447219320692043, + "learning_rate": 1.3821155051176765e-06, + "loss": 0.9271, + "step": 4078 + }, + { + "epoch": 2.7320830542531818, + "grad_norm": 2.7191174256752504, + "learning_rate": 1.3808084670432553e-06, + "loss": 1.0173, + "step": 4079 + }, + { + "epoch": 2.7327528466175486, + "grad_norm": 2.5188552395428925, + "learning_rate": 1.379501811448917e-06, + "loss": 0.9631, + "step": 4080 + }, + { + "epoch": 2.7334226389819154, + "grad_norm": 2.5163813074128027, + "learning_rate": 1.3781955387812067e-06, + "loss": 0.9933, + "step": 4081 + }, + { + "epoch": 2.7340924313462827, + "grad_norm": 2.816634589922145, + "learning_rate": 1.376889649486539e-06, + "loss": 1.1526, + "step": 4082 + }, + { + "epoch": 2.73476222371065, + "grad_norm": 2.4880164298459126, + "learning_rate": 1.3755841440111974e-06, + "loss": 0.9352, + "step": 4083 + }, + { + "epoch": 2.7354320160750167, + "grad_norm": 2.4082525054649477, + "learning_rate": 1.374279022801334e-06, + "loss": 1.0539, + "step": 4084 + }, + { + "epoch": 2.7361018084393836, + "grad_norm": 2.3285207957682936, + "learning_rate": 1.3729742863029703e-06, + "loss": 1.0917, + "step": 4085 + }, + { + "epoch": 2.736771600803751, + "grad_norm": 3.892078992152074, + "learning_rate": 1.371669934961995e-06, + "loss": 0.8126, + "step": 4086 + }, + { + "epoch": 2.737441393168118, + "grad_norm": 3.1747165259787313, + "learning_rate": 1.3703659692241662e-06, + "loss": 0.9109, + "step": 4087 + }, + { + "epoch": 2.738111185532485, + "grad_norm": 3.1156903749643434, + "learning_rate": 1.36906238953511e-06, + "loss": 0.771, + "step": 4088 + }, + { + "epoch": 2.7387809778968517, + "grad_norm": 2.4942676123232443, + "learning_rate": 1.3677591963403207e-06, + "loss": 1.0157, + "step": 4089 + }, + { + "epoch": 2.739450770261219, + "grad_norm": 2.509557348012697, + "learning_rate": 1.3664563900851604e-06, + "loss": 1.0667, + "step": 4090 + }, + { + "epoch": 2.7401205626255862, + "grad_norm": 2.9454262795040225, + "learning_rate": 1.3651539712148587e-06, + "loss": 0.9612, + "step": 4091 + }, + { + "epoch": 2.740790354989953, + "grad_norm": 1.97344502055109, + "learning_rate": 1.3638519401745133e-06, + "loss": 0.732, + "step": 4092 + }, + { + "epoch": 2.7414601473543203, + "grad_norm": 2.609092438174115, + "learning_rate": 1.3625502974090884e-06, + "loss": 0.8735, + "step": 4093 + }, + { + "epoch": 2.742129939718687, + "grad_norm": 2.8247941939800287, + "learning_rate": 1.361249043363417e-06, + "loss": 0.9462, + "step": 4094 + }, + { + "epoch": 2.7427997320830544, + "grad_norm": 2.503021884450556, + "learning_rate": 1.3599481784821982e-06, + "loss": 0.9087, + "step": 4095 + }, + { + "epoch": 2.743469524447421, + "grad_norm": 2.26166239282225, + "learning_rate": 1.3586477032099982e-06, + "loss": 1.048, + "step": 4096 + }, + { + "epoch": 2.7441393168117885, + "grad_norm": 3.397787808079386, + "learning_rate": 1.3573476179912504e-06, + "loss": 0.9083, + "step": 4097 + }, + { + "epoch": 2.7448091091761553, + "grad_norm": 2.019099076616485, + "learning_rate": 1.3560479232702544e-06, + "loss": 1.1308, + "step": 4098 + }, + { + "epoch": 2.7454789015405225, + "grad_norm": 2.3959190482851747, + "learning_rate": 1.3547486194911775e-06, + "loss": 0.984, + "step": 4099 + }, + { + "epoch": 2.7461486939048894, + "grad_norm": 2.8432954970124373, + "learning_rate": 1.3534497070980513e-06, + "loss": 0.8629, + "step": 4100 + }, + { + "epoch": 2.7468184862692566, + "grad_norm": 4.67741224194736, + "learning_rate": 1.3521511865347758e-06, + "loss": 0.919, + "step": 4101 + }, + { + "epoch": 2.7474882786336234, + "grad_norm": 2.397618530556752, + "learning_rate": 1.3508530582451163e-06, + "loss": 0.9762, + "step": 4102 + }, + { + "epoch": 2.7481580709979907, + "grad_norm": 2.5608686074204154, + "learning_rate": 1.349555322672703e-06, + "loss": 1.0962, + "step": 4103 + }, + { + "epoch": 2.7488278633623575, + "grad_norm": 2.3572771219464035, + "learning_rate": 1.3482579802610339e-06, + "loss": 0.9636, + "step": 4104 + }, + { + "epoch": 2.7494976557267248, + "grad_norm": 2.4736281023167495, + "learning_rate": 1.346961031453471e-06, + "loss": 0.931, + "step": 4105 + }, + { + "epoch": 2.7501674480910916, + "grad_norm": 2.767710773964373, + "learning_rate": 1.3456644766932422e-06, + "loss": 0.9475, + "step": 4106 + }, + { + "epoch": 2.750837240455459, + "grad_norm": 2.432349521332796, + "learning_rate": 1.3443683164234413e-06, + "loss": 0.9434, + "step": 4107 + }, + { + "epoch": 2.751507032819826, + "grad_norm": 2.4197491155122015, + "learning_rate": 1.3430725510870269e-06, + "loss": 0.9599, + "step": 4108 + }, + { + "epoch": 2.752176825184193, + "grad_norm": 2.1384808813235536, + "learning_rate": 1.3417771811268224e-06, + "loss": 0.933, + "step": 4109 + }, + { + "epoch": 2.7528466175485597, + "grad_norm": 2.9214026699052704, + "learning_rate": 1.340482206985515e-06, + "loss": 1.1866, + "step": 4110 + }, + { + "epoch": 2.753516409912927, + "grad_norm": 2.6963523082533785, + "learning_rate": 1.3391876291056616e-06, + "loss": 0.844, + "step": 4111 + }, + { + "epoch": 2.7541862022772943, + "grad_norm": 3.1512051694347805, + "learning_rate": 1.3378934479296769e-06, + "loss": 0.8891, + "step": 4112 + }, + { + "epoch": 2.754855994641661, + "grad_norm": 2.153339633130431, + "learning_rate": 1.3365996638998433e-06, + "loss": 0.8534, + "step": 4113 + }, + { + "epoch": 2.755525787006028, + "grad_norm": 2.217685447784221, + "learning_rate": 1.335306277458307e-06, + "loss": 0.834, + "step": 4114 + }, + { + "epoch": 2.756195579370395, + "grad_norm": 2.3604057350875323, + "learning_rate": 1.3340132890470816e-06, + "loss": 0.695, + "step": 4115 + }, + { + "epoch": 2.7568653717347624, + "grad_norm": 3.1114145830733366, + "learning_rate": 1.3327206991080388e-06, + "loss": 0.8441, + "step": 4116 + }, + { + "epoch": 2.7575351640991292, + "grad_norm": 2.470143509852878, + "learning_rate": 1.3314285080829175e-06, + "loss": 0.8896, + "step": 4117 + }, + { + "epoch": 2.758204956463496, + "grad_norm": 2.815880878595863, + "learning_rate": 1.3301367164133194e-06, + "loss": 0.9813, + "step": 4118 + }, + { + "epoch": 2.7588747488278633, + "grad_norm": 2.207706233493034, + "learning_rate": 1.3288453245407126e-06, + "loss": 0.9061, + "step": 4119 + }, + { + "epoch": 2.7595445411922306, + "grad_norm": 2.5607089242550356, + "learning_rate": 1.3275543329064237e-06, + "loss": 0.9408, + "step": 4120 + }, + { + "epoch": 2.7602143335565974, + "grad_norm": 2.3451495355965397, + "learning_rate": 1.3262637419516453e-06, + "loss": 0.9097, + "step": 4121 + }, + { + "epoch": 2.7608841259209647, + "grad_norm": 2.355134400241877, + "learning_rate": 1.3249735521174318e-06, + "loss": 0.9976, + "step": 4122 + }, + { + "epoch": 2.7615539182853315, + "grad_norm": 2.3232838650458656, + "learning_rate": 1.3236837638447047e-06, + "loss": 0.9975, + "step": 4123 + }, + { + "epoch": 2.7622237106496987, + "grad_norm": 2.5886855024053017, + "learning_rate": 1.322394377574242e-06, + "loss": 0.9119, + "step": 4124 + }, + { + "epoch": 2.7628935030140656, + "grad_norm": 2.3124631987582207, + "learning_rate": 1.3211053937466878e-06, + "loss": 0.9357, + "step": 4125 + }, + { + "epoch": 2.763563295378433, + "grad_norm": 2.8736729263539034, + "learning_rate": 1.3198168128025477e-06, + "loss": 1.0579, + "step": 4126 + }, + { + "epoch": 2.7642330877427996, + "grad_norm": 2.5628182037644436, + "learning_rate": 1.3185286351821925e-06, + "loss": 0.9699, + "step": 4127 + }, + { + "epoch": 2.764902880107167, + "grad_norm": 2.254024140683922, + "learning_rate": 1.3172408613258504e-06, + "loss": 0.854, + "step": 4128 + }, + { + "epoch": 2.7655726724715337, + "grad_norm": 2.3926901787551764, + "learning_rate": 1.3159534916736143e-06, + "loss": 0.8334, + "step": 4129 + }, + { + "epoch": 2.766242464835901, + "grad_norm": 3.427361296394928, + "learning_rate": 1.314666526665438e-06, + "loss": 0.9699, + "step": 4130 + }, + { + "epoch": 2.766912257200268, + "grad_norm": 4.427737007552725, + "learning_rate": 1.3133799667411404e-06, + "loss": 1.068, + "step": 4131 + }, + { + "epoch": 2.767582049564635, + "grad_norm": 2.9362779797825205, + "learning_rate": 1.3120938123403964e-06, + "loss": 1.0102, + "step": 4132 + }, + { + "epoch": 2.768251841929002, + "grad_norm": 2.2511463874012816, + "learning_rate": 1.310808063902746e-06, + "loss": 0.8677, + "step": 4133 + }, + { + "epoch": 2.768921634293369, + "grad_norm": 2.9435031381045205, + "learning_rate": 1.3095227218675886e-06, + "loss": 0.904, + "step": 4134 + }, + { + "epoch": 2.769591426657736, + "grad_norm": 3.013219844671699, + "learning_rate": 1.3082377866741886e-06, + "loss": 1.0719, + "step": 4135 + }, + { + "epoch": 2.770261219022103, + "grad_norm": 2.5000186622316534, + "learning_rate": 1.3069532587616654e-06, + "loss": 0.9536, + "step": 4136 + }, + { + "epoch": 2.7709310113864705, + "grad_norm": 2.546049722788574, + "learning_rate": 1.3056691385690023e-06, + "loss": 0.9458, + "step": 4137 + }, + { + "epoch": 2.7716008037508373, + "grad_norm": 2.722762282961137, + "learning_rate": 1.3043854265350455e-06, + "loss": 1.0886, + "step": 4138 + }, + { + "epoch": 2.772270596115204, + "grad_norm": 2.6246010920889726, + "learning_rate": 1.3031021230984992e-06, + "loss": 1.0152, + "step": 4139 + }, + { + "epoch": 2.7729403884795714, + "grad_norm": 2.325460350418188, + "learning_rate": 1.301819228697926e-06, + "loss": 0.902, + "step": 4140 + }, + { + "epoch": 2.7736101808439386, + "grad_norm": 2.5610323719370602, + "learning_rate": 1.3005367437717515e-06, + "loss": 1.0267, + "step": 4141 + }, + { + "epoch": 2.7742799732083054, + "grad_norm": 2.409407980416856, + "learning_rate": 1.2992546687582619e-06, + "loss": 1.0569, + "step": 4142 + }, + { + "epoch": 2.7749497655726723, + "grad_norm": 2.3201514955487634, + "learning_rate": 1.297973004095603e-06, + "loss": 0.9037, + "step": 4143 + }, + { + "epoch": 2.7756195579370395, + "grad_norm": 2.493472762531808, + "learning_rate": 1.2966917502217769e-06, + "loss": 1.0918, + "step": 4144 + }, + { + "epoch": 2.7762893503014068, + "grad_norm": 3.931785619771495, + "learning_rate": 1.2954109075746485e-06, + "loss": 0.982, + "step": 4145 + }, + { + "epoch": 2.7769591426657736, + "grad_norm": 2.8749595773579455, + "learning_rate": 1.2941304765919433e-06, + "loss": 0.972, + "step": 4146 + }, + { + "epoch": 2.7776289350301404, + "grad_norm": 2.2763326734295517, + "learning_rate": 1.2928504577112433e-06, + "loss": 0.7665, + "step": 4147 + }, + { + "epoch": 2.7782987273945077, + "grad_norm": 2.1604235764332795, + "learning_rate": 1.2915708513699923e-06, + "loss": 0.9108, + "step": 4148 + }, + { + "epoch": 2.778968519758875, + "grad_norm": 3.6205665988304045, + "learning_rate": 1.2902916580054883e-06, + "loss": 0.9848, + "step": 4149 + }, + { + "epoch": 2.7796383121232418, + "grad_norm": 2.796141623058434, + "learning_rate": 1.2890128780548947e-06, + "loss": 0.9975, + "step": 4150 + }, + { + "epoch": 2.7803081044876086, + "grad_norm": 2.4291134365421847, + "learning_rate": 1.2877345119552292e-06, + "loss": 0.7037, + "step": 4151 + }, + { + "epoch": 2.780977896851976, + "grad_norm": 5.441396095341086, + "learning_rate": 1.2864565601433703e-06, + "loss": 1.1386, + "step": 4152 + }, + { + "epoch": 2.781647689216343, + "grad_norm": 3.454333752637834, + "learning_rate": 1.285179023056051e-06, + "loss": 0.8627, + "step": 4153 + }, + { + "epoch": 2.78231748158071, + "grad_norm": 2.711445021029276, + "learning_rate": 1.2839019011298685e-06, + "loss": 0.9715, + "step": 4154 + }, + { + "epoch": 2.782987273945077, + "grad_norm": 2.1310348913881865, + "learning_rate": 1.2826251948012736e-06, + "loss": 0.9815, + "step": 4155 + }, + { + "epoch": 2.783657066309444, + "grad_norm": 2.5351307709250173, + "learning_rate": 1.2813489045065783e-06, + "loss": 0.9223, + "step": 4156 + }, + { + "epoch": 2.7843268586738112, + "grad_norm": 2.673923243265036, + "learning_rate": 1.2800730306819473e-06, + "loss": 1.0017, + "step": 4157 + }, + { + "epoch": 2.784996651038178, + "grad_norm": 2.7016844625073513, + "learning_rate": 1.2787975737634093e-06, + "loss": 1.0805, + "step": 4158 + }, + { + "epoch": 2.7856664434025453, + "grad_norm": 2.1425269158902087, + "learning_rate": 1.2775225341868466e-06, + "loss": 0.8941, + "step": 4159 + }, + { + "epoch": 2.786336235766912, + "grad_norm": 2.5240280529312176, + "learning_rate": 1.276247912388001e-06, + "loss": 0.901, + "step": 4160 + }, + { + "epoch": 2.7870060281312794, + "grad_norm": 2.383399501918495, + "learning_rate": 1.2749737088024671e-06, + "loss": 1.0324, + "step": 4161 + }, + { + "epoch": 2.787675820495646, + "grad_norm": 3.3108249610410305, + "learning_rate": 1.273699923865703e-06, + "loss": 0.9211, + "step": 4162 + }, + { + "epoch": 2.7883456128600135, + "grad_norm": 2.7082089234211715, + "learning_rate": 1.2724265580130196e-06, + "loss": 0.8146, + "step": 4163 + }, + { + "epoch": 2.7890154052243803, + "grad_norm": 3.0229045875517726, + "learning_rate": 1.2711536116795866e-06, + "loss": 0.8442, + "step": 4164 + }, + { + "epoch": 2.7896851975887476, + "grad_norm": 2.8690558976267826, + "learning_rate": 1.2698810853004263e-06, + "loss": 1.0775, + "step": 4165 + }, + { + "epoch": 2.7903549899531144, + "grad_norm": 3.129402549743399, + "learning_rate": 1.2686089793104235e-06, + "loss": 0.9258, + "step": 4166 + }, + { + "epoch": 2.7910247823174816, + "grad_norm": 3.189462775786612, + "learning_rate": 1.2673372941443148e-06, + "loss": 0.9137, + "step": 4167 + }, + { + "epoch": 2.7916945746818485, + "grad_norm": 2.621665805785726, + "learning_rate": 1.2660660302366954e-06, + "loss": 0.7917, + "step": 4168 + }, + { + "epoch": 2.7923643670462157, + "grad_norm": 2.543216809774486, + "learning_rate": 1.2647951880220147e-06, + "loss": 1.0491, + "step": 4169 + }, + { + "epoch": 2.793034159410583, + "grad_norm": 2.522434488132489, + "learning_rate": 1.2635247679345797e-06, + "loss": 0.9893, + "step": 4170 + }, + { + "epoch": 2.79370395177495, + "grad_norm": 4.120652190438387, + "learning_rate": 1.2622547704085513e-06, + "loss": 0.9193, + "step": 4171 + }, + { + "epoch": 2.7943737441393166, + "grad_norm": 2.3715369861964497, + "learning_rate": 1.2609851958779484e-06, + "loss": 1.02, + "step": 4172 + }, + { + "epoch": 2.795043536503684, + "grad_norm": 2.4806984918093895, + "learning_rate": 1.2597160447766432e-06, + "loss": 1.0115, + "step": 4173 + }, + { + "epoch": 2.795713328868051, + "grad_norm": 2.3608735940721695, + "learning_rate": 1.2584473175383639e-06, + "loss": 0.9049, + "step": 4174 + }, + { + "epoch": 2.796383121232418, + "grad_norm": 2.6700000277729976, + "learning_rate": 1.2571790145966944e-06, + "loss": 1.0927, + "step": 4175 + }, + { + "epoch": 2.7970529135967848, + "grad_norm": 2.2974906427164314, + "learning_rate": 1.2559111363850723e-06, + "loss": 0.8899, + "step": 4176 + }, + { + "epoch": 2.797722705961152, + "grad_norm": 2.977143536626135, + "learning_rate": 1.254643683336792e-06, + "loss": 1.1385, + "step": 4177 + }, + { + "epoch": 2.7983924983255193, + "grad_norm": 2.5681183088829656, + "learning_rate": 1.2533766558850008e-06, + "loss": 1.0387, + "step": 4178 + }, + { + "epoch": 2.799062290689886, + "grad_norm": 2.4231106586538664, + "learning_rate": 1.252110054462701e-06, + "loss": 0.9164, + "step": 4179 + }, + { + "epoch": 2.799732083054253, + "grad_norm": 2.618627543113876, + "learning_rate": 1.25084387950275e-06, + "loss": 1.0804, + "step": 4180 + }, + { + "epoch": 2.80040187541862, + "grad_norm": 2.630796653088375, + "learning_rate": 1.249578131437859e-06, + "loss": 0.8298, + "step": 4181 + }, + { + "epoch": 2.8010716677829874, + "grad_norm": 2.504535006114694, + "learning_rate": 1.2483128107005929e-06, + "loss": 1.0394, + "step": 4182 + }, + { + "epoch": 2.8017414601473543, + "grad_norm": 2.549254806624666, + "learning_rate": 1.2470479177233714e-06, + "loss": 0.9916, + "step": 4183 + }, + { + "epoch": 2.8024112525117215, + "grad_norm": 2.6419242486040626, + "learning_rate": 1.2457834529384675e-06, + "loss": 0.8944, + "step": 4184 + }, + { + "epoch": 2.8030810448760883, + "grad_norm": 2.4426583135907456, + "learning_rate": 1.2445194167780076e-06, + "loss": 0.8968, + "step": 4185 + }, + { + "epoch": 2.8037508372404556, + "grad_norm": 2.3191109084929282, + "learning_rate": 1.2432558096739722e-06, + "loss": 0.7475, + "step": 4186 + }, + { + "epoch": 2.8044206296048224, + "grad_norm": 6.213996389242259, + "learning_rate": 1.2419926320581949e-06, + "loss": 1.1143, + "step": 4187 + }, + { + "epoch": 2.8050904219691897, + "grad_norm": 2.4521472709234438, + "learning_rate": 1.2407298843623628e-06, + "loss": 0.8415, + "step": 4188 + }, + { + "epoch": 2.8057602143335565, + "grad_norm": 3.873729592849145, + "learning_rate": 1.2394675670180157e-06, + "loss": 1.1317, + "step": 4189 + }, + { + "epoch": 2.8064300066979238, + "grad_norm": 2.4300150381034786, + "learning_rate": 1.2382056804565462e-06, + "loss": 0.9295, + "step": 4190 + }, + { + "epoch": 2.8070997990622906, + "grad_norm": 2.7497349572448657, + "learning_rate": 1.2369442251092001e-06, + "loss": 0.6578, + "step": 4191 + }, + { + "epoch": 2.807769591426658, + "grad_norm": 2.3718319768949496, + "learning_rate": 1.235683201407076e-06, + "loss": 0.9525, + "step": 4192 + }, + { + "epoch": 2.8084393837910246, + "grad_norm": 2.574814235652322, + "learning_rate": 1.2344226097811244e-06, + "loss": 1.1809, + "step": 4193 + }, + { + "epoch": 2.809109176155392, + "grad_norm": 3.3076556208514765, + "learning_rate": 1.2331624506621482e-06, + "loss": 0.9944, + "step": 4194 + }, + { + "epoch": 2.8097789685197587, + "grad_norm": 2.3899749179459384, + "learning_rate": 1.231902724480803e-06, + "loss": 0.879, + "step": 4195 + }, + { + "epoch": 2.810448760884126, + "grad_norm": 2.4379998452075555, + "learning_rate": 1.2306434316675958e-06, + "loss": 0.8603, + "step": 4196 + }, + { + "epoch": 2.811118553248493, + "grad_norm": 2.6839027077075706, + "learning_rate": 1.2293845726528864e-06, + "loss": 0.8529, + "step": 4197 + }, + { + "epoch": 2.81178834561286, + "grad_norm": 2.780915482302145, + "learning_rate": 1.2281261478668855e-06, + "loss": 0.9186, + "step": 4198 + }, + { + "epoch": 2.8124581379772273, + "grad_norm": 2.1282951177086984, + "learning_rate": 1.2268681577396555e-06, + "loss": 0.9617, + "step": 4199 + }, + { + "epoch": 2.813127930341594, + "grad_norm": 2.413435821665958, + "learning_rate": 1.2256106027011098e-06, + "loss": 0.9977, + "step": 4200 + }, + { + "epoch": 2.813797722705961, + "grad_norm": 3.1253439346539094, + "learning_rate": 1.2243534831810165e-06, + "loss": 1.136, + "step": 4201 + }, + { + "epoch": 2.814467515070328, + "grad_norm": 2.9611933152417906, + "learning_rate": 1.2230967996089893e-06, + "loss": 0.822, + "step": 4202 + }, + { + "epoch": 2.8151373074346955, + "grad_norm": 3.1888309145081633, + "learning_rate": 1.2218405524144967e-06, + "loss": 0.9945, + "step": 4203 + }, + { + "epoch": 2.8158070997990623, + "grad_norm": 2.3805764022161284, + "learning_rate": 1.2205847420268557e-06, + "loss": 1.0107, + "step": 4204 + }, + { + "epoch": 2.816476892163429, + "grad_norm": 2.836238948478945, + "learning_rate": 1.219329368875239e-06, + "loss": 0.8054, + "step": 4205 + }, + { + "epoch": 2.8171466845277964, + "grad_norm": 3.1386337718565698, + "learning_rate": 1.2180744333886635e-06, + "loss": 0.908, + "step": 4206 + }, + { + "epoch": 2.8178164768921636, + "grad_norm": 2.848982531524412, + "learning_rate": 1.2168199359959995e-06, + "loss": 0.9839, + "step": 4207 + }, + { + "epoch": 2.8184862692565305, + "grad_norm": 2.5138083660614288, + "learning_rate": 1.2155658771259674e-06, + "loss": 0.8654, + "step": 4208 + }, + { + "epoch": 2.8191560616208973, + "grad_norm": 2.305945926011326, + "learning_rate": 1.2143122572071398e-06, + "loss": 1.0771, + "step": 4209 + }, + { + "epoch": 2.8198258539852645, + "grad_norm": 2.5067279051423657, + "learning_rate": 1.2130590766679345e-06, + "loss": 1.0008, + "step": 4210 + }, + { + "epoch": 2.820495646349632, + "grad_norm": 3.4619994624690014, + "learning_rate": 1.2118063359366236e-06, + "loss": 1.1596, + "step": 4211 + }, + { + "epoch": 2.8211654387139986, + "grad_norm": 2.794620413581337, + "learning_rate": 1.2105540354413253e-06, + "loss": 0.8456, + "step": 4212 + }, + { + "epoch": 2.821835231078366, + "grad_norm": 2.360102388369449, + "learning_rate": 1.209302175610013e-06, + "loss": 1.028, + "step": 4213 + }, + { + "epoch": 2.8225050234427327, + "grad_norm": 2.4914921981639053, + "learning_rate": 1.208050756870502e-06, + "loss": 0.8271, + "step": 4214 + }, + { + "epoch": 2.8231748158071, + "grad_norm": 2.4533257875855066, + "learning_rate": 1.2067997796504622e-06, + "loss": 0.9664, + "step": 4215 + }, + { + "epoch": 2.8238446081714668, + "grad_norm": 2.4569490679004202, + "learning_rate": 1.2055492443774093e-06, + "loss": 0.891, + "step": 4216 + }, + { + "epoch": 2.824514400535834, + "grad_norm": 2.5386912519555747, + "learning_rate": 1.2042991514787128e-06, + "loss": 0.9932, + "step": 4217 + }, + { + "epoch": 2.825184192900201, + "grad_norm": 2.359484310103382, + "learning_rate": 1.203049501381587e-06, + "loss": 0.8995, + "step": 4218 + }, + { + "epoch": 2.825853985264568, + "grad_norm": 2.2636957023282265, + "learning_rate": 1.201800294513094e-06, + "loss": 0.747, + "step": 4219 + }, + { + "epoch": 2.826523777628935, + "grad_norm": 3.984578233660487, + "learning_rate": 1.2005515313001462e-06, + "loss": 0.9897, + "step": 4220 + }, + { + "epoch": 2.827193569993302, + "grad_norm": 2.4250940829742507, + "learning_rate": 1.1993032121695064e-06, + "loss": 0.9211, + "step": 4221 + }, + { + "epoch": 2.827863362357669, + "grad_norm": 3.254931703080602, + "learning_rate": 1.1980553375477835e-06, + "loss": 0.8603, + "step": 4222 + }, + { + "epoch": 2.8285331547220363, + "grad_norm": 2.6238558842294135, + "learning_rate": 1.1968079078614328e-06, + "loss": 0.8478, + "step": 4223 + }, + { + "epoch": 2.829202947086403, + "grad_norm": 2.6845243264193464, + "learning_rate": 1.1955609235367595e-06, + "loss": 1.0697, + "step": 4224 + }, + { + "epoch": 2.8298727394507703, + "grad_norm": 2.7508779408633774, + "learning_rate": 1.1943143849999184e-06, + "loss": 1.0516, + "step": 4225 + }, + { + "epoch": 2.830542531815137, + "grad_norm": 2.696800531506545, + "learning_rate": 1.1930682926769094e-06, + "loss": 0.8791, + "step": 4226 + }, + { + "epoch": 2.8312123241795044, + "grad_norm": 3.020471067304188, + "learning_rate": 1.1918226469935785e-06, + "loss": 0.7679, + "step": 4227 + }, + { + "epoch": 2.8318821165438717, + "grad_norm": 2.9819208234038665, + "learning_rate": 1.1905774483756233e-06, + "loss": 0.9661, + "step": 4228 + }, + { + "epoch": 2.8325519089082385, + "grad_norm": 2.3878212025477725, + "learning_rate": 1.1893326972485854e-06, + "loss": 0.7335, + "step": 4229 + }, + { + "epoch": 2.8332217012726053, + "grad_norm": 3.0591920404174022, + "learning_rate": 1.1880883940378562e-06, + "loss": 1.1137, + "step": 4230 + }, + { + "epoch": 2.8338914936369726, + "grad_norm": 2.4301010148508273, + "learning_rate": 1.1868445391686687e-06, + "loss": 0.9199, + "step": 4231 + }, + { + "epoch": 2.83456128600134, + "grad_norm": 2.598561137118955, + "learning_rate": 1.1856011330661093e-06, + "loss": 0.9243, + "step": 4232 + }, + { + "epoch": 2.8352310783657066, + "grad_norm": 2.203568089211758, + "learning_rate": 1.1843581761551071e-06, + "loss": 0.8947, + "step": 4233 + }, + { + "epoch": 2.8359008707300735, + "grad_norm": 2.6376254348784074, + "learning_rate": 1.1831156688604394e-06, + "loss": 0.7491, + "step": 4234 + }, + { + "epoch": 2.8365706630944407, + "grad_norm": 2.8570708312375195, + "learning_rate": 1.1818736116067267e-06, + "loss": 1.0241, + "step": 4235 + }, + { + "epoch": 2.837240455458808, + "grad_norm": 2.4571008896046393, + "learning_rate": 1.1806320048184405e-06, + "loss": 0.9535, + "step": 4236 + }, + { + "epoch": 2.837910247823175, + "grad_norm": 2.174378759285535, + "learning_rate": 1.1793908489198949e-06, + "loss": 0.9767, + "step": 4237 + }, + { + "epoch": 2.8385800401875416, + "grad_norm": 2.9099863810789413, + "learning_rate": 1.178150144335252e-06, + "loss": 1.0474, + "step": 4238 + }, + { + "epoch": 2.839249832551909, + "grad_norm": 3.056058944128126, + "learning_rate": 1.1769098914885156e-06, + "loss": 0.8133, + "step": 4239 + }, + { + "epoch": 2.839919624916276, + "grad_norm": 2.9950348128047426, + "learning_rate": 1.175670090803541e-06, + "loss": 0.9136, + "step": 4240 + }, + { + "epoch": 2.840589417280643, + "grad_norm": 3.140339110798694, + "learning_rate": 1.1744307427040253e-06, + "loss": 1.1485, + "step": 4241 + }, + { + "epoch": 2.8412592096450098, + "grad_norm": 3.53020597921857, + "learning_rate": 1.1731918476135123e-06, + "loss": 1.0503, + "step": 4242 + }, + { + "epoch": 2.841929002009377, + "grad_norm": 2.394917971474957, + "learning_rate": 1.1719534059553878e-06, + "loss": 0.9722, + "step": 4243 + }, + { + "epoch": 2.8425987943737443, + "grad_norm": 2.641724722571813, + "learning_rate": 1.1707154181528883e-06, + "loss": 0.8958, + "step": 4244 + }, + { + "epoch": 2.843268586738111, + "grad_norm": 3.700560405873337, + "learning_rate": 1.1694778846290905e-06, + "loss": 0.9564, + "step": 4245 + }, + { + "epoch": 2.8439383791024784, + "grad_norm": 2.861737957463428, + "learning_rate": 1.168240805806919e-06, + "loss": 1.1162, + "step": 4246 + }, + { + "epoch": 2.844608171466845, + "grad_norm": 2.6851847295244022, + "learning_rate": 1.1670041821091382e-06, + "loss": 0.8511, + "step": 4247 + }, + { + "epoch": 2.8452779638312125, + "grad_norm": 2.6002185411070187, + "learning_rate": 1.1657680139583636e-06, + "loss": 1.0152, + "step": 4248 + }, + { + "epoch": 2.8459477561955793, + "grad_norm": 2.470685281578574, + "learning_rate": 1.1645323017770504e-06, + "loss": 1.0846, + "step": 4249 + }, + { + "epoch": 2.8466175485599465, + "grad_norm": 2.3749038477101774, + "learning_rate": 1.1632970459875001e-06, + "loss": 0.985, + "step": 4250 + }, + { + "epoch": 2.8472873409243133, + "grad_norm": 2.70956431625878, + "learning_rate": 1.162062247011855e-06, + "loss": 1.0058, + "step": 4251 + }, + { + "epoch": 2.8479571332886806, + "grad_norm": 2.3727589854968367, + "learning_rate": 1.1608279052721058e-06, + "loss": 0.9984, + "step": 4252 + }, + { + "epoch": 2.8486269256530474, + "grad_norm": 2.6526661867233083, + "learning_rate": 1.1595940211900845e-06, + "loss": 0.9489, + "step": 4253 + }, + { + "epoch": 2.8492967180174147, + "grad_norm": 2.2485755085095565, + "learning_rate": 1.1583605951874663e-06, + "loss": 0.845, + "step": 4254 + }, + { + "epoch": 2.8499665103817815, + "grad_norm": 2.921080730755735, + "learning_rate": 1.1571276276857713e-06, + "loss": 1.0766, + "step": 4255 + }, + { + "epoch": 2.8506363027461488, + "grad_norm": 2.856033214044784, + "learning_rate": 1.1558951191063617e-06, + "loss": 0.7731, + "step": 4256 + }, + { + "epoch": 2.8513060951105156, + "grad_norm": 2.690711977792029, + "learning_rate": 1.1546630698704434e-06, + "loss": 1.0493, + "step": 4257 + }, + { + "epoch": 2.851975887474883, + "grad_norm": 2.5279896888648206, + "learning_rate": 1.153431480399065e-06, + "loss": 0.9808, + "step": 4258 + }, + { + "epoch": 2.8526456798392497, + "grad_norm": 2.3541897061964097, + "learning_rate": 1.1522003511131188e-06, + "loss": 0.8088, + "step": 4259 + }, + { + "epoch": 2.853315472203617, + "grad_norm": 2.666321809120324, + "learning_rate": 1.1509696824333387e-06, + "loss": 0.913, + "step": 4260 + }, + { + "epoch": 2.853985264567984, + "grad_norm": 2.6116908973743755, + "learning_rate": 1.149739474780302e-06, + "loss": 0.9599, + "step": 4261 + }, + { + "epoch": 2.854655056932351, + "grad_norm": 2.6053468796397676, + "learning_rate": 1.1485097285744281e-06, + "loss": 0.7529, + "step": 4262 + }, + { + "epoch": 2.855324849296718, + "grad_norm": 2.5793468907187873, + "learning_rate": 1.1472804442359785e-06, + "loss": 1.0369, + "step": 4263 + }, + { + "epoch": 2.855994641661085, + "grad_norm": 2.3971439047999366, + "learning_rate": 1.1460516221850578e-06, + "loss": 0.8785, + "step": 4264 + }, + { + "epoch": 2.8566644340254523, + "grad_norm": 2.5110586422908265, + "learning_rate": 1.1448232628416116e-06, + "loss": 1.0346, + "step": 4265 + }, + { + "epoch": 2.857334226389819, + "grad_norm": 2.9242120507226517, + "learning_rate": 1.1435953666254278e-06, + "loss": 1.0551, + "step": 4266 + }, + { + "epoch": 2.858004018754186, + "grad_norm": 2.825312018360935, + "learning_rate": 1.1423679339561358e-06, + "loss": 1.0279, + "step": 4267 + }, + { + "epoch": 2.8586738111185532, + "grad_norm": 2.2558303141823544, + "learning_rate": 1.1411409652532068e-06, + "loss": 0.9327, + "step": 4268 + }, + { + "epoch": 2.8593436034829205, + "grad_norm": 2.445721402709104, + "learning_rate": 1.1399144609359532e-06, + "loss": 1.036, + "step": 4269 + }, + { + "epoch": 2.8600133958472873, + "grad_norm": 2.952643244302531, + "learning_rate": 1.1386884214235294e-06, + "loss": 0.869, + "step": 4270 + }, + { + "epoch": 2.860683188211654, + "grad_norm": 2.5613161117364722, + "learning_rate": 1.1374628471349304e-06, + "loss": 1.012, + "step": 4271 + }, + { + "epoch": 2.8613529805760214, + "grad_norm": 2.9161448384308954, + "learning_rate": 1.1362377384889914e-06, + "loss": 0.9901, + "step": 4272 + }, + { + "epoch": 2.8620227729403886, + "grad_norm": 2.5681522638718115, + "learning_rate": 1.13501309590439e-06, + "loss": 0.8607, + "step": 4273 + }, + { + "epoch": 2.8626925653047555, + "grad_norm": 3.2802052886491024, + "learning_rate": 1.1337889197996438e-06, + "loss": 0.9876, + "step": 4274 + }, + { + "epoch": 2.8633623576691227, + "grad_norm": 2.49184814832872, + "learning_rate": 1.1325652105931113e-06, + "loss": 0.9908, + "step": 4275 + }, + { + "epoch": 2.8640321500334895, + "grad_norm": 3.1381196429198193, + "learning_rate": 1.1313419687029902e-06, + "loss": 1.0206, + "step": 4276 + }, + { + "epoch": 2.864701942397857, + "grad_norm": 3.1087177403348667, + "learning_rate": 1.1301191945473202e-06, + "loss": 0.9583, + "step": 4277 + }, + { + "epoch": 2.8653717347622236, + "grad_norm": 2.6720771229053817, + "learning_rate": 1.1288968885439805e-06, + "loss": 0.8414, + "step": 4278 + }, + { + "epoch": 2.866041527126591, + "grad_norm": 3.0041563688405284, + "learning_rate": 1.1276750511106899e-06, + "loss": 0.7188, + "step": 4279 + }, + { + "epoch": 2.8667113194909577, + "grad_norm": 2.1856783243464157, + "learning_rate": 1.1264536826650075e-06, + "loss": 0.8916, + "step": 4280 + }, + { + "epoch": 2.867381111855325, + "grad_norm": 3.1148950807586346, + "learning_rate": 1.1252327836243317e-06, + "loss": 0.9315, + "step": 4281 + }, + { + "epoch": 2.8680509042196918, + "grad_norm": 2.707321119709392, + "learning_rate": 1.1240123544059015e-06, + "loss": 0.7514, + "step": 4282 + }, + { + "epoch": 2.868720696584059, + "grad_norm": 2.9601544146053693, + "learning_rate": 1.1227923954267938e-06, + "loss": 0.9782, + "step": 4283 + }, + { + "epoch": 2.869390488948426, + "grad_norm": 2.5593570887938357, + "learning_rate": 1.1215729071039264e-06, + "loss": 1.0393, + "step": 4284 + }, + { + "epoch": 2.870060281312793, + "grad_norm": 3.495917171712384, + "learning_rate": 1.120353889854055e-06, + "loss": 0.7186, + "step": 4285 + }, + { + "epoch": 2.87073007367716, + "grad_norm": 2.2895961078619056, + "learning_rate": 1.119135344093774e-06, + "loss": 1.0681, + "step": 4286 + }, + { + "epoch": 2.871399866041527, + "grad_norm": 2.405151547577995, + "learning_rate": 1.1179172702395203e-06, + "loss": 1.0449, + "step": 4287 + }, + { + "epoch": 2.872069658405894, + "grad_norm": 5.5773252680552, + "learning_rate": 1.1166996687075638e-06, + "loss": 0.9721, + "step": 4288 + }, + { + "epoch": 2.8727394507702613, + "grad_norm": 3.0827871080915465, + "learning_rate": 1.1154825399140168e-06, + "loss": 0.9122, + "step": 4289 + }, + { + "epoch": 2.8734092431346285, + "grad_norm": 2.805400617349217, + "learning_rate": 1.1142658842748285e-06, + "loss": 0.9594, + "step": 4290 + }, + { + "epoch": 2.8740790354989953, + "grad_norm": 3.70774490552198, + "learning_rate": 1.1130497022057884e-06, + "loss": 0.8716, + "step": 4291 + }, + { + "epoch": 2.874748827863362, + "grad_norm": 2.51454958836588, + "learning_rate": 1.111833994122523e-06, + "loss": 0.9354, + "step": 4292 + }, + { + "epoch": 2.8754186202277294, + "grad_norm": 2.528720872580279, + "learning_rate": 1.110618760440495e-06, + "loss": 0.9179, + "step": 4293 + }, + { + "epoch": 2.8760884125920967, + "grad_norm": 2.173097346177421, + "learning_rate": 1.1094040015750063e-06, + "loss": 0.7915, + "step": 4294 + }, + { + "epoch": 2.8767582049564635, + "grad_norm": 2.1466776460590453, + "learning_rate": 1.1081897179411988e-06, + "loss": 0.9352, + "step": 4295 + }, + { + "epoch": 2.8774279973208303, + "grad_norm": 2.3144568658595626, + "learning_rate": 1.1069759099540497e-06, + "loss": 1.0084, + "step": 4296 + }, + { + "epoch": 2.8780977896851976, + "grad_norm": 2.7087069081799378, + "learning_rate": 1.1057625780283727e-06, + "loss": 0.9686, + "step": 4297 + }, + { + "epoch": 2.878767582049565, + "grad_norm": 2.111742401938065, + "learning_rate": 1.1045497225788192e-06, + "loss": 0.7984, + "step": 4298 + }, + { + "epoch": 2.8794373744139317, + "grad_norm": 2.520359361196617, + "learning_rate": 1.1033373440198814e-06, + "loss": 0.8689, + "step": 4299 + }, + { + "epoch": 2.8801071667782985, + "grad_norm": 4.883677302573098, + "learning_rate": 1.102125442765885e-06, + "loss": 0.9475, + "step": 4300 + }, + { + "epoch": 2.8807769591426657, + "grad_norm": 2.4104031972036792, + "learning_rate": 1.1009140192309919e-06, + "loss": 0.9423, + "step": 4301 + }, + { + "epoch": 2.881446751507033, + "grad_norm": 2.5786194098514894, + "learning_rate": 1.099703073829202e-06, + "loss": 0.9487, + "step": 4302 + }, + { + "epoch": 2.8821165438714, + "grad_norm": 2.4912857915349598, + "learning_rate": 1.0984926069743537e-06, + "loss": 1.0007, + "step": 4303 + }, + { + "epoch": 2.882786336235767, + "grad_norm": 2.4473615997026057, + "learning_rate": 1.0972826190801206e-06, + "loss": 1.0047, + "step": 4304 + }, + { + "epoch": 2.883456128600134, + "grad_norm": 2.828280936480052, + "learning_rate": 1.0960731105600097e-06, + "loss": 0.8269, + "step": 4305 + }, + { + "epoch": 2.884125920964501, + "grad_norm": 2.861869073791989, + "learning_rate": 1.0948640818273673e-06, + "loss": 1.0856, + "step": 4306 + }, + { + "epoch": 2.884795713328868, + "grad_norm": 2.49868362301603, + "learning_rate": 1.0936555332953762e-06, + "loss": 0.9851, + "step": 4307 + }, + { + "epoch": 2.8854655056932352, + "grad_norm": 2.491217992844978, + "learning_rate": 1.0924474653770544e-06, + "loss": 0.9338, + "step": 4308 + }, + { + "epoch": 2.886135298057602, + "grad_norm": 2.689068234149185, + "learning_rate": 1.0912398784852532e-06, + "loss": 0.8123, + "step": 4309 + }, + { + "epoch": 2.8868050904219693, + "grad_norm": 3.386317243421871, + "learning_rate": 1.0900327730326615e-06, + "loss": 0.8273, + "step": 4310 + }, + { + "epoch": 2.887474882786336, + "grad_norm": 2.712462201060696, + "learning_rate": 1.0888261494318057e-06, + "loss": 1.0209, + "step": 4311 + }, + { + "epoch": 2.8881446751507034, + "grad_norm": 2.6322486958139764, + "learning_rate": 1.0876200080950453e-06, + "loss": 0.9092, + "step": 4312 + }, + { + "epoch": 2.88881446751507, + "grad_norm": 2.714521541791465, + "learning_rate": 1.0864143494345734e-06, + "loss": 0.9867, + "step": 4313 + }, + { + "epoch": 2.8894842598794375, + "grad_norm": 9.389319225623018, + "learning_rate": 1.0852091738624197e-06, + "loss": 1.0847, + "step": 4314 + }, + { + "epoch": 2.8901540522438043, + "grad_norm": 2.4367009945503453, + "learning_rate": 1.0840044817904513e-06, + "loss": 0.9811, + "step": 4315 + }, + { + "epoch": 2.8908238446081715, + "grad_norm": 2.8897272637371927, + "learning_rate": 1.0828002736303677e-06, + "loss": 0.7548, + "step": 4316 + }, + { + "epoch": 2.8914936369725384, + "grad_norm": 2.662606856498661, + "learning_rate": 1.0815965497937006e-06, + "loss": 1.0431, + "step": 4317 + }, + { + "epoch": 2.8921634293369056, + "grad_norm": 2.7445468741554104, + "learning_rate": 1.080393310691821e-06, + "loss": 0.8601, + "step": 4318 + }, + { + "epoch": 2.8928332217012724, + "grad_norm": 3.27549304517038, + "learning_rate": 1.0791905567359313e-06, + "loss": 0.877, + "step": 4319 + }, + { + "epoch": 2.8935030140656397, + "grad_norm": 2.7731062499242256, + "learning_rate": 1.0779882883370695e-06, + "loss": 0.9699, + "step": 4320 + }, + { + "epoch": 2.8941728064300065, + "grad_norm": 2.6095869511989185, + "learning_rate": 1.0767865059061045e-06, + "loss": 1.0289, + "step": 4321 + }, + { + "epoch": 2.894842598794374, + "grad_norm": 2.41165533814414, + "learning_rate": 1.0755852098537441e-06, + "loss": 0.8744, + "step": 4322 + }, + { + "epoch": 2.895512391158741, + "grad_norm": 4.348322998673113, + "learning_rate": 1.0743844005905266e-06, + "loss": 0.932, + "step": 4323 + }, + { + "epoch": 2.896182183523108, + "grad_norm": 2.9617434341643185, + "learning_rate": 1.0731840785268246e-06, + "loss": 0.8386, + "step": 4324 + }, + { + "epoch": 2.8968519758874747, + "grad_norm": 2.366581070049593, + "learning_rate": 1.0719842440728439e-06, + "loss": 0.7422, + "step": 4325 + }, + { + "epoch": 2.897521768251842, + "grad_norm": 2.429517394447186, + "learning_rate": 1.0707848976386249e-06, + "loss": 0.9939, + "step": 4326 + }, + { + "epoch": 2.898191560616209, + "grad_norm": 2.8571522380055443, + "learning_rate": 1.0695860396340393e-06, + "loss": 0.9986, + "step": 4327 + }, + { + "epoch": 2.898861352980576, + "grad_norm": 2.115209182004409, + "learning_rate": 1.0683876704687937e-06, + "loss": 1.0843, + "step": 4328 + }, + { + "epoch": 2.899531145344943, + "grad_norm": 4.684028719630919, + "learning_rate": 1.0671897905524268e-06, + "loss": 0.8967, + "step": 4329 + }, + { + "epoch": 2.90020093770931, + "grad_norm": 4.713914430974561, + "learning_rate": 1.0659924002943099e-06, + "loss": 0.9639, + "step": 4330 + }, + { + "epoch": 2.9008707300736774, + "grad_norm": 2.679810806517034, + "learning_rate": 1.0647955001036473e-06, + "loss": 1.0082, + "step": 4331 + }, + { + "epoch": 2.901540522438044, + "grad_norm": 2.534249310290995, + "learning_rate": 1.0635990903894759e-06, + "loss": 0.881, + "step": 4332 + }, + { + "epoch": 2.902210314802411, + "grad_norm": 2.3349685770475115, + "learning_rate": 1.0624031715606648e-06, + "loss": 0.8738, + "step": 4333 + }, + { + "epoch": 2.9028801071667782, + "grad_norm": 2.7184033428957393, + "learning_rate": 1.0612077440259152e-06, + "loss": 0.9046, + "step": 4334 + }, + { + "epoch": 2.9035498995311455, + "grad_norm": 9.021681199244881, + "learning_rate": 1.060012808193761e-06, + "loss": 1.1, + "step": 4335 + }, + { + "epoch": 2.9042196918955123, + "grad_norm": 2.276785854014877, + "learning_rate": 1.0588183644725676e-06, + "loss": 0.956, + "step": 4336 + }, + { + "epoch": 2.9048894842598796, + "grad_norm": 3.459072895022146, + "learning_rate": 1.057624413270532e-06, + "loss": 0.994, + "step": 4337 + }, + { + "epoch": 2.9055592766242464, + "grad_norm": 4.145596550093857, + "learning_rate": 1.056430954995683e-06, + "loss": 0.8743, + "step": 4338 + }, + { + "epoch": 2.9062290689886137, + "grad_norm": 2.651300254977568, + "learning_rate": 1.0552379900558815e-06, + "loss": 0.912, + "step": 4339 + }, + { + "epoch": 2.9068988613529805, + "grad_norm": 2.690308083128112, + "learning_rate": 1.0540455188588191e-06, + "loss": 0.8803, + "step": 4340 + }, + { + "epoch": 2.9075686537173477, + "grad_norm": 3.527012248147003, + "learning_rate": 1.0528535418120192e-06, + "loss": 0.8, + "step": 4341 + }, + { + "epoch": 2.9082384460817146, + "grad_norm": 2.255799268490439, + "learning_rate": 1.0516620593228364e-06, + "loss": 0.9687, + "step": 4342 + }, + { + "epoch": 2.908908238446082, + "grad_norm": 2.4845714464677577, + "learning_rate": 1.0504710717984551e-06, + "loss": 0.9534, + "step": 4343 + }, + { + "epoch": 2.9095780308104486, + "grad_norm": 2.872896086614608, + "learning_rate": 1.049280579645892e-06, + "loss": 1.2231, + "step": 4344 + }, + { + "epoch": 2.910247823174816, + "grad_norm": 2.547018585242746, + "learning_rate": 1.0480905832719944e-06, + "loss": 1.1103, + "step": 4345 + }, + { + "epoch": 2.9109176155391827, + "grad_norm": 2.580846015307458, + "learning_rate": 1.0469010830834386e-06, + "loss": 0.939, + "step": 4346 + }, + { + "epoch": 2.91158740790355, + "grad_norm": 2.8852742327179826, + "learning_rate": 1.0457120794867334e-06, + "loss": 0.9385, + "step": 4347 + }, + { + "epoch": 2.912257200267917, + "grad_norm": 2.5825096437118433, + "learning_rate": 1.0445235728882164e-06, + "loss": 0.905, + "step": 4348 + }, + { + "epoch": 2.912926992632284, + "grad_norm": 2.3150005624550483, + "learning_rate": 1.043335563694056e-06, + "loss": 0.8645, + "step": 4349 + }, + { + "epoch": 2.913596784996651, + "grad_norm": 2.984788236334221, + "learning_rate": 1.0421480523102506e-06, + "loss": 1.0311, + "step": 4350 + }, + { + "epoch": 2.914266577361018, + "grad_norm": 2.9154973422361334, + "learning_rate": 1.0409610391426284e-06, + "loss": 0.9425, + "step": 4351 + }, + { + "epoch": 2.9149363697253854, + "grad_norm": 3.198185463768341, + "learning_rate": 1.039774524596847e-06, + "loss": 0.968, + "step": 4352 + }, + { + "epoch": 2.915606162089752, + "grad_norm": 2.6931779966694007, + "learning_rate": 1.0385885090783942e-06, + "loss": 0.8792, + "step": 4353 + }, + { + "epoch": 2.916275954454119, + "grad_norm": 2.691954417233148, + "learning_rate": 1.037402992992587e-06, + "loss": 1.0192, + "step": 4354 + }, + { + "epoch": 2.9169457468184863, + "grad_norm": 2.583376135960862, + "learning_rate": 1.0362179767445715e-06, + "loss": 1.1162, + "step": 4355 + }, + { + "epoch": 2.9176155391828535, + "grad_norm": 2.391548053511492, + "learning_rate": 1.035033460739323e-06, + "loss": 0.8754, + "step": 4356 + }, + { + "epoch": 2.9182853315472204, + "grad_norm": 3.7602546590349633, + "learning_rate": 1.0338494453816466e-06, + "loss": 0.8797, + "step": 4357 + }, + { + "epoch": 2.918955123911587, + "grad_norm": 2.3831760567386078, + "learning_rate": 1.0326659310761752e-06, + "loss": 0.8585, + "step": 4358 + }, + { + "epoch": 2.9196249162759544, + "grad_norm": 2.635733502887533, + "learning_rate": 1.031482918227371e-06, + "loss": 0.9801, + "step": 4359 + }, + { + "epoch": 2.9202947086403217, + "grad_norm": 2.4936517692121245, + "learning_rate": 1.0303004072395252e-06, + "loss": 1.1207, + "step": 4360 + }, + { + "epoch": 2.9209645010046885, + "grad_norm": 2.309661072604383, + "learning_rate": 1.029118398516756e-06, + "loss": 1.0115, + "step": 4361 + }, + { + "epoch": 2.9216342933690553, + "grad_norm": 2.801784982563354, + "learning_rate": 1.0279368924630133e-06, + "loss": 0.906, + "step": 4362 + }, + { + "epoch": 2.9223040857334226, + "grad_norm": 5.376748119823271, + "learning_rate": 1.0267558894820713e-06, + "loss": 0.9565, + "step": 4363 + }, + { + "epoch": 2.92297387809779, + "grad_norm": 3.6225238314167925, + "learning_rate": 1.0255753899775336e-06, + "loss": 1.0388, + "step": 4364 + }, + { + "epoch": 2.9236436704621567, + "grad_norm": 2.7724586596255505, + "learning_rate": 1.0243953943528321e-06, + "loss": 0.8586, + "step": 4365 + }, + { + "epoch": 2.924313462826524, + "grad_norm": 2.792930697028255, + "learning_rate": 1.0232159030112287e-06, + "loss": 1.0167, + "step": 4366 + }, + { + "epoch": 2.9249832551908908, + "grad_norm": 2.5374369323250763, + "learning_rate": 1.0220369163558083e-06, + "loss": 0.9557, + "step": 4367 + }, + { + "epoch": 2.925653047555258, + "grad_norm": 3.031034383816651, + "learning_rate": 1.0208584347894868e-06, + "loss": 0.8811, + "step": 4368 + }, + { + "epoch": 2.926322839919625, + "grad_norm": 2.2659207810414808, + "learning_rate": 1.0196804587150053e-06, + "loss": 0.8898, + "step": 4369 + }, + { + "epoch": 2.926992632283992, + "grad_norm": 2.9158984176971265, + "learning_rate": 1.018502988534936e-06, + "loss": 0.9035, + "step": 4370 + }, + { + "epoch": 2.927662424648359, + "grad_norm": 2.6053302026021723, + "learning_rate": 1.0173260246516728e-06, + "loss": 0.8654, + "step": 4371 + }, + { + "epoch": 2.928332217012726, + "grad_norm": 2.6771055580820393, + "learning_rate": 1.0161495674674404e-06, + "loss": 0.9348, + "step": 4372 + }, + { + "epoch": 2.929002009377093, + "grad_norm": 2.615429640641252, + "learning_rate": 1.0149736173842884e-06, + "loss": 0.9489, + "step": 4373 + }, + { + "epoch": 2.9296718017414602, + "grad_norm": 3.1394633118104154, + "learning_rate": 1.0137981748040963e-06, + "loss": 0.9433, + "step": 4374 + }, + { + "epoch": 2.930341594105827, + "grad_norm": 2.476400621853657, + "learning_rate": 1.012623240128565e-06, + "loss": 1.002, + "step": 4375 + }, + { + "epoch": 2.9310113864701943, + "grad_norm": 2.9574691286337886, + "learning_rate": 1.0114488137592252e-06, + "loss": 0.9553, + "step": 4376 + }, + { + "epoch": 2.931681178834561, + "grad_norm": 2.8557931623156008, + "learning_rate": 1.0102748960974345e-06, + "loss": 0.8411, + "step": 4377 + }, + { + "epoch": 2.9323509711989284, + "grad_norm": 3.0770108963016733, + "learning_rate": 1.009101487544376e-06, + "loss": 0.8948, + "step": 4378 + }, + { + "epoch": 2.933020763563295, + "grad_norm": 3.106343727460445, + "learning_rate": 1.0079285885010563e-06, + "loss": 0.8389, + "step": 4379 + }, + { + "epoch": 2.9336905559276625, + "grad_norm": 2.5160397667451533, + "learning_rate": 1.0067561993683097e-06, + "loss": 0.959, + "step": 4380 + }, + { + "epoch": 2.9343603482920297, + "grad_norm": 2.9338588820073634, + "learning_rate": 1.005584320546798e-06, + "loss": 0.883, + "step": 4381 + }, + { + "epoch": 2.9350301406563966, + "grad_norm": 2.794279916084805, + "learning_rate": 1.0044129524370074e-06, + "loss": 1.0869, + "step": 4382 + }, + { + "epoch": 2.9356999330207634, + "grad_norm": 2.601910239384603, + "learning_rate": 1.0032420954392475e-06, + "loss": 0.9428, + "step": 4383 + }, + { + "epoch": 2.9363697253851306, + "grad_norm": 2.7618015408847305, + "learning_rate": 1.0020717499536543e-06, + "loss": 0.8884, + "step": 4384 + }, + { + "epoch": 2.937039517749498, + "grad_norm": 3.262771587626672, + "learning_rate": 1.0009019163801918e-06, + "loss": 1.0558, + "step": 4385 + }, + { + "epoch": 2.9377093101138647, + "grad_norm": 2.488933967937767, + "learning_rate": 9.997325951186468e-07, + "loss": 0.9407, + "step": 4386 + }, + { + "epoch": 2.9383791024782315, + "grad_norm": 2.247242574334787, + "learning_rate": 9.98563786568629e-07, + "loss": 0.9315, + "step": 4387 + }, + { + "epoch": 2.939048894842599, + "grad_norm": 2.8789461094453954, + "learning_rate": 9.973954911295749e-07, + "loss": 1.018, + "step": 4388 + }, + { + "epoch": 2.939718687206966, + "grad_norm": 2.7262068314056953, + "learning_rate": 9.96227709200748e-07, + "loss": 1.1001, + "step": 4389 + }, + { + "epoch": 2.940388479571333, + "grad_norm": 2.769538564552995, + "learning_rate": 9.950604411812337e-07, + "loss": 0.8462, + "step": 4390 + }, + { + "epoch": 2.9410582719356997, + "grad_norm": 3.7542038958964725, + "learning_rate": 9.938936874699404e-07, + "loss": 0.7655, + "step": 4391 + }, + { + "epoch": 2.941728064300067, + "grad_norm": 2.6505357176209636, + "learning_rate": 9.927274484656018e-07, + "loss": 0.9586, + "step": 4392 + }, + { + "epoch": 2.942397856664434, + "grad_norm": 2.5194930238379736, + "learning_rate": 9.915617245667792e-07, + "loss": 0.8795, + "step": 4393 + }, + { + "epoch": 2.943067649028801, + "grad_norm": 2.369884551454104, + "learning_rate": 9.903965161718538e-07, + "loss": 1.0063, + "step": 4394 + }, + { + "epoch": 2.943737441393168, + "grad_norm": 2.7771043843015195, + "learning_rate": 9.892318236790307e-07, + "loss": 0.9206, + "step": 4395 + }, + { + "epoch": 2.944407233757535, + "grad_norm": 3.0805320385012203, + "learning_rate": 9.880676474863396e-07, + "loss": 1.1146, + "step": 4396 + }, + { + "epoch": 2.9450770261219024, + "grad_norm": 2.905443966455606, + "learning_rate": 9.869039879916356e-07, + "loss": 1.0193, + "step": 4397 + }, + { + "epoch": 2.945746818486269, + "grad_norm": 2.5504438485776504, + "learning_rate": 9.857408455925952e-07, + "loss": 0.9537, + "step": 4398 + }, + { + "epoch": 2.9464166108506364, + "grad_norm": 2.644085220960265, + "learning_rate": 9.845782206867189e-07, + "loss": 0.9285, + "step": 4399 + }, + { + "epoch": 2.9470864032150033, + "grad_norm": 3.8734553302156263, + "learning_rate": 9.83416113671327e-07, + "loss": 0.9578, + "step": 4400 + }, + { + "epoch": 2.9477561955793705, + "grad_norm": 3.121404209191545, + "learning_rate": 9.822545249435696e-07, + "loss": 0.9414, + "step": 4401 + }, + { + "epoch": 2.9484259879437373, + "grad_norm": 2.516546960641608, + "learning_rate": 9.810934549004142e-07, + "loss": 0.8255, + "step": 4402 + }, + { + "epoch": 2.9490957803081046, + "grad_norm": 2.9168681504638863, + "learning_rate": 9.799329039386537e-07, + "loss": 0.8944, + "step": 4403 + }, + { + "epoch": 2.9497655726724714, + "grad_norm": 2.769457775184325, + "learning_rate": 9.787728724549002e-07, + "loss": 0.8629, + "step": 4404 + }, + { + "epoch": 2.9504353650368387, + "grad_norm": 2.331566435343739, + "learning_rate": 9.776133608455931e-07, + "loss": 0.9267, + "step": 4405 + }, + { + "epoch": 2.9511051574012055, + "grad_norm": 2.6961133162235695, + "learning_rate": 9.764543695069912e-07, + "loss": 0.8774, + "step": 4406 + }, + { + "epoch": 2.9517749497655728, + "grad_norm": 2.520557303368286, + "learning_rate": 9.752958988351757e-07, + "loss": 0.9728, + "step": 4407 + }, + { + "epoch": 2.9524447421299396, + "grad_norm": 2.429563287617059, + "learning_rate": 9.741379492260503e-07, + "loss": 0.8021, + "step": 4408 + }, + { + "epoch": 2.953114534494307, + "grad_norm": 3.6883579808599833, + "learning_rate": 9.729805210753404e-07, + "loss": 0.9494, + "step": 4409 + }, + { + "epoch": 2.9537843268586736, + "grad_norm": 2.68868894257387, + "learning_rate": 9.718236147785934e-07, + "loss": 0.8795, + "step": 4410 + }, + { + "epoch": 2.954454119223041, + "grad_norm": 2.5095410445786652, + "learning_rate": 9.706672307311785e-07, + "loss": 0.8314, + "step": 4411 + }, + { + "epoch": 2.9551239115874077, + "grad_norm": 2.651582115108444, + "learning_rate": 9.695113693282863e-07, + "loss": 0.9832, + "step": 4412 + }, + { + "epoch": 2.955793703951775, + "grad_norm": 2.3635012613617183, + "learning_rate": 9.683560309649281e-07, + "loss": 0.9718, + "step": 4413 + }, + { + "epoch": 2.9564634963161422, + "grad_norm": 2.3476981578008584, + "learning_rate": 9.672012160359373e-07, + "loss": 0.9925, + "step": 4414 + }, + { + "epoch": 2.957133288680509, + "grad_norm": 2.4757054832069, + "learning_rate": 9.660469249359686e-07, + "loss": 0.8731, + "step": 4415 + }, + { + "epoch": 2.957803081044876, + "grad_norm": 2.3847944356156856, + "learning_rate": 9.648931580594967e-07, + "loss": 0.9669, + "step": 4416 + }, + { + "epoch": 2.958472873409243, + "grad_norm": 2.6298507954616137, + "learning_rate": 9.637399158008178e-07, + "loss": 0.9301, + "step": 4417 + }, + { + "epoch": 2.9591426657736104, + "grad_norm": 2.643106128283847, + "learning_rate": 9.62587198554049e-07, + "loss": 0.9369, + "step": 4418 + }, + { + "epoch": 2.959812458137977, + "grad_norm": 2.650743335458646, + "learning_rate": 9.614350067131274e-07, + "loss": 0.9152, + "step": 4419 + }, + { + "epoch": 2.960482250502344, + "grad_norm": 2.913478055619303, + "learning_rate": 9.60283340671811e-07, + "loss": 0.8555, + "step": 4420 + }, + { + "epoch": 2.9611520428667113, + "grad_norm": 2.8610039835549945, + "learning_rate": 9.591322008236779e-07, + "loss": 1.1167, + "step": 4421 + }, + { + "epoch": 2.9618218352310786, + "grad_norm": 3.0959216982909123, + "learning_rate": 9.57981587562126e-07, + "loss": 0.9062, + "step": 4422 + }, + { + "epoch": 2.9624916275954454, + "grad_norm": 2.956735406916858, + "learning_rate": 9.568315012803744e-07, + "loss": 0.8536, + "step": 4423 + }, + { + "epoch": 2.963161419959812, + "grad_norm": 2.39833612659861, + "learning_rate": 9.556819423714607e-07, + "loss": 0.9255, + "step": 4424 + }, + { + "epoch": 2.9638312123241795, + "grad_norm": 2.7149471641922296, + "learning_rate": 9.545329112282436e-07, + "loss": 0.8597, + "step": 4425 + }, + { + "epoch": 2.9645010046885467, + "grad_norm": 2.575310007159469, + "learning_rate": 9.533844082434001e-07, + "loss": 0.913, + "step": 4426 + }, + { + "epoch": 2.9651707970529135, + "grad_norm": 2.287420918850998, + "learning_rate": 9.522364338094278e-07, + "loss": 0.9335, + "step": 4427 + }, + { + "epoch": 2.965840589417281, + "grad_norm": 2.9164563654772104, + "learning_rate": 9.510889883186431e-07, + "loss": 0.9747, + "step": 4428 + }, + { + "epoch": 2.9665103817816476, + "grad_norm": 2.303200957030166, + "learning_rate": 9.499420721631819e-07, + "loss": 0.9709, + "step": 4429 + }, + { + "epoch": 2.967180174146015, + "grad_norm": 2.572325144505513, + "learning_rate": 9.487956857349989e-07, + "loss": 1.036, + "step": 4430 + }, + { + "epoch": 2.9678499665103817, + "grad_norm": 3.15024187957532, + "learning_rate": 9.476498294258682e-07, + "loss": 0.8784, + "step": 4431 + }, + { + "epoch": 2.968519758874749, + "grad_norm": 2.676348728254368, + "learning_rate": 9.46504503627382e-07, + "loss": 0.8659, + "step": 4432 + }, + { + "epoch": 2.9691895512391158, + "grad_norm": 2.66240819280471, + "learning_rate": 9.453597087309521e-07, + "loss": 0.9444, + "step": 4433 + }, + { + "epoch": 2.969859343603483, + "grad_norm": 2.52150643916364, + "learning_rate": 9.442154451278085e-07, + "loss": 0.8875, + "step": 4434 + }, + { + "epoch": 2.97052913596785, + "grad_norm": 3.5075068327541485, + "learning_rate": 9.430717132089995e-07, + "loss": 0.8731, + "step": 4435 + }, + { + "epoch": 2.971198928332217, + "grad_norm": 2.467035200529858, + "learning_rate": 9.419285133653916e-07, + "loss": 0.9346, + "step": 4436 + }, + { + "epoch": 2.971868720696584, + "grad_norm": 3.060338785018009, + "learning_rate": 9.407858459876698e-07, + "loss": 1.0273, + "step": 4437 + }, + { + "epoch": 2.972538513060951, + "grad_norm": 2.5109498498992764, + "learning_rate": 9.396437114663368e-07, + "loss": 0.9491, + "step": 4438 + }, + { + "epoch": 2.973208305425318, + "grad_norm": 2.5222848353383656, + "learning_rate": 9.385021101917127e-07, + "loss": 0.9486, + "step": 4439 + }, + { + "epoch": 2.9738780977896853, + "grad_norm": 2.541721530094765, + "learning_rate": 9.373610425539386e-07, + "loss": 0.991, + "step": 4440 + }, + { + "epoch": 2.974547890154052, + "grad_norm": 2.6739391996120507, + "learning_rate": 9.362205089429682e-07, + "loss": 1.039, + "step": 4441 + }, + { + "epoch": 2.9752176825184193, + "grad_norm": 2.8450042907546482, + "learning_rate": 9.35080509748576e-07, + "loss": 0.8505, + "step": 4442 + }, + { + "epoch": 2.9758874748827866, + "grad_norm": 2.4278393121574378, + "learning_rate": 9.339410453603517e-07, + "loss": 1.0483, + "step": 4443 + }, + { + "epoch": 2.9765572672471534, + "grad_norm": 2.8246531504789982, + "learning_rate": 9.328021161677067e-07, + "loss": 0.9485, + "step": 4444 + }, + { + "epoch": 2.9772270596115202, + "grad_norm": 2.931189279783314, + "learning_rate": 9.316637225598638e-07, + "loss": 1.0414, + "step": 4445 + }, + { + "epoch": 2.9778968519758875, + "grad_norm": 3.4540119059588763, + "learning_rate": 9.305258649258656e-07, + "loss": 0.9877, + "step": 4446 + }, + { + "epoch": 2.9785666443402548, + "grad_norm": 3.707854273611058, + "learning_rate": 9.293885436545711e-07, + "loss": 0.8129, + "step": 4447 + }, + { + "epoch": 2.9792364367046216, + "grad_norm": 2.448423671710483, + "learning_rate": 9.282517591346582e-07, + "loss": 0.7905, + "step": 4448 + }, + { + "epoch": 2.9799062290689884, + "grad_norm": 2.8428113869681444, + "learning_rate": 9.271155117546171e-07, + "loss": 1.1138, + "step": 4449 + }, + { + "epoch": 2.9805760214333556, + "grad_norm": 3.0967494379096223, + "learning_rate": 9.259798019027574e-07, + "loss": 0.9274, + "step": 4450 + }, + { + "epoch": 2.981245813797723, + "grad_norm": 4.789766159399207, + "learning_rate": 9.248446299672034e-07, + "loss": 0.89, + "step": 4451 + }, + { + "epoch": 2.9819156061620897, + "grad_norm": 2.8940808805772265, + "learning_rate": 9.23709996335899e-07, + "loss": 0.9755, + "step": 4452 + }, + { + "epoch": 2.9825853985264565, + "grad_norm": 2.8141024260345686, + "learning_rate": 9.22575901396599e-07, + "loss": 0.7563, + "step": 4453 + }, + { + "epoch": 2.983255190890824, + "grad_norm": 2.6036362129475124, + "learning_rate": 9.214423455368776e-07, + "loss": 0.8567, + "step": 4454 + }, + { + "epoch": 2.983924983255191, + "grad_norm": 3.287544473310596, + "learning_rate": 9.203093291441228e-07, + "loss": 0.9998, + "step": 4455 + }, + { + "epoch": 2.984594775619558, + "grad_norm": 3.6830052822227075, + "learning_rate": 9.191768526055423e-07, + "loss": 0.8636, + "step": 4456 + }, + { + "epoch": 2.985264567983925, + "grad_norm": 2.4724342498019833, + "learning_rate": 9.180449163081532e-07, + "loss": 0.7422, + "step": 4457 + }, + { + "epoch": 2.985934360348292, + "grad_norm": 2.7442185244309356, + "learning_rate": 9.169135206387919e-07, + "loss": 0.8531, + "step": 4458 + }, + { + "epoch": 2.986604152712659, + "grad_norm": 2.5494194044118346, + "learning_rate": 9.15782665984109e-07, + "loss": 0.9576, + "step": 4459 + }, + { + "epoch": 2.987273945077026, + "grad_norm": 2.615921172035635, + "learning_rate": 9.146523527305723e-07, + "loss": 0.9148, + "step": 4460 + }, + { + "epoch": 2.9879437374413933, + "grad_norm": 2.5797689227754734, + "learning_rate": 9.135225812644605e-07, + "loss": 1.065, + "step": 4461 + }, + { + "epoch": 2.98861352980576, + "grad_norm": 2.7281685535460842, + "learning_rate": 9.1239335197187e-07, + "loss": 0.8665, + "step": 4462 + }, + { + "epoch": 2.9892833221701274, + "grad_norm": 3.060331298104484, + "learning_rate": 9.112646652387108e-07, + "loss": 1.0095, + "step": 4463 + }, + { + "epoch": 2.989953114534494, + "grad_norm": 3.2464779008201425, + "learning_rate": 9.101365214507099e-07, + "loss": 0.9538, + "step": 4464 + }, + { + "epoch": 2.9906229068988615, + "grad_norm": 2.710523233700759, + "learning_rate": 9.090089209934044e-07, + "loss": 0.9192, + "step": 4465 + }, + { + "epoch": 2.9912926992632283, + "grad_norm": 3.4496181582373358, + "learning_rate": 9.078818642521487e-07, + "loss": 1.005, + "step": 4466 + }, + { + "epoch": 2.9919624916275955, + "grad_norm": 2.33104370049697, + "learning_rate": 9.067553516121116e-07, + "loss": 0.9773, + "step": 4467 + }, + { + "epoch": 2.9926322839919623, + "grad_norm": 2.3828250953159946, + "learning_rate": 9.056293834582749e-07, + "loss": 0.91, + "step": 4468 + }, + { + "epoch": 2.9933020763563296, + "grad_norm": 2.610572341663122, + "learning_rate": 9.045039601754352e-07, + "loss": 0.8592, + "step": 4469 + }, + { + "epoch": 2.9939718687206964, + "grad_norm": 5.124021108432933, + "learning_rate": 9.033790821481997e-07, + "loss": 0.9002, + "step": 4470 + }, + { + "epoch": 2.9946416610850637, + "grad_norm": 2.5312221388338387, + "learning_rate": 9.022547497609943e-07, + "loss": 0.9732, + "step": 4471 + }, + { + "epoch": 2.995311453449431, + "grad_norm": 2.646085586309052, + "learning_rate": 9.011309633980555e-07, + "loss": 1.0915, + "step": 4472 + }, + { + "epoch": 2.9959812458137978, + "grad_norm": 2.2606557111794423, + "learning_rate": 9.00007723443434e-07, + "loss": 0.893, + "step": 4473 + }, + { + "epoch": 2.9966510381781646, + "grad_norm": 2.576842135139763, + "learning_rate": 8.988850302809912e-07, + "loss": 0.9517, + "step": 4474 + }, + { + "epoch": 2.997320830542532, + "grad_norm": 2.4493568864320645, + "learning_rate": 8.977628842944064e-07, + "loss": 0.8974, + "step": 4475 + }, + { + "epoch": 2.997990622906899, + "grad_norm": 2.7397660540505173, + "learning_rate": 8.966412858671683e-07, + "loss": 0.8767, + "step": 4476 + }, + { + "epoch": 2.998660415271266, + "grad_norm": 2.9155365474840327, + "learning_rate": 8.955202353825804e-07, + "loss": 0.8527, + "step": 4477 + }, + { + "epoch": 2.9993302076356327, + "grad_norm": 2.9367835703120915, + "learning_rate": 8.943997332237559e-07, + "loss": 1.0057, + "step": 4478 + }, + { + "epoch": 3.0, + "grad_norm": 2.2211222106288844, + "learning_rate": 8.932797797736247e-07, + "loss": 0.8237, + "step": 4479 + }, + { + "epoch": 3.0006697923643673, + "grad_norm": 2.308705902482387, + "learning_rate": 8.921603754149269e-07, + "loss": 0.6718, + "step": 4480 + }, + { + "epoch": 3.001339584728734, + "grad_norm": 3.0219878763952885, + "learning_rate": 8.910415205302161e-07, + "loss": 0.8901, + "step": 4481 + }, + { + "epoch": 3.0020093770931013, + "grad_norm": 2.3527271599768813, + "learning_rate": 8.899232155018544e-07, + "loss": 0.7234, + "step": 4482 + }, + { + "epoch": 3.002679169457468, + "grad_norm": 2.8358206149613583, + "learning_rate": 8.888054607120222e-07, + "loss": 0.8455, + "step": 4483 + }, + { + "epoch": 3.0033489618218354, + "grad_norm": 3.0934004400612056, + "learning_rate": 8.876882565427072e-07, + "loss": 0.8782, + "step": 4484 + }, + { + "epoch": 3.0040187541862022, + "grad_norm": 2.200577013093396, + "learning_rate": 8.86571603375711e-07, + "loss": 0.6258, + "step": 4485 + }, + { + "epoch": 3.0046885465505695, + "grad_norm": 2.811736182669925, + "learning_rate": 8.854555015926442e-07, + "loss": 0.8801, + "step": 4486 + }, + { + "epoch": 3.0053583389149363, + "grad_norm": 3.6096063399310068, + "learning_rate": 8.843399515749332e-07, + "loss": 0.8359, + "step": 4487 + }, + { + "epoch": 3.0060281312793036, + "grad_norm": 2.313602440194986, + "learning_rate": 8.832249537038129e-07, + "loss": 0.6775, + "step": 4488 + }, + { + "epoch": 3.0066979236436704, + "grad_norm": 2.6919679162413552, + "learning_rate": 8.82110508360331e-07, + "loss": 0.966, + "step": 4489 + }, + { + "epoch": 3.0073677160080377, + "grad_norm": 2.3408757683809642, + "learning_rate": 8.809966159253427e-07, + "loss": 0.8003, + "step": 4490 + }, + { + "epoch": 3.0080375083724045, + "grad_norm": 2.639233433720983, + "learning_rate": 8.798832767795201e-07, + "loss": 0.8426, + "step": 4491 + }, + { + "epoch": 3.0087073007367717, + "grad_norm": 2.52191042245871, + "learning_rate": 8.787704913033424e-07, + "loss": 0.9519, + "step": 4492 + }, + { + "epoch": 3.0093770931011385, + "grad_norm": 2.7593540652267077, + "learning_rate": 8.776582598771011e-07, + "loss": 0.699, + "step": 4493 + }, + { + "epoch": 3.010046885465506, + "grad_norm": 2.4775618023005084, + "learning_rate": 8.765465828808953e-07, + "loss": 0.8113, + "step": 4494 + }, + { + "epoch": 3.0107166778298726, + "grad_norm": 2.492888484760764, + "learning_rate": 8.754354606946394e-07, + "loss": 0.8075, + "step": 4495 + }, + { + "epoch": 3.01138647019424, + "grad_norm": 2.0166646208641645, + "learning_rate": 8.74324893698055e-07, + "loss": 0.6753, + "step": 4496 + }, + { + "epoch": 3.0120562625586067, + "grad_norm": 2.0355678701974, + "learning_rate": 8.732148822706748e-07, + "loss": 0.6656, + "step": 4497 + }, + { + "epoch": 3.012726054922974, + "grad_norm": 2.7500675807497736, + "learning_rate": 8.721054267918416e-07, + "loss": 0.821, + "step": 4498 + }, + { + "epoch": 3.013395847287341, + "grad_norm": 2.9095385567708685, + "learning_rate": 8.709965276407084e-07, + "loss": 0.7873, + "step": 4499 + }, + { + "epoch": 3.014065639651708, + "grad_norm": 2.7347761530466244, + "learning_rate": 8.698881851962377e-07, + "loss": 0.7854, + "step": 4500 + }, + { + "epoch": 3.014735432016075, + "grad_norm": 2.746639126077221, + "learning_rate": 8.687803998372019e-07, + "loss": 0.686, + "step": 4501 + }, + { + "epoch": 3.015405224380442, + "grad_norm": 2.6140792958274206, + "learning_rate": 8.67673171942183e-07, + "loss": 0.6326, + "step": 4502 + }, + { + "epoch": 3.016075016744809, + "grad_norm": 2.7494704560951484, + "learning_rate": 8.665665018895728e-07, + "loss": 0.7014, + "step": 4503 + }, + { + "epoch": 3.016744809109176, + "grad_norm": 2.6160996980337847, + "learning_rate": 8.65460390057572e-07, + "loss": 0.8342, + "step": 4504 + }, + { + "epoch": 3.017414601473543, + "grad_norm": 2.729842129176557, + "learning_rate": 8.643548368241911e-07, + "loss": 0.8222, + "step": 4505 + }, + { + "epoch": 3.0180843938379103, + "grad_norm": 2.3788666139435373, + "learning_rate": 8.632498425672489e-07, + "loss": 0.8033, + "step": 4506 + }, + { + "epoch": 3.018754186202277, + "grad_norm": 2.8829273476836828, + "learning_rate": 8.621454076643737e-07, + "loss": 0.8393, + "step": 4507 + }, + { + "epoch": 3.0194239785666444, + "grad_norm": 2.4261555475243375, + "learning_rate": 8.610415324930027e-07, + "loss": 0.6761, + "step": 4508 + }, + { + "epoch": 3.020093770931011, + "grad_norm": 2.568160278003953, + "learning_rate": 8.599382174303816e-07, + "loss": 0.8148, + "step": 4509 + }, + { + "epoch": 3.0207635632953784, + "grad_norm": 2.823229124862339, + "learning_rate": 8.588354628535648e-07, + "loss": 0.7274, + "step": 4510 + }, + { + "epoch": 3.0214333556597457, + "grad_norm": 2.953154619138717, + "learning_rate": 8.57733269139415e-07, + "loss": 0.8453, + "step": 4511 + }, + { + "epoch": 3.0221031480241125, + "grad_norm": 2.589298483496803, + "learning_rate": 8.566316366646033e-07, + "loss": 0.7828, + "step": 4512 + }, + { + "epoch": 3.0227729403884798, + "grad_norm": 4.28328478712298, + "learning_rate": 8.555305658056092e-07, + "loss": 0.9337, + "step": 4513 + }, + { + "epoch": 3.0234427327528466, + "grad_norm": 2.6189876559687706, + "learning_rate": 8.544300569387201e-07, + "loss": 0.7962, + "step": 4514 + }, + { + "epoch": 3.024112525117214, + "grad_norm": 2.583011396930158, + "learning_rate": 8.53330110440031e-07, + "loss": 0.6263, + "step": 4515 + }, + { + "epoch": 3.0247823174815807, + "grad_norm": 2.693462582148134, + "learning_rate": 8.522307266854457e-07, + "loss": 0.9337, + "step": 4516 + }, + { + "epoch": 3.025452109845948, + "grad_norm": 2.3691133049571445, + "learning_rate": 8.511319060506743e-07, + "loss": 0.7813, + "step": 4517 + }, + { + "epoch": 3.0261219022103147, + "grad_norm": 2.7280079214073485, + "learning_rate": 8.500336489112357e-07, + "loss": 0.8661, + "step": 4518 + }, + { + "epoch": 3.026791694574682, + "grad_norm": 2.690766620319964, + "learning_rate": 8.489359556424557e-07, + "loss": 0.8214, + "step": 4519 + }, + { + "epoch": 3.027461486939049, + "grad_norm": 2.928110797921107, + "learning_rate": 8.47838826619467e-07, + "loss": 0.8406, + "step": 4520 + }, + { + "epoch": 3.028131279303416, + "grad_norm": 2.7671129097794678, + "learning_rate": 8.467422622172102e-07, + "loss": 0.8143, + "step": 4521 + }, + { + "epoch": 3.028801071667783, + "grad_norm": 2.7151257091912107, + "learning_rate": 8.456462628104329e-07, + "loss": 0.8492, + "step": 4522 + }, + { + "epoch": 3.02947086403215, + "grad_norm": 2.4624586556962913, + "learning_rate": 8.445508287736886e-07, + "loss": 0.8244, + "step": 4523 + }, + { + "epoch": 3.030140656396517, + "grad_norm": 2.535462283670022, + "learning_rate": 8.434559604813391e-07, + "loss": 0.8203, + "step": 4524 + }, + { + "epoch": 3.0308104487608842, + "grad_norm": 2.6841584730857884, + "learning_rate": 8.423616583075511e-07, + "loss": 0.8684, + "step": 4525 + }, + { + "epoch": 3.031480241125251, + "grad_norm": 2.77866381278767, + "learning_rate": 8.412679226263009e-07, + "loss": 0.8968, + "step": 4526 + }, + { + "epoch": 3.0321500334896183, + "grad_norm": 2.868883180208398, + "learning_rate": 8.401747538113672e-07, + "loss": 0.9504, + "step": 4527 + }, + { + "epoch": 3.032819825853985, + "grad_norm": 2.666010827402208, + "learning_rate": 8.390821522363374e-07, + "loss": 0.8504, + "step": 4528 + }, + { + "epoch": 3.0334896182183524, + "grad_norm": 2.937454985769477, + "learning_rate": 8.379901182746036e-07, + "loss": 0.7832, + "step": 4529 + }, + { + "epoch": 3.034159410582719, + "grad_norm": 2.8216589057025225, + "learning_rate": 8.368986522993675e-07, + "loss": 0.8267, + "step": 4530 + }, + { + "epoch": 3.0348292029470865, + "grad_norm": 2.5470625723470413, + "learning_rate": 8.358077546836319e-07, + "loss": 0.8604, + "step": 4531 + }, + { + "epoch": 3.0354989953114533, + "grad_norm": 2.7039058896214754, + "learning_rate": 8.347174258002077e-07, + "loss": 0.8556, + "step": 4532 + }, + { + "epoch": 3.0361687876758205, + "grad_norm": 4.329960738574528, + "learning_rate": 8.33627666021711e-07, + "loss": 0.6626, + "step": 4533 + }, + { + "epoch": 3.0368385800401874, + "grad_norm": 2.531094070483746, + "learning_rate": 8.325384757205659e-07, + "loss": 0.668, + "step": 4534 + }, + { + "epoch": 3.0375083724045546, + "grad_norm": 3.3830981115319787, + "learning_rate": 8.314498552689973e-07, + "loss": 0.7866, + "step": 4535 + }, + { + "epoch": 3.0381781647689214, + "grad_norm": 2.6283342930720535, + "learning_rate": 8.303618050390383e-07, + "loss": 0.8639, + "step": 4536 + }, + { + "epoch": 3.0388479571332887, + "grad_norm": 2.6248122420337983, + "learning_rate": 8.292743254025259e-07, + "loss": 0.7382, + "step": 4537 + }, + { + "epoch": 3.0395177494976555, + "grad_norm": 2.4032109006764344, + "learning_rate": 8.281874167311054e-07, + "loss": 0.7362, + "step": 4538 + }, + { + "epoch": 3.040187541862023, + "grad_norm": 2.773762639433884, + "learning_rate": 8.271010793962217e-07, + "loss": 0.8723, + "step": 4539 + }, + { + "epoch": 3.0408573342263896, + "grad_norm": 2.5766780065213957, + "learning_rate": 8.260153137691276e-07, + "loss": 0.8219, + "step": 4540 + }, + { + "epoch": 3.041527126590757, + "grad_norm": 2.5028415906017574, + "learning_rate": 8.249301202208799e-07, + "loss": 0.7378, + "step": 4541 + }, + { + "epoch": 3.042196918955124, + "grad_norm": 2.4975665623051446, + "learning_rate": 8.238454991223407e-07, + "loss": 0.8293, + "step": 4542 + }, + { + "epoch": 3.042866711319491, + "grad_norm": 2.7691716243959723, + "learning_rate": 8.227614508441761e-07, + "loss": 0.8582, + "step": 4543 + }, + { + "epoch": 3.043536503683858, + "grad_norm": 2.746714444606856, + "learning_rate": 8.216779757568549e-07, + "loss": 0.7828, + "step": 4544 + }, + { + "epoch": 3.044206296048225, + "grad_norm": 2.8118696738066933, + "learning_rate": 8.205950742306506e-07, + "loss": 0.8096, + "step": 4545 + }, + { + "epoch": 3.0448760884125923, + "grad_norm": 3.068311407813263, + "learning_rate": 8.195127466356434e-07, + "loss": 0.8927, + "step": 4546 + }, + { + "epoch": 3.045545880776959, + "grad_norm": 3.1674520489507647, + "learning_rate": 8.184309933417147e-07, + "loss": 0.7257, + "step": 4547 + }, + { + "epoch": 3.0462156731413264, + "grad_norm": 2.657121725792249, + "learning_rate": 8.173498147185491e-07, + "loss": 0.7622, + "step": 4548 + }, + { + "epoch": 3.046885465505693, + "grad_norm": 2.5548443345404666, + "learning_rate": 8.162692111356357e-07, + "loss": 0.8875, + "step": 4549 + }, + { + "epoch": 3.0475552578700604, + "grad_norm": 3.0222798309973484, + "learning_rate": 8.151891829622688e-07, + "loss": 0.826, + "step": 4550 + }, + { + "epoch": 3.0482250502344272, + "grad_norm": 2.654253640638199, + "learning_rate": 8.141097305675447e-07, + "loss": 0.8246, + "step": 4551 + }, + { + "epoch": 3.0488948425987945, + "grad_norm": 2.860604872292076, + "learning_rate": 8.130308543203608e-07, + "loss": 0.8793, + "step": 4552 + }, + { + "epoch": 3.0495646349631613, + "grad_norm": 2.8743117624769376, + "learning_rate": 8.119525545894199e-07, + "loss": 0.8471, + "step": 4553 + }, + { + "epoch": 3.0502344273275286, + "grad_norm": 2.4720717817402154, + "learning_rate": 8.108748317432291e-07, + "loss": 0.919, + "step": 4554 + }, + { + "epoch": 3.0509042196918954, + "grad_norm": 2.543881187475063, + "learning_rate": 8.097976861500964e-07, + "loss": 0.7672, + "step": 4555 + }, + { + "epoch": 3.0515740120562627, + "grad_norm": 2.523481062513832, + "learning_rate": 8.087211181781307e-07, + "loss": 0.848, + "step": 4556 + }, + { + "epoch": 3.0522438044206295, + "grad_norm": 3.0115283548710656, + "learning_rate": 8.076451281952477e-07, + "loss": 0.9081, + "step": 4557 + }, + { + "epoch": 3.0529135967849967, + "grad_norm": 2.714690842659541, + "learning_rate": 8.06569716569163e-07, + "loss": 0.8612, + "step": 4558 + }, + { + "epoch": 3.0535833891493636, + "grad_norm": 2.639194034309296, + "learning_rate": 8.054948836673959e-07, + "loss": 0.7576, + "step": 4559 + }, + { + "epoch": 3.054253181513731, + "grad_norm": 2.729173431187717, + "learning_rate": 8.044206298572643e-07, + "loss": 0.918, + "step": 4560 + }, + { + "epoch": 3.0549229738780976, + "grad_norm": 2.581234965471339, + "learning_rate": 8.033469555058935e-07, + "loss": 0.782, + "step": 4561 + }, + { + "epoch": 3.055592766242465, + "grad_norm": 2.391436062744207, + "learning_rate": 8.022738609802075e-07, + "loss": 0.6755, + "step": 4562 + }, + { + "epoch": 3.0562625586068317, + "grad_norm": 2.6646651651208755, + "learning_rate": 8.012013466469338e-07, + "loss": 0.7255, + "step": 4563 + }, + { + "epoch": 3.056932350971199, + "grad_norm": 2.786543074365306, + "learning_rate": 8.001294128725981e-07, + "loss": 0.9141, + "step": 4564 + }, + { + "epoch": 3.057602143335566, + "grad_norm": 3.290063158025738, + "learning_rate": 7.990580600235329e-07, + "loss": 0.7985, + "step": 4565 + }, + { + "epoch": 3.058271935699933, + "grad_norm": 2.6554674141458103, + "learning_rate": 7.979872884658685e-07, + "loss": 0.8087, + "step": 4566 + }, + { + "epoch": 3.0589417280643, + "grad_norm": 2.6779545549018007, + "learning_rate": 7.969170985655383e-07, + "loss": 0.7249, + "step": 4567 + }, + { + "epoch": 3.059611520428667, + "grad_norm": 2.8402028608233, + "learning_rate": 7.958474906882744e-07, + "loss": 0.7788, + "step": 4568 + }, + { + "epoch": 3.060281312793034, + "grad_norm": 2.5285739192550833, + "learning_rate": 7.947784651996138e-07, + "loss": 0.854, + "step": 4569 + }, + { + "epoch": 3.060951105157401, + "grad_norm": 2.654251007516384, + "learning_rate": 7.937100224648917e-07, + "loss": 0.9415, + "step": 4570 + }, + { + "epoch": 3.0616208975217685, + "grad_norm": 2.727529507860568, + "learning_rate": 7.926421628492461e-07, + "loss": 0.8178, + "step": 4571 + }, + { + "epoch": 3.0622906898861353, + "grad_norm": 2.3362358621449877, + "learning_rate": 7.915748867176117e-07, + "loss": 0.7949, + "step": 4572 + }, + { + "epoch": 3.0629604822505025, + "grad_norm": 2.2402595097600373, + "learning_rate": 7.905081944347292e-07, + "loss": 0.7051, + "step": 4573 + }, + { + "epoch": 3.0636302746148694, + "grad_norm": 2.178105919588685, + "learning_rate": 7.894420863651367e-07, + "loss": 0.6539, + "step": 4574 + }, + { + "epoch": 3.0643000669792366, + "grad_norm": 2.87882856499604, + "learning_rate": 7.883765628731732e-07, + "loss": 0.7507, + "step": 4575 + }, + { + "epoch": 3.0649698593436034, + "grad_norm": 2.8645384355579138, + "learning_rate": 7.873116243229773e-07, + "loss": 0.769, + "step": 4576 + }, + { + "epoch": 3.0656396517079707, + "grad_norm": 2.5706014970504727, + "learning_rate": 7.862472710784888e-07, + "loss": 0.8689, + "step": 4577 + }, + { + "epoch": 3.0663094440723375, + "grad_norm": 2.7486984088566038, + "learning_rate": 7.851835035034469e-07, + "loss": 0.9181, + "step": 4578 + }, + { + "epoch": 3.066979236436705, + "grad_norm": 3.4574234693691253, + "learning_rate": 7.841203219613907e-07, + "loss": 0.7172, + "step": 4579 + }, + { + "epoch": 3.0676490288010716, + "grad_norm": 2.727474931928276, + "learning_rate": 7.830577268156592e-07, + "loss": 0.9423, + "step": 4580 + }, + { + "epoch": 3.068318821165439, + "grad_norm": 2.346639869154611, + "learning_rate": 7.819957184293906e-07, + "loss": 0.7365, + "step": 4581 + }, + { + "epoch": 3.0689886135298057, + "grad_norm": 2.7781289710880435, + "learning_rate": 7.809342971655229e-07, + "loss": 0.8431, + "step": 4582 + }, + { + "epoch": 3.069658405894173, + "grad_norm": 3.221997963341945, + "learning_rate": 7.798734633867933e-07, + "loss": 0.8131, + "step": 4583 + }, + { + "epoch": 3.0703281982585398, + "grad_norm": 2.4797172891582937, + "learning_rate": 7.788132174557386e-07, + "loss": 0.7008, + "step": 4584 + }, + { + "epoch": 3.070997990622907, + "grad_norm": 2.67459152687761, + "learning_rate": 7.777535597346941e-07, + "loss": 0.79, + "step": 4585 + }, + { + "epoch": 3.071667782987274, + "grad_norm": 2.5742320056990873, + "learning_rate": 7.766944905857946e-07, + "loss": 0.7698, + "step": 4586 + }, + { + "epoch": 3.072337575351641, + "grad_norm": 2.530937895029093, + "learning_rate": 7.756360103709734e-07, + "loss": 0.7453, + "step": 4587 + }, + { + "epoch": 3.073007367716008, + "grad_norm": 2.5682877338077086, + "learning_rate": 7.74578119451963e-07, + "loss": 0.7538, + "step": 4588 + }, + { + "epoch": 3.073677160080375, + "grad_norm": 2.6301044272562075, + "learning_rate": 7.735208181902937e-07, + "loss": 0.7678, + "step": 4589 + }, + { + "epoch": 3.074346952444742, + "grad_norm": 2.557747767314214, + "learning_rate": 7.724641069472952e-07, + "loss": 0.6681, + "step": 4590 + }, + { + "epoch": 3.0750167448091092, + "grad_norm": 2.8901479932825462, + "learning_rate": 7.714079860840948e-07, + "loss": 0.6426, + "step": 4591 + }, + { + "epoch": 3.075686537173476, + "grad_norm": 2.393713496753272, + "learning_rate": 7.703524559616188e-07, + "loss": 0.775, + "step": 4592 + }, + { + "epoch": 3.0763563295378433, + "grad_norm": 2.788765809606521, + "learning_rate": 7.692975169405909e-07, + "loss": 0.8346, + "step": 4593 + }, + { + "epoch": 3.07702612190221, + "grad_norm": 2.5791693897510304, + "learning_rate": 7.682431693815332e-07, + "loss": 0.837, + "step": 4594 + }, + { + "epoch": 3.0776959142665774, + "grad_norm": 2.349251971114205, + "learning_rate": 7.671894136447655e-07, + "loss": 0.7009, + "step": 4595 + }, + { + "epoch": 3.078365706630944, + "grad_norm": 2.2406218805784093, + "learning_rate": 7.661362500904055e-07, + "loss": 0.7793, + "step": 4596 + }, + { + "epoch": 3.0790354989953115, + "grad_norm": 2.880870374586317, + "learning_rate": 7.650836790783681e-07, + "loss": 0.8016, + "step": 4597 + }, + { + "epoch": 3.0797052913596783, + "grad_norm": 2.7163024679153955, + "learning_rate": 7.640317009683665e-07, + "loss": 0.739, + "step": 4598 + }, + { + "epoch": 3.0803750837240456, + "grad_norm": 2.8691705272285297, + "learning_rate": 7.62980316119911e-07, + "loss": 0.8076, + "step": 4599 + }, + { + "epoch": 3.081044876088413, + "grad_norm": 2.797639151841758, + "learning_rate": 7.619295248923081e-07, + "loss": 0.8011, + "step": 4600 + }, + { + "epoch": 3.0817146684527796, + "grad_norm": 3.886214119376957, + "learning_rate": 7.60879327644663e-07, + "loss": 0.9107, + "step": 4601 + }, + { + "epoch": 3.082384460817147, + "grad_norm": 2.8113367976987513, + "learning_rate": 7.598297247358769e-07, + "loss": 0.9315, + "step": 4602 + }, + { + "epoch": 3.0830542531815137, + "grad_norm": 2.66235011194035, + "learning_rate": 7.587807165246483e-07, + "loss": 0.806, + "step": 4603 + }, + { + "epoch": 3.083724045545881, + "grad_norm": 2.6969944153430254, + "learning_rate": 7.577323033694725e-07, + "loss": 0.7877, + "step": 4604 + }, + { + "epoch": 3.084393837910248, + "grad_norm": 2.7176773445653146, + "learning_rate": 7.566844856286412e-07, + "loss": 0.8496, + "step": 4605 + }, + { + "epoch": 3.085063630274615, + "grad_norm": 2.505815964533713, + "learning_rate": 7.556372636602427e-07, + "loss": 0.7789, + "step": 4606 + }, + { + "epoch": 3.085733422638982, + "grad_norm": 2.8575716858313798, + "learning_rate": 7.545906378221615e-07, + "loss": 0.8215, + "step": 4607 + }, + { + "epoch": 3.086403215003349, + "grad_norm": 2.8877209018992986, + "learning_rate": 7.535446084720791e-07, + "loss": 0.8992, + "step": 4608 + }, + { + "epoch": 3.087073007367716, + "grad_norm": 2.666314208566318, + "learning_rate": 7.524991759674721e-07, + "loss": 0.8084, + "step": 4609 + }, + { + "epoch": 3.087742799732083, + "grad_norm": 3.425482131694566, + "learning_rate": 7.514543406656138e-07, + "loss": 0.7498, + "step": 4610 + }, + { + "epoch": 3.08841259209645, + "grad_norm": 3.0368781853796696, + "learning_rate": 7.504101029235736e-07, + "loss": 0.7791, + "step": 4611 + }, + { + "epoch": 3.0890823844608173, + "grad_norm": 2.5066654101588313, + "learning_rate": 7.493664630982153e-07, + "loss": 0.7521, + "step": 4612 + }, + { + "epoch": 3.089752176825184, + "grad_norm": 3.3487803043407522, + "learning_rate": 7.48323421546202e-07, + "loss": 0.9699, + "step": 4613 + }, + { + "epoch": 3.0904219691895514, + "grad_norm": 2.66366596607494, + "learning_rate": 7.472809786239871e-07, + "loss": 0.8072, + "step": 4614 + }, + { + "epoch": 3.091091761553918, + "grad_norm": 2.5775924423841303, + "learning_rate": 7.462391346878222e-07, + "loss": 0.8935, + "step": 4615 + }, + { + "epoch": 3.0917615539182854, + "grad_norm": 2.531528155672908, + "learning_rate": 7.451978900937562e-07, + "loss": 0.8382, + "step": 4616 + }, + { + "epoch": 3.0924313462826523, + "grad_norm": 2.8394274402460837, + "learning_rate": 7.441572451976303e-07, + "loss": 0.8349, + "step": 4617 + }, + { + "epoch": 3.0931011386470195, + "grad_norm": 2.6567274770014047, + "learning_rate": 7.431172003550802e-07, + "loss": 0.7679, + "step": 4618 + }, + { + "epoch": 3.0937709310113863, + "grad_norm": 2.4887684030374793, + "learning_rate": 7.420777559215381e-07, + "loss": 0.8637, + "step": 4619 + }, + { + "epoch": 3.0944407233757536, + "grad_norm": 2.4243874801880207, + "learning_rate": 7.410389122522318e-07, + "loss": 0.6833, + "step": 4620 + }, + { + "epoch": 3.0951105157401204, + "grad_norm": 2.5795565020645985, + "learning_rate": 7.400006697021836e-07, + "loss": 0.8251, + "step": 4621 + }, + { + "epoch": 3.0957803081044877, + "grad_norm": 2.9368567223785615, + "learning_rate": 7.389630286262073e-07, + "loss": 0.942, + "step": 4622 + }, + { + "epoch": 3.0964501004688545, + "grad_norm": 2.59766894385429, + "learning_rate": 7.379259893789134e-07, + "loss": 0.763, + "step": 4623 + }, + { + "epoch": 3.0971198928332218, + "grad_norm": 2.5307693432852396, + "learning_rate": 7.368895523147085e-07, + "loss": 0.7527, + "step": 4624 + }, + { + "epoch": 3.0977896851975886, + "grad_norm": 2.8877240048772306, + "learning_rate": 7.358537177877917e-07, + "loss": 0.8425, + "step": 4625 + }, + { + "epoch": 3.098459477561956, + "grad_norm": 2.6957467142689873, + "learning_rate": 7.34818486152154e-07, + "loss": 0.7866, + "step": 4626 + }, + { + "epoch": 3.0991292699263226, + "grad_norm": 2.7457645734219076, + "learning_rate": 7.337838577615833e-07, + "loss": 0.7564, + "step": 4627 + }, + { + "epoch": 3.09979906229069, + "grad_norm": 2.705382332735434, + "learning_rate": 7.327498329696614e-07, + "loss": 0.8638, + "step": 4628 + }, + { + "epoch": 3.1004688546550567, + "grad_norm": 2.6307838279683446, + "learning_rate": 7.31716412129763e-07, + "loss": 0.8603, + "step": 4629 + }, + { + "epoch": 3.101138647019424, + "grad_norm": 2.5689106402774895, + "learning_rate": 7.30683595595055e-07, + "loss": 0.7673, + "step": 4630 + }, + { + "epoch": 3.101808439383791, + "grad_norm": 2.6309310983211978, + "learning_rate": 7.29651383718499e-07, + "loss": 0.83, + "step": 4631 + }, + { + "epoch": 3.102478231748158, + "grad_norm": 2.6945926877252138, + "learning_rate": 7.286197768528516e-07, + "loss": 0.855, + "step": 4632 + }, + { + "epoch": 3.1031480241125253, + "grad_norm": 3.1359258991299876, + "learning_rate": 7.275887753506611e-07, + "loss": 0.7392, + "step": 4633 + }, + { + "epoch": 3.103817816476892, + "grad_norm": 3.0815997641778994, + "learning_rate": 7.265583795642675e-07, + "loss": 0.91, + "step": 4634 + }, + { + "epoch": 3.1044876088412594, + "grad_norm": 2.948185668253564, + "learning_rate": 7.25528589845805e-07, + "loss": 0.9469, + "step": 4635 + }, + { + "epoch": 3.105157401205626, + "grad_norm": 2.8668233038709023, + "learning_rate": 7.244994065472024e-07, + "loss": 0.7223, + "step": 4636 + }, + { + "epoch": 3.1058271935699935, + "grad_norm": 3.254423295916462, + "learning_rate": 7.234708300201803e-07, + "loss": 0.8092, + "step": 4637 + }, + { + "epoch": 3.1064969859343603, + "grad_norm": 2.6098329064129886, + "learning_rate": 7.224428606162493e-07, + "loss": 0.894, + "step": 4638 + }, + { + "epoch": 3.1071667782987276, + "grad_norm": 2.6252128392291842, + "learning_rate": 7.21415498686715e-07, + "loss": 0.7406, + "step": 4639 + }, + { + "epoch": 3.1078365706630944, + "grad_norm": 2.484534535223185, + "learning_rate": 7.20388744582676e-07, + "loss": 0.8417, + "step": 4640 + }, + { + "epoch": 3.1085063630274616, + "grad_norm": 2.596649956131794, + "learning_rate": 7.193625986550229e-07, + "loss": 0.6922, + "step": 4641 + }, + { + "epoch": 3.1091761553918285, + "grad_norm": 3.1929327179376146, + "learning_rate": 7.183370612544357e-07, + "loss": 0.6638, + "step": 4642 + }, + { + "epoch": 3.1098459477561957, + "grad_norm": 2.7169551660413305, + "learning_rate": 7.173121327313887e-07, + "loss": 0.7391, + "step": 4643 + }, + { + "epoch": 3.1105157401205625, + "grad_norm": 2.8362376789755057, + "learning_rate": 7.162878134361492e-07, + "loss": 0.8207, + "step": 4644 + }, + { + "epoch": 3.11118553248493, + "grad_norm": 3.586771140735701, + "learning_rate": 7.152641037187755e-07, + "loss": 0.8285, + "step": 4645 + }, + { + "epoch": 3.1118553248492966, + "grad_norm": 3.178687356548629, + "learning_rate": 7.142410039291142e-07, + "loss": 0.7768, + "step": 4646 + }, + { + "epoch": 3.112525117213664, + "grad_norm": 2.500724033621939, + "learning_rate": 7.132185144168089e-07, + "loss": 0.7943, + "step": 4647 + }, + { + "epoch": 3.1131949095780307, + "grad_norm": 2.733923634803592, + "learning_rate": 7.121966355312915e-07, + "loss": 0.7248, + "step": 4648 + }, + { + "epoch": 3.113864701942398, + "grad_norm": 2.9954937814940337, + "learning_rate": 7.111753676217856e-07, + "loss": 0.8355, + "step": 4649 + }, + { + "epoch": 3.1145344943067648, + "grad_norm": 2.9685423771340633, + "learning_rate": 7.101547110373061e-07, + "loss": 0.9197, + "step": 4650 + }, + { + "epoch": 3.115204286671132, + "grad_norm": 2.4225857646043623, + "learning_rate": 7.091346661266594e-07, + "loss": 0.6479, + "step": 4651 + }, + { + "epoch": 3.115874079035499, + "grad_norm": 2.670552609296137, + "learning_rate": 7.081152332384419e-07, + "loss": 0.7197, + "step": 4652 + }, + { + "epoch": 3.116543871399866, + "grad_norm": 3.205117083722009, + "learning_rate": 7.070964127210423e-07, + "loss": 0.8336, + "step": 4653 + }, + { + "epoch": 3.117213663764233, + "grad_norm": 2.868167223744094, + "learning_rate": 7.060782049226389e-07, + "loss": 0.7568, + "step": 4654 + }, + { + "epoch": 3.1178834561286, + "grad_norm": 2.8013869362052124, + "learning_rate": 7.050606101912008e-07, + "loss": 0.8253, + "step": 4655 + }, + { + "epoch": 3.118553248492967, + "grad_norm": 2.5520506790786825, + "learning_rate": 7.040436288744879e-07, + "loss": 0.7719, + "step": 4656 + }, + { + "epoch": 3.1192230408573343, + "grad_norm": 2.872996635478029, + "learning_rate": 7.030272613200498e-07, + "loss": 0.8229, + "step": 4657 + }, + { + "epoch": 3.119892833221701, + "grad_norm": 2.631520959389636, + "learning_rate": 7.020115078752273e-07, + "loss": 0.7615, + "step": 4658 + }, + { + "epoch": 3.1205626255860683, + "grad_norm": 3.056448169300065, + "learning_rate": 7.00996368887151e-07, + "loss": 0.9886, + "step": 4659 + }, + { + "epoch": 3.121232417950435, + "grad_norm": 3.060283887215828, + "learning_rate": 6.99981844702741e-07, + "loss": 0.8845, + "step": 4660 + }, + { + "epoch": 3.1219022103148024, + "grad_norm": 3.4108018922652796, + "learning_rate": 6.989679356687074e-07, + "loss": 0.7969, + "step": 4661 + }, + { + "epoch": 3.1225720026791697, + "grad_norm": 3.2033092990740295, + "learning_rate": 6.979546421315511e-07, + "loss": 0.7311, + "step": 4662 + }, + { + "epoch": 3.1232417950435365, + "grad_norm": 2.941137178446677, + "learning_rate": 6.96941964437561e-07, + "loss": 0.8911, + "step": 4663 + }, + { + "epoch": 3.1239115874079038, + "grad_norm": 2.9441963092688264, + "learning_rate": 6.959299029328173e-07, + "loss": 0.897, + "step": 4664 + }, + { + "epoch": 3.1245813797722706, + "grad_norm": 2.3831857537309658, + "learning_rate": 6.949184579631882e-07, + "loss": 0.7266, + "step": 4665 + }, + { + "epoch": 3.125251172136638, + "grad_norm": 2.6188437435615564, + "learning_rate": 6.939076298743319e-07, + "loss": 0.7448, + "step": 4666 + }, + { + "epoch": 3.1259209645010047, + "grad_norm": 2.892775327624039, + "learning_rate": 6.928974190116955e-07, + "loss": 0.8355, + "step": 4667 + }, + { + "epoch": 3.126590756865372, + "grad_norm": 2.5553199433956073, + "learning_rate": 6.918878257205153e-07, + "loss": 0.8159, + "step": 4668 + }, + { + "epoch": 3.1272605492297387, + "grad_norm": 2.5342423689481084, + "learning_rate": 6.908788503458166e-07, + "loss": 0.6686, + "step": 4669 + }, + { + "epoch": 3.127930341594106, + "grad_norm": 3.3907450626414164, + "learning_rate": 6.898704932324138e-07, + "loss": 0.5947, + "step": 4670 + }, + { + "epoch": 3.128600133958473, + "grad_norm": 2.442153063017744, + "learning_rate": 6.888627547249091e-07, + "loss": 0.9077, + "step": 4671 + }, + { + "epoch": 3.12926992632284, + "grad_norm": 4.295755308032197, + "learning_rate": 6.878556351676943e-07, + "loss": 0.7745, + "step": 4672 + }, + { + "epoch": 3.129939718687207, + "grad_norm": 2.7996401834325835, + "learning_rate": 6.868491349049489e-07, + "loss": 0.8645, + "step": 4673 + }, + { + "epoch": 3.130609511051574, + "grad_norm": 2.437831429606666, + "learning_rate": 6.858432542806414e-07, + "loss": 0.8207, + "step": 4674 + }, + { + "epoch": 3.131279303415941, + "grad_norm": 5.477923544470202, + "learning_rate": 6.848379936385282e-07, + "loss": 0.8461, + "step": 4675 + }, + { + "epoch": 3.1319490957803082, + "grad_norm": 3.7662837670110556, + "learning_rate": 6.838333533221539e-07, + "loss": 0.8688, + "step": 4676 + }, + { + "epoch": 3.132618888144675, + "grad_norm": 2.915829390309901, + "learning_rate": 6.828293336748507e-07, + "loss": 0.8499, + "step": 4677 + }, + { + "epoch": 3.1332886805090423, + "grad_norm": 3.1718591638547515, + "learning_rate": 6.818259350397396e-07, + "loss": 0.8433, + "step": 4678 + }, + { + "epoch": 3.133958472873409, + "grad_norm": 3.1391256226469855, + "learning_rate": 6.808231577597285e-07, + "loss": 0.861, + "step": 4679 + }, + { + "epoch": 3.1346282652377764, + "grad_norm": 3.090668655265418, + "learning_rate": 6.798210021775134e-07, + "loss": 0.9082, + "step": 4680 + }, + { + "epoch": 3.135298057602143, + "grad_norm": 2.6871812471911016, + "learning_rate": 6.788194686355776e-07, + "loss": 0.8025, + "step": 4681 + }, + { + "epoch": 3.1359678499665105, + "grad_norm": 2.561391464352071, + "learning_rate": 6.778185574761919e-07, + "loss": 0.689, + "step": 4682 + }, + { + "epoch": 3.1366376423308773, + "grad_norm": 2.7039183319285938, + "learning_rate": 6.76818269041415e-07, + "loss": 0.7998, + "step": 4683 + }, + { + "epoch": 3.1373074346952445, + "grad_norm": 2.7551911679926118, + "learning_rate": 6.758186036730915e-07, + "loss": 0.6842, + "step": 4684 + }, + { + "epoch": 3.1379772270596114, + "grad_norm": 2.590792258272469, + "learning_rate": 6.748195617128545e-07, + "loss": 0.778, + "step": 4685 + }, + { + "epoch": 3.1386470194239786, + "grad_norm": 2.8284461176888476, + "learning_rate": 6.738211435021219e-07, + "loss": 0.6471, + "step": 4686 + }, + { + "epoch": 3.1393168117883454, + "grad_norm": 2.6981629613937668, + "learning_rate": 6.728233493821023e-07, + "loss": 0.7525, + "step": 4687 + }, + { + "epoch": 3.1399866041527127, + "grad_norm": 4.478377222303441, + "learning_rate": 6.71826179693787e-07, + "loss": 0.7915, + "step": 4688 + }, + { + "epoch": 3.1406563965170795, + "grad_norm": 2.7899593977784254, + "learning_rate": 6.708296347779555e-07, + "loss": 0.9235, + "step": 4689 + }, + { + "epoch": 3.1413261888814468, + "grad_norm": 2.813885833897849, + "learning_rate": 6.698337149751735e-07, + "loss": 0.721, + "step": 4690 + }, + { + "epoch": 3.141995981245814, + "grad_norm": 2.875640790518473, + "learning_rate": 6.688384206257955e-07, + "loss": 0.7056, + "step": 4691 + }, + { + "epoch": 3.142665773610181, + "grad_norm": 2.9959463284347736, + "learning_rate": 6.678437520699577e-07, + "loss": 0.8418, + "step": 4692 + }, + { + "epoch": 3.1433355659745477, + "grad_norm": 2.582585436944563, + "learning_rate": 6.668497096475859e-07, + "loss": 0.7973, + "step": 4693 + }, + { + "epoch": 3.144005358338915, + "grad_norm": 2.4932982732860642, + "learning_rate": 6.658562936983898e-07, + "loss": 0.8028, + "step": 4694 + }, + { + "epoch": 3.144675150703282, + "grad_norm": 3.1466062052711847, + "learning_rate": 6.648635045618684e-07, + "loss": 0.8372, + "step": 4695 + }, + { + "epoch": 3.145344943067649, + "grad_norm": 2.6503509176409614, + "learning_rate": 6.638713425773024e-07, + "loss": 0.6606, + "step": 4696 + }, + { + "epoch": 3.1460147354320163, + "grad_norm": 3.011342986415276, + "learning_rate": 6.628798080837601e-07, + "loss": 0.7324, + "step": 4697 + }, + { + "epoch": 3.146684527796383, + "grad_norm": 2.408814029906915, + "learning_rate": 6.618889014200946e-07, + "loss": 0.7069, + "step": 4698 + }, + { + "epoch": 3.1473543201607503, + "grad_norm": 2.449055245292723, + "learning_rate": 6.608986229249476e-07, + "loss": 0.7657, + "step": 4699 + }, + { + "epoch": 3.148024112525117, + "grad_norm": 2.7289088019268215, + "learning_rate": 6.599089729367411e-07, + "loss": 0.799, + "step": 4700 + }, + { + "epoch": 3.1486939048894844, + "grad_norm": 2.375586191246392, + "learning_rate": 6.589199517936853e-07, + "loss": 0.7324, + "step": 4701 + }, + { + "epoch": 3.1493636972538512, + "grad_norm": 2.8035175609952736, + "learning_rate": 6.579315598337746e-07, + "loss": 0.6449, + "step": 4702 + }, + { + "epoch": 3.1500334896182185, + "grad_norm": 2.831508930786501, + "learning_rate": 6.569437973947909e-07, + "loss": 0.8172, + "step": 4703 + }, + { + "epoch": 3.1507032819825853, + "grad_norm": 2.742790557347886, + "learning_rate": 6.559566648142965e-07, + "loss": 0.8101, + "step": 4704 + }, + { + "epoch": 3.1513730743469526, + "grad_norm": 2.8659670206158525, + "learning_rate": 6.549701624296406e-07, + "loss": 0.6984, + "step": 4705 + }, + { + "epoch": 3.1520428667113194, + "grad_norm": 2.472753446567299, + "learning_rate": 6.539842905779589e-07, + "loss": 0.6965, + "step": 4706 + }, + { + "epoch": 3.1527126590756867, + "grad_norm": 2.5976718449480445, + "learning_rate": 6.529990495961702e-07, + "loss": 0.8914, + "step": 4707 + }, + { + "epoch": 3.1533824514400535, + "grad_norm": 2.31155321617562, + "learning_rate": 6.520144398209752e-07, + "loss": 0.6984, + "step": 4708 + }, + { + "epoch": 3.1540522438044207, + "grad_norm": 2.742213121642182, + "learning_rate": 6.510304615888618e-07, + "loss": 0.6293, + "step": 4709 + }, + { + "epoch": 3.1547220361687875, + "grad_norm": 2.6362106717898235, + "learning_rate": 6.500471152361023e-07, + "loss": 0.853, + "step": 4710 + }, + { + "epoch": 3.155391828533155, + "grad_norm": 2.8928312577243607, + "learning_rate": 6.490644010987526e-07, + "loss": 0.8897, + "step": 4711 + }, + { + "epoch": 3.1560616208975216, + "grad_norm": 2.9000321661429003, + "learning_rate": 6.480823195126504e-07, + "loss": 0.9356, + "step": 4712 + }, + { + "epoch": 3.156731413261889, + "grad_norm": 2.32041145164591, + "learning_rate": 6.471008708134191e-07, + "loss": 0.7437, + "step": 4713 + }, + { + "epoch": 3.1574012056262557, + "grad_norm": 2.5827887499571154, + "learning_rate": 6.461200553364666e-07, + "loss": 0.8193, + "step": 4714 + }, + { + "epoch": 3.158070997990623, + "grad_norm": 3.0263171067501866, + "learning_rate": 6.451398734169836e-07, + "loss": 0.7962, + "step": 4715 + }, + { + "epoch": 3.15874079035499, + "grad_norm": 2.2026376454794496, + "learning_rate": 6.441603253899431e-07, + "loss": 0.4965, + "step": 4716 + }, + { + "epoch": 3.159410582719357, + "grad_norm": 2.7302677520774163, + "learning_rate": 6.43181411590102e-07, + "loss": 0.7832, + "step": 4717 + }, + { + "epoch": 3.160080375083724, + "grad_norm": 4.1254461245526635, + "learning_rate": 6.422031323520022e-07, + "loss": 0.7697, + "step": 4718 + }, + { + "epoch": 3.160750167448091, + "grad_norm": 3.1533628632833315, + "learning_rate": 6.412254880099672e-07, + "loss": 0.8247, + "step": 4719 + }, + { + "epoch": 3.161419959812458, + "grad_norm": 3.3726982851480733, + "learning_rate": 6.402484788981045e-07, + "loss": 0.8039, + "step": 4720 + }, + { + "epoch": 3.162089752176825, + "grad_norm": 3.131216366424982, + "learning_rate": 6.392721053503012e-07, + "loss": 0.8198, + "step": 4721 + }, + { + "epoch": 3.162759544541192, + "grad_norm": 2.8097721291416216, + "learning_rate": 6.382963677002324e-07, + "loss": 0.6951, + "step": 4722 + }, + { + "epoch": 3.1634293369055593, + "grad_norm": 3.3903132167463985, + "learning_rate": 6.373212662813525e-07, + "loss": 0.8314, + "step": 4723 + }, + { + "epoch": 3.1640991292699265, + "grad_norm": 2.8782461838889204, + "learning_rate": 6.363468014268997e-07, + "loss": 0.7904, + "step": 4724 + }, + { + "epoch": 3.1647689216342934, + "grad_norm": 2.6026885861399287, + "learning_rate": 6.353729734698922e-07, + "loss": 0.8232, + "step": 4725 + }, + { + "epoch": 3.1654387139986606, + "grad_norm": 2.559478991570689, + "learning_rate": 6.343997827431347e-07, + "loss": 0.7687, + "step": 4726 + }, + { + "epoch": 3.1661085063630274, + "grad_norm": 2.8682424249193303, + "learning_rate": 6.334272295792112e-07, + "loss": 0.8081, + "step": 4727 + }, + { + "epoch": 3.1667782987273947, + "grad_norm": 2.8499586537798796, + "learning_rate": 6.324553143104898e-07, + "loss": 0.8744, + "step": 4728 + }, + { + "epoch": 3.1674480910917615, + "grad_norm": 2.7403234061447614, + "learning_rate": 6.314840372691164e-07, + "loss": 0.9348, + "step": 4729 + }, + { + "epoch": 3.1681178834561288, + "grad_norm": 2.3552388110121334, + "learning_rate": 6.305133987870246e-07, + "loss": 0.908, + "step": 4730 + }, + { + "epoch": 3.1687876758204956, + "grad_norm": 2.7233593835049823, + "learning_rate": 6.295433991959258e-07, + "loss": 0.7359, + "step": 4731 + }, + { + "epoch": 3.169457468184863, + "grad_norm": 2.400355325012571, + "learning_rate": 6.285740388273154e-07, + "loss": 0.749, + "step": 4732 + }, + { + "epoch": 3.1701272605492297, + "grad_norm": 2.290280142978726, + "learning_rate": 6.276053180124667e-07, + "loss": 0.5835, + "step": 4733 + }, + { + "epoch": 3.170797052913597, + "grad_norm": 2.954516624464961, + "learning_rate": 6.266372370824389e-07, + "loss": 0.8458, + "step": 4734 + }, + { + "epoch": 3.1714668452779637, + "grad_norm": 2.8946559426863, + "learning_rate": 6.256697963680702e-07, + "loss": 0.812, + "step": 4735 + }, + { + "epoch": 3.172136637642331, + "grad_norm": 3.3397696985247904, + "learning_rate": 6.247029961999803e-07, + "loss": 0.7977, + "step": 4736 + }, + { + "epoch": 3.172806430006698, + "grad_norm": 2.7481769452013425, + "learning_rate": 6.237368369085697e-07, + "loss": 0.5461, + "step": 4737 + }, + { + "epoch": 3.173476222371065, + "grad_norm": 2.3233865067375015, + "learning_rate": 6.227713188240206e-07, + "loss": 0.4846, + "step": 4738 + }, + { + "epoch": 3.174146014735432, + "grad_norm": 2.520634319684336, + "learning_rate": 6.218064422762954e-07, + "loss": 0.6928, + "step": 4739 + }, + { + "epoch": 3.174815807099799, + "grad_norm": 2.4337567989282283, + "learning_rate": 6.208422075951376e-07, + "loss": 0.8185, + "step": 4740 + }, + { + "epoch": 3.175485599464166, + "grad_norm": 2.95612002412158, + "learning_rate": 6.198786151100713e-07, + "loss": 0.8302, + "step": 4741 + }, + { + "epoch": 3.1761553918285332, + "grad_norm": 3.296245498052128, + "learning_rate": 6.189156651504011e-07, + "loss": 0.7709, + "step": 4742 + }, + { + "epoch": 3.1768251841929, + "grad_norm": 3.487319004656779, + "learning_rate": 6.179533580452118e-07, + "loss": 0.883, + "step": 4743 + }, + { + "epoch": 3.1774949765572673, + "grad_norm": 2.7019142256328603, + "learning_rate": 6.169916941233694e-07, + "loss": 0.7452, + "step": 4744 + }, + { + "epoch": 3.178164768921634, + "grad_norm": 2.7857722807221093, + "learning_rate": 6.160306737135188e-07, + "loss": 0.7543, + "step": 4745 + }, + { + "epoch": 3.1788345612860014, + "grad_norm": 2.682978738202796, + "learning_rate": 6.150702971440858e-07, + "loss": 0.6561, + "step": 4746 + }, + { + "epoch": 3.179504353650368, + "grad_norm": 3.1354677431219535, + "learning_rate": 6.14110564743276e-07, + "loss": 0.91, + "step": 4747 + }, + { + "epoch": 3.1801741460147355, + "grad_norm": 2.6002488361021254, + "learning_rate": 6.131514768390748e-07, + "loss": 0.7835, + "step": 4748 + }, + { + "epoch": 3.1808439383791023, + "grad_norm": 2.822237166909353, + "learning_rate": 6.121930337592475e-07, + "loss": 0.8172, + "step": 4749 + }, + { + "epoch": 3.1815137307434695, + "grad_norm": 4.356815835771393, + "learning_rate": 6.112352358313389e-07, + "loss": 0.8154, + "step": 4750 + }, + { + "epoch": 3.1821835231078364, + "grad_norm": 2.8751232852412683, + "learning_rate": 6.102780833826732e-07, + "loss": 0.8418, + "step": 4751 + }, + { + "epoch": 3.1828533154722036, + "grad_norm": 2.5971493080185617, + "learning_rate": 6.093215767403543e-07, + "loss": 0.8124, + "step": 4752 + }, + { + "epoch": 3.183523107836571, + "grad_norm": 2.3823998407818836, + "learning_rate": 6.083657162312653e-07, + "loss": 0.7718, + "step": 4753 + }, + { + "epoch": 3.1841929002009377, + "grad_norm": 2.72216774002768, + "learning_rate": 6.074105021820681e-07, + "loss": 0.7959, + "step": 4754 + }, + { + "epoch": 3.1848626925653045, + "grad_norm": 2.6299841936931925, + "learning_rate": 6.064559349192046e-07, + "loss": 0.7851, + "step": 4755 + }, + { + "epoch": 3.185532484929672, + "grad_norm": 2.643090167645218, + "learning_rate": 6.055020147688942e-07, + "loss": 0.7537, + "step": 4756 + }, + { + "epoch": 3.186202277294039, + "grad_norm": 2.8492535284063156, + "learning_rate": 6.045487420571369e-07, + "loss": 0.6967, + "step": 4757 + }, + { + "epoch": 3.186872069658406, + "grad_norm": 3.1900187645227147, + "learning_rate": 6.035961171097099e-07, + "loss": 0.8136, + "step": 4758 + }, + { + "epoch": 3.187541862022773, + "grad_norm": 3.434480625733463, + "learning_rate": 6.026441402521702e-07, + "loss": 0.9165, + "step": 4759 + }, + { + "epoch": 3.18821165438714, + "grad_norm": 2.45917268874636, + "learning_rate": 6.016928118098525e-07, + "loss": 0.8394, + "step": 4760 + }, + { + "epoch": 3.188881446751507, + "grad_norm": 4.473856856879308, + "learning_rate": 6.007421321078705e-07, + "loss": 0.7054, + "step": 4761 + }, + { + "epoch": 3.189551239115874, + "grad_norm": 2.742759875053477, + "learning_rate": 5.997921014711156e-07, + "loss": 0.7776, + "step": 4762 + }, + { + "epoch": 3.1902210314802413, + "grad_norm": 2.840935803701259, + "learning_rate": 5.988427202242575e-07, + "loss": 0.8417, + "step": 4763 + }, + { + "epoch": 3.190890823844608, + "grad_norm": 2.439092734307995, + "learning_rate": 5.978939886917442e-07, + "loss": 0.7919, + "step": 4764 + }, + { + "epoch": 3.1915606162089754, + "grad_norm": 2.7952772911356933, + "learning_rate": 5.969459071978032e-07, + "loss": 0.752, + "step": 4765 + }, + { + "epoch": 3.192230408573342, + "grad_norm": 2.3786612775754796, + "learning_rate": 5.959984760664361e-07, + "loss": 0.5495, + "step": 4766 + }, + { + "epoch": 3.1929002009377094, + "grad_norm": 2.4523010347621494, + "learning_rate": 5.950516956214255e-07, + "loss": 0.6951, + "step": 4767 + }, + { + "epoch": 3.1935699933020762, + "grad_norm": 2.938985316872771, + "learning_rate": 5.941055661863296e-07, + "loss": 0.7371, + "step": 4768 + }, + { + "epoch": 3.1942397856664435, + "grad_norm": 2.7574339812661357, + "learning_rate": 5.931600880844874e-07, + "loss": 0.7443, + "step": 4769 + }, + { + "epoch": 3.1949095780308103, + "grad_norm": 2.975650575366438, + "learning_rate": 5.922152616390103e-07, + "loss": 0.7136, + "step": 4770 + }, + { + "epoch": 3.1955793703951776, + "grad_norm": 2.585305842898274, + "learning_rate": 5.912710871727911e-07, + "loss": 0.7984, + "step": 4771 + }, + { + "epoch": 3.1962491627595444, + "grad_norm": 2.392423284968659, + "learning_rate": 5.903275650084975e-07, + "loss": 0.8219, + "step": 4772 + }, + { + "epoch": 3.1969189551239117, + "grad_norm": 3.6294413593283763, + "learning_rate": 5.893846954685767e-07, + "loss": 0.8911, + "step": 4773 + }, + { + "epoch": 3.1975887474882785, + "grad_norm": 2.8751232348070976, + "learning_rate": 5.884424788752499e-07, + "loss": 0.6901, + "step": 4774 + }, + { + "epoch": 3.1982585398526457, + "grad_norm": 3.0686441995789733, + "learning_rate": 5.875009155505168e-07, + "loss": 0.8117, + "step": 4775 + }, + { + "epoch": 3.1989283322170126, + "grad_norm": 2.624353932372788, + "learning_rate": 5.865600058161533e-07, + "loss": 0.751, + "step": 4776 + }, + { + "epoch": 3.19959812458138, + "grad_norm": 2.5927232474321418, + "learning_rate": 5.856197499937141e-07, + "loss": 0.6797, + "step": 4777 + }, + { + "epoch": 3.2002679169457466, + "grad_norm": 2.539965322196584, + "learning_rate": 5.846801484045267e-07, + "loss": 0.7783, + "step": 4778 + }, + { + "epoch": 3.200937709310114, + "grad_norm": 2.865979895640912, + "learning_rate": 5.837412013696972e-07, + "loss": 0.8891, + "step": 4779 + }, + { + "epoch": 3.2016075016744807, + "grad_norm": 2.8988384487323233, + "learning_rate": 5.828029092101073e-07, + "loss": 0.8796, + "step": 4780 + }, + { + "epoch": 3.202277294038848, + "grad_norm": 2.8194043272880216, + "learning_rate": 5.818652722464175e-07, + "loss": 0.7242, + "step": 4781 + }, + { + "epoch": 3.2029470864032152, + "grad_norm": 2.885340782252545, + "learning_rate": 5.809282907990596e-07, + "loss": 0.6677, + "step": 4782 + }, + { + "epoch": 3.203616878767582, + "grad_norm": 2.924471929031055, + "learning_rate": 5.799919651882455e-07, + "loss": 0.8078, + "step": 4783 + }, + { + "epoch": 3.204286671131949, + "grad_norm": 2.639703954356499, + "learning_rate": 5.790562957339599e-07, + "loss": 0.7375, + "step": 4784 + }, + { + "epoch": 3.204956463496316, + "grad_norm": 2.5777754106551223, + "learning_rate": 5.781212827559673e-07, + "loss": 0.753, + "step": 4785 + }, + { + "epoch": 3.2056262558606834, + "grad_norm": 2.8151753289702315, + "learning_rate": 5.771869265738034e-07, + "loss": 0.7187, + "step": 4786 + }, + { + "epoch": 3.20629604822505, + "grad_norm": 2.648036213402655, + "learning_rate": 5.76253227506782e-07, + "loss": 0.8333, + "step": 4787 + }, + { + "epoch": 3.2069658405894175, + "grad_norm": 2.9912581753302585, + "learning_rate": 5.75320185873991e-07, + "loss": 0.6533, + "step": 4788 + }, + { + "epoch": 3.2076356329537843, + "grad_norm": 2.618411990331516, + "learning_rate": 5.743878019942964e-07, + "loss": 0.7981, + "step": 4789 + }, + { + "epoch": 3.2083054253181515, + "grad_norm": 3.02534161750478, + "learning_rate": 5.734560761863351e-07, + "loss": 0.9369, + "step": 4790 + }, + { + "epoch": 3.2089752176825184, + "grad_norm": 2.352655647909705, + "learning_rate": 5.725250087685224e-07, + "loss": 0.7589, + "step": 4791 + }, + { + "epoch": 3.2096450100468856, + "grad_norm": 3.2245365678009725, + "learning_rate": 5.715946000590469e-07, + "loss": 0.7417, + "step": 4792 + }, + { + "epoch": 3.2103148024112524, + "grad_norm": 2.847728001428527, + "learning_rate": 5.706648503758738e-07, + "loss": 0.8924, + "step": 4793 + }, + { + "epoch": 3.2109845947756197, + "grad_norm": 3.3244381238737617, + "learning_rate": 5.697357600367423e-07, + "loss": 0.9506, + "step": 4794 + }, + { + "epoch": 3.2116543871399865, + "grad_norm": 2.9563268777806764, + "learning_rate": 5.68807329359164e-07, + "loss": 0.685, + "step": 4795 + }, + { + "epoch": 3.212324179504354, + "grad_norm": 2.5714527163317484, + "learning_rate": 5.678795586604291e-07, + "loss": 0.9271, + "step": 4796 + }, + { + "epoch": 3.2129939718687206, + "grad_norm": 2.37501417617076, + "learning_rate": 5.669524482575995e-07, + "loss": 0.6713, + "step": 4797 + }, + { + "epoch": 3.213663764233088, + "grad_norm": 3.0989947090845322, + "learning_rate": 5.660259984675126e-07, + "loss": 0.8534, + "step": 4798 + }, + { + "epoch": 3.2143335565974547, + "grad_norm": 2.70672686635647, + "learning_rate": 5.651002096067781e-07, + "loss": 0.9294, + "step": 4799 + }, + { + "epoch": 3.215003348961822, + "grad_norm": 2.837454442756623, + "learning_rate": 5.641750819917829e-07, + "loss": 0.8071, + "step": 4800 + }, + { + "epoch": 3.2156731413261888, + "grad_norm": 2.902240438341263, + "learning_rate": 5.632506159386861e-07, + "loss": 0.7926, + "step": 4801 + }, + { + "epoch": 3.216342933690556, + "grad_norm": 3.6828215382925755, + "learning_rate": 5.623268117634214e-07, + "loss": 0.7334, + "step": 4802 + }, + { + "epoch": 3.217012726054923, + "grad_norm": 2.8052273781482624, + "learning_rate": 5.614036697816938e-07, + "loss": 0.7735, + "step": 4803 + }, + { + "epoch": 3.21768251841929, + "grad_norm": 2.433277667114568, + "learning_rate": 5.604811903089863e-07, + "loss": 0.5618, + "step": 4804 + }, + { + "epoch": 3.218352310783657, + "grad_norm": 2.7330564345960573, + "learning_rate": 5.595593736605526e-07, + "loss": 0.7955, + "step": 4805 + }, + { + "epoch": 3.219022103148024, + "grad_norm": 2.7943371270336024, + "learning_rate": 5.586382201514212e-07, + "loss": 0.7737, + "step": 4806 + }, + { + "epoch": 3.219691895512391, + "grad_norm": 2.6116500848042086, + "learning_rate": 5.577177300963912e-07, + "loss": 0.8398, + "step": 4807 + }, + { + "epoch": 3.2203616878767582, + "grad_norm": 2.4346409113392986, + "learning_rate": 5.56797903810039e-07, + "loss": 0.7746, + "step": 4808 + }, + { + "epoch": 3.221031480241125, + "grad_norm": 2.735840706630352, + "learning_rate": 5.558787416067118e-07, + "loss": 0.8377, + "step": 4809 + }, + { + "epoch": 3.2217012726054923, + "grad_norm": 2.5093647033538837, + "learning_rate": 5.549602438005311e-07, + "loss": 0.5976, + "step": 4810 + }, + { + "epoch": 3.222371064969859, + "grad_norm": 2.879299740073321, + "learning_rate": 5.540424107053885e-07, + "loss": 0.7882, + "step": 4811 + }, + { + "epoch": 3.2230408573342264, + "grad_norm": 2.694625605403166, + "learning_rate": 5.53125242634952e-07, + "loss": 0.7463, + "step": 4812 + }, + { + "epoch": 3.2237106496985932, + "grad_norm": 2.8103856200532578, + "learning_rate": 5.522087399026607e-07, + "loss": 0.915, + "step": 4813 + }, + { + "epoch": 3.2243804420629605, + "grad_norm": 2.894388173881319, + "learning_rate": 5.51292902821727e-07, + "loss": 0.9517, + "step": 4814 + }, + { + "epoch": 3.2250502344273277, + "grad_norm": 2.700120752827592, + "learning_rate": 5.503777317051332e-07, + "loss": 0.602, + "step": 4815 + }, + { + "epoch": 3.2257200267916946, + "grad_norm": 3.0508800769403437, + "learning_rate": 5.49463226865638e-07, + "loss": 0.7906, + "step": 4816 + }, + { + "epoch": 3.226389819156062, + "grad_norm": 2.5600665232833495, + "learning_rate": 5.4854938861577e-07, + "loss": 0.8675, + "step": 4817 + }, + { + "epoch": 3.2270596115204286, + "grad_norm": 3.433120952384493, + "learning_rate": 5.476362172678309e-07, + "loss": 0.8792, + "step": 4818 + }, + { + "epoch": 3.227729403884796, + "grad_norm": 2.5921575770363012, + "learning_rate": 5.46723713133892e-07, + "loss": 0.846, + "step": 4819 + }, + { + "epoch": 3.2283991962491627, + "grad_norm": 3.4567825945753823, + "learning_rate": 5.458118765258008e-07, + "loss": 0.7859, + "step": 4820 + }, + { + "epoch": 3.22906898861353, + "grad_norm": 2.6865643513085034, + "learning_rate": 5.449007077551741e-07, + "loss": 0.7758, + "step": 4821 + }, + { + "epoch": 3.229738780977897, + "grad_norm": 5.479555483382819, + "learning_rate": 5.439902071334002e-07, + "loss": 0.5393, + "step": 4822 + }, + { + "epoch": 3.230408573342264, + "grad_norm": 2.6275090681517863, + "learning_rate": 5.430803749716404e-07, + "loss": 0.8684, + "step": 4823 + }, + { + "epoch": 3.231078365706631, + "grad_norm": 3.1160949022340882, + "learning_rate": 5.421712115808264e-07, + "loss": 0.8194, + "step": 4824 + }, + { + "epoch": 3.231748158070998, + "grad_norm": 2.5801699676686485, + "learning_rate": 5.412627172716623e-07, + "loss": 0.6271, + "step": 4825 + }, + { + "epoch": 3.232417950435365, + "grad_norm": 2.339053922423891, + "learning_rate": 5.403548923546228e-07, + "loss": 0.6735, + "step": 4826 + }, + { + "epoch": 3.233087742799732, + "grad_norm": 3.0111043984988672, + "learning_rate": 5.394477371399542e-07, + "loss": 0.6692, + "step": 4827 + }, + { + "epoch": 3.233757535164099, + "grad_norm": 2.6457094681690974, + "learning_rate": 5.385412519376737e-07, + "loss": 0.716, + "step": 4828 + }, + { + "epoch": 3.2344273275284663, + "grad_norm": 2.9317578117614285, + "learning_rate": 5.376354370575701e-07, + "loss": 0.7003, + "step": 4829 + }, + { + "epoch": 3.235097119892833, + "grad_norm": 3.752117633373258, + "learning_rate": 5.367302928092022e-07, + "loss": 0.8737, + "step": 4830 + }, + { + "epoch": 3.2357669122572004, + "grad_norm": 2.7209223779115987, + "learning_rate": 5.358258195019003e-07, + "loss": 0.5639, + "step": 4831 + }, + { + "epoch": 3.236436704621567, + "grad_norm": 2.5705714543103344, + "learning_rate": 5.349220174447656e-07, + "loss": 0.6631, + "step": 4832 + }, + { + "epoch": 3.2371064969859344, + "grad_norm": 2.902386176733501, + "learning_rate": 5.340188869466689e-07, + "loss": 0.8799, + "step": 4833 + }, + { + "epoch": 3.2377762893503013, + "grad_norm": 3.3798696742645906, + "learning_rate": 5.331164283162527e-07, + "loss": 0.8132, + "step": 4834 + }, + { + "epoch": 3.2384460817146685, + "grad_norm": 2.5514342674281285, + "learning_rate": 5.322146418619287e-07, + "loss": 0.609, + "step": 4835 + }, + { + "epoch": 3.2391158740790353, + "grad_norm": 2.4697870362053385, + "learning_rate": 5.313135278918802e-07, + "loss": 0.7311, + "step": 4836 + }, + { + "epoch": 3.2397856664434026, + "grad_norm": 2.7877219970266776, + "learning_rate": 5.304130867140591e-07, + "loss": 0.9191, + "step": 4837 + }, + { + "epoch": 3.2404554588077694, + "grad_norm": 2.8700983812775345, + "learning_rate": 5.295133186361892e-07, + "loss": 0.7029, + "step": 4838 + }, + { + "epoch": 3.2411252511721367, + "grad_norm": 2.3603193428160805, + "learning_rate": 5.28614223965763e-07, + "loss": 0.6496, + "step": 4839 + }, + { + "epoch": 3.2417950435365035, + "grad_norm": 2.8152886946724425, + "learning_rate": 5.277158030100429e-07, + "loss": 0.8559, + "step": 4840 + }, + { + "epoch": 3.2424648359008708, + "grad_norm": 2.7965903056809207, + "learning_rate": 5.268180560760614e-07, + "loss": 0.7456, + "step": 4841 + }, + { + "epoch": 3.2431346282652376, + "grad_norm": 2.676067033787458, + "learning_rate": 5.259209834706211e-07, + "loss": 0.8737, + "step": 4842 + }, + { + "epoch": 3.243804420629605, + "grad_norm": 2.575213481405073, + "learning_rate": 5.250245855002931e-07, + "loss": 0.7837, + "step": 4843 + }, + { + "epoch": 3.244474212993972, + "grad_norm": 3.1489081272629096, + "learning_rate": 5.241288624714186e-07, + "loss": 0.7576, + "step": 4844 + }, + { + "epoch": 3.245144005358339, + "grad_norm": 3.293863413693666, + "learning_rate": 5.232338146901083e-07, + "loss": 0.8493, + "step": 4845 + }, + { + "epoch": 3.2458137977227057, + "grad_norm": 3.176145658290598, + "learning_rate": 5.223394424622419e-07, + "loss": 0.5996, + "step": 4846 + }, + { + "epoch": 3.246483590087073, + "grad_norm": 2.7653187618147124, + "learning_rate": 5.21445746093468e-07, + "loss": 0.9125, + "step": 4847 + }, + { + "epoch": 3.2471533824514403, + "grad_norm": 3.6178480466764733, + "learning_rate": 5.205527258892043e-07, + "loss": 0.8712, + "step": 4848 + }, + { + "epoch": 3.247823174815807, + "grad_norm": 2.6487543094885595, + "learning_rate": 5.196603821546381e-07, + "loss": 0.7124, + "step": 4849 + }, + { + "epoch": 3.2484929671801743, + "grad_norm": 3.250312233562758, + "learning_rate": 5.187687151947243e-07, + "loss": 0.714, + "step": 4850 + }, + { + "epoch": 3.249162759544541, + "grad_norm": 2.7864695634665226, + "learning_rate": 5.178777253141878e-07, + "loss": 0.7891, + "step": 4851 + }, + { + "epoch": 3.2498325519089084, + "grad_norm": 2.6093914814087475, + "learning_rate": 5.169874128175212e-07, + "loss": 0.6957, + "step": 4852 + }, + { + "epoch": 3.2505023442732752, + "grad_norm": 2.7516057517843264, + "learning_rate": 5.160977780089857e-07, + "loss": 0.9162, + "step": 4853 + }, + { + "epoch": 3.2511721366376425, + "grad_norm": 3.374480656584969, + "learning_rate": 5.152088211926109e-07, + "loss": 0.8268, + "step": 4854 + }, + { + "epoch": 3.2518419290020093, + "grad_norm": 2.845707819238481, + "learning_rate": 5.14320542672197e-07, + "loss": 0.8535, + "step": 4855 + }, + { + "epoch": 3.2525117213663766, + "grad_norm": 2.720573259524309, + "learning_rate": 5.134329427513076e-07, + "loss": 0.8494, + "step": 4856 + }, + { + "epoch": 3.2531815137307434, + "grad_norm": 2.721953350405363, + "learning_rate": 5.125460217332784e-07, + "loss": 0.6726, + "step": 4857 + }, + { + "epoch": 3.2538513060951106, + "grad_norm": 2.728901548929651, + "learning_rate": 5.116597799212108e-07, + "loss": 0.7679, + "step": 4858 + }, + { + "epoch": 3.2545210984594775, + "grad_norm": 2.667076705021638, + "learning_rate": 5.107742176179776e-07, + "loss": 0.8629, + "step": 4859 + }, + { + "epoch": 3.2551908908238447, + "grad_norm": 2.7544339941779055, + "learning_rate": 5.098893351262141e-07, + "loss": 0.8244, + "step": 4860 + }, + { + "epoch": 3.2558606831882115, + "grad_norm": 2.688738742646756, + "learning_rate": 5.090051327483269e-07, + "loss": 0.7665, + "step": 4861 + }, + { + "epoch": 3.256530475552579, + "grad_norm": 2.726193689850518, + "learning_rate": 5.081216107864892e-07, + "loss": 0.8006, + "step": 4862 + }, + { + "epoch": 3.2572002679169456, + "grad_norm": 2.651581781353739, + "learning_rate": 5.072387695426423e-07, + "loss": 0.6338, + "step": 4863 + }, + { + "epoch": 3.257870060281313, + "grad_norm": 3.2168699490094457, + "learning_rate": 5.063566093184949e-07, + "loss": 0.8698, + "step": 4864 + }, + { + "epoch": 3.2585398526456797, + "grad_norm": 2.3742739021131625, + "learning_rate": 5.054751304155209e-07, + "loss": 0.618, + "step": 4865 + }, + { + "epoch": 3.259209645010047, + "grad_norm": 2.9674102697905003, + "learning_rate": 5.045943331349626e-07, + "loss": 0.7759, + "step": 4866 + }, + { + "epoch": 3.2598794373744138, + "grad_norm": 2.6731072081337715, + "learning_rate": 5.037142177778315e-07, + "loss": 0.8277, + "step": 4867 + }, + { + "epoch": 3.260549229738781, + "grad_norm": 2.705117453221013, + "learning_rate": 5.028347846449036e-07, + "loss": 0.7468, + "step": 4868 + }, + { + "epoch": 3.261219022103148, + "grad_norm": 2.546282708814431, + "learning_rate": 5.019560340367213e-07, + "loss": 0.7916, + "step": 4869 + }, + { + "epoch": 3.261888814467515, + "grad_norm": 3.1567708279251465, + "learning_rate": 5.010779662535947e-07, + "loss": 0.7422, + "step": 4870 + }, + { + "epoch": 3.262558606831882, + "grad_norm": 2.570770186136929, + "learning_rate": 5.002005815956021e-07, + "loss": 0.8778, + "step": 4871 + }, + { + "epoch": 3.263228399196249, + "grad_norm": 2.6931583930922094, + "learning_rate": 4.993238803625869e-07, + "loss": 0.6236, + "step": 4872 + }, + { + "epoch": 3.2638981915606164, + "grad_norm": 2.648193675711626, + "learning_rate": 4.984478628541573e-07, + "loss": 0.7546, + "step": 4873 + }, + { + "epoch": 3.2645679839249833, + "grad_norm": 3.62830558178581, + "learning_rate": 4.975725293696892e-07, + "loss": 0.7555, + "step": 4874 + }, + { + "epoch": 3.26523777628935, + "grad_norm": 2.929957829227019, + "learning_rate": 4.966978802083272e-07, + "loss": 0.845, + "step": 4875 + }, + { + "epoch": 3.2659075686537173, + "grad_norm": 2.718700380992981, + "learning_rate": 4.95823915668979e-07, + "loss": 0.7585, + "step": 4876 + }, + { + "epoch": 3.2665773610180846, + "grad_norm": 3.1792410646752716, + "learning_rate": 4.949506360503184e-07, + "loss": 0.8123, + "step": 4877 + }, + { + "epoch": 3.2672471533824514, + "grad_norm": 2.6337236601188514, + "learning_rate": 4.940780416507854e-07, + "loss": 0.7535, + "step": 4878 + }, + { + "epoch": 3.2679169457468187, + "grad_norm": 2.4833288851097772, + "learning_rate": 4.932061327685881e-07, + "loss": 0.6902, + "step": 4879 + }, + { + "epoch": 3.2685867381111855, + "grad_norm": 2.9007464415812407, + "learning_rate": 4.92334909701698e-07, + "loss": 0.8317, + "step": 4880 + }, + { + "epoch": 3.2692565304755528, + "grad_norm": 2.7144365958516237, + "learning_rate": 4.914643727478519e-07, + "loss": 0.8036, + "step": 4881 + }, + { + "epoch": 3.2699263228399196, + "grad_norm": 3.0980425792654844, + "learning_rate": 4.905945222045527e-07, + "loss": 0.7757, + "step": 4882 + }, + { + "epoch": 3.270596115204287, + "grad_norm": 3.380722382472791, + "learning_rate": 4.897253583690703e-07, + "loss": 0.7431, + "step": 4883 + }, + { + "epoch": 3.2712659075686537, + "grad_norm": 3.437244398000318, + "learning_rate": 4.88856881538439e-07, + "loss": 0.7692, + "step": 4884 + }, + { + "epoch": 3.271935699933021, + "grad_norm": 3.103810879980463, + "learning_rate": 4.879890920094557e-07, + "loss": 0.9247, + "step": 4885 + }, + { + "epoch": 3.2726054922973877, + "grad_norm": 2.8703551316032803, + "learning_rate": 4.871219900786866e-07, + "loss": 0.7308, + "step": 4886 + }, + { + "epoch": 3.273275284661755, + "grad_norm": 3.8161693710616924, + "learning_rate": 4.862555760424606e-07, + "loss": 0.8303, + "step": 4887 + }, + { + "epoch": 3.273945077026122, + "grad_norm": 2.8121404182447174, + "learning_rate": 4.85389850196872e-07, + "loss": 0.7589, + "step": 4888 + }, + { + "epoch": 3.274614869390489, + "grad_norm": 2.5789130653879972, + "learning_rate": 4.845248128377786e-07, + "loss": 0.7224, + "step": 4889 + }, + { + "epoch": 3.275284661754856, + "grad_norm": 2.743016112163826, + "learning_rate": 4.836604642608053e-07, + "loss": 0.7902, + "step": 4890 + }, + { + "epoch": 3.275954454119223, + "grad_norm": 2.3818500361510444, + "learning_rate": 4.827968047613404e-07, + "loss": 0.5745, + "step": 4891 + }, + { + "epoch": 3.27662424648359, + "grad_norm": 3.4219987669011376, + "learning_rate": 4.819338346345371e-07, + "loss": 0.9109, + "step": 4892 + }, + { + "epoch": 3.2772940388479572, + "grad_norm": 2.7331021950734127, + "learning_rate": 4.810715541753105e-07, + "loss": 0.859, + "step": 4893 + }, + { + "epoch": 3.277963831212324, + "grad_norm": 2.824477248917966, + "learning_rate": 4.802099636783444e-07, + "loss": 0.7294, + "step": 4894 + }, + { + "epoch": 3.2786336235766913, + "grad_norm": 2.9702810467592484, + "learning_rate": 4.793490634380837e-07, + "loss": 0.8104, + "step": 4895 + }, + { + "epoch": 3.279303415941058, + "grad_norm": 2.5456356758855527, + "learning_rate": 4.784888537487395e-07, + "loss": 0.7488, + "step": 4896 + }, + { + "epoch": 3.2799732083054254, + "grad_norm": 2.922039478522317, + "learning_rate": 4.776293349042829e-07, + "loss": 0.7408, + "step": 4897 + }, + { + "epoch": 3.280643000669792, + "grad_norm": 4.731011232047965, + "learning_rate": 4.767705071984541e-07, + "loss": 0.7906, + "step": 4898 + }, + { + "epoch": 3.2813127930341595, + "grad_norm": 3.225127760721164, + "learning_rate": 4.75912370924754e-07, + "loss": 0.8337, + "step": 4899 + }, + { + "epoch": 3.2819825853985263, + "grad_norm": 2.71695172103566, + "learning_rate": 4.7505492637644784e-07, + "loss": 0.8405, + "step": 4900 + }, + { + "epoch": 3.2826523777628935, + "grad_norm": 2.512763788028351, + "learning_rate": 4.741981738465645e-07, + "loss": 0.7054, + "step": 4901 + }, + { + "epoch": 3.283322170127261, + "grad_norm": 2.590869728697413, + "learning_rate": 4.733421136278962e-07, + "loss": 0.6647, + "step": 4902 + }, + { + "epoch": 3.2839919624916276, + "grad_norm": 2.4777022797386197, + "learning_rate": 4.724867460129992e-07, + "loss": 0.8576, + "step": 4903 + }, + { + "epoch": 3.2846617548559944, + "grad_norm": 3.10045239308407, + "learning_rate": 4.7163207129419243e-07, + "loss": 0.8622, + "step": 4904 + }, + { + "epoch": 3.2853315472203617, + "grad_norm": 2.897481506973721, + "learning_rate": 4.7077808976355805e-07, + "loss": 0.873, + "step": 4905 + }, + { + "epoch": 3.286001339584729, + "grad_norm": 2.455233445335044, + "learning_rate": 4.699248017129415e-07, + "loss": 0.626, + "step": 4906 + }, + { + "epoch": 3.2866711319490958, + "grad_norm": 2.894207476547868, + "learning_rate": 4.6907220743395157e-07, + "loss": 0.8438, + "step": 4907 + }, + { + "epoch": 3.2873409243134626, + "grad_norm": 2.718104706289506, + "learning_rate": 4.6822030721795894e-07, + "loss": 0.8063, + "step": 4908 + }, + { + "epoch": 3.28801071667783, + "grad_norm": 2.857190161139987, + "learning_rate": 4.6736910135609827e-07, + "loss": 0.8755, + "step": 4909 + }, + { + "epoch": 3.288680509042197, + "grad_norm": 3.4072601905447857, + "learning_rate": 4.665185901392663e-07, + "loss": 0.7567, + "step": 4910 + }, + { + "epoch": 3.289350301406564, + "grad_norm": 2.5847140698309485, + "learning_rate": 4.656687738581225e-07, + "loss": 0.7165, + "step": 4911 + }, + { + "epoch": 3.290020093770931, + "grad_norm": 2.8108398173690916, + "learning_rate": 4.648196528030885e-07, + "loss": 0.7513, + "step": 4912 + }, + { + "epoch": 3.290689886135298, + "grad_norm": 2.6666163734002457, + "learning_rate": 4.639712272643493e-07, + "loss": 0.8137, + "step": 4913 + }, + { + "epoch": 3.2913596784996653, + "grad_norm": 2.8664551679790304, + "learning_rate": 4.6312349753185105e-07, + "loss": 0.8258, + "step": 4914 + }, + { + "epoch": 3.292029470864032, + "grad_norm": 2.7052109061552865, + "learning_rate": 4.6227646389530283e-07, + "loss": 0.9677, + "step": 4915 + }, + { + "epoch": 3.2926992632283993, + "grad_norm": 2.712784680918064, + "learning_rate": 4.6143012664417565e-07, + "loss": 0.8509, + "step": 4916 + }, + { + "epoch": 3.293369055592766, + "grad_norm": 3.1904893978588973, + "learning_rate": 4.6058448606770253e-07, + "loss": 0.894, + "step": 4917 + }, + { + "epoch": 3.2940388479571334, + "grad_norm": 2.6908813762689587, + "learning_rate": 4.597395424548784e-07, + "loss": 0.5774, + "step": 4918 + }, + { + "epoch": 3.2947086403215002, + "grad_norm": 2.622217573557638, + "learning_rate": 4.5889529609445984e-07, + "loss": 0.8037, + "step": 4919 + }, + { + "epoch": 3.2953784326858675, + "grad_norm": 2.145670496382193, + "learning_rate": 4.5805174727496573e-07, + "loss": 0.6243, + "step": 4920 + }, + { + "epoch": 3.2960482250502343, + "grad_norm": 3.0810567309331796, + "learning_rate": 4.572088962846757e-07, + "loss": 0.9635, + "step": 4921 + }, + { + "epoch": 3.2967180174146016, + "grad_norm": 2.64341787277102, + "learning_rate": 4.5636674341163143e-07, + "loss": 0.7204, + "step": 4922 + }, + { + "epoch": 3.2973878097789684, + "grad_norm": 2.925129109051457, + "learning_rate": 4.555252889436362e-07, + "loss": 0.806, + "step": 4923 + }, + { + "epoch": 3.2980576021433357, + "grad_norm": 2.7819727250011264, + "learning_rate": 4.5468453316825396e-07, + "loss": 0.7895, + "step": 4924 + }, + { + "epoch": 3.2987273945077025, + "grad_norm": 3.6669991828000104, + "learning_rate": 4.5384447637281057e-07, + "loss": 0.8669, + "step": 4925 + }, + { + "epoch": 3.2993971868720697, + "grad_norm": 3.197830009687548, + "learning_rate": 4.5300511884439293e-07, + "loss": 0.7738, + "step": 4926 + }, + { + "epoch": 3.3000669792364365, + "grad_norm": 3.066796683179765, + "learning_rate": 4.521664608698481e-07, + "loss": 0.7132, + "step": 4927 + }, + { + "epoch": 3.300736771600804, + "grad_norm": 3.3282578211596654, + "learning_rate": 4.5132850273578525e-07, + "loss": 0.8767, + "step": 4928 + }, + { + "epoch": 3.3014065639651706, + "grad_norm": 2.610965418068377, + "learning_rate": 4.5049124472857366e-07, + "loss": 0.6865, + "step": 4929 + }, + { + "epoch": 3.302076356329538, + "grad_norm": 3.2375697360299878, + "learning_rate": 4.496546871343435e-07, + "loss": 0.9043, + "step": 4930 + }, + { + "epoch": 3.3027461486939047, + "grad_norm": 2.8779816604606143, + "learning_rate": 4.488188302389859e-07, + "loss": 0.8941, + "step": 4931 + }, + { + "epoch": 3.303415941058272, + "grad_norm": 2.7621988731665894, + "learning_rate": 4.4798367432815184e-07, + "loss": 0.6207, + "step": 4932 + }, + { + "epoch": 3.304085733422639, + "grad_norm": 2.98555000276379, + "learning_rate": 4.4714921968725374e-07, + "loss": 0.6794, + "step": 4933 + }, + { + "epoch": 3.304755525787006, + "grad_norm": 4.654377079732971, + "learning_rate": 4.4631546660146296e-07, + "loss": 0.6974, + "step": 4934 + }, + { + "epoch": 3.3054253181513733, + "grad_norm": 2.857379471395188, + "learning_rate": 4.4548241535571245e-07, + "loss": 0.9486, + "step": 4935 + }, + { + "epoch": 3.30609511051574, + "grad_norm": 2.532620795654812, + "learning_rate": 4.44650066234695e-07, + "loss": 0.6797, + "step": 4936 + }, + { + "epoch": 3.306764902880107, + "grad_norm": 2.63246471456226, + "learning_rate": 4.4381841952286205e-07, + "loss": 0.8133, + "step": 4937 + }, + { + "epoch": 3.307434695244474, + "grad_norm": 3.227123835331213, + "learning_rate": 4.4298747550442805e-07, + "loss": 0.7416, + "step": 4938 + }, + { + "epoch": 3.3081044876088415, + "grad_norm": 2.6868894632365565, + "learning_rate": 4.421572344633643e-07, + "loss": 0.7624, + "step": 4939 + }, + { + "epoch": 3.3087742799732083, + "grad_norm": 2.908465309579733, + "learning_rate": 4.4132769668340265e-07, + "loss": 0.84, + "step": 4940 + }, + { + "epoch": 3.3094440723375755, + "grad_norm": 2.86519090099284, + "learning_rate": 4.4049886244803496e-07, + "loss": 0.8284, + "step": 4941 + }, + { + "epoch": 3.3101138647019424, + "grad_norm": 2.9003160193974837, + "learning_rate": 4.3967073204051414e-07, + "loss": 0.6589, + "step": 4942 + }, + { + "epoch": 3.3107836570663096, + "grad_norm": 2.2849497925868816, + "learning_rate": 4.3884330574384957e-07, + "loss": 0.6218, + "step": 4943 + }, + { + "epoch": 3.3114534494306764, + "grad_norm": 3.2198934772802477, + "learning_rate": 4.3801658384081154e-07, + "loss": 0.8719, + "step": 4944 + }, + { + "epoch": 3.3121232417950437, + "grad_norm": 2.6537731684880628, + "learning_rate": 4.3719056661393033e-07, + "loss": 0.6554, + "step": 4945 + }, + { + "epoch": 3.3127930341594105, + "grad_norm": 3.4390508874878303, + "learning_rate": 4.363652543454949e-07, + "loss": 0.8867, + "step": 4946 + }, + { + "epoch": 3.3134628265237778, + "grad_norm": 2.4111024922086806, + "learning_rate": 4.355406473175519e-07, + "loss": 0.7604, + "step": 4947 + }, + { + "epoch": 3.3141326188881446, + "grad_norm": 2.635216903260582, + "learning_rate": 4.347167458119081e-07, + "loss": 0.7269, + "step": 4948 + }, + { + "epoch": 3.314802411252512, + "grad_norm": 2.8083413974358975, + "learning_rate": 4.3389355011013047e-07, + "loss": 0.8341, + "step": 4949 + }, + { + "epoch": 3.3154722036168787, + "grad_norm": 3.7097861129223926, + "learning_rate": 4.3307106049354316e-07, + "loss": 0.8768, + "step": 4950 + }, + { + "epoch": 3.316141995981246, + "grad_norm": 2.788339560727403, + "learning_rate": 4.322492772432285e-07, + "loss": 0.8123, + "step": 4951 + }, + { + "epoch": 3.3168117883456127, + "grad_norm": 2.796884153977941, + "learning_rate": 4.314282006400281e-07, + "loss": 0.9045, + "step": 4952 + }, + { + "epoch": 3.31748158070998, + "grad_norm": 2.4674414958336244, + "learning_rate": 4.3060783096454313e-07, + "loss": 0.538, + "step": 4953 + }, + { + "epoch": 3.318151373074347, + "grad_norm": 2.684954936984025, + "learning_rate": 4.297881684971328e-07, + "loss": 0.7766, + "step": 4954 + }, + { + "epoch": 3.318821165438714, + "grad_norm": 2.791887636769825, + "learning_rate": 4.289692135179127e-07, + "loss": 0.8823, + "step": 4955 + }, + { + "epoch": 3.319490957803081, + "grad_norm": 2.198354081820339, + "learning_rate": 4.281509663067579e-07, + "loss": 0.5593, + "step": 4956 + }, + { + "epoch": 3.320160750167448, + "grad_norm": 3.0644129964502063, + "learning_rate": 4.27333427143303e-07, + "loss": 0.7996, + "step": 4957 + }, + { + "epoch": 3.320830542531815, + "grad_norm": 2.384095213345387, + "learning_rate": 4.265165963069395e-07, + "loss": 0.8145, + "step": 4958 + }, + { + "epoch": 3.3215003348961822, + "grad_norm": 3.1366787376666005, + "learning_rate": 4.2570047407681533e-07, + "loss": 0.7503, + "step": 4959 + }, + { + "epoch": 3.322170127260549, + "grad_norm": 2.52145024408424, + "learning_rate": 4.2488506073183775e-07, + "loss": 0.7673, + "step": 4960 + }, + { + "epoch": 3.3228399196249163, + "grad_norm": 2.5956321461194225, + "learning_rate": 4.240703565506726e-07, + "loss": 0.7173, + "step": 4961 + }, + { + "epoch": 3.323509711989283, + "grad_norm": 3.547892934440586, + "learning_rate": 4.232563618117427e-07, + "loss": 0.7839, + "step": 4962 + }, + { + "epoch": 3.3241795043536504, + "grad_norm": 2.688445314027561, + "learning_rate": 4.2244307679322693e-07, + "loss": 0.6917, + "step": 4963 + }, + { + "epoch": 3.3248492967180177, + "grad_norm": 2.9591641173730143, + "learning_rate": 4.2163050177306237e-07, + "loss": 0.5909, + "step": 4964 + }, + { + "epoch": 3.3255190890823845, + "grad_norm": 2.8199462603012133, + "learning_rate": 4.208186370289455e-07, + "loss": 0.8856, + "step": 4965 + }, + { + "epoch": 3.3261888814467513, + "grad_norm": 2.698368457666367, + "learning_rate": 4.2000748283832856e-07, + "loss": 0.8516, + "step": 4966 + }, + { + "epoch": 3.3268586738111185, + "grad_norm": 2.872400914147228, + "learning_rate": 4.1919703947841924e-07, + "loss": 0.7176, + "step": 4967 + }, + { + "epoch": 3.327528466175486, + "grad_norm": 3.0117598868501045, + "learning_rate": 4.1838730722618406e-07, + "loss": 0.7729, + "step": 4968 + }, + { + "epoch": 3.3281982585398526, + "grad_norm": 2.7935672865068475, + "learning_rate": 4.17578286358348e-07, + "loss": 0.7507, + "step": 4969 + }, + { + "epoch": 3.3288680509042194, + "grad_norm": 2.4637930872287024, + "learning_rate": 4.1676997715139027e-07, + "loss": 0.7324, + "step": 4970 + }, + { + "epoch": 3.3295378432685867, + "grad_norm": 3.277728056363495, + "learning_rate": 4.1596237988154846e-07, + "loss": 0.8731, + "step": 4971 + }, + { + "epoch": 3.330207635632954, + "grad_norm": 2.6351499683576485, + "learning_rate": 4.1515549482481515e-07, + "loss": 0.7094, + "step": 4972 + }, + { + "epoch": 3.330877427997321, + "grad_norm": 10.310138873692757, + "learning_rate": 4.143493222569417e-07, + "loss": 0.8354, + "step": 4973 + }, + { + "epoch": 3.331547220361688, + "grad_norm": 2.569108837490541, + "learning_rate": 4.135438624534349e-07, + "loss": 0.7758, + "step": 4974 + }, + { + "epoch": 3.332217012726055, + "grad_norm": 2.774923542402823, + "learning_rate": 4.12739115689558e-07, + "loss": 0.7041, + "step": 4975 + }, + { + "epoch": 3.332886805090422, + "grad_norm": 2.6281078952492676, + "learning_rate": 4.119350822403304e-07, + "loss": 0.6556, + "step": 4976 + }, + { + "epoch": 3.333556597454789, + "grad_norm": 2.7733797906156625, + "learning_rate": 4.1113176238052806e-07, + "loss": 0.783, + "step": 4977 + }, + { + "epoch": 3.334226389819156, + "grad_norm": 3.20271654163719, + "learning_rate": 4.103291563846828e-07, + "loss": 0.7143, + "step": 4978 + }, + { + "epoch": 3.334896182183523, + "grad_norm": 2.913278060523605, + "learning_rate": 4.0952726452708284e-07, + "loss": 0.821, + "step": 4979 + }, + { + "epoch": 3.3355659745478903, + "grad_norm": 2.895032440354962, + "learning_rate": 4.087260870817719e-07, + "loss": 0.794, + "step": 4980 + }, + { + "epoch": 3.336235766912257, + "grad_norm": 2.938735346776285, + "learning_rate": 4.079256243225502e-07, + "loss": 0.7913, + "step": 4981 + }, + { + "epoch": 3.3369055592766244, + "grad_norm": 2.6052038331101754, + "learning_rate": 4.071258765229727e-07, + "loss": 0.7292, + "step": 4982 + }, + { + "epoch": 3.337575351640991, + "grad_norm": 2.8077125714516034, + "learning_rate": 4.0632684395635086e-07, + "loss": 0.8285, + "step": 4983 + }, + { + "epoch": 3.3382451440053584, + "grad_norm": 2.6387482795668094, + "learning_rate": 4.055285268957515e-07, + "loss": 0.7287, + "step": 4984 + }, + { + "epoch": 3.3389149363697253, + "grad_norm": 3.216159506758138, + "learning_rate": 4.0473092561399694e-07, + "loss": 0.7811, + "step": 4985 + }, + { + "epoch": 3.3395847287340925, + "grad_norm": 2.628849612499717, + "learning_rate": 4.039340403836647e-07, + "loss": 0.7183, + "step": 4986 + }, + { + "epoch": 3.3402545210984593, + "grad_norm": 2.636613582087822, + "learning_rate": 4.031378714770881e-07, + "loss": 0.8036, + "step": 4987 + }, + { + "epoch": 3.3409243134628266, + "grad_norm": 2.8189473583492672, + "learning_rate": 4.0234241916635497e-07, + "loss": 0.6267, + "step": 4988 + }, + { + "epoch": 3.3415941058271934, + "grad_norm": 2.7019967054067338, + "learning_rate": 4.0154768372330844e-07, + "loss": 0.8134, + "step": 4989 + }, + { + "epoch": 3.3422638981915607, + "grad_norm": 3.0309397295930665, + "learning_rate": 4.0075366541954706e-07, + "loss": 0.7855, + "step": 4990 + }, + { + "epoch": 3.3429336905559275, + "grad_norm": 2.8898062345314726, + "learning_rate": 3.999603645264238e-07, + "loss": 0.8075, + "step": 4991 + }, + { + "epoch": 3.3436034829202947, + "grad_norm": 3.0291133708184277, + "learning_rate": 3.9916778131504685e-07, + "loss": 0.7695, + "step": 4992 + }, + { + "epoch": 3.3442732752846616, + "grad_norm": 2.7168665497864186, + "learning_rate": 3.983759160562786e-07, + "loss": 0.8162, + "step": 4993 + }, + { + "epoch": 3.344943067649029, + "grad_norm": 2.6761639190339284, + "learning_rate": 3.975847690207371e-07, + "loss": 0.6573, + "step": 4994 + }, + { + "epoch": 3.3456128600133956, + "grad_norm": 2.8900121047478415, + "learning_rate": 3.9679434047879354e-07, + "loss": 0.7513, + "step": 4995 + }, + { + "epoch": 3.346282652377763, + "grad_norm": 3.3319418500929703, + "learning_rate": 3.9600463070057493e-07, + "loss": 0.6243, + "step": 4996 + }, + { + "epoch": 3.34695244474213, + "grad_norm": 3.0322085110614516, + "learning_rate": 3.952156399559617e-07, + "loss": 0.8772, + "step": 4997 + }, + { + "epoch": 3.347622237106497, + "grad_norm": 3.689215448526312, + "learning_rate": 3.9442736851458914e-07, + "loss": 0.7898, + "step": 4998 + }, + { + "epoch": 3.348292029470864, + "grad_norm": 3.4461972866541144, + "learning_rate": 3.9363981664584604e-07, + "loss": 0.8007, + "step": 4999 + }, + { + "epoch": 3.348961821835231, + "grad_norm": 2.7744399816780194, + "learning_rate": 3.928529846188761e-07, + "loss": 0.9319, + "step": 5000 + }, + { + "epoch": 3.3496316141995983, + "grad_norm": 2.733499297075922, + "learning_rate": 3.9206687270257616e-07, + "loss": 0.7845, + "step": 5001 + }, + { + "epoch": 3.350301406563965, + "grad_norm": 2.9861118490357295, + "learning_rate": 3.912814811655979e-07, + "loss": 0.7956, + "step": 5002 + }, + { + "epoch": 3.3509711989283324, + "grad_norm": 3.2708529497708816, + "learning_rate": 3.904968102763462e-07, + "loss": 0.8162, + "step": 5003 + }, + { + "epoch": 3.351640991292699, + "grad_norm": 3.7162068312077676, + "learning_rate": 3.8971286030297983e-07, + "loss": 0.8696, + "step": 5004 + }, + { + "epoch": 3.3523107836570665, + "grad_norm": 2.857799606891035, + "learning_rate": 3.8892963151341114e-07, + "loss": 0.7862, + "step": 5005 + }, + { + "epoch": 3.3529805760214333, + "grad_norm": 2.8827397008478677, + "learning_rate": 3.881471241753057e-07, + "loss": 0.6851, + "step": 5006 + }, + { + "epoch": 3.3536503683858006, + "grad_norm": 2.994075928678233, + "learning_rate": 3.873653385560827e-07, + "loss": 0.7842, + "step": 5007 + }, + { + "epoch": 3.3543201607501674, + "grad_norm": 2.1911423852258682, + "learning_rate": 3.8658427492291625e-07, + "loss": 0.5629, + "step": 5008 + }, + { + "epoch": 3.3549899531145346, + "grad_norm": 2.7613290014536376, + "learning_rate": 3.8580393354273107e-07, + "loss": 0.8713, + "step": 5009 + }, + { + "epoch": 3.3556597454789014, + "grad_norm": 3.1574671693650296, + "learning_rate": 3.8502431468220634e-07, + "loss": 0.7121, + "step": 5010 + }, + { + "epoch": 3.3563295378432687, + "grad_norm": 2.8678005740599013, + "learning_rate": 3.8424541860777406e-07, + "loss": 0.7687, + "step": 5011 + }, + { + "epoch": 3.3569993302076355, + "grad_norm": 2.91176135636121, + "learning_rate": 3.834672455856209e-07, + "loss": 0.8379, + "step": 5012 + }, + { + "epoch": 3.357669122572003, + "grad_norm": 2.967433320009398, + "learning_rate": 3.826897958816833e-07, + "loss": 0.7433, + "step": 5013 + }, + { + "epoch": 3.3583389149363696, + "grad_norm": 3.0171244303217373, + "learning_rate": 3.8191306976165327e-07, + "loss": 0.7406, + "step": 5014 + }, + { + "epoch": 3.359008707300737, + "grad_norm": 3.4397238823136833, + "learning_rate": 3.811370674909731e-07, + "loss": 0.7655, + "step": 5015 + }, + { + "epoch": 3.3596784996651037, + "grad_norm": 3.2193364273151532, + "learning_rate": 3.803617893348413e-07, + "loss": 0.693, + "step": 5016 + }, + { + "epoch": 3.360348292029471, + "grad_norm": 2.419868357355869, + "learning_rate": 3.7958723555820445e-07, + "loss": 0.6576, + "step": 5017 + }, + { + "epoch": 3.3610180843938378, + "grad_norm": 3.39004382143499, + "learning_rate": 3.788134064257651e-07, + "loss": 0.893, + "step": 5018 + }, + { + "epoch": 3.361687876758205, + "grad_norm": 2.7378111605314173, + "learning_rate": 3.7804030220197575e-07, + "loss": 0.7648, + "step": 5019 + }, + { + "epoch": 3.362357669122572, + "grad_norm": 3.04506175418011, + "learning_rate": 3.77267923151044e-07, + "loss": 0.7009, + "step": 5020 + }, + { + "epoch": 3.363027461486939, + "grad_norm": 3.076645726785812, + "learning_rate": 3.7649626953692617e-07, + "loss": 0.8044, + "step": 5021 + }, + { + "epoch": 3.363697253851306, + "grad_norm": 2.882101633023875, + "learning_rate": 3.757253416233334e-07, + "loss": 0.6877, + "step": 5022 + }, + { + "epoch": 3.364367046215673, + "grad_norm": 2.662455732286603, + "learning_rate": 3.7495513967372663e-07, + "loss": 0.8413, + "step": 5023 + }, + { + "epoch": 3.36503683858004, + "grad_norm": 3.0397657351432166, + "learning_rate": 3.7418566395132195e-07, + "loss": 0.7739, + "step": 5024 + }, + { + "epoch": 3.3657066309444073, + "grad_norm": 2.803213581901629, + "learning_rate": 3.734169147190833e-07, + "loss": 0.9021, + "step": 5025 + }, + { + "epoch": 3.3663764233087745, + "grad_norm": 3.1380975578437447, + "learning_rate": 3.7264889223972895e-07, + "loss": 0.8692, + "step": 5026 + }, + { + "epoch": 3.3670462156731413, + "grad_norm": 2.6356178404993296, + "learning_rate": 3.718815967757275e-07, + "loss": 0.6327, + "step": 5027 + }, + { + "epoch": 3.367716008037508, + "grad_norm": 2.907035440233595, + "learning_rate": 3.7111502858930144e-07, + "loss": 0.7658, + "step": 5028 + }, + { + "epoch": 3.3683858004018754, + "grad_norm": 2.7920464971424748, + "learning_rate": 3.703491879424212e-07, + "loss": 0.7135, + "step": 5029 + }, + { + "epoch": 3.3690555927662427, + "grad_norm": 2.635899255832973, + "learning_rate": 3.6958407509681075e-07, + "loss": 0.6704, + "step": 5030 + }, + { + "epoch": 3.3697253851306095, + "grad_norm": 2.7428673263328593, + "learning_rate": 3.688196903139449e-07, + "loss": 0.7765, + "step": 5031 + }, + { + "epoch": 3.3703951774949767, + "grad_norm": 2.5988084963834637, + "learning_rate": 3.680560338550507e-07, + "loss": 0.6502, + "step": 5032 + }, + { + "epoch": 3.3710649698593436, + "grad_norm": 2.46436403500541, + "learning_rate": 3.6729310598110385e-07, + "loss": 0.8297, + "step": 5033 + }, + { + "epoch": 3.371734762223711, + "grad_norm": 2.742790253281237, + "learning_rate": 3.665309069528325e-07, + "loss": 0.839, + "step": 5034 + }, + { + "epoch": 3.3724045545880776, + "grad_norm": 2.7312075942644665, + "learning_rate": 3.657694370307169e-07, + "loss": 0.6875, + "step": 5035 + }, + { + "epoch": 3.373074346952445, + "grad_norm": 2.8661509762461854, + "learning_rate": 3.650086964749869e-07, + "loss": 0.8188, + "step": 5036 + }, + { + "epoch": 3.3737441393168117, + "grad_norm": 3.060558168940683, + "learning_rate": 3.6424868554562203e-07, + "loss": 0.7602, + "step": 5037 + }, + { + "epoch": 3.374413931681179, + "grad_norm": 2.7572727268476456, + "learning_rate": 3.6348940450235347e-07, + "loss": 0.7101, + "step": 5038 + }, + { + "epoch": 3.375083724045546, + "grad_norm": 2.463058220022583, + "learning_rate": 3.6273085360466394e-07, + "loss": 0.6957, + "step": 5039 + }, + { + "epoch": 3.375753516409913, + "grad_norm": 2.6225458074605, + "learning_rate": 3.6197303311178627e-07, + "loss": 0.7767, + "step": 5040 + }, + { + "epoch": 3.37642330877428, + "grad_norm": 2.6892780091276216, + "learning_rate": 3.612159432827017e-07, + "loss": 0.8274, + "step": 5041 + }, + { + "epoch": 3.377093101138647, + "grad_norm": 5.60681300797202, + "learning_rate": 3.6045958437614326e-07, + "loss": 0.7903, + "step": 5042 + }, + { + "epoch": 3.377762893503014, + "grad_norm": 2.875384036515439, + "learning_rate": 3.5970395665059555e-07, + "loss": 0.8001, + "step": 5043 + }, + { + "epoch": 3.378432685867381, + "grad_norm": 2.9732242259319968, + "learning_rate": 3.5894906036429087e-07, + "loss": 0.7169, + "step": 5044 + }, + { + "epoch": 3.379102478231748, + "grad_norm": 2.7024357395570755, + "learning_rate": 3.581948957752135e-07, + "loss": 0.7225, + "step": 5045 + }, + { + "epoch": 3.3797722705961153, + "grad_norm": 2.5300184469168427, + "learning_rate": 3.5744146314109495e-07, + "loss": 0.7121, + "step": 5046 + }, + { + "epoch": 3.380442062960482, + "grad_norm": 2.8810640527820057, + "learning_rate": 3.5668876271942014e-07, + "loss": 0.8249, + "step": 5047 + }, + { + "epoch": 3.3811118553248494, + "grad_norm": 3.0209264042024184, + "learning_rate": 3.559367947674214e-07, + "loss": 0.9041, + "step": 5048 + }, + { + "epoch": 3.381781647689216, + "grad_norm": 2.523995725102098, + "learning_rate": 3.5518555954208195e-07, + "loss": 0.7995, + "step": 5049 + }, + { + "epoch": 3.3824514400535834, + "grad_norm": 3.0901543865500525, + "learning_rate": 3.5443505730013205e-07, + "loss": 0.8499, + "step": 5050 + }, + { + "epoch": 3.3831212324179503, + "grad_norm": 2.9898658593495107, + "learning_rate": 3.5368528829805516e-07, + "loss": 0.8183, + "step": 5051 + }, + { + "epoch": 3.3837910247823175, + "grad_norm": 2.719055571861372, + "learning_rate": 3.529362527920821e-07, + "loss": 0.8617, + "step": 5052 + }, + { + "epoch": 3.3844608171466843, + "grad_norm": 2.8108531867484112, + "learning_rate": 3.52187951038194e-07, + "loss": 0.9757, + "step": 5053 + }, + { + "epoch": 3.3851306095110516, + "grad_norm": 2.564540730330649, + "learning_rate": 3.514403832921184e-07, + "loss": 0.7712, + "step": 5054 + }, + { + "epoch": 3.385800401875419, + "grad_norm": 2.9493419854680427, + "learning_rate": 3.506935498093361e-07, + "loss": 0.7698, + "step": 5055 + }, + { + "epoch": 3.3864701942397857, + "grad_norm": 2.5996505877796032, + "learning_rate": 3.4994745084507455e-07, + "loss": 0.7162, + "step": 5056 + }, + { + "epoch": 3.3871399866041525, + "grad_norm": 2.62566889183629, + "learning_rate": 3.492020866543111e-07, + "loss": 0.7829, + "step": 5057 + }, + { + "epoch": 3.3878097789685198, + "grad_norm": 2.844449897905926, + "learning_rate": 3.4845745749176983e-07, + "loss": 0.6949, + "step": 5058 + }, + { + "epoch": 3.388479571332887, + "grad_norm": 3.213766537087316, + "learning_rate": 3.477135636119269e-07, + "loss": 0.92, + "step": 5059 + }, + { + "epoch": 3.389149363697254, + "grad_norm": 3.314503407700576, + "learning_rate": 3.4697040526900536e-07, + "loss": 0.7988, + "step": 5060 + }, + { + "epoch": 3.3898191560616207, + "grad_norm": 3.142685320926113, + "learning_rate": 3.462279827169773e-07, + "loss": 0.8728, + "step": 5061 + }, + { + "epoch": 3.390488948425988, + "grad_norm": 2.770813927156509, + "learning_rate": 3.4548629620956285e-07, + "loss": 0.7209, + "step": 5062 + }, + { + "epoch": 3.391158740790355, + "grad_norm": 3.913040516659897, + "learning_rate": 3.447453460002309e-07, + "loss": 0.6976, + "step": 5063 + }, + { + "epoch": 3.391828533154722, + "grad_norm": 3.0192269444726594, + "learning_rate": 3.440051323421992e-07, + "loss": 0.7394, + "step": 5064 + }, + { + "epoch": 3.3924983255190893, + "grad_norm": 2.938842328975349, + "learning_rate": 3.432656554884331e-07, + "loss": 0.7271, + "step": 5065 + }, + { + "epoch": 3.393168117883456, + "grad_norm": 2.844670859081228, + "learning_rate": 3.4252691569164636e-07, + "loss": 0.7134, + "step": 5066 + }, + { + "epoch": 3.3938379102478233, + "grad_norm": 2.386624468434775, + "learning_rate": 3.417889132043012e-07, + "loss": 0.7803, + "step": 5067 + }, + { + "epoch": 3.39450770261219, + "grad_norm": 2.9621352607212645, + "learning_rate": 3.410516482786075e-07, + "loss": 0.9167, + "step": 5068 + }, + { + "epoch": 3.3951774949765574, + "grad_norm": 2.550582378383411, + "learning_rate": 3.4031512116652287e-07, + "loss": 0.7441, + "step": 5069 + }, + { + "epoch": 3.3958472873409242, + "grad_norm": 2.4895463710110746, + "learning_rate": 3.395793321197535e-07, + "loss": 0.783, + "step": 5070 + }, + { + "epoch": 3.3965170797052915, + "grad_norm": 2.7355277277449956, + "learning_rate": 3.3884428138975247e-07, + "loss": 0.7315, + "step": 5071 + }, + { + "epoch": 3.3971868720696583, + "grad_norm": 2.74012281594932, + "learning_rate": 3.381099692277212e-07, + "loss": 0.7603, + "step": 5072 + }, + { + "epoch": 3.3978566644340256, + "grad_norm": 2.667634505810667, + "learning_rate": 3.373763958846088e-07, + "loss": 0.7952, + "step": 5073 + }, + { + "epoch": 3.3985264567983924, + "grad_norm": 3.4462043425571953, + "learning_rate": 3.3664356161111106e-07, + "loss": 0.879, + "step": 5074 + }, + { + "epoch": 3.3991962491627596, + "grad_norm": 2.9046753099835905, + "learning_rate": 3.359114666576718e-07, + "loss": 0.8906, + "step": 5075 + }, + { + "epoch": 3.3998660415271265, + "grad_norm": 2.5453497755026624, + "learning_rate": 3.3518011127448234e-07, + "loss": 0.7726, + "step": 5076 + }, + { + "epoch": 3.4005358338914937, + "grad_norm": 2.6996740538010626, + "learning_rate": 3.3444949571148105e-07, + "loss": 0.7198, + "step": 5077 + }, + { + "epoch": 3.4012056262558605, + "grad_norm": 2.860967230505949, + "learning_rate": 3.337196202183532e-07, + "loss": 0.7737, + "step": 5078 + }, + { + "epoch": 3.401875418620228, + "grad_norm": 2.6633474498776724, + "learning_rate": 3.3299048504453147e-07, + "loss": 0.9361, + "step": 5079 + }, + { + "epoch": 3.4025452109845946, + "grad_norm": 2.878398973101886, + "learning_rate": 3.3226209043919574e-07, + "loss": 0.7864, + "step": 5080 + }, + { + "epoch": 3.403215003348962, + "grad_norm": 3.8362158133200728, + "learning_rate": 3.3153443665127183e-07, + "loss": 0.6562, + "step": 5081 + }, + { + "epoch": 3.4038847957133287, + "grad_norm": 3.0322262524079986, + "learning_rate": 3.308075239294337e-07, + "loss": 0.9338, + "step": 5082 + }, + { + "epoch": 3.404554588077696, + "grad_norm": 3.2244017586879807, + "learning_rate": 3.300813525221011e-07, + "loss": 0.7584, + "step": 5083 + }, + { + "epoch": 3.4052243804420628, + "grad_norm": 3.288634433154943, + "learning_rate": 3.2935592267744066e-07, + "loss": 0.7625, + "step": 5084 + }, + { + "epoch": 3.40589417280643, + "grad_norm": 2.91691143240573, + "learning_rate": 3.286312346433659e-07, + "loss": 0.8445, + "step": 5085 + }, + { + "epoch": 3.406563965170797, + "grad_norm": 3.1855659715078026, + "learning_rate": 3.279072886675369e-07, + "loss": 0.7314, + "step": 5086 + }, + { + "epoch": 3.407233757535164, + "grad_norm": 2.7293197446721242, + "learning_rate": 3.2718408499735926e-07, + "loss": 0.6052, + "step": 5087 + }, + { + "epoch": 3.4079035498995314, + "grad_norm": 2.8390917828910616, + "learning_rate": 3.264616238799856e-07, + "loss": 0.7901, + "step": 5088 + }, + { + "epoch": 3.408573342263898, + "grad_norm": 2.7300832451840495, + "learning_rate": 3.2573990556231503e-07, + "loss": 0.7334, + "step": 5089 + }, + { + "epoch": 3.409243134628265, + "grad_norm": 3.3089704287801744, + "learning_rate": 3.250189302909923e-07, + "loss": 0.9055, + "step": 5090 + }, + { + "epoch": 3.4099129269926323, + "grad_norm": 3.007679750390223, + "learning_rate": 3.242986983124083e-07, + "loss": 0.6645, + "step": 5091 + }, + { + "epoch": 3.4105827193569995, + "grad_norm": 3.19502940152926, + "learning_rate": 3.2357920987269977e-07, + "loss": 0.7604, + "step": 5092 + }, + { + "epoch": 3.4112525117213663, + "grad_norm": 3.416524985336817, + "learning_rate": 3.228604652177494e-07, + "loss": 0.6858, + "step": 5093 + }, + { + "epoch": 3.4119223040857336, + "grad_norm": 2.852473904999195, + "learning_rate": 3.221424645931873e-07, + "loss": 0.7969, + "step": 5094 + }, + { + "epoch": 3.4125920964501004, + "grad_norm": 2.558839219554653, + "learning_rate": 3.214252082443861e-07, + "loss": 0.7558, + "step": 5095 + }, + { + "epoch": 3.4132618888144677, + "grad_norm": 2.590632937987955, + "learning_rate": 3.2070869641646656e-07, + "loss": 0.7237, + "step": 5096 + }, + { + "epoch": 3.4139316811788345, + "grad_norm": 2.428577329531861, + "learning_rate": 3.199929293542939e-07, + "loss": 0.7077, + "step": 5097 + }, + { + "epoch": 3.4146014735432018, + "grad_norm": 3.3981978294332613, + "learning_rate": 3.192779073024804e-07, + "loss": 0.9162, + "step": 5098 + }, + { + "epoch": 3.4152712659075686, + "grad_norm": 2.5389523708340587, + "learning_rate": 3.185636305053813e-07, + "loss": 0.8408, + "step": 5099 + }, + { + "epoch": 3.415941058271936, + "grad_norm": 2.4649490693056375, + "learning_rate": 3.178500992070985e-07, + "loss": 0.69, + "step": 5100 + }, + { + "epoch": 3.4166108506363027, + "grad_norm": 2.876004612209915, + "learning_rate": 3.171373136514788e-07, + "loss": 0.8727, + "step": 5101 + }, + { + "epoch": 3.41728064300067, + "grad_norm": 2.8095971832956543, + "learning_rate": 3.164252740821161e-07, + "loss": 0.7716, + "step": 5102 + }, + { + "epoch": 3.4179504353650367, + "grad_norm": 2.7883532589018687, + "learning_rate": 3.1571398074234565e-07, + "loss": 0.774, + "step": 5103 + }, + { + "epoch": 3.418620227729404, + "grad_norm": 2.3446526912974273, + "learning_rate": 3.150034338752503e-07, + "loss": 0.7909, + "step": 5104 + }, + { + "epoch": 3.419290020093771, + "grad_norm": 2.7360228256023866, + "learning_rate": 3.1429363372365655e-07, + "loss": 0.7051, + "step": 5105 + }, + { + "epoch": 3.419959812458138, + "grad_norm": 3.260554611978407, + "learning_rate": 3.13584580530138e-07, + "loss": 0.8457, + "step": 5106 + }, + { + "epoch": 3.420629604822505, + "grad_norm": 2.7063145174737775, + "learning_rate": 3.128762745370098e-07, + "loss": 0.7779, + "step": 5107 + }, + { + "epoch": 3.421299397186872, + "grad_norm": 2.64432539427834, + "learning_rate": 3.1216871598633346e-07, + "loss": 0.6021, + "step": 5108 + }, + { + "epoch": 3.421969189551239, + "grad_norm": 2.784189888835372, + "learning_rate": 3.114619051199147e-07, + "loss": 0.7796, + "step": 5109 + }, + { + "epoch": 3.4226389819156062, + "grad_norm": 3.3735030463430604, + "learning_rate": 3.10755842179305e-07, + "loss": 0.9965, + "step": 5110 + }, + { + "epoch": 3.423308774279973, + "grad_norm": 2.4026006964707887, + "learning_rate": 3.100505274057977e-07, + "loss": 0.7271, + "step": 5111 + }, + { + "epoch": 3.4239785666443403, + "grad_norm": 2.4663330053847785, + "learning_rate": 3.0934596104043255e-07, + "loss": 0.7759, + "step": 5112 + }, + { + "epoch": 3.424648359008707, + "grad_norm": 2.8770816796574423, + "learning_rate": 3.0864214332399194e-07, + "loss": 0.8775, + "step": 5113 + }, + { + "epoch": 3.4253181513730744, + "grad_norm": 2.6558781847861512, + "learning_rate": 3.0793907449700454e-07, + "loss": 0.7816, + "step": 5114 + }, + { + "epoch": 3.425987943737441, + "grad_norm": 2.4523118197294833, + "learning_rate": 3.072367547997418e-07, + "loss": 0.7475, + "step": 5115 + }, + { + "epoch": 3.4266577361018085, + "grad_norm": 2.5582224069902466, + "learning_rate": 3.0653518447221837e-07, + "loss": 0.7815, + "step": 5116 + }, + { + "epoch": 3.4273275284661757, + "grad_norm": 2.4080999688906863, + "learning_rate": 3.058343637541933e-07, + "loss": 0.7129, + "step": 5117 + }, + { + "epoch": 3.4279973208305425, + "grad_norm": 2.4893407922498074, + "learning_rate": 3.051342928851711e-07, + "loss": 0.7302, + "step": 5118 + }, + { + "epoch": 3.4286671131949094, + "grad_norm": 3.283743696731113, + "learning_rate": 3.0443497210439847e-07, + "loss": 0.7093, + "step": 5119 + }, + { + "epoch": 3.4293369055592766, + "grad_norm": 2.9282463209479066, + "learning_rate": 3.037364016508651e-07, + "loss": 0.7566, + "step": 5120 + }, + { + "epoch": 3.430006697923644, + "grad_norm": 2.71484460606066, + "learning_rate": 3.0303858176330526e-07, + "loss": 0.6935, + "step": 5121 + }, + { + "epoch": 3.4306764902880107, + "grad_norm": 2.846819717015729, + "learning_rate": 3.023415126801976e-07, + "loss": 0.7257, + "step": 5122 + }, + { + "epoch": 3.431346282652378, + "grad_norm": 2.7373130234600875, + "learning_rate": 3.0164519463976334e-07, + "loss": 0.7418, + "step": 5123 + }, + { + "epoch": 3.4320160750167448, + "grad_norm": 2.742171008909363, + "learning_rate": 3.0094962787996535e-07, + "loss": 0.5829, + "step": 5124 + }, + { + "epoch": 3.432685867381112, + "grad_norm": 2.6139621003704843, + "learning_rate": 3.002548126385127e-07, + "loss": 0.6781, + "step": 5125 + }, + { + "epoch": 3.433355659745479, + "grad_norm": 3.128501769947722, + "learning_rate": 2.995607491528557e-07, + "loss": 0.7265, + "step": 5126 + }, + { + "epoch": 3.434025452109846, + "grad_norm": 2.9635185546833855, + "learning_rate": 2.988674376601891e-07, + "loss": 0.9281, + "step": 5127 + }, + { + "epoch": 3.434695244474213, + "grad_norm": 2.453780321659283, + "learning_rate": 2.9817487839744807e-07, + "loss": 0.7623, + "step": 5128 + }, + { + "epoch": 3.43536503683858, + "grad_norm": 3.0974914326929475, + "learning_rate": 2.974830716013141e-07, + "loss": 0.7566, + "step": 5129 + }, + { + "epoch": 3.436034829202947, + "grad_norm": 3.542110256603639, + "learning_rate": 2.967920175082095e-07, + "loss": 0.8186, + "step": 5130 + }, + { + "epoch": 3.4367046215673143, + "grad_norm": 3.4813354474188114, + "learning_rate": 2.9610171635430005e-07, + "loss": 0.7202, + "step": 5131 + }, + { + "epoch": 3.437374413931681, + "grad_norm": 2.5135103599088793, + "learning_rate": 2.954121683754926e-07, + "loss": 0.7002, + "step": 5132 + }, + { + "epoch": 3.4380442062960483, + "grad_norm": 2.5112652396790063, + "learning_rate": 2.9472337380743954e-07, + "loss": 0.7616, + "step": 5133 + }, + { + "epoch": 3.438713998660415, + "grad_norm": 2.4468656640021678, + "learning_rate": 2.9403533288553357e-07, + "loss": 0.6918, + "step": 5134 + }, + { + "epoch": 3.4393837910247824, + "grad_norm": 2.669499679090266, + "learning_rate": 2.933480458449109e-07, + "loss": 0.712, + "step": 5135 + }, + { + "epoch": 3.4400535833891492, + "grad_norm": 2.661760806053253, + "learning_rate": 2.9266151292044826e-07, + "loss": 0.5886, + "step": 5136 + }, + { + "epoch": 3.4407233757535165, + "grad_norm": 2.5521274495181143, + "learning_rate": 2.9197573434676744e-07, + "loss": 0.7257, + "step": 5137 + }, + { + "epoch": 3.4413931681178833, + "grad_norm": 2.6159989729061874, + "learning_rate": 2.912907103582305e-07, + "loss": 0.6545, + "step": 5138 + }, + { + "epoch": 3.4420629604822506, + "grad_norm": 2.735068310519882, + "learning_rate": 2.906064411889428e-07, + "loss": 0.6652, + "step": 5139 + }, + { + "epoch": 3.4427327528466174, + "grad_norm": 3.275610732993882, + "learning_rate": 2.8992292707274967e-07, + "loss": 0.7472, + "step": 5140 + }, + { + "epoch": 3.4434025452109847, + "grad_norm": 2.178695124743703, + "learning_rate": 2.8924016824324106e-07, + "loss": 0.6078, + "step": 5141 + }, + { + "epoch": 3.4440723375753515, + "grad_norm": 3.1287635339546487, + "learning_rate": 2.885581649337474e-07, + "loss": 0.6344, + "step": 5142 + }, + { + "epoch": 3.4447421299397187, + "grad_norm": 2.716447111802941, + "learning_rate": 2.878769173773416e-07, + "loss": 0.7423, + "step": 5143 + }, + { + "epoch": 3.4454119223040856, + "grad_norm": 2.8268625231471733, + "learning_rate": 2.87196425806836e-07, + "loss": 0.8332, + "step": 5144 + }, + { + "epoch": 3.446081714668453, + "grad_norm": 3.294902797712028, + "learning_rate": 2.8651669045478836e-07, + "loss": 0.8775, + "step": 5145 + }, + { + "epoch": 3.44675150703282, + "grad_norm": 2.665424144498105, + "learning_rate": 2.8583771155349493e-07, + "loss": 0.7891, + "step": 5146 + }, + { + "epoch": 3.447421299397187, + "grad_norm": 2.872138511138963, + "learning_rate": 2.851594893349957e-07, + "loss": 0.8507, + "step": 5147 + }, + { + "epoch": 3.4480910917615537, + "grad_norm": 3.4663776800937858, + "learning_rate": 2.844820240310689e-07, + "loss": 0.7755, + "step": 5148 + }, + { + "epoch": 3.448760884125921, + "grad_norm": 2.5390470725053342, + "learning_rate": 2.838053158732379e-07, + "loss": 0.6394, + "step": 5149 + }, + { + "epoch": 3.4494306764902882, + "grad_norm": 2.522981410656555, + "learning_rate": 2.83129365092765e-07, + "loss": 0.7633, + "step": 5150 + }, + { + "epoch": 3.450100468854655, + "grad_norm": 2.955723827758891, + "learning_rate": 2.824541719206539e-07, + "loss": 0.8165, + "step": 5151 + }, + { + "epoch": 3.450770261219022, + "grad_norm": 2.7578840196297842, + "learning_rate": 2.8177973658765015e-07, + "loss": 0.8396, + "step": 5152 + }, + { + "epoch": 3.451440053583389, + "grad_norm": 2.27727936508888, + "learning_rate": 2.8110605932423956e-07, + "loss": 0.6844, + "step": 5153 + }, + { + "epoch": 3.4521098459477564, + "grad_norm": 2.9367947755657595, + "learning_rate": 2.8043314036064914e-07, + "loss": 0.7104, + "step": 5154 + }, + { + "epoch": 3.452779638312123, + "grad_norm": 2.7927429710629346, + "learning_rate": 2.7976097992684665e-07, + "loss": 0.7402, + "step": 5155 + }, + { + "epoch": 3.4534494306764905, + "grad_norm": 2.8417036708392995, + "learning_rate": 2.79089578252541e-07, + "loss": 0.7968, + "step": 5156 + }, + { + "epoch": 3.4541192230408573, + "grad_norm": 2.883982576527015, + "learning_rate": 2.7841893556718155e-07, + "loss": 0.7177, + "step": 5157 + }, + { + "epoch": 3.4547890154052245, + "grad_norm": 2.816432160449193, + "learning_rate": 2.7774905209995816e-07, + "loss": 0.7256, + "step": 5158 + }, + { + "epoch": 3.4554588077695914, + "grad_norm": 2.8416029883664744, + "learning_rate": 2.7707992807980146e-07, + "loss": 0.7259, + "step": 5159 + }, + { + "epoch": 3.4561286001339586, + "grad_norm": 2.5092157563967548, + "learning_rate": 2.76411563735382e-07, + "loss": 0.6984, + "step": 5160 + }, + { + "epoch": 3.4567983924983254, + "grad_norm": 2.935729701626579, + "learning_rate": 2.7574395929511165e-07, + "loss": 0.8928, + "step": 5161 + }, + { + "epoch": 3.4574681848626927, + "grad_norm": 3.0503598992382863, + "learning_rate": 2.7507711498714174e-07, + "loss": 0.5673, + "step": 5162 + }, + { + "epoch": 3.4581379772270595, + "grad_norm": 3.3860849307604663, + "learning_rate": 2.744110310393641e-07, + "loss": 0.8003, + "step": 5163 + }, + { + "epoch": 3.4588077695914268, + "grad_norm": 3.0793647377918867, + "learning_rate": 2.737457076794112e-07, + "loss": 0.8093, + "step": 5164 + }, + { + "epoch": 3.4594775619557936, + "grad_norm": 2.815488704852465, + "learning_rate": 2.730811451346546e-07, + "loss": 0.7277, + "step": 5165 + }, + { + "epoch": 3.460147354320161, + "grad_norm": 2.533752610041226, + "learning_rate": 2.7241734363220697e-07, + "loss": 0.7796, + "step": 5166 + }, + { + "epoch": 3.4608171466845277, + "grad_norm": 2.9033883751467635, + "learning_rate": 2.717543033989198e-07, + "loss": 0.7659, + "step": 5167 + }, + { + "epoch": 3.461486939048895, + "grad_norm": 2.8657526111945293, + "learning_rate": 2.710920246613852e-07, + "loss": 0.8037, + "step": 5168 + }, + { + "epoch": 3.4621567314132617, + "grad_norm": 2.872801836145061, + "learning_rate": 2.704305076459349e-07, + "loss": 0.805, + "step": 5169 + }, + { + "epoch": 3.462826523777629, + "grad_norm": 2.673255844756843, + "learning_rate": 2.6976975257864e-07, + "loss": 0.8452, + "step": 5170 + }, + { + "epoch": 3.463496316141996, + "grad_norm": 2.873592368632093, + "learning_rate": 2.691097596853112e-07, + "loss": 0.7027, + "step": 5171 + }, + { + "epoch": 3.464166108506363, + "grad_norm": 2.466899387990417, + "learning_rate": 2.6845052919149934e-07, + "loss": 0.6954, + "step": 5172 + }, + { + "epoch": 3.46483590087073, + "grad_norm": 2.842070617690217, + "learning_rate": 2.677920613224941e-07, + "loss": 0.7045, + "step": 5173 + }, + { + "epoch": 3.465505693235097, + "grad_norm": 2.7937319880034055, + "learning_rate": 2.671343563033249e-07, + "loss": 0.7813, + "step": 5174 + }, + { + "epoch": 3.466175485599464, + "grad_norm": 2.6692601653634567, + "learning_rate": 2.6647741435876004e-07, + "loss": 0.7004, + "step": 5175 + }, + { + "epoch": 3.4668452779638312, + "grad_norm": 2.676938193038636, + "learning_rate": 2.6582123571330774e-07, + "loss": 0.7777, + "step": 5176 + }, + { + "epoch": 3.467515070328198, + "grad_norm": 2.262749019873836, + "learning_rate": 2.6516582059121445e-07, + "loss": 0.5406, + "step": 5177 + }, + { + "epoch": 3.4681848626925653, + "grad_norm": 3.2996972456916702, + "learning_rate": 2.645111692164662e-07, + "loss": 0.8259, + "step": 5178 + }, + { + "epoch": 3.4688546550569326, + "grad_norm": 2.44620612292026, + "learning_rate": 2.6385728181278827e-07, + "loss": 0.6081, + "step": 5179 + }, + { + "epoch": 3.4695244474212994, + "grad_norm": 3.5004790539306705, + "learning_rate": 2.632041586036446e-07, + "loss": 0.7084, + "step": 5180 + }, + { + "epoch": 3.470194239785666, + "grad_norm": 2.9141009606196686, + "learning_rate": 2.625517998122376e-07, + "loss": 0.795, + "step": 5181 + }, + { + "epoch": 3.4708640321500335, + "grad_norm": 3.110762518729119, + "learning_rate": 2.6190020566150884e-07, + "loss": 0.7816, + "step": 5182 + }, + { + "epoch": 3.4715338245144007, + "grad_norm": 2.447760833999001, + "learning_rate": 2.612493763741383e-07, + "loss": 0.6722, + "step": 5183 + }, + { + "epoch": 3.4722036168787676, + "grad_norm": 4.650793172343653, + "learning_rate": 2.6059931217254557e-07, + "loss": 0.8003, + "step": 5184 + }, + { + "epoch": 3.472873409243135, + "grad_norm": 3.2335289686970294, + "learning_rate": 2.599500132788879e-07, + "loss": 0.8715, + "step": 5185 + }, + { + "epoch": 3.4735432016075016, + "grad_norm": 2.856666858605018, + "learning_rate": 2.5930147991506003e-07, + "loss": 0.8669, + "step": 5186 + }, + { + "epoch": 3.474212993971869, + "grad_norm": 2.968131143730743, + "learning_rate": 2.5865371230269667e-07, + "loss": 0.79, + "step": 5187 + }, + { + "epoch": 3.4748827863362357, + "grad_norm": 3.4035078141127455, + "learning_rate": 2.580067106631706e-07, + "loss": 0.904, + "step": 5188 + }, + { + "epoch": 3.475552578700603, + "grad_norm": 3.026375906105706, + "learning_rate": 2.573604752175932e-07, + "loss": 0.7704, + "step": 5189 + }, + { + "epoch": 3.47622237106497, + "grad_norm": 2.4999901839733614, + "learning_rate": 2.5671500618681177e-07, + "loss": 0.7298, + "step": 5190 + }, + { + "epoch": 3.476892163429337, + "grad_norm": 14.06864048898204, + "learning_rate": 2.560703037914136e-07, + "loss": 0.9031, + "step": 5191 + }, + { + "epoch": 3.477561955793704, + "grad_norm": 2.591869501520293, + "learning_rate": 2.554263682517244e-07, + "loss": 0.8136, + "step": 5192 + }, + { + "epoch": 3.478231748158071, + "grad_norm": 2.6482992915758348, + "learning_rate": 2.5478319978780726e-07, + "loss": 0.773, + "step": 5193 + }, + { + "epoch": 3.478901540522438, + "grad_norm": 2.5030013371611384, + "learning_rate": 2.541407986194619e-07, + "loss": 0.7053, + "step": 5194 + }, + { + "epoch": 3.479571332886805, + "grad_norm": 3.0414285936497687, + "learning_rate": 2.534991649662269e-07, + "loss": 0.8911, + "step": 5195 + }, + { + "epoch": 3.480241125251172, + "grad_norm": 3.0201244600101447, + "learning_rate": 2.5285829904737934e-07, + "loss": 0.8572, + "step": 5196 + }, + { + "epoch": 3.4809109176155393, + "grad_norm": 9.72643172986337, + "learning_rate": 2.5221820108193297e-07, + "loss": 0.8039, + "step": 5197 + }, + { + "epoch": 3.481580709979906, + "grad_norm": 2.8506554089758827, + "learning_rate": 2.5157887128863856e-07, + "loss": 0.7602, + "step": 5198 + }, + { + "epoch": 3.4822505023442734, + "grad_norm": 2.6222077297692903, + "learning_rate": 2.509403098859847e-07, + "loss": 0.8165, + "step": 5199 + }, + { + "epoch": 3.48292029470864, + "grad_norm": 2.6834384108368012, + "learning_rate": 2.5030251709219883e-07, + "loss": 0.7744, + "step": 5200 + }, + { + "epoch": 3.4835900870730074, + "grad_norm": 2.493024712851863, + "learning_rate": 2.496654931252443e-07, + "loss": 0.6455, + "step": 5201 + }, + { + "epoch": 3.4842598794373743, + "grad_norm": 2.4602636266345117, + "learning_rate": 2.490292382028212e-07, + "loss": 0.7366, + "step": 5202 + }, + { + "epoch": 3.4849296718017415, + "grad_norm": 2.635000840922741, + "learning_rate": 2.483937525423677e-07, + "loss": 0.6515, + "step": 5203 + }, + { + "epoch": 3.4855994641661083, + "grad_norm": 2.7173942064639878, + "learning_rate": 2.477590363610599e-07, + "loss": 0.8764, + "step": 5204 + }, + { + "epoch": 3.4862692565304756, + "grad_norm": 2.595012260826424, + "learning_rate": 2.471250898758099e-07, + "loss": 0.7627, + "step": 5205 + }, + { + "epoch": 3.4869390488948424, + "grad_norm": 2.8811843949457434, + "learning_rate": 2.464919133032659e-07, + "loss": 0.891, + "step": 5206 + }, + { + "epoch": 3.4876088412592097, + "grad_norm": 2.3118993303569666, + "learning_rate": 2.4585950685981405e-07, + "loss": 0.7419, + "step": 5207 + }, + { + "epoch": 3.488278633623577, + "grad_norm": 3.0613676644356738, + "learning_rate": 2.4522787076157795e-07, + "loss": 0.8188, + "step": 5208 + }, + { + "epoch": 3.4889484259879437, + "grad_norm": 2.6683568838446887, + "learning_rate": 2.445970052244173e-07, + "loss": 0.7875, + "step": 5209 + }, + { + "epoch": 3.4896182183523106, + "grad_norm": 2.70533957187416, + "learning_rate": 2.439669104639275e-07, + "loss": 0.8501, + "step": 5210 + }, + { + "epoch": 3.490288010716678, + "grad_norm": 2.7337459421718435, + "learning_rate": 2.4333758669544156e-07, + "loss": 0.8443, + "step": 5211 + }, + { + "epoch": 3.490957803081045, + "grad_norm": 2.5688391106529145, + "learning_rate": 2.4270903413402933e-07, + "loss": 0.7318, + "step": 5212 + }, + { + "epoch": 3.491627595445412, + "grad_norm": 2.9440619654865454, + "learning_rate": 2.420812529944969e-07, + "loss": 0.7786, + "step": 5213 + }, + { + "epoch": 3.4922973878097787, + "grad_norm": 2.8138068836171124, + "learning_rate": 2.414542434913847e-07, + "loss": 0.8182, + "step": 5214 + }, + { + "epoch": 3.492967180174146, + "grad_norm": 2.7581828865434983, + "learning_rate": 2.408280058389731e-07, + "loss": 0.7216, + "step": 5215 + }, + { + "epoch": 3.4936369725385132, + "grad_norm": 2.8028204746127785, + "learning_rate": 2.4020254025127613e-07, + "loss": 0.6495, + "step": 5216 + }, + { + "epoch": 3.49430676490288, + "grad_norm": 2.9295613253340003, + "learning_rate": 2.395778469420451e-07, + "loss": 0.752, + "step": 5217 + }, + { + "epoch": 3.4949765572672473, + "grad_norm": 2.9039706966939804, + "learning_rate": 2.389539261247653e-07, + "loss": 0.8456, + "step": 5218 + }, + { + "epoch": 3.495646349631614, + "grad_norm": 2.9484855039137323, + "learning_rate": 2.383307780126612e-07, + "loss": 0.7764, + "step": 5219 + }, + { + "epoch": 3.4963161419959814, + "grad_norm": 3.0015210605894347, + "learning_rate": 2.3770840281869128e-07, + "loss": 0.8143, + "step": 5220 + }, + { + "epoch": 3.496985934360348, + "grad_norm": 2.904623692571769, + "learning_rate": 2.3708680075555045e-07, + "loss": 0.8649, + "step": 5221 + }, + { + "epoch": 3.4976557267247155, + "grad_norm": 2.695238180024862, + "learning_rate": 2.364659720356685e-07, + "loss": 0.6603, + "step": 5222 + }, + { + "epoch": 3.4983255190890823, + "grad_norm": 2.485083531264805, + "learning_rate": 2.358459168712124e-07, + "loss": 0.6445, + "step": 5223 + }, + { + "epoch": 3.4989953114534496, + "grad_norm": 2.303968688084619, + "learning_rate": 2.352266354740837e-07, + "loss": 0.6786, + "step": 5224 + }, + { + "epoch": 3.4996651038178164, + "grad_norm": 3.0262217249963794, + "learning_rate": 2.3460812805591955e-07, + "loss": 0.8526, + "step": 5225 + }, + { + "epoch": 3.5003348961821836, + "grad_norm": 2.642801523191761, + "learning_rate": 2.3399039482809305e-07, + "loss": 0.7705, + "step": 5226 + }, + { + "epoch": 3.5010046885465504, + "grad_norm": 2.7112149392849183, + "learning_rate": 2.3337343600171286e-07, + "loss": 0.7956, + "step": 5227 + }, + { + "epoch": 3.5016744809109177, + "grad_norm": 2.9273052324499567, + "learning_rate": 2.3275725178762225e-07, + "loss": 0.8739, + "step": 5228 + }, + { + "epoch": 3.5023442732752845, + "grad_norm": 2.8371433693443207, + "learning_rate": 2.3214184239640004e-07, + "loss": 0.887, + "step": 5229 + }, + { + "epoch": 3.503014065639652, + "grad_norm": 2.9216914727187318, + "learning_rate": 2.3152720803836098e-07, + "loss": 0.7877, + "step": 5230 + }, + { + "epoch": 3.5036838580040186, + "grad_norm": 2.5810352253285243, + "learning_rate": 2.3091334892355405e-07, + "loss": 0.8186, + "step": 5231 + }, + { + "epoch": 3.504353650368386, + "grad_norm": 3.144406267940947, + "learning_rate": 2.3030026526176336e-07, + "loss": 0.8479, + "step": 5232 + }, + { + "epoch": 3.5050234427327527, + "grad_norm": 3.032231219005518, + "learning_rate": 2.2968795726250852e-07, + "loss": 0.9316, + "step": 5233 + }, + { + "epoch": 3.50569323509712, + "grad_norm": 3.0858919330178463, + "learning_rate": 2.2907642513504414e-07, + "loss": 0.701, + "step": 5234 + }, + { + "epoch": 3.5063630274614868, + "grad_norm": 3.3394086302993564, + "learning_rate": 2.2846566908835882e-07, + "loss": 0.8689, + "step": 5235 + }, + { + "epoch": 3.507032819825854, + "grad_norm": 2.4441700790783636, + "learning_rate": 2.2785568933117647e-07, + "loss": 0.6414, + "step": 5236 + }, + { + "epoch": 3.5077026121902213, + "grad_norm": 2.43303598290568, + "learning_rate": 2.2724648607195615e-07, + "loss": 0.8538, + "step": 5237 + }, + { + "epoch": 3.508372404554588, + "grad_norm": 2.771956124530348, + "learning_rate": 2.266380595188908e-07, + "loss": 0.715, + "step": 5238 + }, + { + "epoch": 3.509042196918955, + "grad_norm": 2.7682307584694126, + "learning_rate": 2.260304098799082e-07, + "loss": 0.7309, + "step": 5239 + }, + { + "epoch": 3.509711989283322, + "grad_norm": 2.8103501317599595, + "learning_rate": 2.2542353736267064e-07, + "loss": 0.6036, + "step": 5240 + }, + { + "epoch": 3.5103817816476894, + "grad_norm": 2.8526650487101306, + "learning_rate": 2.2481744217457496e-07, + "loss": 0.7926, + "step": 5241 + }, + { + "epoch": 3.5110515740120563, + "grad_norm": 2.6719868663020514, + "learning_rate": 2.2421212452275215e-07, + "loss": 0.7113, + "step": 5242 + }, + { + "epoch": 3.511721366376423, + "grad_norm": 2.6725547127437106, + "learning_rate": 2.2360758461406757e-07, + "loss": 0.8233, + "step": 5243 + }, + { + "epoch": 3.5123911587407903, + "grad_norm": 2.846094676701013, + "learning_rate": 2.2300382265512066e-07, + "loss": 0.7531, + "step": 5244 + }, + { + "epoch": 3.5130609511051576, + "grad_norm": 2.507725144822489, + "learning_rate": 2.2240083885224556e-07, + "loss": 0.7301, + "step": 5245 + }, + { + "epoch": 3.5137307434695244, + "grad_norm": 2.6114051788953834, + "learning_rate": 2.2179863341150936e-07, + "loss": 1.0234, + "step": 5246 + }, + { + "epoch": 3.5144005358338912, + "grad_norm": 2.8638606889160556, + "learning_rate": 2.211972065387144e-07, + "loss": 0.8063, + "step": 5247 + }, + { + "epoch": 3.5150703281982585, + "grad_norm": 3.503801553961111, + "learning_rate": 2.2059655843939597e-07, + "loss": 0.814, + "step": 5248 + }, + { + "epoch": 3.5157401205626257, + "grad_norm": 4.412848086451492, + "learning_rate": 2.1999668931882407e-07, + "loss": 0.7271, + "step": 5249 + }, + { + "epoch": 3.5164099129269926, + "grad_norm": 2.6718624791626606, + "learning_rate": 2.1939759938200134e-07, + "loss": 0.5695, + "step": 5250 + }, + { + "epoch": 3.51707970529136, + "grad_norm": 2.69099283873437, + "learning_rate": 2.187992888336654e-07, + "loss": 0.739, + "step": 5251 + }, + { + "epoch": 3.5177494976557266, + "grad_norm": 2.5928388253270276, + "learning_rate": 2.182017578782869e-07, + "loss": 0.6917, + "step": 5252 + }, + { + "epoch": 3.518419290020094, + "grad_norm": 3.781196919868201, + "learning_rate": 2.1760500672006995e-07, + "loss": 0.7916, + "step": 5253 + }, + { + "epoch": 3.5190890823844607, + "grad_norm": 3.2503025491657835, + "learning_rate": 2.170090355629523e-07, + "loss": 0.8618, + "step": 5254 + }, + { + "epoch": 3.519758874748828, + "grad_norm": 3.5214973030299017, + "learning_rate": 2.164138446106051e-07, + "loss": 0.8519, + "step": 5255 + }, + { + "epoch": 3.520428667113195, + "grad_norm": 2.7199262847600156, + "learning_rate": 2.1581943406643297e-07, + "loss": 0.739, + "step": 5256 + }, + { + "epoch": 3.521098459477562, + "grad_norm": 3.3678992200429048, + "learning_rate": 2.1522580413357419e-07, + "loss": 0.7346, + "step": 5257 + }, + { + "epoch": 3.521768251841929, + "grad_norm": 2.818185178319606, + "learning_rate": 2.14632955014899e-07, + "loss": 0.7985, + "step": 5258 + }, + { + "epoch": 3.522438044206296, + "grad_norm": 2.7729756912798873, + "learning_rate": 2.1404088691301283e-07, + "loss": 0.815, + "step": 5259 + }, + { + "epoch": 3.523107836570663, + "grad_norm": 3.1922436485602543, + "learning_rate": 2.1344960003025218e-07, + "loss": 0.8205, + "step": 5260 + }, + { + "epoch": 3.52377762893503, + "grad_norm": 2.203539325324253, + "learning_rate": 2.1285909456868787e-07, + "loss": 0.5862, + "step": 5261 + }, + { + "epoch": 3.524447421299397, + "grad_norm": 2.9984181100053098, + "learning_rate": 2.122693707301224e-07, + "loss": 0.7922, + "step": 5262 + }, + { + "epoch": 3.5251172136637643, + "grad_norm": 2.849845007533477, + "learning_rate": 2.116804287160934e-07, + "loss": 0.772, + "step": 5263 + }, + { + "epoch": 3.525787006028131, + "grad_norm": 2.4492345121858534, + "learning_rate": 2.1109226872786853e-07, + "loss": 0.7252, + "step": 5264 + }, + { + "epoch": 3.5264567983924984, + "grad_norm": 2.897303141439389, + "learning_rate": 2.105048909664506e-07, + "loss": 0.609, + "step": 5265 + }, + { + "epoch": 3.5271265907568656, + "grad_norm": 2.058092462800647, + "learning_rate": 2.0991829563257265e-07, + "loss": 0.5751, + "step": 5266 + }, + { + "epoch": 3.5277963831212324, + "grad_norm": 2.7435101623235876, + "learning_rate": 2.0933248292670406e-07, + "loss": 0.7051, + "step": 5267 + }, + { + "epoch": 3.5284661754855993, + "grad_norm": 2.6009658044738004, + "learning_rate": 2.0874745304904216e-07, + "loss": 0.7921, + "step": 5268 + }, + { + "epoch": 3.5291359678499665, + "grad_norm": 2.569769366861188, + "learning_rate": 2.0816320619952008e-07, + "loss": 0.8102, + "step": 5269 + }, + { + "epoch": 3.529805760214334, + "grad_norm": 3.070165632134926, + "learning_rate": 2.0757974257780173e-07, + "loss": 0.7161, + "step": 5270 + }, + { + "epoch": 3.5304755525787006, + "grad_norm": 3.0376559007305, + "learning_rate": 2.069970623832851e-07, + "loss": 0.8473, + "step": 5271 + }, + { + "epoch": 3.5311453449430674, + "grad_norm": 2.935610371954402, + "learning_rate": 2.0641516581509814e-07, + "loss": 0.7892, + "step": 5272 + }, + { + "epoch": 3.5318151373074347, + "grad_norm": 3.1788391831751683, + "learning_rate": 2.0583405307210174e-07, + "loss": 0.7608, + "step": 5273 + }, + { + "epoch": 3.532484929671802, + "grad_norm": 2.8435912920004864, + "learning_rate": 2.052537243528907e-07, + "loss": 0.8192, + "step": 5274 + }, + { + "epoch": 3.5331547220361688, + "grad_norm": 4.233672599387468, + "learning_rate": 2.0467417985578996e-07, + "loss": 0.8312, + "step": 5275 + }, + { + "epoch": 3.5338245144005356, + "grad_norm": 4.4801040579368125, + "learning_rate": 2.0409541977885666e-07, + "loss": 0.8162, + "step": 5276 + }, + { + "epoch": 3.534494306764903, + "grad_norm": 2.44345662305189, + "learning_rate": 2.035174443198795e-07, + "loss": 0.7328, + "step": 5277 + }, + { + "epoch": 3.53516409912927, + "grad_norm": 2.450143849182462, + "learning_rate": 2.0294025367638136e-07, + "loss": 0.683, + "step": 5278 + }, + { + "epoch": 3.535833891493637, + "grad_norm": 2.535953348098567, + "learning_rate": 2.023638480456147e-07, + "loss": 0.7878, + "step": 5279 + }, + { + "epoch": 3.536503683858004, + "grad_norm": 3.237142004479083, + "learning_rate": 2.0178822762456362e-07, + "loss": 0.9299, + "step": 5280 + }, + { + "epoch": 3.537173476222371, + "grad_norm": 3.147192149129202, + "learning_rate": 2.0121339260994438e-07, + "loss": 0.8724, + "step": 5281 + }, + { + "epoch": 3.5378432685867383, + "grad_norm": 2.7170780399198295, + "learning_rate": 2.0063934319820595e-07, + "loss": 0.7727, + "step": 5282 + }, + { + "epoch": 3.538513060951105, + "grad_norm": 2.758141126652643, + "learning_rate": 2.0006607958552805e-07, + "loss": 0.8937, + "step": 5283 + }, + { + "epoch": 3.5391828533154723, + "grad_norm": 2.771332501044663, + "learning_rate": 1.9949360196782065e-07, + "loss": 0.8588, + "step": 5284 + }, + { + "epoch": 3.539852645679839, + "grad_norm": 2.510589702416251, + "learning_rate": 1.9892191054072612e-07, + "loss": 0.7731, + "step": 5285 + }, + { + "epoch": 3.5405224380442064, + "grad_norm": 3.27562797678869, + "learning_rate": 1.9835100549961878e-07, + "loss": 0.8337, + "step": 5286 + }, + { + "epoch": 3.5411922304085732, + "grad_norm": 3.3926791775628873, + "learning_rate": 1.977808870396039e-07, + "loss": 0.8361, + "step": 5287 + }, + { + "epoch": 3.5418620227729405, + "grad_norm": 2.591273722063561, + "learning_rate": 1.9721155535551678e-07, + "loss": 0.6234, + "step": 5288 + }, + { + "epoch": 3.5425318151373073, + "grad_norm": 2.367540378203085, + "learning_rate": 1.9664301064192453e-07, + "loss": 0.6641, + "step": 5289 + }, + { + "epoch": 3.5432016075016746, + "grad_norm": 2.690081508831837, + "learning_rate": 1.960752530931262e-07, + "loss": 0.7871, + "step": 5290 + }, + { + "epoch": 3.5438713998660414, + "grad_norm": 2.6273071737253844, + "learning_rate": 1.9550828290315155e-07, + "loss": 0.6411, + "step": 5291 + }, + { + "epoch": 3.5445411922304086, + "grad_norm": 3.1756622142778848, + "learning_rate": 1.949421002657595e-07, + "loss": 0.7337, + "step": 5292 + }, + { + "epoch": 3.5452109845947755, + "grad_norm": 2.940300284036039, + "learning_rate": 1.943767053744411e-07, + "loss": 0.6536, + "step": 5293 + }, + { + "epoch": 3.5458807769591427, + "grad_norm": 2.7549221997667073, + "learning_rate": 1.9381209842241927e-07, + "loss": 0.7143, + "step": 5294 + }, + { + "epoch": 3.54655056932351, + "grad_norm": 2.4262838926105235, + "learning_rate": 1.9324827960264636e-07, + "loss": 0.7288, + "step": 5295 + }, + { + "epoch": 3.547220361687877, + "grad_norm": 3.0318329876692296, + "learning_rate": 1.92685249107806e-07, + "loss": 0.8517, + "step": 5296 + }, + { + "epoch": 3.5478901540522436, + "grad_norm": 2.7330494314114704, + "learning_rate": 1.921230071303104e-07, + "loss": 0.7156, + "step": 5297 + }, + { + "epoch": 3.548559946416611, + "grad_norm": 3.087713360278909, + "learning_rate": 1.915615538623053e-07, + "loss": 0.8395, + "step": 5298 + }, + { + "epoch": 3.549229738780978, + "grad_norm": 2.975994137518015, + "learning_rate": 1.9100088949566552e-07, + "loss": 0.739, + "step": 5299 + }, + { + "epoch": 3.549899531145345, + "grad_norm": 2.310274050746528, + "learning_rate": 1.904410142219959e-07, + "loss": 0.6087, + "step": 5300 + }, + { + "epoch": 3.5505693235097118, + "grad_norm": 3.176230506115533, + "learning_rate": 1.8988192823263197e-07, + "loss": 0.6664, + "step": 5301 + }, + { + "epoch": 3.551239115874079, + "grad_norm": 3.473267469759522, + "learning_rate": 1.8932363171864006e-07, + "loss": 0.9673, + "step": 5302 + }, + { + "epoch": 3.5519089082384463, + "grad_norm": 2.630645648391538, + "learning_rate": 1.8876612487081558e-07, + "loss": 0.6889, + "step": 5303 + }, + { + "epoch": 3.552578700602813, + "grad_norm": 2.881185632773045, + "learning_rate": 1.8820940787968507e-07, + "loss": 0.8699, + "step": 5304 + }, + { + "epoch": 3.55324849296718, + "grad_norm": 2.732575497135747, + "learning_rate": 1.8765348093550463e-07, + "loss": 0.7965, + "step": 5305 + }, + { + "epoch": 3.553918285331547, + "grad_norm": 2.8944193100553632, + "learning_rate": 1.8709834422826035e-07, + "loss": 0.7861, + "step": 5306 + }, + { + "epoch": 3.5545880776959144, + "grad_norm": 2.6461729120615, + "learning_rate": 1.8654399794766882e-07, + "loss": 0.7059, + "step": 5307 + }, + { + "epoch": 3.5552578700602813, + "grad_norm": 2.809021476375477, + "learning_rate": 1.859904422831757e-07, + "loss": 0.6767, + "step": 5308 + }, + { + "epoch": 3.555927662424648, + "grad_norm": 2.758530694887619, + "learning_rate": 1.8543767742395692e-07, + "loss": 0.8023, + "step": 5309 + }, + { + "epoch": 3.5565974547890153, + "grad_norm": 2.4861580708422664, + "learning_rate": 1.8488570355891828e-07, + "loss": 0.7029, + "step": 5310 + }, + { + "epoch": 3.5572672471533826, + "grad_norm": 3.888909340620315, + "learning_rate": 1.8433452087669474e-07, + "loss": 0.7685, + "step": 5311 + }, + { + "epoch": 3.5579370395177494, + "grad_norm": 2.790723812056266, + "learning_rate": 1.8378412956565145e-07, + "loss": 0.8341, + "step": 5312 + }, + { + "epoch": 3.5586068318821167, + "grad_norm": 2.659004392639056, + "learning_rate": 1.8323452981388267e-07, + "loss": 0.6861, + "step": 5313 + }, + { + "epoch": 3.5592766242464835, + "grad_norm": 2.4937825294891955, + "learning_rate": 1.8268572180921263e-07, + "loss": 0.6684, + "step": 5314 + }, + { + "epoch": 3.5599464166108508, + "grad_norm": 2.465070451976805, + "learning_rate": 1.8213770573919433e-07, + "loss": 0.77, + "step": 5315 + }, + { + "epoch": 3.5606162089752176, + "grad_norm": 2.7214039569251676, + "learning_rate": 1.8159048179111072e-07, + "loss": 0.8092, + "step": 5316 + }, + { + "epoch": 3.561286001339585, + "grad_norm": 2.674671761054923, + "learning_rate": 1.810440501519739e-07, + "loss": 0.764, + "step": 5317 + }, + { + "epoch": 3.5619557937039517, + "grad_norm": 2.955504005851599, + "learning_rate": 1.8049841100852477e-07, + "loss": 0.8972, + "step": 5318 + }, + { + "epoch": 3.562625586068319, + "grad_norm": 2.893641404169939, + "learning_rate": 1.7995356454723416e-07, + "loss": 0.8197, + "step": 5319 + }, + { + "epoch": 3.5632953784326857, + "grad_norm": 2.853496475010259, + "learning_rate": 1.794095109543012e-07, + "loss": 0.7898, + "step": 5320 + }, + { + "epoch": 3.563965170797053, + "grad_norm": 3.1525466460724196, + "learning_rate": 1.7886625041565465e-07, + "loss": 0.8135, + "step": 5321 + }, + { + "epoch": 3.56463496316142, + "grad_norm": 3.045903166197537, + "learning_rate": 1.783237831169521e-07, + "loss": 0.6994, + "step": 5322 + }, + { + "epoch": 3.565304755525787, + "grad_norm": 4.711163785522011, + "learning_rate": 1.777821092435797e-07, + "loss": 0.8308, + "step": 5323 + }, + { + "epoch": 3.565974547890154, + "grad_norm": 3.345811849218893, + "learning_rate": 1.77241228980653e-07, + "loss": 0.8047, + "step": 5324 + }, + { + "epoch": 3.566644340254521, + "grad_norm": 2.7494344254432885, + "learning_rate": 1.7670114251301607e-07, + "loss": 0.77, + "step": 5325 + }, + { + "epoch": 3.567314132618888, + "grad_norm": 2.930432940677215, + "learning_rate": 1.7616185002524155e-07, + "loss": 0.8547, + "step": 5326 + }, + { + "epoch": 3.5679839249832552, + "grad_norm": 2.694834951540221, + "learning_rate": 1.7562335170163091e-07, + "loss": 0.7893, + "step": 5327 + }, + { + "epoch": 3.5686537173476225, + "grad_norm": 2.9655140034197895, + "learning_rate": 1.7508564772621444e-07, + "loss": 0.7396, + "step": 5328 + }, + { + "epoch": 3.5693235097119893, + "grad_norm": 2.750329732852391, + "learning_rate": 1.745487382827507e-07, + "loss": 0.9374, + "step": 5329 + }, + { + "epoch": 3.569993302076356, + "grad_norm": 2.2801849945607433, + "learning_rate": 1.7401262355472653e-07, + "loss": 0.5806, + "step": 5330 + }, + { + "epoch": 3.5706630944407234, + "grad_norm": 2.2410097420450956, + "learning_rate": 1.734773037253576e-07, + "loss": 0.6602, + "step": 5331 + }, + { + "epoch": 3.5713328868050906, + "grad_norm": 3.114095545593711, + "learning_rate": 1.7294277897758728e-07, + "loss": 0.8417, + "step": 5332 + }, + { + "epoch": 3.5720026791694575, + "grad_norm": 2.9944761787897787, + "learning_rate": 1.724090494940886e-07, + "loss": 0.693, + "step": 5333 + }, + { + "epoch": 3.5726724715338243, + "grad_norm": 2.727561683180268, + "learning_rate": 1.7187611545726125e-07, + "loss": 0.7158, + "step": 5334 + }, + { + "epoch": 3.5733422638981915, + "grad_norm": 3.042201945229738, + "learning_rate": 1.7134397704923366e-07, + "loss": 0.7918, + "step": 5335 + }, + { + "epoch": 3.574012056262559, + "grad_norm": 2.5959353885845062, + "learning_rate": 1.708126344518621e-07, + "loss": 0.5729, + "step": 5336 + }, + { + "epoch": 3.5746818486269256, + "grad_norm": 2.7566762704757144, + "learning_rate": 1.7028208784673234e-07, + "loss": 0.6415, + "step": 5337 + }, + { + "epoch": 3.5753516409912924, + "grad_norm": 2.7422238062651756, + "learning_rate": 1.6975233741515607e-07, + "loss": 0.8522, + "step": 5338 + }, + { + "epoch": 3.5760214333556597, + "grad_norm": 2.5309130473460337, + "learning_rate": 1.6922338333817374e-07, + "loss": 0.8606, + "step": 5339 + }, + { + "epoch": 3.576691225720027, + "grad_norm": 2.904284985597876, + "learning_rate": 1.686952257965535e-07, + "loss": 0.7906, + "step": 5340 + }, + { + "epoch": 3.5773610180843938, + "grad_norm": 3.0533538048588955, + "learning_rate": 1.6816786497079268e-07, + "loss": 0.8942, + "step": 5341 + }, + { + "epoch": 3.578030810448761, + "grad_norm": 3.431573189764406, + "learning_rate": 1.6764130104111404e-07, + "loss": 0.8229, + "step": 5342 + }, + { + "epoch": 3.578700602813128, + "grad_norm": 3.0106125780490007, + "learning_rate": 1.6711553418746946e-07, + "loss": 0.825, + "step": 5343 + }, + { + "epoch": 3.579370395177495, + "grad_norm": 2.628350551280417, + "learning_rate": 1.665905645895377e-07, + "loss": 0.6883, + "step": 5344 + }, + { + "epoch": 3.580040187541862, + "grad_norm": 2.847032499385887, + "learning_rate": 1.660663924267264e-07, + "loss": 0.6888, + "step": 5345 + }, + { + "epoch": 3.580709979906229, + "grad_norm": 3.6953390549476244, + "learning_rate": 1.6554301787816863e-07, + "loss": 0.8117, + "step": 5346 + }, + { + "epoch": 3.581379772270596, + "grad_norm": 2.997510332392729, + "learning_rate": 1.650204411227263e-07, + "loss": 0.6858, + "step": 5347 + }, + { + "epoch": 3.5820495646349633, + "grad_norm": 2.915127201695986, + "learning_rate": 1.6449866233898797e-07, + "loss": 0.9667, + "step": 5348 + }, + { + "epoch": 3.58271935699933, + "grad_norm": 2.7076978601917725, + "learning_rate": 1.6397768170527102e-07, + "loss": 0.8193, + "step": 5349 + }, + { + "epoch": 3.5833891493636973, + "grad_norm": 3.168875487272739, + "learning_rate": 1.634574993996177e-07, + "loss": 0.873, + "step": 5350 + }, + { + "epoch": 3.584058941728064, + "grad_norm": 2.9158342584420303, + "learning_rate": 1.6293811559979916e-07, + "loss": 0.8542, + "step": 5351 + }, + { + "epoch": 3.5847287340924314, + "grad_norm": 2.66223703804887, + "learning_rate": 1.6241953048331232e-07, + "loss": 0.7134, + "step": 5352 + }, + { + "epoch": 3.5853985264567982, + "grad_norm": 2.3667501493554695, + "learning_rate": 1.6190174422738376e-07, + "loss": 0.661, + "step": 5353 + }, + { + "epoch": 3.5860683188211655, + "grad_norm": 2.9213638692693986, + "learning_rate": 1.6138475700896384e-07, + "loss": 0.493, + "step": 5354 + }, + { + "epoch": 3.5867381111855323, + "grad_norm": 2.9514483170405934, + "learning_rate": 1.6086856900473153e-07, + "loss": 0.7489, + "step": 5355 + }, + { + "epoch": 3.5874079035498996, + "grad_norm": 2.6082478297645966, + "learning_rate": 1.603531803910918e-07, + "loss": 0.7071, + "step": 5356 + }, + { + "epoch": 3.588077695914267, + "grad_norm": 4.559860963450634, + "learning_rate": 1.5983859134417885e-07, + "loss": 0.7557, + "step": 5357 + }, + { + "epoch": 3.5887474882786337, + "grad_norm": 3.445559828886725, + "learning_rate": 1.5932480203984996e-07, + "loss": 0.7148, + "step": 5358 + }, + { + "epoch": 3.5894172806430005, + "grad_norm": 2.690873294359022, + "learning_rate": 1.5881181265369195e-07, + "loss": 0.7674, + "step": 5359 + }, + { + "epoch": 3.5900870730073677, + "grad_norm": 2.6453102792536316, + "learning_rate": 1.582996233610165e-07, + "loss": 0.8526, + "step": 5360 + }, + { + "epoch": 3.590756865371735, + "grad_norm": 2.557280595636372, + "learning_rate": 1.577882343368639e-07, + "loss": 0.7707, + "step": 5361 + }, + { + "epoch": 3.591426657736102, + "grad_norm": 2.5909172584395272, + "learning_rate": 1.572776457559988e-07, + "loss": 0.7253, + "step": 5362 + }, + { + "epoch": 3.5920964501004686, + "grad_norm": 2.4267558653513746, + "learning_rate": 1.567678577929127e-07, + "loss": 0.6608, + "step": 5363 + }, + { + "epoch": 3.592766242464836, + "grad_norm": 4.550467778648024, + "learning_rate": 1.5625887062182515e-07, + "loss": 0.7845, + "step": 5364 + }, + { + "epoch": 3.593436034829203, + "grad_norm": 2.4804711806271103, + "learning_rate": 1.5575068441668033e-07, + "loss": 0.7755, + "step": 5365 + }, + { + "epoch": 3.59410582719357, + "grad_norm": 2.4778314212044843, + "learning_rate": 1.5524329935114961e-07, + "loss": 0.6756, + "step": 5366 + }, + { + "epoch": 3.594775619557937, + "grad_norm": 2.8681720986039534, + "learning_rate": 1.54736715598629e-07, + "loss": 0.8157, + "step": 5367 + }, + { + "epoch": 3.595445411922304, + "grad_norm": 2.9460891605326474, + "learning_rate": 1.5423093333224308e-07, + "loss": 0.7473, + "step": 5368 + }, + { + "epoch": 3.5961152042866713, + "grad_norm": 2.9097771328395146, + "learning_rate": 1.5372595272484082e-07, + "loss": 0.8428, + "step": 5369 + }, + { + "epoch": 3.596784996651038, + "grad_norm": 2.724257687460456, + "learning_rate": 1.532217739489983e-07, + "loss": 0.7001, + "step": 5370 + }, + { + "epoch": 3.5974547890154054, + "grad_norm": 2.8532178761017337, + "learning_rate": 1.5271839717701577e-07, + "loss": 0.7361, + "step": 5371 + }, + { + "epoch": 3.598124581379772, + "grad_norm": 2.69193651301504, + "learning_rate": 1.5221582258092173e-07, + "loss": 0.6247, + "step": 5372 + }, + { + "epoch": 3.5987943737441395, + "grad_norm": 3.055367359960816, + "learning_rate": 1.5171405033246906e-07, + "loss": 0.668, + "step": 5373 + }, + { + "epoch": 3.5994641661085063, + "grad_norm": 2.8574174279941507, + "learning_rate": 1.51213080603137e-07, + "loss": 0.7064, + "step": 5374 + }, + { + "epoch": 3.6001339584728735, + "grad_norm": 2.403265332863789, + "learning_rate": 1.5071291356412914e-07, + "loss": 0.6405, + "step": 5375 + }, + { + "epoch": 3.6008037508372404, + "grad_norm": 2.63559128796058, + "learning_rate": 1.5021354938637766e-07, + "loss": 0.7236, + "step": 5376 + }, + { + "epoch": 3.6014735432016076, + "grad_norm": 2.8673922066707793, + "learning_rate": 1.4971498824053771e-07, + "loss": 0.8646, + "step": 5377 + }, + { + "epoch": 3.6021433355659744, + "grad_norm": 2.4714245785768156, + "learning_rate": 1.4921723029699132e-07, + "loss": 0.7361, + "step": 5378 + }, + { + "epoch": 3.6028131279303417, + "grad_norm": 2.628090776409199, + "learning_rate": 1.4872027572584492e-07, + "loss": 0.7758, + "step": 5379 + }, + { + "epoch": 3.6034829202947085, + "grad_norm": 3.1111428991715906, + "learning_rate": 1.4822412469693208e-07, + "loss": 0.8051, + "step": 5380 + }, + { + "epoch": 3.6041527126590758, + "grad_norm": 2.811157135180601, + "learning_rate": 1.4772877737981029e-07, + "loss": 0.6939, + "step": 5381 + }, + { + "epoch": 3.6048225050234426, + "grad_norm": 3.180775153072367, + "learning_rate": 1.472342339437635e-07, + "loss": 0.8472, + "step": 5382 + }, + { + "epoch": 3.60549229738781, + "grad_norm": 2.8592090602936633, + "learning_rate": 1.467404945577991e-07, + "loss": 0.7265, + "step": 5383 + }, + { + "epoch": 3.6061620897521767, + "grad_norm": 2.7368953363903863, + "learning_rate": 1.4624755939065177e-07, + "loss": 0.7209, + "step": 5384 + }, + { + "epoch": 3.606831882116544, + "grad_norm": 2.7043605681111775, + "learning_rate": 1.457554286107804e-07, + "loss": 0.7512, + "step": 5385 + }, + { + "epoch": 3.607501674480911, + "grad_norm": 2.6024298807200617, + "learning_rate": 1.4526410238636963e-07, + "loss": 0.7319, + "step": 5386 + }, + { + "epoch": 3.608171466845278, + "grad_norm": 2.8789228202508186, + "learning_rate": 1.4477358088532734e-07, + "loss": 0.802, + "step": 5387 + }, + { + "epoch": 3.608841259209645, + "grad_norm": 3.149195923038586, + "learning_rate": 1.4428386427528863e-07, + "loss": 0.7676, + "step": 5388 + }, + { + "epoch": 3.609511051574012, + "grad_norm": 2.562443659383114, + "learning_rate": 1.4379495272361237e-07, + "loss": 0.7089, + "step": 5389 + }, + { + "epoch": 3.6101808439383793, + "grad_norm": 2.8167224348112234, + "learning_rate": 1.4330684639738246e-07, + "loss": 0.8657, + "step": 5390 + }, + { + "epoch": 3.610850636302746, + "grad_norm": 2.667076326214576, + "learning_rate": 1.4281954546340764e-07, + "loss": 0.7357, + "step": 5391 + }, + { + "epoch": 3.611520428667113, + "grad_norm": 2.5095022655000805, + "learning_rate": 1.423330500882214e-07, + "loss": 0.6903, + "step": 5392 + }, + { + "epoch": 3.6121902210314802, + "grad_norm": 2.6092588011854927, + "learning_rate": 1.4184736043808184e-07, + "loss": 0.6671, + "step": 5393 + }, + { + "epoch": 3.6128600133958475, + "grad_norm": 2.5889814841360685, + "learning_rate": 1.4136247667897203e-07, + "loss": 0.763, + "step": 5394 + }, + { + "epoch": 3.6135298057602143, + "grad_norm": 2.7270220707476533, + "learning_rate": 1.4087839897659967e-07, + "loss": 0.7689, + "step": 5395 + }, + { + "epoch": 3.614199598124581, + "grad_norm": 2.3722275786408438, + "learning_rate": 1.4039512749639605e-07, + "loss": 0.8183, + "step": 5396 + }, + { + "epoch": 3.6148693904889484, + "grad_norm": 2.7421973372183603, + "learning_rate": 1.3991266240351825e-07, + "loss": 0.6964, + "step": 5397 + }, + { + "epoch": 3.6155391828533157, + "grad_norm": 3.1024009401095594, + "learning_rate": 1.3943100386284685e-07, + "loss": 0.7459, + "step": 5398 + }, + { + "epoch": 3.6162089752176825, + "grad_norm": 2.588702867774586, + "learning_rate": 1.3895015203898716e-07, + "loss": 0.6095, + "step": 5399 + }, + { + "epoch": 3.6168787675820493, + "grad_norm": 2.88456277828215, + "learning_rate": 1.3847010709626907e-07, + "loss": 0.8735, + "step": 5400 + }, + { + "epoch": 3.6175485599464166, + "grad_norm": 2.903820348010055, + "learning_rate": 1.3799086919874554e-07, + "loss": 0.5827, + "step": 5401 + }, + { + "epoch": 3.618218352310784, + "grad_norm": 3.047625991535014, + "learning_rate": 1.3751243851019557e-07, + "loss": 0.7348, + "step": 5402 + }, + { + "epoch": 3.6188881446751506, + "grad_norm": 2.6236023270743227, + "learning_rate": 1.370348151941206e-07, + "loss": 0.7569, + "step": 5403 + }, + { + "epoch": 3.619557937039518, + "grad_norm": 2.847063069436185, + "learning_rate": 1.3655799941374697e-07, + "loss": 0.6596, + "step": 5404 + }, + { + "epoch": 3.6202277294038847, + "grad_norm": 2.6866266645047387, + "learning_rate": 1.360819913320252e-07, + "loss": 0.7723, + "step": 5405 + }, + { + "epoch": 3.620897521768252, + "grad_norm": 2.3914266029021247, + "learning_rate": 1.3560679111162932e-07, + "loss": 0.7115, + "step": 5406 + }, + { + "epoch": 3.621567314132619, + "grad_norm": 2.7648499333659013, + "learning_rate": 1.351323989149575e-07, + "loss": 0.869, + "step": 5407 + }, + { + "epoch": 3.622237106496986, + "grad_norm": 3.30983595212658, + "learning_rate": 1.3465881490413168e-07, + "loss": 0.8226, + "step": 5408 + }, + { + "epoch": 3.622906898861353, + "grad_norm": 2.6846377123349723, + "learning_rate": 1.341860392409977e-07, + "loss": 0.8209, + "step": 5409 + }, + { + "epoch": 3.62357669122572, + "grad_norm": 3.2827683053418237, + "learning_rate": 1.3371407208712523e-07, + "loss": 0.8778, + "step": 5410 + }, + { + "epoch": 3.624246483590087, + "grad_norm": 2.6385387309964043, + "learning_rate": 1.332429136038074e-07, + "loss": 0.5534, + "step": 5411 + }, + { + "epoch": 3.624916275954454, + "grad_norm": 2.986281466936915, + "learning_rate": 1.3277256395206106e-07, + "loss": 0.6918, + "step": 5412 + }, + { + "epoch": 3.625586068318821, + "grad_norm": 3.0761434578524343, + "learning_rate": 1.3230302329262674e-07, + "loss": 0.6824, + "step": 5413 + }, + { + "epoch": 3.6262558606831883, + "grad_norm": 3.0457192986572545, + "learning_rate": 1.3183429178596858e-07, + "loss": 0.648, + "step": 5414 + }, + { + "epoch": 3.626925653047555, + "grad_norm": 2.4771216458336505, + "learning_rate": 1.3136636959227373e-07, + "loss": 0.7304, + "step": 5415 + }, + { + "epoch": 3.6275954454119224, + "grad_norm": 2.360813201822281, + "learning_rate": 1.3089925687145345e-07, + "loss": 0.7602, + "step": 5416 + }, + { + "epoch": 3.628265237776289, + "grad_norm": 2.637356383702868, + "learning_rate": 1.304329537831414e-07, + "loss": 0.782, + "step": 5417 + }, + { + "epoch": 3.6289350301406564, + "grad_norm": 2.6420272073659024, + "learning_rate": 1.2996746048669602e-07, + "loss": 0.8112, + "step": 5418 + }, + { + "epoch": 3.6296048225050237, + "grad_norm": 2.7323294186366134, + "learning_rate": 1.2950277714119746e-07, + "loss": 0.7704, + "step": 5419 + }, + { + "epoch": 3.6302746148693905, + "grad_norm": 2.990278873588977, + "learning_rate": 1.2903890390544989e-07, + "loss": 0.85, + "step": 5420 + }, + { + "epoch": 3.6309444072337573, + "grad_norm": 3.3903282878719554, + "learning_rate": 1.285758409379806e-07, + "loss": 0.8735, + "step": 5421 + }, + { + "epoch": 3.6316141995981246, + "grad_norm": 2.51526127869343, + "learning_rate": 1.281135883970397e-07, + "loss": 0.5604, + "step": 5422 + }, + { + "epoch": 3.632283991962492, + "grad_norm": 2.841978683333726, + "learning_rate": 1.2765214644060109e-07, + "loss": 0.6878, + "step": 5423 + }, + { + "epoch": 3.6329537843268587, + "grad_norm": 2.9085165151112866, + "learning_rate": 1.271915152263603e-07, + "loss": 0.837, + "step": 5424 + }, + { + "epoch": 3.6336235766912255, + "grad_norm": 2.582399584626108, + "learning_rate": 1.2673169491173697e-07, + "loss": 0.7672, + "step": 5425 + }, + { + "epoch": 3.6342933690555927, + "grad_norm": 2.483909005524625, + "learning_rate": 1.2627268565387262e-07, + "loss": 0.7238, + "step": 5426 + }, + { + "epoch": 3.63496316141996, + "grad_norm": 2.5803657962137345, + "learning_rate": 1.2581448760963315e-07, + "loss": 0.7581, + "step": 5427 + }, + { + "epoch": 3.635632953784327, + "grad_norm": 2.550466070505371, + "learning_rate": 1.2535710093560583e-07, + "loss": 0.6414, + "step": 5428 + }, + { + "epoch": 3.6363027461486936, + "grad_norm": 2.7331601234842253, + "learning_rate": 1.249005257881006e-07, + "loss": 0.746, + "step": 5429 + }, + { + "epoch": 3.636972538513061, + "grad_norm": 2.4892739342646193, + "learning_rate": 1.2444476232315094e-07, + "loss": 0.5264, + "step": 5430 + }, + { + "epoch": 3.637642330877428, + "grad_norm": 2.955619912193088, + "learning_rate": 1.239898106965129e-07, + "loss": 0.8541, + "step": 5431 + }, + { + "epoch": 3.638312123241795, + "grad_norm": 2.7600327680635033, + "learning_rate": 1.2353567106366427e-07, + "loss": 0.6993, + "step": 5432 + }, + { + "epoch": 3.6389819156061622, + "grad_norm": 3.2360557043306057, + "learning_rate": 1.2308234357980564e-07, + "loss": 0.8344, + "step": 5433 + }, + { + "epoch": 3.639651707970529, + "grad_norm": 2.54192285288786, + "learning_rate": 1.2262982839986032e-07, + "loss": 0.7297, + "step": 5434 + }, + { + "epoch": 3.6403215003348963, + "grad_norm": 2.6813026399072495, + "learning_rate": 1.2217812567847432e-07, + "loss": 0.6707, + "step": 5435 + }, + { + "epoch": 3.640991292699263, + "grad_norm": 2.5037394177828878, + "learning_rate": 1.2172723557001582e-07, + "loss": 0.7763, + "step": 5436 + }, + { + "epoch": 3.6416610850636304, + "grad_norm": 3.0736012849423617, + "learning_rate": 1.2127715822857406e-07, + "loss": 0.7129, + "step": 5437 + }, + { + "epoch": 3.642330877427997, + "grad_norm": 2.9739357583992465, + "learning_rate": 1.2082789380796155e-07, + "loss": 0.822, + "step": 5438 + }, + { + "epoch": 3.6430006697923645, + "grad_norm": 2.9057961630343203, + "learning_rate": 1.2037944246171386e-07, + "loss": 0.6455, + "step": 5439 + }, + { + "epoch": 3.6436704621567313, + "grad_norm": 3.062611880748525, + "learning_rate": 1.1993180434308775e-07, + "loss": 0.8228, + "step": 5440 + }, + { + "epoch": 3.6443402545210986, + "grad_norm": 2.7461136391578296, + "learning_rate": 1.1948497960506122e-07, + "loss": 0.7463, + "step": 5441 + }, + { + "epoch": 3.6450100468854654, + "grad_norm": 3.1175228163526976, + "learning_rate": 1.1903896840033513e-07, + "loss": 0.7917, + "step": 5442 + }, + { + "epoch": 3.6456798392498326, + "grad_norm": 3.2650072187481043, + "learning_rate": 1.185937708813334e-07, + "loss": 0.7508, + "step": 5443 + }, + { + "epoch": 3.6463496316141994, + "grad_norm": 2.755617105363316, + "learning_rate": 1.1814938720020069e-07, + "loss": 0.8443, + "step": 5444 + }, + { + "epoch": 3.6470194239785667, + "grad_norm": 2.906571910342573, + "learning_rate": 1.1770581750880305e-07, + "loss": 0.7094, + "step": 5445 + }, + { + "epoch": 3.6476892163429335, + "grad_norm": 2.5257144139262278, + "learning_rate": 1.1726306195872866e-07, + "loss": 0.847, + "step": 5446 + }, + { + "epoch": 3.648359008707301, + "grad_norm": 2.5778022171263566, + "learning_rate": 1.1682112070128871e-07, + "loss": 0.749, + "step": 5447 + }, + { + "epoch": 3.649028801071668, + "grad_norm": 2.4602694528281304, + "learning_rate": 1.1637999388751542e-07, + "loss": 0.8102, + "step": 5448 + }, + { + "epoch": 3.649698593436035, + "grad_norm": 2.3942731558550014, + "learning_rate": 1.1593968166816128e-07, + "loss": 0.6255, + "step": 5449 + }, + { + "epoch": 3.6503683858004017, + "grad_norm": 2.680122838508592, + "learning_rate": 1.1550018419370201e-07, + "loss": 0.8563, + "step": 5450 + }, + { + "epoch": 3.651038178164769, + "grad_norm": 2.4476543697814557, + "learning_rate": 1.1506150161433499e-07, + "loss": 0.7629, + "step": 5451 + }, + { + "epoch": 3.651707970529136, + "grad_norm": 2.5853813763357127, + "learning_rate": 1.1462363407997834e-07, + "loss": 0.6982, + "step": 5452 + }, + { + "epoch": 3.652377762893503, + "grad_norm": 2.8021251427280984, + "learning_rate": 1.1418658174027125e-07, + "loss": 0.7648, + "step": 5453 + }, + { + "epoch": 3.65304755525787, + "grad_norm": 2.889448917218554, + "learning_rate": 1.1375034474457563e-07, + "loss": 0.7911, + "step": 5454 + }, + { + "epoch": 3.653717347622237, + "grad_norm": 2.6797664115624293, + "learning_rate": 1.133149232419739e-07, + "loss": 0.763, + "step": 5455 + }, + { + "epoch": 3.6543871399866044, + "grad_norm": 2.6933875326094827, + "learning_rate": 1.1288031738127008e-07, + "loss": 0.8559, + "step": 5456 + }, + { + "epoch": 3.655056932350971, + "grad_norm": 2.664088117200915, + "learning_rate": 1.1244652731098871e-07, + "loss": 0.7568, + "step": 5457 + }, + { + "epoch": 3.655726724715338, + "grad_norm": 2.792839512817141, + "learning_rate": 1.1201355317937673e-07, + "loss": 0.7521, + "step": 5458 + }, + { + "epoch": 3.6563965170797053, + "grad_norm": 3.3823271727262907, + "learning_rate": 1.1158139513440163e-07, + "loss": 0.8837, + "step": 5459 + }, + { + "epoch": 3.6570663094440725, + "grad_norm": 2.297910871272363, + "learning_rate": 1.1115005332375195e-07, + "loss": 0.6676, + "step": 5460 + }, + { + "epoch": 3.6577361018084393, + "grad_norm": 2.760983153628738, + "learning_rate": 1.107195278948367e-07, + "loss": 0.7877, + "step": 5461 + }, + { + "epoch": 3.6584058941728066, + "grad_norm": 3.036784227422532, + "learning_rate": 1.1028981899478763e-07, + "loss": 0.8297, + "step": 5462 + }, + { + "epoch": 3.6590756865371734, + "grad_norm": 2.373570345519356, + "learning_rate": 1.098609267704559e-07, + "loss": 0.7128, + "step": 5463 + }, + { + "epoch": 3.6597454789015407, + "grad_norm": 2.657852274415926, + "learning_rate": 1.0943285136841398e-07, + "loss": 0.7285, + "step": 5464 + }, + { + "epoch": 3.6604152712659075, + "grad_norm": 2.8484672740232244, + "learning_rate": 1.090055929349551e-07, + "loss": 0.7016, + "step": 5465 + }, + { + "epoch": 3.6610850636302747, + "grad_norm": 2.93566349091176, + "learning_rate": 1.0857915161609361e-07, + "loss": 0.836, + "step": 5466 + }, + { + "epoch": 3.6617548559946416, + "grad_norm": 3.185927426481852, + "learning_rate": 1.0815352755756458e-07, + "loss": 0.8425, + "step": 5467 + }, + { + "epoch": 3.662424648359009, + "grad_norm": 2.7572423421270287, + "learning_rate": 1.0772872090482389e-07, + "loss": 0.6427, + "step": 5468 + }, + { + "epoch": 3.6630944407233756, + "grad_norm": 3.0194437872805007, + "learning_rate": 1.0730473180304679e-07, + "loss": 0.9125, + "step": 5469 + }, + { + "epoch": 3.663764233087743, + "grad_norm": 2.856851863085186, + "learning_rate": 1.0688156039713127e-07, + "loss": 0.8576, + "step": 5470 + }, + { + "epoch": 3.6644340254521097, + "grad_norm": 2.80650937053764, + "learning_rate": 1.0645920683169442e-07, + "loss": 0.7853, + "step": 5471 + }, + { + "epoch": 3.665103817816477, + "grad_norm": 2.8844809855471905, + "learning_rate": 1.0603767125107412e-07, + "loss": 0.7031, + "step": 5472 + }, + { + "epoch": 3.665773610180844, + "grad_norm": 2.9350486245168486, + "learning_rate": 1.0561695379932874e-07, + "loss": 0.7612, + "step": 5473 + }, + { + "epoch": 3.666443402545211, + "grad_norm": 3.7030708319228705, + "learning_rate": 1.0519705462023744e-07, + "loss": 0.7073, + "step": 5474 + }, + { + "epoch": 3.667113194909578, + "grad_norm": 2.7583029794070963, + "learning_rate": 1.0477797385729905e-07, + "loss": 0.7097, + "step": 5475 + }, + { + "epoch": 3.667782987273945, + "grad_norm": 2.6729270924619715, + "learning_rate": 1.0435971165373316e-07, + "loss": 0.8016, + "step": 5476 + }, + { + "epoch": 3.6684527796383124, + "grad_norm": 2.6795594439305757, + "learning_rate": 1.039422681524796e-07, + "loss": 0.7241, + "step": 5477 + }, + { + "epoch": 3.669122572002679, + "grad_norm": 2.8094930127646993, + "learning_rate": 1.0352564349619842e-07, + "loss": 0.7857, + "step": 5478 + }, + { + "epoch": 3.669792364367046, + "grad_norm": 2.6427542658110736, + "learning_rate": 1.0310983782726936e-07, + "loss": 0.7171, + "step": 5479 + }, + { + "epoch": 3.6704621567314133, + "grad_norm": 3.2432380901045037, + "learning_rate": 1.0269485128779316e-07, + "loss": 0.8196, + "step": 5480 + }, + { + "epoch": 3.6711319490957806, + "grad_norm": 2.6878987633968405, + "learning_rate": 1.0228068401958973e-07, + "loss": 0.7857, + "step": 5481 + }, + { + "epoch": 3.6718017414601474, + "grad_norm": 2.5016494484536005, + "learning_rate": 1.0186733616419974e-07, + "loss": 0.7569, + "step": 5482 + }, + { + "epoch": 3.672471533824514, + "grad_norm": 3.079808771587136, + "learning_rate": 1.0145480786288326e-07, + "loss": 0.6401, + "step": 5483 + }, + { + "epoch": 3.6731413261888815, + "grad_norm": 2.7803795689705826, + "learning_rate": 1.0104309925662053e-07, + "loss": 0.783, + "step": 5484 + }, + { + "epoch": 3.6738111185532487, + "grad_norm": 2.655381342861268, + "learning_rate": 1.0063221048611155e-07, + "loss": 0.6789, + "step": 5485 + }, + { + "epoch": 3.6744809109176155, + "grad_norm": 2.7994811163661155, + "learning_rate": 1.0022214169177647e-07, + "loss": 0.8132, + "step": 5486 + }, + { + "epoch": 3.6751507032819823, + "grad_norm": 2.5440591380451383, + "learning_rate": 9.981289301375458e-08, + "loss": 0.687, + "step": 5487 + }, + { + "epoch": 3.6758204956463496, + "grad_norm": 2.8707926926753284, + "learning_rate": 9.940446459190567e-08, + "loss": 0.7843, + "step": 5488 + }, + { + "epoch": 3.676490288010717, + "grad_norm": 2.906260867982351, + "learning_rate": 9.899685656580866e-08, + "loss": 0.8301, + "step": 5489 + }, + { + "epoch": 3.6771600803750837, + "grad_norm": 2.5936627512266903, + "learning_rate": 9.859006907476237e-08, + "loss": 0.6598, + "step": 5490 + }, + { + "epoch": 3.6778298727394505, + "grad_norm": 2.695963643135141, + "learning_rate": 9.818410225778507e-08, + "loss": 0.8248, + "step": 5491 + }, + { + "epoch": 3.6784996651038178, + "grad_norm": 2.553778577244167, + "learning_rate": 9.777895625361439e-08, + "loss": 0.7513, + "step": 5492 + }, + { + "epoch": 3.679169457468185, + "grad_norm": 2.477790392635121, + "learning_rate": 9.737463120070817e-08, + "loss": 0.6756, + "step": 5493 + }, + { + "epoch": 3.679839249832552, + "grad_norm": 2.4640663579064817, + "learning_rate": 9.697112723724256e-08, + "loss": 0.7065, + "step": 5494 + }, + { + "epoch": 3.680509042196919, + "grad_norm": 2.965804951990758, + "learning_rate": 9.656844450111419e-08, + "loss": 0.8429, + "step": 5495 + }, + { + "epoch": 3.681178834561286, + "grad_norm": 3.0039005425405754, + "learning_rate": 9.616658312993855e-08, + "loss": 0.8463, + "step": 5496 + }, + { + "epoch": 3.681848626925653, + "grad_norm": 3.491312368288686, + "learning_rate": 9.57655432610502e-08, + "loss": 0.8256, + "step": 5497 + }, + { + "epoch": 3.68251841929002, + "grad_norm": 2.468252196041051, + "learning_rate": 9.536532503150341e-08, + "loss": 0.6079, + "step": 5498 + }, + { + "epoch": 3.6831882116543873, + "grad_norm": 2.5345735291060723, + "learning_rate": 9.49659285780713e-08, + "loss": 0.8128, + "step": 5499 + }, + { + "epoch": 3.683858004018754, + "grad_norm": 3.1998805809647863, + "learning_rate": 9.45673540372466e-08, + "loss": 0.8536, + "step": 5500 + }, + { + "epoch": 3.6845277963831213, + "grad_norm": 3.297545353127516, + "learning_rate": 9.416960154524062e-08, + "loss": 0.7248, + "step": 5501 + }, + { + "epoch": 3.685197588747488, + "grad_norm": 2.726961050498722, + "learning_rate": 9.377267123798411e-08, + "loss": 0.7026, + "step": 5502 + }, + { + "epoch": 3.6858673811118554, + "grad_norm": 2.3314459826064775, + "learning_rate": 9.33765632511266e-08, + "loss": 0.6976, + "step": 5503 + }, + { + "epoch": 3.6865371734762222, + "grad_norm": 2.961906015622091, + "learning_rate": 9.298127772003701e-08, + "loss": 0.842, + "step": 5504 + }, + { + "epoch": 3.6872069658405895, + "grad_norm": 3.126676373479862, + "learning_rate": 9.258681477980231e-08, + "loss": 0.8766, + "step": 5505 + }, + { + "epoch": 3.6878767582049563, + "grad_norm": 2.555196898161316, + "learning_rate": 9.219317456522963e-08, + "loss": 0.6283, + "step": 5506 + }, + { + "epoch": 3.6885465505693236, + "grad_norm": 2.903988417033912, + "learning_rate": 9.180035721084363e-08, + "loss": 0.9197, + "step": 5507 + }, + { + "epoch": 3.6892163429336904, + "grad_norm": 2.448636036512766, + "learning_rate": 9.140836285088884e-08, + "loss": 0.7152, + "step": 5508 + }, + { + "epoch": 3.6898861352980576, + "grad_norm": 2.5579122557351726, + "learning_rate": 9.101719161932753e-08, + "loss": 0.7257, + "step": 5509 + }, + { + "epoch": 3.690555927662425, + "grad_norm": 2.3029237872203794, + "learning_rate": 9.062684364984226e-08, + "loss": 0.6815, + "step": 5510 + }, + { + "epoch": 3.6912257200267917, + "grad_norm": 3.186284227457906, + "learning_rate": 9.023731907583182e-08, + "loss": 0.8845, + "step": 5511 + }, + { + "epoch": 3.6918955123911585, + "grad_norm": 2.7289107279314075, + "learning_rate": 8.98486180304156e-08, + "loss": 0.6827, + "step": 5512 + }, + { + "epoch": 3.692565304755526, + "grad_norm": 2.7225529207707835, + "learning_rate": 8.946074064643124e-08, + "loss": 0.7078, + "step": 5513 + }, + { + "epoch": 3.693235097119893, + "grad_norm": 2.588922301439071, + "learning_rate": 8.907368705643432e-08, + "loss": 0.7861, + "step": 5514 + }, + { + "epoch": 3.69390488948426, + "grad_norm": 2.4546708657958956, + "learning_rate": 8.868745739269907e-08, + "loss": 0.7374, + "step": 5515 + }, + { + "epoch": 3.6945746818486267, + "grad_norm": 2.48511412619448, + "learning_rate": 8.830205178721768e-08, + "loss": 0.6585, + "step": 5516 + }, + { + "epoch": 3.695244474212994, + "grad_norm": 3.6143011853903944, + "learning_rate": 8.791747037170228e-08, + "loss": 0.6759, + "step": 5517 + }, + { + "epoch": 3.695914266577361, + "grad_norm": 2.523829260471016, + "learning_rate": 8.753371327758248e-08, + "loss": 0.7244, + "step": 5518 + }, + { + "epoch": 3.696584058941728, + "grad_norm": 3.2164270998919293, + "learning_rate": 8.715078063600474e-08, + "loss": 0.7602, + "step": 5519 + }, + { + "epoch": 3.697253851306095, + "grad_norm": 2.973891412328284, + "learning_rate": 8.676867257783578e-08, + "loss": 0.855, + "step": 5520 + }, + { + "epoch": 3.697923643670462, + "grad_norm": 3.0418397427459634, + "learning_rate": 8.638738923365974e-08, + "loss": 0.7866, + "step": 5521 + }, + { + "epoch": 3.6985934360348294, + "grad_norm": 2.803285021810978, + "learning_rate": 8.600693073377963e-08, + "loss": 0.7342, + "step": 5522 + }, + { + "epoch": 3.699263228399196, + "grad_norm": 2.541216874186934, + "learning_rate": 8.562729720821478e-08, + "loss": 0.6674, + "step": 5523 + }, + { + "epoch": 3.6999330207635635, + "grad_norm": 2.4918904352607805, + "learning_rate": 8.524848878670389e-08, + "loss": 0.7135, + "step": 5524 + }, + { + "epoch": 3.7006028131279303, + "grad_norm": 2.811267546746281, + "learning_rate": 8.487050559870425e-08, + "loss": 0.6828, + "step": 5525 + }, + { + "epoch": 3.7012726054922975, + "grad_norm": 2.6961404565089286, + "learning_rate": 8.449334777339003e-08, + "loss": 0.7875, + "step": 5526 + }, + { + "epoch": 3.7019423978566643, + "grad_norm": 3.119928461026308, + "learning_rate": 8.411701543965367e-08, + "loss": 0.8151, + "step": 5527 + }, + { + "epoch": 3.7026121902210316, + "grad_norm": 2.597852785457201, + "learning_rate": 8.374150872610482e-08, + "loss": 0.5435, + "step": 5528 + }, + { + "epoch": 3.7032819825853984, + "grad_norm": 3.4647790571874992, + "learning_rate": 8.336682776107302e-08, + "loss": 0.8022, + "step": 5529 + }, + { + "epoch": 3.7039517749497657, + "grad_norm": 2.8043498246815517, + "learning_rate": 8.299297267260365e-08, + "loss": 0.947, + "step": 5530 + }, + { + "epoch": 3.7046215673141325, + "grad_norm": 2.4697337557289987, + "learning_rate": 8.261994358846031e-08, + "loss": 0.7713, + "step": 5531 + }, + { + "epoch": 3.7052913596784998, + "grad_norm": 2.5837146094433927, + "learning_rate": 8.22477406361244e-08, + "loss": 0.7902, + "step": 5532 + }, + { + "epoch": 3.7059611520428666, + "grad_norm": 2.3804759997214058, + "learning_rate": 8.187636394279524e-08, + "loss": 0.712, + "step": 5533 + }, + { + "epoch": 3.706630944407234, + "grad_norm": 2.892313499009909, + "learning_rate": 8.150581363539022e-08, + "loss": 0.706, + "step": 5534 + }, + { + "epoch": 3.7073007367716007, + "grad_norm": 2.4365682134658453, + "learning_rate": 8.113608984054278e-08, + "loss": 0.7409, + "step": 5535 + }, + { + "epoch": 3.707970529135968, + "grad_norm": 2.5041767159126103, + "learning_rate": 8.076719268460487e-08, + "loss": 0.7253, + "step": 5536 + }, + { + "epoch": 3.7086403215003347, + "grad_norm": 2.736389699069072, + "learning_rate": 8.039912229364677e-08, + "loss": 0.7476, + "step": 5537 + }, + { + "epoch": 3.709310113864702, + "grad_norm": 2.89494979238859, + "learning_rate": 8.003187879345481e-08, + "loss": 0.7571, + "step": 5538 + }, + { + "epoch": 3.7099799062290693, + "grad_norm": 3.142758659311641, + "learning_rate": 7.966546230953276e-08, + "loss": 0.7955, + "step": 5539 + }, + { + "epoch": 3.710649698593436, + "grad_norm": 2.903140918886843, + "learning_rate": 7.929987296710295e-08, + "loss": 0.8921, + "step": 5540 + }, + { + "epoch": 3.711319490957803, + "grad_norm": 3.05032954701525, + "learning_rate": 7.893511089110434e-08, + "loss": 0.8768, + "step": 5541 + }, + { + "epoch": 3.71198928332217, + "grad_norm": 2.5008814577071092, + "learning_rate": 7.857117620619331e-08, + "loss": 0.6758, + "step": 5542 + }, + { + "epoch": 3.7126590756865374, + "grad_norm": 2.663495080287163, + "learning_rate": 7.820806903674232e-08, + "loss": 0.7746, + "step": 5543 + }, + { + "epoch": 3.7133288680509042, + "grad_norm": 2.5306218692874194, + "learning_rate": 7.784578950684324e-08, + "loss": 0.716, + "step": 5544 + }, + { + "epoch": 3.713998660415271, + "grad_norm": 2.6808269183068005, + "learning_rate": 7.74843377403034e-08, + "loss": 0.8359, + "step": 5545 + }, + { + "epoch": 3.7146684527796383, + "grad_norm": 2.5611453845330017, + "learning_rate": 7.712371386064788e-08, + "loss": 0.9439, + "step": 5546 + }, + { + "epoch": 3.7153382451440056, + "grad_norm": 2.7234640207636858, + "learning_rate": 7.676391799111893e-08, + "loss": 0.8585, + "step": 5547 + }, + { + "epoch": 3.7160080375083724, + "grad_norm": 2.8205271779113685, + "learning_rate": 7.640495025467515e-08, + "loss": 0.7578, + "step": 5548 + }, + { + "epoch": 3.716677829872739, + "grad_norm": 2.9093466878882355, + "learning_rate": 7.604681077399312e-08, + "loss": 0.8181, + "step": 5549 + }, + { + "epoch": 3.7173476222371065, + "grad_norm": 2.9008124197636342, + "learning_rate": 7.56894996714655e-08, + "loss": 0.863, + "step": 5550 + }, + { + "epoch": 3.7180174146014737, + "grad_norm": 3.6030235414383056, + "learning_rate": 7.53330170692021e-08, + "loss": 0.8456, + "step": 5551 + }, + { + "epoch": 3.7186872069658405, + "grad_norm": 2.3404755448744954, + "learning_rate": 7.49773630890302e-08, + "loss": 0.719, + "step": 5552 + }, + { + "epoch": 3.7193569993302074, + "grad_norm": 3.401562503308308, + "learning_rate": 7.462253785249285e-08, + "loss": 0.7983, + "step": 5553 + }, + { + "epoch": 3.7200267916945746, + "grad_norm": 2.8288099384290395, + "learning_rate": 7.426854148085055e-08, + "loss": 0.7485, + "step": 5554 + }, + { + "epoch": 3.720696584058942, + "grad_norm": 3.1094508446306066, + "learning_rate": 7.391537409508071e-08, + "loss": 0.7207, + "step": 5555 + }, + { + "epoch": 3.7213663764233087, + "grad_norm": 3.4306114021108063, + "learning_rate": 7.356303581587704e-08, + "loss": 0.7126, + "step": 5556 + }, + { + "epoch": 3.722036168787676, + "grad_norm": 2.9988156980167027, + "learning_rate": 7.32115267636499e-08, + "loss": 0.7607, + "step": 5557 + }, + { + "epoch": 3.7227059611520428, + "grad_norm": 2.899504193682371, + "learning_rate": 7.286084705852625e-08, + "loss": 0.8352, + "step": 5558 + }, + { + "epoch": 3.72337575351641, + "grad_norm": 2.6001020203931273, + "learning_rate": 7.251099682034996e-08, + "loss": 0.7556, + "step": 5559 + }, + { + "epoch": 3.724045545880777, + "grad_norm": 2.909470326815072, + "learning_rate": 7.21619761686812e-08, + "loss": 0.7017, + "step": 5560 + }, + { + "epoch": 3.724715338245144, + "grad_norm": 2.479694006442323, + "learning_rate": 7.181378522279653e-08, + "loss": 0.8619, + "step": 5561 + }, + { + "epoch": 3.725385130609511, + "grad_norm": 2.6349637512195025, + "learning_rate": 7.146642410168907e-08, + "loss": 0.6473, + "step": 5562 + }, + { + "epoch": 3.726054922973878, + "grad_norm": 3.6155381252802528, + "learning_rate": 7.111989292406862e-08, + "loss": 0.6913, + "step": 5563 + }, + { + "epoch": 3.726724715338245, + "grad_norm": 2.2541428282715676, + "learning_rate": 7.07741918083607e-08, + "loss": 0.7372, + "step": 5564 + }, + { + "epoch": 3.7273945077026123, + "grad_norm": 3.0361922764276486, + "learning_rate": 7.042932087270776e-08, + "loss": 0.8016, + "step": 5565 + }, + { + "epoch": 3.728064300066979, + "grad_norm": 2.8242991396619623, + "learning_rate": 7.00852802349683e-08, + "loss": 0.6565, + "step": 5566 + }, + { + "epoch": 3.7287340924313463, + "grad_norm": 2.4623078827187244, + "learning_rate": 6.974207001271716e-08, + "loss": 0.6524, + "step": 5567 + }, + { + "epoch": 3.729403884795713, + "grad_norm": 2.524529367991435, + "learning_rate": 6.939969032324523e-08, + "loss": 0.7389, + "step": 5568 + }, + { + "epoch": 3.7300736771600804, + "grad_norm": 2.5753244116761187, + "learning_rate": 6.905814128355975e-08, + "loss": 0.8596, + "step": 5569 + }, + { + "epoch": 3.7307434695244472, + "grad_norm": 3.2484443857449063, + "learning_rate": 6.871742301038403e-08, + "loss": 0.8598, + "step": 5570 + }, + { + "epoch": 3.7314132618888145, + "grad_norm": 2.5818464837085795, + "learning_rate": 6.837753562015714e-08, + "loss": 0.6666, + "step": 5571 + }, + { + "epoch": 3.7320830542531818, + "grad_norm": 2.748781717304662, + "learning_rate": 6.803847922903478e-08, + "loss": 0.6539, + "step": 5572 + }, + { + "epoch": 3.7327528466175486, + "grad_norm": 2.816572303046056, + "learning_rate": 6.770025395288842e-08, + "loss": 0.7074, + "step": 5573 + }, + { + "epoch": 3.7334226389819154, + "grad_norm": 2.7672069890376103, + "learning_rate": 6.736285990730507e-08, + "loss": 0.7606, + "step": 5574 + }, + { + "epoch": 3.7340924313462827, + "grad_norm": 2.5836204090117945, + "learning_rate": 6.702629720758857e-08, + "loss": 0.7127, + "step": 5575 + }, + { + "epoch": 3.73476222371065, + "grad_norm": 2.9740265654574167, + "learning_rate": 6.669056596875778e-08, + "loss": 0.8188, + "step": 5576 + }, + { + "epoch": 3.7354320160750167, + "grad_norm": 2.7697657525131674, + "learning_rate": 6.635566630554785e-08, + "loss": 0.866, + "step": 5577 + }, + { + "epoch": 3.7361018084393836, + "grad_norm": 3.0570152882527473, + "learning_rate": 6.602159833240973e-08, + "loss": 0.7687, + "step": 5578 + }, + { + "epoch": 3.736771600803751, + "grad_norm": 2.6166028173917386, + "learning_rate": 6.56883621635096e-08, + "loss": 0.7724, + "step": 5579 + }, + { + "epoch": 3.737441393168118, + "grad_norm": 2.7227043785851244, + "learning_rate": 6.535595791273108e-08, + "loss": 0.8089, + "step": 5580 + }, + { + "epoch": 3.738111185532485, + "grad_norm": 2.592861330487835, + "learning_rate": 6.502438569367081e-08, + "loss": 0.7263, + "step": 5581 + }, + { + "epoch": 3.7387809778968517, + "grad_norm": 2.44554244327109, + "learning_rate": 6.469364561964315e-08, + "loss": 0.7153, + "step": 5582 + }, + { + "epoch": 3.739450770261219, + "grad_norm": 2.754473096739496, + "learning_rate": 6.436373780367739e-08, + "loss": 0.7912, + "step": 5583 + }, + { + "epoch": 3.7401205626255862, + "grad_norm": 3.1840681428208755, + "learning_rate": 6.40346623585189e-08, + "loss": 0.8212, + "step": 5584 + }, + { + "epoch": 3.740790354989953, + "grad_norm": 2.890551049790987, + "learning_rate": 6.370641939662748e-08, + "loss": 0.6026, + "step": 5585 + }, + { + "epoch": 3.7414601473543203, + "grad_norm": 2.838801833988054, + "learning_rate": 6.337900903017946e-08, + "loss": 0.8552, + "step": 5586 + }, + { + "epoch": 3.742129939718687, + "grad_norm": 2.21596130516217, + "learning_rate": 6.305243137106564e-08, + "loss": 0.5362, + "step": 5587 + }, + { + "epoch": 3.7427997320830544, + "grad_norm": 2.4583338132207784, + "learning_rate": 6.272668653089426e-08, + "loss": 0.7499, + "step": 5588 + }, + { + "epoch": 3.743469524447421, + "grad_norm": 2.5804666626658572, + "learning_rate": 6.240177462098624e-08, + "loss": 0.6741, + "step": 5589 + }, + { + "epoch": 3.7441393168117885, + "grad_norm": 2.49020531578345, + "learning_rate": 6.20776957523797e-08, + "loss": 0.711, + "step": 5590 + }, + { + "epoch": 3.7448091091761553, + "grad_norm": 3.060052542828894, + "learning_rate": 6.17544500358272e-08, + "loss": 0.6598, + "step": 5591 + }, + { + "epoch": 3.7454789015405225, + "grad_norm": 2.404893797963795, + "learning_rate": 6.143203758179783e-08, + "loss": 0.7738, + "step": 5592 + }, + { + "epoch": 3.7461486939048894, + "grad_norm": 3.3865968672141893, + "learning_rate": 6.111045850047404e-08, + "loss": 0.8204, + "step": 5593 + }, + { + "epoch": 3.7468184862692566, + "grad_norm": 3.3848951104762923, + "learning_rate": 6.078971290175484e-08, + "loss": 0.7902, + "step": 5594 + }, + { + "epoch": 3.7474882786336234, + "grad_norm": 2.547333934205356, + "learning_rate": 6.046980089525367e-08, + "loss": 0.8784, + "step": 5595 + }, + { + "epoch": 3.7481580709979907, + "grad_norm": 2.532968274588994, + "learning_rate": 6.015072259030003e-08, + "loss": 0.7592, + "step": 5596 + }, + { + "epoch": 3.7488278633623575, + "grad_norm": 3.9382459372899565, + "learning_rate": 5.983247809593751e-08, + "loss": 0.7571, + "step": 5597 + }, + { + "epoch": 3.7494976557267248, + "grad_norm": 3.1889400944031636, + "learning_rate": 5.951506752092523e-08, + "loss": 0.6981, + "step": 5598 + }, + { + "epoch": 3.7501674480910916, + "grad_norm": 2.546366632669778, + "learning_rate": 5.91984909737367e-08, + "loss": 0.7454, + "step": 5599 + }, + { + "epoch": 3.750837240455459, + "grad_norm": 2.75074777646698, + "learning_rate": 5.8882748562562045e-08, + "loss": 0.796, + "step": 5600 + }, + { + "epoch": 3.751507032819826, + "grad_norm": 2.4749150396278843, + "learning_rate": 5.856784039530439e-08, + "loss": 0.6893, + "step": 5601 + }, + { + "epoch": 3.752176825184193, + "grad_norm": 2.6942707326877042, + "learning_rate": 5.825376657958237e-08, + "loss": 0.813, + "step": 5602 + }, + { + "epoch": 3.7528466175485597, + "grad_norm": 2.8739018166973227, + "learning_rate": 5.794052722273041e-08, + "loss": 0.7966, + "step": 5603 + }, + { + "epoch": 3.753516409912927, + "grad_norm": 2.8847186775654348, + "learning_rate": 5.762812243179705e-08, + "loss": 0.8458, + "step": 5604 + }, + { + "epoch": 3.7541862022772943, + "grad_norm": 2.6416740478429546, + "learning_rate": 5.731655231354521e-08, + "loss": 0.7525, + "step": 5605 + }, + { + "epoch": 3.754855994641661, + "grad_norm": 2.757399027154436, + "learning_rate": 5.700581697445251e-08, + "loss": 0.8051, + "step": 5606 + }, + { + "epoch": 3.755525787006028, + "grad_norm": 2.4977016435451778, + "learning_rate": 5.6695916520712604e-08, + "loss": 0.8006, + "step": 5607 + }, + { + "epoch": 3.756195579370395, + "grad_norm": 2.677863100450459, + "learning_rate": 5.6386851058232995e-08, + "loss": 0.7363, + "step": 5608 + }, + { + "epoch": 3.7568653717347624, + "grad_norm": 2.5228394524744098, + "learning_rate": 5.607862069263531e-08, + "loss": 0.7733, + "step": 5609 + }, + { + "epoch": 3.7575351640991292, + "grad_norm": 2.7066708018255135, + "learning_rate": 5.5771225529255844e-08, + "loss": 0.807, + "step": 5610 + }, + { + "epoch": 3.758204956463496, + "grad_norm": 2.6359818635788996, + "learning_rate": 5.546466567314695e-08, + "loss": 0.8311, + "step": 5611 + }, + { + "epoch": 3.7588747488278633, + "grad_norm": 2.5565696218931397, + "learning_rate": 5.515894122907428e-08, + "loss": 0.6663, + "step": 5612 + }, + { + "epoch": 3.7595445411922306, + "grad_norm": 3.0792140361055775, + "learning_rate": 5.4854052301517576e-08, + "loss": 0.7276, + "step": 5613 + }, + { + "epoch": 3.7602143335565974, + "grad_norm": 2.415871991048877, + "learning_rate": 5.454999899467184e-08, + "loss": 0.5953, + "step": 5614 + }, + { + "epoch": 3.7608841259209647, + "grad_norm": 2.3241259902092963, + "learning_rate": 5.4246781412446715e-08, + "loss": 0.8702, + "step": 5615 + }, + { + "epoch": 3.7615539182853315, + "grad_norm": 2.752419459786335, + "learning_rate": 5.394439965846543e-08, + "loss": 0.727, + "step": 5616 + }, + { + "epoch": 3.7622237106496987, + "grad_norm": 2.3842437613323315, + "learning_rate": 5.3642853836066434e-08, + "loss": 0.5885, + "step": 5617 + }, + { + "epoch": 3.7628935030140656, + "grad_norm": 3.0801119716823266, + "learning_rate": 5.3342144048300904e-08, + "loss": 0.6639, + "step": 5618 + }, + { + "epoch": 3.763563295378433, + "grad_norm": 2.580848537052402, + "learning_rate": 5.304227039793691e-08, + "loss": 0.7454, + "step": 5619 + }, + { + "epoch": 3.7642330877427996, + "grad_norm": 2.655895628075263, + "learning_rate": 5.2743232987454154e-08, + "loss": 0.8378, + "step": 5620 + }, + { + "epoch": 3.764902880107167, + "grad_norm": 2.6797090637788203, + "learning_rate": 5.244503191904865e-08, + "loss": 0.8544, + "step": 5621 + }, + { + "epoch": 3.7655726724715337, + "grad_norm": 2.5692639165262072, + "learning_rate": 5.214766729462861e-08, + "loss": 0.7518, + "step": 5622 + }, + { + "epoch": 3.766242464835901, + "grad_norm": 2.6830125778728684, + "learning_rate": 5.1851139215818304e-08, + "loss": 0.7714, + "step": 5623 + }, + { + "epoch": 3.766912257200268, + "grad_norm": 2.525903423088709, + "learning_rate": 5.1555447783954994e-08, + "loss": 0.7531, + "step": 5624 + }, + { + "epoch": 3.767582049564635, + "grad_norm": 2.92095714755828, + "learning_rate": 5.126059310009007e-08, + "loss": 0.857, + "step": 5625 + }, + { + "epoch": 3.768251841929002, + "grad_norm": 2.644200360156803, + "learning_rate": 5.0966575264989046e-08, + "loss": 0.808, + "step": 5626 + }, + { + "epoch": 3.768921634293369, + "grad_norm": 2.992352173185143, + "learning_rate": 5.06733943791321e-08, + "loss": 0.7687, + "step": 5627 + }, + { + "epoch": 3.769591426657736, + "grad_norm": 2.554665178160293, + "learning_rate": 5.0381050542712415e-08, + "loss": 0.6699, + "step": 5628 + }, + { + "epoch": 3.770261219022103, + "grad_norm": 2.7982904501765464, + "learning_rate": 5.008954385563786e-08, + "loss": 0.9341, + "step": 5629 + }, + { + "epoch": 3.7709310113864705, + "grad_norm": 2.686877574321224, + "learning_rate": 4.9798874417529844e-08, + "loss": 0.7727, + "step": 5630 + }, + { + "epoch": 3.7716008037508373, + "grad_norm": 2.6060768193814416, + "learning_rate": 4.950904232772336e-08, + "loss": 0.7612, + "step": 5631 + }, + { + "epoch": 3.772270596115204, + "grad_norm": 2.6270744320797936, + "learning_rate": 4.922004768526778e-08, + "loss": 0.7682, + "step": 5632 + }, + { + "epoch": 3.7729403884795714, + "grad_norm": 4.45614656898411, + "learning_rate": 4.8931890588926334e-08, + "loss": 0.765, + "step": 5633 + }, + { + "epoch": 3.7736101808439386, + "grad_norm": 3.2799443029981856, + "learning_rate": 4.864457113717525e-08, + "loss": 0.7957, + "step": 5634 + }, + { + "epoch": 3.7742799732083054, + "grad_norm": 3.1249897808620872, + "learning_rate": 4.835808942820569e-08, + "loss": 0.8276, + "step": 5635 + }, + { + "epoch": 3.7749497655726723, + "grad_norm": 3.3554039036668706, + "learning_rate": 4.80724455599213e-08, + "loss": 0.8197, + "step": 5636 + }, + { + "epoch": 3.7756195579370395, + "grad_norm": 2.54689604989742, + "learning_rate": 4.7787639629940095e-08, + "loss": 0.7689, + "step": 5637 + }, + { + "epoch": 3.7762893503014068, + "grad_norm": 2.9316340235189564, + "learning_rate": 4.750367173559395e-08, + "loss": 0.7108, + "step": 5638 + }, + { + "epoch": 3.7769591426657736, + "grad_norm": 2.6309227216134694, + "learning_rate": 4.722054197392745e-08, + "loss": 0.6199, + "step": 5639 + }, + { + "epoch": 3.7776289350301404, + "grad_norm": 2.333099428471905, + "learning_rate": 4.6938250441699584e-08, + "loss": 0.7577, + "step": 5640 + }, + { + "epoch": 3.7782987273945077, + "grad_norm": 2.532961514840087, + "learning_rate": 4.665679723538235e-08, + "loss": 0.8399, + "step": 5641 + }, + { + "epoch": 3.778968519758875, + "grad_norm": 2.547265044695469, + "learning_rate": 4.63761824511616e-08, + "loss": 0.6266, + "step": 5642 + }, + { + "epoch": 3.7796383121232418, + "grad_norm": 2.5825938008770937, + "learning_rate": 4.609640618493644e-08, + "loss": 0.7293, + "step": 5643 + }, + { + "epoch": 3.7803081044876086, + "grad_norm": 2.692783183956541, + "learning_rate": 4.581746853231983e-08, + "loss": 0.7968, + "step": 5644 + }, + { + "epoch": 3.780977896851976, + "grad_norm": 2.7397946395775663, + "learning_rate": 4.553936958863747e-08, + "loss": 0.7506, + "step": 5645 + }, + { + "epoch": 3.781647689216343, + "grad_norm": 2.565443756025757, + "learning_rate": 4.526210944892862e-08, + "loss": 0.6949, + "step": 5646 + }, + { + "epoch": 3.78231748158071, + "grad_norm": 2.0999041113760653, + "learning_rate": 4.498568820794663e-08, + "loss": 0.6608, + "step": 5647 + }, + { + "epoch": 3.782987273945077, + "grad_norm": 2.6003365344808316, + "learning_rate": 4.471010596015707e-08, + "loss": 0.8544, + "step": 5648 + }, + { + "epoch": 3.783657066309444, + "grad_norm": 2.8684480229367946, + "learning_rate": 4.443536279973931e-08, + "loss": 0.7634, + "step": 5649 + }, + { + "epoch": 3.7843268586738112, + "grad_norm": 2.7383534728895844, + "learning_rate": 4.416145882058603e-08, + "loss": 0.8771, + "step": 5650 + }, + { + "epoch": 3.784996651038178, + "grad_norm": 2.8284151809246203, + "learning_rate": 4.388839411630319e-08, + "loss": 0.7773, + "step": 5651 + }, + { + "epoch": 3.7856664434025453, + "grad_norm": 2.5575009617386453, + "learning_rate": 4.361616878020974e-08, + "loss": 0.6468, + "step": 5652 + }, + { + "epoch": 3.786336235766912, + "grad_norm": 2.61028416464375, + "learning_rate": 4.3344782905337393e-08, + "loss": 0.7093, + "step": 5653 + }, + { + "epoch": 3.7870060281312794, + "grad_norm": 2.6594097574242745, + "learning_rate": 4.3074236584431937e-08, + "loss": 0.7859, + "step": 5654 + }, + { + "epoch": 3.787675820495646, + "grad_norm": 3.1669181071595958, + "learning_rate": 4.280452990995137e-08, + "loss": 0.7137, + "step": 5655 + }, + { + "epoch": 3.7883456128600135, + "grad_norm": 2.6855015363923984, + "learning_rate": 4.253566297406725e-08, + "loss": 0.693, + "step": 5656 + }, + { + "epoch": 3.7890154052243803, + "grad_norm": 2.475379531863455, + "learning_rate": 4.226763586866384e-08, + "loss": 0.8334, + "step": 5657 + }, + { + "epoch": 3.7896851975887476, + "grad_norm": 3.0316561443537937, + "learning_rate": 4.2000448685338423e-08, + "loss": 0.7555, + "step": 5658 + }, + { + "epoch": 3.7903549899531144, + "grad_norm": 2.630928869299846, + "learning_rate": 4.173410151540186e-08, + "loss": 0.8835, + "step": 5659 + }, + { + "epoch": 3.7910247823174816, + "grad_norm": 3.140075843742966, + "learning_rate": 4.146859444987689e-08, + "loss": 0.7259, + "step": 5660 + }, + { + "epoch": 3.7916945746818485, + "grad_norm": 2.9543582144180114, + "learning_rate": 4.120392757949954e-08, + "loss": 0.8617, + "step": 5661 + }, + { + "epoch": 3.7923643670462157, + "grad_norm": 2.6913775210628295, + "learning_rate": 4.094010099471968e-08, + "loss": 0.8102, + "step": 5662 + }, + { + "epoch": 3.793034159410583, + "grad_norm": 2.5418857148983256, + "learning_rate": 4.06771147856988e-08, + "loss": 0.6475, + "step": 5663 + }, + { + "epoch": 3.79370395177495, + "grad_norm": 2.9860500539551373, + "learning_rate": 4.04149690423114e-08, + "loss": 0.7873, + "step": 5664 + }, + { + "epoch": 3.7943737441393166, + "grad_norm": 3.08422894747359, + "learning_rate": 4.015366385414471e-08, + "loss": 0.8063, + "step": 5665 + }, + { + "epoch": 3.795043536503684, + "grad_norm": 2.757186851768845, + "learning_rate": 3.9893199310499785e-08, + "loss": 0.8136, + "step": 5666 + }, + { + "epoch": 3.795713328868051, + "grad_norm": 3.462899026194322, + "learning_rate": 3.9633575500388766e-08, + "loss": 0.8379, + "step": 5667 + }, + { + "epoch": 3.796383121232418, + "grad_norm": 2.569109426273239, + "learning_rate": 3.937479251253734e-08, + "loss": 0.8343, + "step": 5668 + }, + { + "epoch": 3.7970529135967848, + "grad_norm": 2.9686306158253344, + "learning_rate": 3.911685043538393e-08, + "loss": 0.794, + "step": 5669 + }, + { + "epoch": 3.797722705961152, + "grad_norm": 3.1338567956667966, + "learning_rate": 3.885974935707998e-08, + "loss": 0.7221, + "step": 5670 + }, + { + "epoch": 3.7983924983255193, + "grad_norm": 2.6979532971020648, + "learning_rate": 3.8603489365487966e-08, + "loss": 0.6821, + "step": 5671 + }, + { + "epoch": 3.799062290689886, + "grad_norm": 2.748373345853412, + "learning_rate": 3.8348070548184235e-08, + "loss": 0.7348, + "step": 5672 + }, + { + "epoch": 3.799732083054253, + "grad_norm": 2.6854612329960155, + "learning_rate": 3.8093492992457295e-08, + "loss": 0.7057, + "step": 5673 + }, + { + "epoch": 3.80040187541862, + "grad_norm": 3.522538448791418, + "learning_rate": 3.783975678530866e-08, + "loss": 0.6702, + "step": 5674 + }, + { + "epoch": 3.8010716677829874, + "grad_norm": 3.0936187978647958, + "learning_rate": 3.7586862013451466e-08, + "loss": 0.7362, + "step": 5675 + }, + { + "epoch": 3.8017414601473543, + "grad_norm": 2.485082926761087, + "learning_rate": 3.733480876331186e-08, + "loss": 0.754, + "step": 5676 + }, + { + "epoch": 3.8024112525117215, + "grad_norm": 2.807534432538874, + "learning_rate": 3.7083597121027605e-08, + "loss": 0.7323, + "step": 5677 + }, + { + "epoch": 3.8030810448760883, + "grad_norm": 2.681408028217067, + "learning_rate": 3.683322717245058e-08, + "loss": 0.7501, + "step": 5678 + }, + { + "epoch": 3.8037508372404556, + "grad_norm": 2.9223407493246625, + "learning_rate": 3.65836990031429e-08, + "loss": 0.7247, + "step": 5679 + }, + { + "epoch": 3.8044206296048224, + "grad_norm": 3.1842840361587705, + "learning_rate": 3.633501269838052e-08, + "loss": 0.7041, + "step": 5680 + }, + { + "epoch": 3.8050904219691897, + "grad_norm": 3.057785415624386, + "learning_rate": 3.608716834315101e-08, + "loss": 0.8177, + "step": 5681 + }, + { + "epoch": 3.8057602143335565, + "grad_norm": 2.6014897493274063, + "learning_rate": 3.584016602215468e-08, + "loss": 0.8223, + "step": 5682 + }, + { + "epoch": 3.8064300066979238, + "grad_norm": 3.0432060395724565, + "learning_rate": 3.559400581980316e-08, + "loss": 0.9159, + "step": 5683 + }, + { + "epoch": 3.8070997990622906, + "grad_norm": 2.1981703298702246, + "learning_rate": 3.534868782022138e-08, + "loss": 0.6226, + "step": 5684 + }, + { + "epoch": 3.807769591426658, + "grad_norm": 2.5697830121743404, + "learning_rate": 3.510421210724563e-08, + "loss": 0.8082, + "step": 5685 + }, + { + "epoch": 3.8084393837910246, + "grad_norm": 2.663103222094587, + "learning_rate": 3.4860578764425445e-08, + "loss": 0.7517, + "step": 5686 + }, + { + "epoch": 3.809109176155392, + "grad_norm": 3.580410262153032, + "learning_rate": 3.46177878750209e-08, + "loss": 0.7851, + "step": 5687 + }, + { + "epoch": 3.8097789685197587, + "grad_norm": 4.138328296649182, + "learning_rate": 3.437583952200535e-08, + "loss": 0.7708, + "step": 5688 + }, + { + "epoch": 3.810448760884126, + "grad_norm": 3.305893615608775, + "learning_rate": 3.4134733788063756e-08, + "loss": 0.8873, + "step": 5689 + }, + { + "epoch": 3.811118553248493, + "grad_norm": 2.5079956736490177, + "learning_rate": 3.389447075559327e-08, + "loss": 0.7622, + "step": 5690 + }, + { + "epoch": 3.81178834561286, + "grad_norm": 2.3694051828786185, + "learning_rate": 3.3655050506703204e-08, + "loss": 0.5753, + "step": 5691 + }, + { + "epoch": 3.8124581379772273, + "grad_norm": 2.4005993108228405, + "learning_rate": 3.341647312321422e-08, + "loss": 0.7282, + "step": 5692 + }, + { + "epoch": 3.813127930341594, + "grad_norm": 2.3842547866069785, + "learning_rate": 3.317873868665972e-08, + "loss": 0.7791, + "step": 5693 + }, + { + "epoch": 3.813797722705961, + "grad_norm": 2.965895797775636, + "learning_rate": 3.294184727828442e-08, + "loss": 0.6979, + "step": 5694 + }, + { + "epoch": 3.814467515070328, + "grad_norm": 2.628154786629414, + "learning_rate": 3.2705798979045524e-08, + "loss": 0.7088, + "step": 5695 + }, + { + "epoch": 3.8151373074346955, + "grad_norm": 2.530346049327581, + "learning_rate": 3.247059386961154e-08, + "loss": 0.7157, + "step": 5696 + }, + { + "epoch": 3.8158070997990623, + "grad_norm": 3.2310969157687945, + "learning_rate": 3.2236232030362915e-08, + "loss": 0.8409, + "step": 5697 + }, + { + "epoch": 3.816476892163429, + "grad_norm": 3.2293824094279278, + "learning_rate": 3.200271354139251e-08, + "loss": 0.5934, + "step": 5698 + }, + { + "epoch": 3.8171466845277964, + "grad_norm": 2.8228680094565846, + "learning_rate": 3.177003848250426e-08, + "loss": 0.9161, + "step": 5699 + }, + { + "epoch": 3.8178164768921636, + "grad_norm": 2.993091793362896, + "learning_rate": 3.1538206933213746e-08, + "loss": 0.7558, + "step": 5700 + }, + { + "epoch": 3.8184862692565305, + "grad_norm": 2.8268701894815966, + "learning_rate": 3.130721897274924e-08, + "loss": 0.7249, + "step": 5701 + }, + { + "epoch": 3.8191560616208973, + "grad_norm": 2.78057332641224, + "learning_rate": 3.1077074680049836e-08, + "loss": 0.8057, + "step": 5702 + }, + { + "epoch": 3.8198258539852645, + "grad_norm": 2.620854101307887, + "learning_rate": 3.084777413376677e-08, + "loss": 0.6668, + "step": 5703 + }, + { + "epoch": 3.820495646349632, + "grad_norm": 2.5757353794593114, + "learning_rate": 3.061931741226237e-08, + "loss": 0.6742, + "step": 5704 + }, + { + "epoch": 3.8211654387139986, + "grad_norm": 2.626603934896424, + "learning_rate": 3.0391704593611406e-08, + "loss": 0.9147, + "step": 5705 + }, + { + "epoch": 3.821835231078366, + "grad_norm": 2.704178710592281, + "learning_rate": 3.01649357555997e-08, + "loss": 0.7462, + "step": 5706 + }, + { + "epoch": 3.8225050234427327, + "grad_norm": 3.0365491092056587, + "learning_rate": 2.993901097572499e-08, + "loss": 0.8388, + "step": 5707 + }, + { + "epoch": 3.8231748158071, + "grad_norm": 3.5505060413000913, + "learning_rate": 2.971393033119552e-08, + "loss": 0.9456, + "step": 5708 + }, + { + "epoch": 3.8238446081714668, + "grad_norm": 2.9366805830892435, + "learning_rate": 2.9489693898932803e-08, + "loss": 0.6297, + "step": 5709 + }, + { + "epoch": 3.824514400535834, + "grad_norm": 2.4873769836266337, + "learning_rate": 2.9266301755568327e-08, + "loss": 0.7917, + "step": 5710 + }, + { + "epoch": 3.825184192900201, + "grad_norm": 2.556092624650561, + "learning_rate": 2.9043753977446298e-08, + "loss": 0.7112, + "step": 5711 + }, + { + "epoch": 3.825853985264568, + "grad_norm": 2.362383238826447, + "learning_rate": 2.88220506406206e-08, + "loss": 0.6855, + "step": 5712 + }, + { + "epoch": 3.826523777628935, + "grad_norm": 2.7457286756078885, + "learning_rate": 2.8601191820858687e-08, + "loss": 0.739, + "step": 5713 + }, + { + "epoch": 3.827193569993302, + "grad_norm": 2.6111466826395513, + "learning_rate": 2.8381177593637676e-08, + "loss": 0.7358, + "step": 5714 + }, + { + "epoch": 3.827863362357669, + "grad_norm": 2.5799151348532146, + "learning_rate": 2.8162008034147425e-08, + "loss": 0.8623, + "step": 5715 + }, + { + "epoch": 3.8285331547220363, + "grad_norm": 2.874583691778059, + "learning_rate": 2.7943683217287466e-08, + "loss": 0.8311, + "step": 5716 + }, + { + "epoch": 3.829202947086403, + "grad_norm": 2.739192146050806, + "learning_rate": 2.7726203217670333e-08, + "loss": 0.7757, + "step": 5717 + }, + { + "epoch": 3.8298727394507703, + "grad_norm": 3.1377321807002003, + "learning_rate": 2.7509568109619077e-08, + "loss": 0.7628, + "step": 5718 + }, + { + "epoch": 3.830542531815137, + "grad_norm": 2.472077317839045, + "learning_rate": 2.7293777967167535e-08, + "loss": 0.6238, + "step": 5719 + }, + { + "epoch": 3.8312123241795044, + "grad_norm": 5.150044330937919, + "learning_rate": 2.707883286406199e-08, + "loss": 0.6285, + "step": 5720 + }, + { + "epoch": 3.8318821165438717, + "grad_norm": 3.9513019700222802, + "learning_rate": 2.6864732873758405e-08, + "loss": 0.7889, + "step": 5721 + }, + { + "epoch": 3.8325519089082385, + "grad_norm": 2.7174560597957274, + "learning_rate": 2.6651478069425472e-08, + "loss": 0.7251, + "step": 5722 + }, + { + "epoch": 3.8332217012726053, + "grad_norm": 2.687673648501459, + "learning_rate": 2.6439068523942124e-08, + "loss": 0.7349, + "step": 5723 + }, + { + "epoch": 3.8338914936369726, + "grad_norm": 2.769818428011101, + "learning_rate": 2.6227504309898345e-08, + "loss": 0.6672, + "step": 5724 + }, + { + "epoch": 3.83456128600134, + "grad_norm": 2.510745126772365, + "learning_rate": 2.601678549959602e-08, + "loss": 0.7439, + "step": 5725 + }, + { + "epoch": 3.8352310783657066, + "grad_norm": 2.7500761742732833, + "learning_rate": 2.580691216504727e-08, + "loss": 0.7133, + "step": 5726 + }, + { + "epoch": 3.8359008707300735, + "grad_norm": 2.983485220452788, + "learning_rate": 2.5597884377975557e-08, + "loss": 0.8194, + "step": 5727 + }, + { + "epoch": 3.8365706630944407, + "grad_norm": 2.4943362096533024, + "learning_rate": 2.538970220981568e-08, + "loss": 0.66, + "step": 5728 + }, + { + "epoch": 3.837240455458808, + "grad_norm": 2.4151287276137037, + "learning_rate": 2.518236573171323e-08, + "loss": 0.6278, + "step": 5729 + }, + { + "epoch": 3.837910247823175, + "grad_norm": 2.801701925005799, + "learning_rate": 2.4975875014524588e-08, + "loss": 0.8642, + "step": 5730 + }, + { + "epoch": 3.8385800401875416, + "grad_norm": 2.946917033980991, + "learning_rate": 2.4770230128817462e-08, + "loss": 0.6988, + "step": 5731 + }, + { + "epoch": 3.839249832551909, + "grad_norm": 2.759229064151201, + "learning_rate": 2.4565431144870368e-08, + "loss": 0.7597, + "step": 5732 + }, + { + "epoch": 3.839919624916276, + "grad_norm": 2.794408427812537, + "learning_rate": 2.436147813267231e-08, + "loss": 0.7686, + "step": 5733 + }, + { + "epoch": 3.840589417280643, + "grad_norm": 2.4389291723286375, + "learning_rate": 2.415837116192421e-08, + "loss": 0.7325, + "step": 5734 + }, + { + "epoch": 3.8412592096450098, + "grad_norm": 2.5261018030599347, + "learning_rate": 2.395611030203665e-08, + "loss": 0.7199, + "step": 5735 + }, + { + "epoch": 3.841929002009377, + "grad_norm": 2.7366220007034916, + "learning_rate": 2.3754695622131564e-08, + "loss": 0.7499, + "step": 5736 + }, + { + "epoch": 3.8425987943737443, + "grad_norm": 2.7417898882635803, + "learning_rate": 2.35541271910425e-08, + "loss": 0.8139, + "step": 5737 + }, + { + "epoch": 3.843268586738111, + "grad_norm": 2.598223732404786, + "learning_rate": 2.3354405077312405e-08, + "loss": 0.7321, + "step": 5738 + }, + { + "epoch": 3.8439383791024784, + "grad_norm": 2.5047513778517314, + "learning_rate": 2.315552934919585e-08, + "loss": 0.7447, + "step": 5739 + }, + { + "epoch": 3.844608171466845, + "grad_norm": 2.711088929937148, + "learning_rate": 2.2957500074657913e-08, + "loss": 0.8546, + "step": 5740 + }, + { + "epoch": 3.8452779638312125, + "grad_norm": 2.5381066442797593, + "learning_rate": 2.276031732137446e-08, + "loss": 0.5961, + "step": 5741 + }, + { + "epoch": 3.8459477561955793, + "grad_norm": 2.853505505488105, + "learning_rate": 2.256398115673214e-08, + "loss": 0.7159, + "step": 5742 + }, + { + "epoch": 3.8466175485599465, + "grad_norm": 2.6335217505699573, + "learning_rate": 2.2368491647828394e-08, + "loss": 0.8508, + "step": 5743 + }, + { + "epoch": 3.8472873409243133, + "grad_norm": 4.532523876069212, + "learning_rate": 2.2173848861470894e-08, + "loss": 0.7086, + "step": 5744 + }, + { + "epoch": 3.8479571332886806, + "grad_norm": 2.4625784291232566, + "learning_rate": 2.198005286417809e-08, + "loss": 0.6724, + "step": 5745 + }, + { + "epoch": 3.8486269256530474, + "grad_norm": 2.7615702469719485, + "learning_rate": 2.1787103722179237e-08, + "loss": 0.9076, + "step": 5746 + }, + { + "epoch": 3.8492967180174147, + "grad_norm": 2.686764134956576, + "learning_rate": 2.159500150141408e-08, + "loss": 0.7347, + "step": 5747 + }, + { + "epoch": 3.8499665103817815, + "grad_norm": 2.4238487787360117, + "learning_rate": 2.1403746267533165e-08, + "loss": 0.6706, + "step": 5748 + }, + { + "epoch": 3.8506363027461488, + "grad_norm": 2.6554944811101358, + "learning_rate": 2.1213338085896984e-08, + "loss": 0.7119, + "step": 5749 + }, + { + "epoch": 3.8513060951105156, + "grad_norm": 2.6393561931779748, + "learning_rate": 2.102377702157682e-08, + "loss": 0.6288, + "step": 5750 + }, + { + "epoch": 3.851975887474883, + "grad_norm": 2.7643382558029423, + "learning_rate": 2.0835063139355016e-08, + "loss": 0.801, + "step": 5751 + }, + { + "epoch": 3.8526456798392497, + "grad_norm": 2.7153341997777516, + "learning_rate": 2.0647196503723875e-08, + "loss": 0.6066, + "step": 5752 + }, + { + "epoch": 3.853315472203617, + "grad_norm": 2.349190870759771, + "learning_rate": 2.0460177178885933e-08, + "loss": 0.6404, + "step": 5753 + }, + { + "epoch": 3.853985264567984, + "grad_norm": 2.5512163824866034, + "learning_rate": 2.02740052287545e-08, + "loss": 0.76, + "step": 5754 + }, + { + "epoch": 3.854655056932351, + "grad_norm": 2.7528063677225822, + "learning_rate": 2.0088680716953134e-08, + "loss": 0.6716, + "step": 5755 + }, + { + "epoch": 3.855324849296718, + "grad_norm": 3.0778222424789137, + "learning_rate": 1.990420370681645e-08, + "loss": 0.7253, + "step": 5756 + }, + { + "epoch": 3.855994641661085, + "grad_norm": 2.9831493052435683, + "learning_rate": 1.9720574261388182e-08, + "loss": 0.7634, + "step": 5757 + }, + { + "epoch": 3.8566644340254523, + "grad_norm": 2.487301923863738, + "learning_rate": 1.9537792443423408e-08, + "loss": 0.6286, + "step": 5758 + }, + { + "epoch": 3.857334226389819, + "grad_norm": 2.650461104796345, + "learning_rate": 1.9355858315386887e-08, + "loss": 0.727, + "step": 5759 + }, + { + "epoch": 3.858004018754186, + "grad_norm": 2.7458102072941557, + "learning_rate": 1.9174771939454718e-08, + "loss": 0.6293, + "step": 5760 + }, + { + "epoch": 3.8586738111185532, + "grad_norm": 2.60687873931559, + "learning_rate": 1.8994533377512393e-08, + "loss": 0.5524, + "step": 5761 + }, + { + "epoch": 3.8593436034829205, + "grad_norm": 3.0203185851414727, + "learning_rate": 1.8815142691155362e-08, + "loss": 0.6951, + "step": 5762 + }, + { + "epoch": 3.8600133958472873, + "grad_norm": 2.893121843235047, + "learning_rate": 1.863659994168987e-08, + "loss": 0.8099, + "step": 5763 + }, + { + "epoch": 3.860683188211654, + "grad_norm": 2.4399749056667392, + "learning_rate": 1.8458905190132936e-08, + "loss": 0.6919, + "step": 5764 + }, + { + "epoch": 3.8613529805760214, + "grad_norm": 2.654884708095556, + "learning_rate": 1.828205849721071e-08, + "loss": 0.9508, + "step": 5765 + }, + { + "epoch": 3.8620227729403886, + "grad_norm": 3.1236335862878857, + "learning_rate": 1.810605992336012e-08, + "loss": 0.7975, + "step": 5766 + }, + { + "epoch": 3.8626925653047555, + "grad_norm": 2.9406789667297857, + "learning_rate": 1.793090952872778e-08, + "loss": 0.7637, + "step": 5767 + }, + { + "epoch": 3.8633623576691227, + "grad_norm": 2.7925203936007934, + "learning_rate": 1.7756607373171087e-08, + "loss": 0.7905, + "step": 5768 + }, + { + "epoch": 3.8640321500334895, + "grad_norm": 2.5357797255965493, + "learning_rate": 1.7583153516257388e-08, + "loss": 0.7662, + "step": 5769 + }, + { + "epoch": 3.864701942397857, + "grad_norm": 2.8179228807366408, + "learning_rate": 1.7410548017263717e-08, + "loss": 0.8525, + "step": 5770 + }, + { + "epoch": 3.8653717347622236, + "grad_norm": 2.6906088499598844, + "learning_rate": 1.723879093517733e-08, + "loss": 0.8816, + "step": 5771 + }, + { + "epoch": 3.866041527126591, + "grad_norm": 2.623832239796545, + "learning_rate": 1.7067882328695996e-08, + "loss": 0.69, + "step": 5772 + }, + { + "epoch": 3.8667113194909577, + "grad_norm": 3.103049969915227, + "learning_rate": 1.689782225622688e-08, + "loss": 0.7023, + "step": 5773 + }, + { + "epoch": 3.867381111855325, + "grad_norm": 2.3568243953170307, + "learning_rate": 1.6728610775887667e-08, + "loss": 0.6757, + "step": 5774 + }, + { + "epoch": 3.8680509042196918, + "grad_norm": 2.4430497373436113, + "learning_rate": 1.6560247945505702e-08, + "loss": 0.6996, + "step": 5775 + }, + { + "epoch": 3.868720696584059, + "grad_norm": 2.5679271972941327, + "learning_rate": 1.639273382261858e-08, + "loss": 0.6995, + "step": 5776 + }, + { + "epoch": 3.869390488948426, + "grad_norm": 3.007090692781731, + "learning_rate": 1.6226068464473555e-08, + "loss": 0.7326, + "step": 5777 + }, + { + "epoch": 3.870060281312793, + "grad_norm": 2.6143820080493176, + "learning_rate": 1.6060251928028127e-08, + "loss": 0.7387, + "step": 5778 + }, + { + "epoch": 3.87073007367716, + "grad_norm": 2.6229271523106346, + "learning_rate": 1.5895284269949184e-08, + "loss": 0.728, + "step": 5779 + }, + { + "epoch": 3.871399866041527, + "grad_norm": 2.6592954695665596, + "learning_rate": 1.5731165546614412e-08, + "loss": 0.8715, + "step": 5780 + }, + { + "epoch": 3.872069658405894, + "grad_norm": 2.739362970325317, + "learning_rate": 1.5567895814110888e-08, + "loss": 0.9223, + "step": 5781 + }, + { + "epoch": 3.8727394507702613, + "grad_norm": 2.760959726310429, + "learning_rate": 1.5405475128234813e-08, + "loss": 0.7036, + "step": 5782 + }, + { + "epoch": 3.8734092431346285, + "grad_norm": 2.4922471350787236, + "learning_rate": 1.5243903544493454e-08, + "loss": 0.7725, + "step": 5783 + }, + { + "epoch": 3.8740790354989953, + "grad_norm": 2.6508807824705123, + "learning_rate": 1.5083181118103195e-08, + "loss": 0.8432, + "step": 5784 + }, + { + "epoch": 3.874748827863362, + "grad_norm": 2.639579106786613, + "learning_rate": 1.4923307903990936e-08, + "loss": 0.7253, + "step": 5785 + }, + { + "epoch": 3.8754186202277294, + "grad_norm": 2.644541119122903, + "learning_rate": 1.4764283956791859e-08, + "loss": 0.7524, + "step": 5786 + }, + { + "epoch": 3.8760884125920967, + "grad_norm": 2.94253042572009, + "learning_rate": 1.4606109330852492e-08, + "loss": 0.7716, + "step": 5787 + }, + { + "epoch": 3.8767582049564635, + "grad_norm": 2.5474086118996904, + "learning_rate": 1.4448784080228762e-08, + "loss": 0.731, + "step": 5788 + }, + { + "epoch": 3.8774279973208303, + "grad_norm": 2.306191638101246, + "learning_rate": 1.4292308258685439e-08, + "loss": 0.7394, + "step": 5789 + }, + { + "epoch": 3.8780977896851976, + "grad_norm": 3.2100510541848677, + "learning_rate": 1.4136681919698081e-08, + "loss": 0.7668, + "step": 5790 + }, + { + "epoch": 3.878767582049565, + "grad_norm": 2.9629686195445166, + "learning_rate": 1.3981905116451089e-08, + "loss": 0.8388, + "step": 5791 + }, + { + "epoch": 3.8794373744139317, + "grad_norm": 2.3936035371360567, + "learning_rate": 1.3827977901839373e-08, + "loss": 0.7286, + "step": 5792 + }, + { + "epoch": 3.8801071667782985, + "grad_norm": 2.6187742105181337, + "learning_rate": 1.367490032846669e-08, + "loss": 0.6897, + "step": 5793 + }, + { + "epoch": 3.8807769591426657, + "grad_norm": 3.016886459313161, + "learning_rate": 1.3522672448647022e-08, + "loss": 0.8839, + "step": 5794 + }, + { + "epoch": 3.881446751507033, + "grad_norm": 2.6293164211164446, + "learning_rate": 1.337129431440376e-08, + "loss": 0.8611, + "step": 5795 + }, + { + "epoch": 3.8821165438714, + "grad_norm": 4.59849361957359, + "learning_rate": 1.3220765977469686e-08, + "loss": 0.7093, + "step": 5796 + }, + { + "epoch": 3.882786336235767, + "grad_norm": 3.4649054293547827, + "learning_rate": 1.3071087489287814e-08, + "loss": 0.8488, + "step": 5797 + }, + { + "epoch": 3.883456128600134, + "grad_norm": 3.1268324250721804, + "learning_rate": 1.2922258901009732e-08, + "loss": 0.7359, + "step": 5798 + }, + { + "epoch": 3.884125920964501, + "grad_norm": 2.74248554368027, + "learning_rate": 1.2774280263497252e-08, + "loss": 0.7293, + "step": 5799 + }, + { + "epoch": 3.884795713328868, + "grad_norm": 2.5272526175682217, + "learning_rate": 1.2627151627322143e-08, + "loss": 0.8029, + "step": 5800 + }, + { + "epoch": 3.8854655056932352, + "grad_norm": 2.5532412427852584, + "learning_rate": 1.2480873042764464e-08, + "loss": 0.681, + "step": 5801 + }, + { + "epoch": 3.886135298057602, + "grad_norm": 2.7967363673920103, + "learning_rate": 1.2335444559814502e-08, + "loss": 0.8539, + "step": 5802 + }, + { + "epoch": 3.8868050904219693, + "grad_norm": 2.7223020602924617, + "learning_rate": 1.2190866228172505e-08, + "loss": 0.7984, + "step": 5803 + }, + { + "epoch": 3.887474882786336, + "grad_norm": 3.0186763021790046, + "learning_rate": 1.204713809724728e-08, + "loss": 0.8291, + "step": 5804 + }, + { + "epoch": 3.8881446751507034, + "grad_norm": 2.3496977026032484, + "learning_rate": 1.1904260216157314e-08, + "loss": 0.7776, + "step": 5805 + }, + { + "epoch": 3.88881446751507, + "grad_norm": 2.675431269229078, + "learning_rate": 1.1762232633730775e-08, + "loss": 0.7845, + "step": 5806 + }, + { + "epoch": 3.8894842598794375, + "grad_norm": 3.6631653389665537, + "learning_rate": 1.1621055398505498e-08, + "loss": 0.7935, + "step": 5807 + }, + { + "epoch": 3.8901540522438043, + "grad_norm": 3.418788603215627, + "learning_rate": 1.1480728558727894e-08, + "loss": 0.851, + "step": 5808 + }, + { + "epoch": 3.8908238446081715, + "grad_norm": 2.9184051280409253, + "learning_rate": 1.1341252162354322e-08, + "loss": 0.7231, + "step": 5809 + }, + { + "epoch": 3.8914936369725384, + "grad_norm": 3.184288399233424, + "learning_rate": 1.1202626257050264e-08, + "loss": 0.9419, + "step": 5810 + }, + { + "epoch": 3.8921634293369056, + "grad_norm": 2.717673271154476, + "learning_rate": 1.1064850890190881e-08, + "loss": 0.8146, + "step": 5811 + }, + { + "epoch": 3.8928332217012724, + "grad_norm": 2.7393882506504497, + "learning_rate": 1.0927926108860454e-08, + "loss": 0.701, + "step": 5812 + }, + { + "epoch": 3.8935030140656397, + "grad_norm": 3.32156612660741, + "learning_rate": 1.0791851959852662e-08, + "loss": 0.6979, + "step": 5813 + }, + { + "epoch": 3.8941728064300065, + "grad_norm": 2.5651749904965513, + "learning_rate": 1.0656628489670307e-08, + "loss": 0.8644, + "step": 5814 + }, + { + "epoch": 3.894842598794374, + "grad_norm": 2.30123274322329, + "learning_rate": 1.0522255744525311e-08, + "loss": 0.7239, + "step": 5815 + }, + { + "epoch": 3.895512391158741, + "grad_norm": 2.896928239300953, + "learning_rate": 1.0388733770339276e-08, + "loss": 0.7835, + "step": 5816 + }, + { + "epoch": 3.896182183523108, + "grad_norm": 3.5538708686680054, + "learning_rate": 1.02560626127432e-08, + "loss": 0.9277, + "step": 5817 + }, + { + "epoch": 3.8968519758874747, + "grad_norm": 2.7035430913906473, + "learning_rate": 1.0124242317076927e-08, + "loss": 0.8535, + "step": 5818 + }, + { + "epoch": 3.897521768251842, + "grad_norm": 3.0326896602346505, + "learning_rate": 9.993272928389141e-09, + "loss": 0.8175, + "step": 5819 + }, + { + "epoch": 3.898191560616209, + "grad_norm": 2.416871653011157, + "learning_rate": 9.863154491438765e-09, + "loss": 0.7262, + "step": 5820 + }, + { + "epoch": 3.898861352980576, + "grad_norm": 2.6751621924296614, + "learning_rate": 9.733887050693003e-09, + "loss": 0.6955, + "step": 5821 + }, + { + "epoch": 3.899531145344943, + "grad_norm": 2.579713362058447, + "learning_rate": 9.605470650329019e-09, + "loss": 0.7519, + "step": 5822 + }, + { + "epoch": 3.90020093770931, + "grad_norm": 2.484395234689009, + "learning_rate": 9.477905334232263e-09, + "loss": 0.7966, + "step": 5823 + }, + { + "epoch": 3.9008707300736774, + "grad_norm": 2.442486609756535, + "learning_rate": 9.351191145997861e-09, + "loss": 0.7172, + "step": 5824 + }, + { + "epoch": 3.901540522438044, + "grad_norm": 3.0138638914803493, + "learning_rate": 9.225328128930066e-09, + "loss": 0.7891, + "step": 5825 + }, + { + "epoch": 3.902210314802411, + "grad_norm": 2.65648677428678, + "learning_rate": 9.100316326041969e-09, + "loss": 0.7675, + "step": 5826 + }, + { + "epoch": 3.9028801071667782, + "grad_norm": 2.5878168770366266, + "learning_rate": 8.97615578005634e-09, + "loss": 0.7454, + "step": 5827 + }, + { + "epoch": 3.9035498995311455, + "grad_norm": 2.383003175244561, + "learning_rate": 8.852846533404235e-09, + "loss": 0.651, + "step": 5828 + }, + { + "epoch": 3.9042196918955123, + "grad_norm": 2.698846433770343, + "learning_rate": 8.730388628226394e-09, + "loss": 0.7093, + "step": 5829 + }, + { + "epoch": 3.9048894842598796, + "grad_norm": 3.0360105612024832, + "learning_rate": 8.608782106372394e-09, + "loss": 0.7327, + "step": 5830 + }, + { + "epoch": 3.9055592766242464, + "grad_norm": 3.1873113244188302, + "learning_rate": 8.488027009400934e-09, + "loss": 0.8541, + "step": 5831 + }, + { + "epoch": 3.9062290689886137, + "grad_norm": 2.148862451535837, + "learning_rate": 8.368123378579285e-09, + "loss": 0.6172, + "step": 5832 + }, + { + "epoch": 3.9068988613529805, + "grad_norm": 3.1464139819328336, + "learning_rate": 8.249071254884666e-09, + "loss": 0.8667, + "step": 5833 + }, + { + "epoch": 3.9075686537173477, + "grad_norm": 2.8525109137618774, + "learning_rate": 8.13087067900259e-09, + "loss": 0.7018, + "step": 5834 + }, + { + "epoch": 3.9082384460817146, + "grad_norm": 2.508947876066996, + "learning_rate": 8.013521691327686e-09, + "loss": 0.7263, + "step": 5835 + }, + { + "epoch": 3.908908238446082, + "grad_norm": 2.563497098236904, + "learning_rate": 7.897024331963432e-09, + "loss": 0.6963, + "step": 5836 + }, + { + "epoch": 3.9095780308104486, + "grad_norm": 2.7069528704037586, + "learning_rate": 7.781378640722704e-09, + "loss": 0.8514, + "step": 5837 + }, + { + "epoch": 3.910247823174816, + "grad_norm": 2.840767688434162, + "learning_rate": 7.666584657126663e-09, + "loss": 0.7622, + "step": 5838 + }, + { + "epoch": 3.9109176155391827, + "grad_norm": 2.8331105015147124, + "learning_rate": 7.552642420406154e-09, + "loss": 0.8741, + "step": 5839 + }, + { + "epoch": 3.91158740790355, + "grad_norm": 3.1286931337819026, + "learning_rate": 7.4395519695003026e-09, + "loss": 0.7724, + "step": 5840 + }, + { + "epoch": 3.912257200267917, + "grad_norm": 2.6900250829139827, + "learning_rate": 7.32731334305764e-09, + "loss": 0.7837, + "step": 5841 + }, + { + "epoch": 3.912926992632284, + "grad_norm": 2.8429372286904306, + "learning_rate": 7.215926579434984e-09, + "loss": 0.9419, + "step": 5842 + }, + { + "epoch": 3.913596784996651, + "grad_norm": 3.066395232264507, + "learning_rate": 7.105391716699106e-09, + "loss": 0.6605, + "step": 5843 + }, + { + "epoch": 3.914266577361018, + "grad_norm": 2.393270890883313, + "learning_rate": 6.995708792624234e-09, + "loss": 0.6869, + "step": 5844 + }, + { + "epoch": 3.9149363697253854, + "grad_norm": 3.532499081690206, + "learning_rate": 6.886877844694273e-09, + "loss": 0.8147, + "step": 5845 + }, + { + "epoch": 3.915606162089752, + "grad_norm": 3.3168729206461793, + "learning_rate": 6.778898910102249e-09, + "loss": 0.7356, + "step": 5846 + }, + { + "epoch": 3.916275954454119, + "grad_norm": 2.978936879331441, + "learning_rate": 6.671772025749201e-09, + "loss": 0.9608, + "step": 5847 + }, + { + "epoch": 3.9169457468184863, + "grad_norm": 2.5303344746514167, + "learning_rate": 6.565497228245565e-09, + "loss": 0.6779, + "step": 5848 + }, + { + "epoch": 3.9176155391828535, + "grad_norm": 2.717062359260206, + "learning_rate": 6.460074553910067e-09, + "loss": 0.7818, + "step": 5849 + }, + { + "epoch": 3.9182853315472204, + "grad_norm": 2.403021954648178, + "learning_rate": 6.355504038771387e-09, + "loss": 0.662, + "step": 5850 + }, + { + "epoch": 3.918955123911587, + "grad_norm": 2.4127239021548976, + "learning_rate": 6.251785718565384e-09, + "loss": 0.6496, + "step": 5851 + }, + { + "epoch": 3.9196249162759544, + "grad_norm": 2.6634888647093677, + "learning_rate": 6.14891962873787e-09, + "loss": 0.7239, + "step": 5852 + }, + { + "epoch": 3.9202947086403217, + "grad_norm": 2.514971330653854, + "learning_rate": 6.046905804442671e-09, + "loss": 0.748, + "step": 5853 + }, + { + "epoch": 3.9209645010046885, + "grad_norm": 2.46840942424675, + "learning_rate": 5.945744280542731e-09, + "loss": 0.7865, + "step": 5854 + }, + { + "epoch": 3.9216342933690553, + "grad_norm": 4.179731724600731, + "learning_rate": 5.845435091610119e-09, + "loss": 0.6919, + "step": 5855 + }, + { + "epoch": 3.9223040857334226, + "grad_norm": 2.7707537842548673, + "learning_rate": 5.745978271924912e-09, + "loss": 0.6706, + "step": 5856 + }, + { + "epoch": 3.92297387809779, + "grad_norm": 3.75356545440246, + "learning_rate": 5.647373855475757e-09, + "loss": 0.7576, + "step": 5857 + }, + { + "epoch": 3.9236436704621567, + "grad_norm": 2.3258906516820463, + "learning_rate": 5.549621875960975e-09, + "loss": 0.7263, + "step": 5858 + }, + { + "epoch": 3.924313462826524, + "grad_norm": 2.274777909336929, + "learning_rate": 5.452722366786622e-09, + "loss": 0.6733, + "step": 5859 + }, + { + "epoch": 3.9249832551908908, + "grad_norm": 2.496814159059392, + "learning_rate": 5.356675361068153e-09, + "loss": 0.6934, + "step": 5860 + }, + { + "epoch": 3.925653047555258, + "grad_norm": 2.587867815567695, + "learning_rate": 5.261480891628756e-09, + "loss": 0.7765, + "step": 5861 + }, + { + "epoch": 3.926322839919625, + "grad_norm": 2.6863487593319064, + "learning_rate": 5.167138991001297e-09, + "loss": 0.7632, + "step": 5862 + }, + { + "epoch": 3.926992632283992, + "grad_norm": 2.7035487595215297, + "learning_rate": 5.073649691426929e-09, + "loss": 0.7409, + "step": 5863 + }, + { + "epoch": 3.927662424648359, + "grad_norm": 2.7192425424013877, + "learning_rate": 4.9810130248548195e-09, + "loss": 0.7696, + "step": 5864 + }, + { + "epoch": 3.928332217012726, + "grad_norm": 2.7050387683641, + "learning_rate": 4.889229022943531e-09, + "loss": 0.8771, + "step": 5865 + }, + { + "epoch": 3.929002009377093, + "grad_norm": 2.6656902824701154, + "learning_rate": 4.7982977170599185e-09, + "loss": 0.6503, + "step": 5866 + }, + { + "epoch": 3.9296718017414602, + "grad_norm": 2.6936104243827748, + "learning_rate": 4.708219138279679e-09, + "loss": 0.7676, + "step": 5867 + }, + { + "epoch": 3.930341594105827, + "grad_norm": 3.228945432596634, + "learning_rate": 4.618993317386522e-09, + "loss": 0.8979, + "step": 5868 + }, + { + "epoch": 3.9310113864701943, + "grad_norm": 2.931138754004935, + "learning_rate": 4.530620284873e-09, + "loss": 0.7893, + "step": 5869 + }, + { + "epoch": 3.931681178834561, + "grad_norm": 3.1956247382195317, + "learning_rate": 4.443100070940787e-09, + "loss": 0.7977, + "step": 5870 + }, + { + "epoch": 3.9323509711989284, + "grad_norm": 2.7422071490781494, + "learning_rate": 4.356432705499292e-09, + "loss": 0.8659, + "step": 5871 + }, + { + "epoch": 3.933020763563295, + "grad_norm": 2.5934140160182824, + "learning_rate": 4.270618218167044e-09, + "loss": 0.6849, + "step": 5872 + }, + { + "epoch": 3.9336905559276625, + "grad_norm": 2.595536714391781, + "learning_rate": 4.1856566382705855e-09, + "loss": 0.677, + "step": 5873 + }, + { + "epoch": 3.9343603482920297, + "grad_norm": 3.683664514489274, + "learning_rate": 4.101547994845579e-09, + "loss": 0.7448, + "step": 5874 + }, + { + "epoch": 3.9350301406563966, + "grad_norm": 2.314305020989154, + "learning_rate": 4.018292316635419e-09, + "loss": 0.6247, + "step": 5875 + }, + { + "epoch": 3.9356999330207634, + "grad_norm": 2.507692676220483, + "learning_rate": 3.935889632092904e-09, + "loss": 0.6618, + "step": 5876 + }, + { + "epoch": 3.9363697253851306, + "grad_norm": 2.379949058690527, + "learning_rate": 3.854339969378562e-09, + "loss": 0.661, + "step": 5877 + }, + { + "epoch": 3.937039517749498, + "grad_norm": 3.31059364859352, + "learning_rate": 3.773643356362045e-09, + "loss": 0.8139, + "step": 5878 + }, + { + "epoch": 3.9377093101138647, + "grad_norm": 2.4292421339651717, + "learning_rate": 3.6937998206210135e-09, + "loss": 0.7544, + "step": 5879 + }, + { + "epoch": 3.9383791024782315, + "grad_norm": 2.804598441876934, + "learning_rate": 3.614809389441698e-09, + "loss": 0.6013, + "step": 5880 + }, + { + "epoch": 3.939048894842599, + "grad_norm": 2.6853326678807052, + "learning_rate": 3.5366720898186158e-09, + "loss": 0.7393, + "step": 5881 + }, + { + "epoch": 3.939718687206966, + "grad_norm": 2.5838955670122035, + "learning_rate": 3.4593879484554062e-09, + "loss": 0.7651, + "step": 5882 + }, + { + "epoch": 3.940388479571333, + "grad_norm": 2.5241641963678294, + "learning_rate": 3.382956991763442e-09, + "loss": 0.7787, + "step": 5883 + }, + { + "epoch": 3.9410582719356997, + "grad_norm": 2.4840960264372978, + "learning_rate": 3.307379245862663e-09, + "loss": 0.7275, + "step": 5884 + }, + { + "epoch": 3.941728064300067, + "grad_norm": 2.5276322998587553, + "learning_rate": 3.2326547365815752e-09, + "loss": 0.7351, + "step": 5885 + }, + { + "epoch": 3.942397856664434, + "grad_norm": 2.6824618372595124, + "learning_rate": 3.158783489457251e-09, + "loss": 0.7569, + "step": 5886 + }, + { + "epoch": 3.943067649028801, + "grad_norm": 3.067049411323231, + "learning_rate": 3.085765529734497e-09, + "loss": 0.707, + "step": 5887 + }, + { + "epoch": 3.943737441393168, + "grad_norm": 2.6427996806782246, + "learning_rate": 3.0136008823675177e-09, + "loss": 0.7672, + "step": 5888 + }, + { + "epoch": 3.944407233757535, + "grad_norm": 2.2531961328097596, + "learning_rate": 2.9422895720179755e-09, + "loss": 0.6257, + "step": 5889 + }, + { + "epoch": 3.9450770261219024, + "grad_norm": 2.9063219090390207, + "learning_rate": 2.8718316230560984e-09, + "loss": 0.6788, + "step": 5890 + }, + { + "epoch": 3.945746818486269, + "grad_norm": 2.544140743931208, + "learning_rate": 2.8022270595612356e-09, + "loss": 0.806, + "step": 5891 + }, + { + "epoch": 3.9464166108506364, + "grad_norm": 2.8974276104419836, + "learning_rate": 2.733475905320193e-09, + "loss": 0.7413, + "step": 5892 + }, + { + "epoch": 3.9470864032150033, + "grad_norm": 2.8750791077571787, + "learning_rate": 2.6655781838283436e-09, + "loss": 0.7383, + "step": 5893 + }, + { + "epoch": 3.9477561955793705, + "grad_norm": 2.870051392899464, + "learning_rate": 2.5985339182896252e-09, + "loss": 0.8495, + "step": 5894 + }, + { + "epoch": 3.9484259879437373, + "grad_norm": 2.653395341203953, + "learning_rate": 2.532343131616544e-09, + "loss": 0.7394, + "step": 5895 + }, + { + "epoch": 3.9490957803081046, + "grad_norm": 2.9607916724924133, + "learning_rate": 2.46700584642906e-09, + "loss": 0.7406, + "step": 5896 + }, + { + "epoch": 3.9497655726724714, + "grad_norm": 2.382170282766361, + "learning_rate": 2.4025220850559805e-09, + "loss": 0.6738, + "step": 5897 + }, + { + "epoch": 3.9504353650368387, + "grad_norm": 2.6277176006367453, + "learning_rate": 2.338891869534954e-09, + "loss": 0.7097, + "step": 5898 + }, + { + "epoch": 3.9511051574012055, + "grad_norm": 2.522190255386664, + "learning_rate": 2.2761152216108085e-09, + "loss": 0.7338, + "step": 5899 + }, + { + "epoch": 3.9517749497655728, + "grad_norm": 2.7470904052762455, + "learning_rate": 2.2141921627377717e-09, + "loss": 0.6913, + "step": 5900 + }, + { + "epoch": 3.9524447421299396, + "grad_norm": 2.783051982770849, + "learning_rate": 2.153122714077527e-09, + "loss": 0.9265, + "step": 5901 + }, + { + "epoch": 3.953114534494307, + "grad_norm": 2.511434525327316, + "learning_rate": 2.092906896500324e-09, + "loss": 0.7555, + "step": 5902 + }, + { + "epoch": 3.9537843268586736, + "grad_norm": 3.0190253139786036, + "learning_rate": 2.03354473058498e-09, + "loss": 0.8338, + "step": 5903 + }, + { + "epoch": 3.954454119223041, + "grad_norm": 2.870170538305558, + "learning_rate": 1.9750362366180444e-09, + "loss": 0.7732, + "step": 5904 + }, + { + "epoch": 3.9551239115874077, + "grad_norm": 3.0118103745242943, + "learning_rate": 1.9173814345949116e-09, + "loss": 0.7574, + "step": 5905 + }, + { + "epoch": 3.955793703951775, + "grad_norm": 2.3239213908933913, + "learning_rate": 1.8605803442184322e-09, + "loss": 0.7666, + "step": 5906 + }, + { + "epoch": 3.9564634963161422, + "grad_norm": 2.812642048667833, + "learning_rate": 1.804632984900856e-09, + "loss": 0.7236, + "step": 5907 + }, + { + "epoch": 3.957133288680509, + "grad_norm": 2.3648524893563123, + "learning_rate": 1.7495393757616108e-09, + "loss": 0.7198, + "step": 5908 + }, + { + "epoch": 3.957803081044876, + "grad_norm": 2.722831703021855, + "learning_rate": 1.6952995356286916e-09, + "loss": 0.8103, + "step": 5909 + }, + { + "epoch": 3.958472873409243, + "grad_norm": 3.186553762282104, + "learning_rate": 1.6419134830386595e-09, + "loss": 0.6198, + "step": 5910 + }, + { + "epoch": 3.9591426657736104, + "grad_norm": 2.750821868435693, + "learning_rate": 1.5893812362355321e-09, + "loss": 0.7133, + "step": 5911 + }, + { + "epoch": 3.959812458137977, + "grad_norm": 2.4129930033585856, + "learning_rate": 1.5377028131727256e-09, + "loss": 0.5975, + "step": 5912 + }, + { + "epoch": 3.960482250502344, + "grad_norm": 2.808282750461145, + "learning_rate": 1.4868782315108354e-09, + "loss": 0.7757, + "step": 5913 + }, + { + "epoch": 3.9611520428667113, + "grad_norm": 3.153400728989891, + "learning_rate": 1.4369075086187457e-09, + "loss": 0.7621, + "step": 5914 + }, + { + "epoch": 3.9618218352310786, + "grad_norm": 3.1430926913455695, + "learning_rate": 1.3877906615741843e-09, + "loss": 0.8329, + "step": 5915 + }, + { + "epoch": 3.9624916275954454, + "grad_norm": 2.515811116280196, + "learning_rate": 1.3395277071626133e-09, + "loss": 0.7443, + "step": 5916 + }, + { + "epoch": 3.963161419959812, + "grad_norm": 3.2051902324986443, + "learning_rate": 1.2921186618777836e-09, + "loss": 0.8705, + "step": 5917 + }, + { + "epoch": 3.9638312123241795, + "grad_norm": 2.50801950765661, + "learning_rate": 1.2455635419211797e-09, + "loss": 0.7482, + "step": 5918 + }, + { + "epoch": 3.9645010046885467, + "grad_norm": 2.9288491827746332, + "learning_rate": 1.1998623632031304e-09, + "loss": 0.7751, + "step": 5919 + }, + { + "epoch": 3.9651707970529135, + "grad_norm": 2.3899665165311634, + "learning_rate": 1.1550151413419753e-09, + "loss": 0.6232, + "step": 5920 + }, + { + "epoch": 3.965840589417281, + "grad_norm": 2.596597602674953, + "learning_rate": 1.1110218916640658e-09, + "loss": 0.7017, + "step": 5921 + }, + { + "epoch": 3.9665103817816476, + "grad_norm": 2.295199253560701, + "learning_rate": 1.0678826292037648e-09, + "loss": 0.8071, + "step": 5922 + }, + { + "epoch": 3.967180174146015, + "grad_norm": 2.579563246859174, + "learning_rate": 1.025597368704001e-09, + "loss": 0.6002, + "step": 5923 + }, + { + "epoch": 3.9678499665103817, + "grad_norm": 2.907783640895918, + "learning_rate": 9.841661246154377e-10, + "loss": 0.7841, + "step": 5924 + }, + { + "epoch": 3.968519758874749, + "grad_norm": 2.5242324120554795, + "learning_rate": 9.435889110970265e-10, + "loss": 0.6601, + "step": 5925 + }, + { + "epoch": 3.9691895512391158, + "grad_norm": 2.3813619798015635, + "learning_rate": 9.038657420162855e-10, + "loss": 0.7951, + "step": 5926 + }, + { + "epoch": 3.969859343603483, + "grad_norm": 2.320324007161945, + "learning_rate": 8.649966309481894e-10, + "loss": 0.5971, + "step": 5927 + }, + { + "epoch": 3.97052913596785, + "grad_norm": 2.562577557113508, + "learning_rate": 8.269815911760016e-10, + "loss": 0.6976, + "step": 5928 + }, + { + "epoch": 3.971198928332217, + "grad_norm": 2.5002469704963732, + "learning_rate": 7.898206356915516e-10, + "loss": 0.6763, + "step": 5929 + }, + { + "epoch": 3.971868720696584, + "grad_norm": 2.5227272542040637, + "learning_rate": 7.535137771944034e-10, + "loss": 0.6512, + "step": 5930 + }, + { + "epoch": 3.972538513060951, + "grad_norm": 2.308726325921425, + "learning_rate": 7.180610280921318e-10, + "loss": 0.7471, + "step": 5931 + }, + { + "epoch": 3.973208305425318, + "grad_norm": 2.8619176158571893, + "learning_rate": 6.834624005006008e-10, + "loss": 0.736, + "step": 5932 + }, + { + "epoch": 3.9738780977896853, + "grad_norm": 2.489439419068575, + "learning_rate": 6.497179062439629e-10, + "loss": 0.8413, + "step": 5933 + }, + { + "epoch": 3.974547890154052, + "grad_norm": 2.7889889656808426, + "learning_rate": 6.168275568541049e-10, + "loss": 0.7444, + "step": 5934 + }, + { + "epoch": 3.9752176825184193, + "grad_norm": 2.4066912736367625, + "learning_rate": 5.847913635714797e-10, + "loss": 0.6175, + "step": 5935 + }, + { + "epoch": 3.9758874748827866, + "grad_norm": 4.7796825059423345, + "learning_rate": 5.536093373439965e-10, + "loss": 0.8377, + "step": 5936 + }, + { + "epoch": 3.9765572672471534, + "grad_norm": 2.703076115836484, + "learning_rate": 5.232814888284088e-10, + "loss": 0.7752, + "step": 5937 + }, + { + "epoch": 3.9772270596115202, + "grad_norm": 3.1286098656932584, + "learning_rate": 4.938078283889258e-10, + "loss": 0.7751, + "step": 5938 + }, + { + "epoch": 3.9778968519758875, + "grad_norm": 3.01566717872497, + "learning_rate": 4.6518836609776853e-10, + "loss": 0.7426, + "step": 5939 + }, + { + "epoch": 3.9785666443402548, + "grad_norm": 2.46338880139197, + "learning_rate": 4.374231117360017e-10, + "loss": 0.6974, + "step": 5940 + }, + { + "epoch": 3.9792364367046216, + "grad_norm": 2.5213829608447487, + "learning_rate": 4.1051207479242404e-10, + "loss": 0.8158, + "step": 5941 + }, + { + "epoch": 3.9799062290689884, + "grad_norm": 2.666317876727597, + "learning_rate": 3.844552644632904e-10, + "loss": 0.717, + "step": 5942 + }, + { + "epoch": 3.9805760214333556, + "grad_norm": 2.7724018051843533, + "learning_rate": 3.5925268965369966e-10, + "loss": 0.6958, + "step": 5943 + }, + { + "epoch": 3.981245813797723, + "grad_norm": 2.5052345308889064, + "learning_rate": 3.3490435897648444e-10, + "loss": 0.6075, + "step": 5944 + }, + { + "epoch": 3.9819156061620897, + "grad_norm": 2.6718150067180986, + "learning_rate": 3.114102807527664e-10, + "loss": 0.847, + "step": 5945 + }, + { + "epoch": 3.9825853985264565, + "grad_norm": 3.293340329482871, + "learning_rate": 2.8877046301167833e-10, + "loss": 0.7633, + "step": 5946 + }, + { + "epoch": 3.983255190890824, + "grad_norm": 2.7589093431716614, + "learning_rate": 2.669849134898095e-10, + "loss": 0.8009, + "step": 5947 + }, + { + "epoch": 3.983924983255191, + "grad_norm": 2.715759139635818, + "learning_rate": 2.4605363963259297e-10, + "loss": 0.8437, + "step": 5948 + }, + { + "epoch": 3.984594775619558, + "grad_norm": 2.42105904430447, + "learning_rate": 2.2597664859319578e-10, + "loss": 0.7224, + "step": 5949 + }, + { + "epoch": 3.985264567983925, + "grad_norm": 3.0841328486256288, + "learning_rate": 2.0675394723307374e-10, + "loss": 0.8035, + "step": 5950 + }, + { + "epoch": 3.985934360348292, + "grad_norm": 2.601429442804751, + "learning_rate": 1.883855421211389e-10, + "loss": 0.8348, + "step": 5951 + }, + { + "epoch": 3.986604152712659, + "grad_norm": 2.6941838831688636, + "learning_rate": 1.7087143953486983e-10, + "loss": 0.6664, + "step": 5952 + }, + { + "epoch": 3.987273945077026, + "grad_norm": 2.6477828009282787, + "learning_rate": 1.542116454597564e-10, + "loss": 0.7459, + "step": 5953 + }, + { + "epoch": 3.9879437374413933, + "grad_norm": 2.308399642455026, + "learning_rate": 1.3840616558929988e-10, + "loss": 0.6201, + "step": 5954 + }, + { + "epoch": 3.98861352980576, + "grad_norm": 2.6470104427814243, + "learning_rate": 1.234550053247352e-10, + "loss": 0.7209, + "step": 5955 + }, + { + "epoch": 3.9892833221701274, + "grad_norm": 2.2346140398366483, + "learning_rate": 1.0935816977586389e-10, + "loss": 0.6682, + "step": 5956 + }, + { + "epoch": 3.989953114534494, + "grad_norm": 3.067349188107142, + "learning_rate": 9.611566375994363e-11, + "loss": 0.7206, + "step": 5957 + }, + { + "epoch": 3.9906229068988615, + "grad_norm": 2.663652438772068, + "learning_rate": 8.37274918027986e-11, + "loss": 0.8288, + "step": 5958 + }, + { + "epoch": 3.9912926992632283, + "grad_norm": 2.522134173841248, + "learning_rate": 7.219365813770917e-11, + "loss": 0.7917, + "step": 5959 + }, + { + "epoch": 3.9919624916275955, + "grad_norm": 2.969183690262666, + "learning_rate": 6.151416670679977e-11, + "loss": 0.8176, + "step": 5960 + }, + { + "epoch": 3.9926322839919623, + "grad_norm": 3.0128246590565273, + "learning_rate": 5.168902115909591e-11, + "loss": 0.8408, + "step": 5961 + }, + { + "epoch": 3.9933020763563296, + "grad_norm": 2.5992331012334415, + "learning_rate": 4.271822485302224e-11, + "loss": 0.8115, + "step": 5962 + }, + { + "epoch": 3.9939718687206964, + "grad_norm": 2.726242784148829, + "learning_rate": 3.460178085390453e-11, + "loss": 0.7228, + "step": 5963 + }, + { + "epoch": 3.9946416610850637, + "grad_norm": 3.051968924020032, + "learning_rate": 2.7339691935635014e-11, + "loss": 0.8766, + "step": 5964 + }, + { + "epoch": 3.995311453449431, + "grad_norm": 2.7379772392474493, + "learning_rate": 2.093196058011726e-11, + "loss": 0.7415, + "step": 5965 + }, + { + "epoch": 3.9959812458137978, + "grad_norm": 2.9536660261588668, + "learning_rate": 1.537858897698863e-11, + "loss": 0.7662, + "step": 5966 + }, + { + "epoch": 3.9966510381781646, + "grad_norm": 2.6174254764621048, + "learning_rate": 1.0679579023897823e-11, + "loss": 0.6869, + "step": 5967 + }, + { + "epoch": 3.997320830542532, + "grad_norm": 2.6107902366838944, + "learning_rate": 6.8349323270600064e-12, + "loss": 0.6962, + "step": 5968 + }, + { + "epoch": 3.997990622906899, + "grad_norm": 2.565948278117615, + "learning_rate": 3.844650200424127e-12, + "loss": 0.842, + "step": 5969 + }, + { + "epoch": 3.998660415271266, + "grad_norm": 2.894504720380556, + "learning_rate": 1.7087336656729237e-12, + "loss": 0.7598, + "step": 5970 + }, + { + "epoch": 3.9993302076356327, + "grad_norm": 2.809801956254604, + "learning_rate": 4.271834530555907e-13, + "loss": 0.816, + "step": 5971 + }, + { + "epoch": 4.0, + "grad_norm": 2.1879935173483442, + "learning_rate": 0.0, + "loss": 0.623, + "step": 5972 + } + ], + "logging_steps": 1.0, + "max_steps": 5972, + "num_input_tokens_seen": 0, + "num_train_epochs": 4, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 1422904204771328.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}