| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.995008319467554, | |
| "eval_steps": 500, | |
| "global_step": 675, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004437049362174155, | |
| "grad_norm": 3.4363257049624876, | |
| "learning_rate": 0.0, | |
| "loss": 0.6212, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00887409872434831, | |
| "grad_norm": 3.4363257049624876, | |
| "learning_rate": 0.0, | |
| "loss": 1.447, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.013311148086522463, | |
| "grad_norm": 3.4363257049624876, | |
| "learning_rate": 0.0, | |
| "loss": 1.4008, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01774819744869662, | |
| "grad_norm": 16.868642103790727, | |
| "learning_rate": 7.352941176470589e-07, | |
| "loss": 1.4622, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.022185246810870772, | |
| "grad_norm": 16.643551679254042, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 1.449, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.026622296173044926, | |
| "grad_norm": 10.319968368718369, | |
| "learning_rate": 2.2058823529411767e-06, | |
| "loss": 1.2176, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03105934553521908, | |
| "grad_norm": 6.838063285541478, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.1671, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03549639489739324, | |
| "grad_norm": 3.524165786619175, | |
| "learning_rate": 3.6764705882352942e-06, | |
| "loss": 0.8897, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03993344425956739, | |
| "grad_norm": 2.692583820309877, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 0.7701, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.044370493621741544, | |
| "grad_norm": 2.1523694488597362, | |
| "learning_rate": 5.147058823529412e-06, | |
| "loss": 0.6826, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.048807542983915694, | |
| "grad_norm": 2.065704399579686, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.6885, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05324459234608985, | |
| "grad_norm": 1.8642136901781687, | |
| "learning_rate": 6.61764705882353e-06, | |
| "loss": 0.6381, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.057681641708264, | |
| "grad_norm": 1.9903385333507209, | |
| "learning_rate": 7.3529411764705884e-06, | |
| "loss": 0.6594, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06211869107043816, | |
| "grad_norm": 1.5186915884833811, | |
| "learning_rate": 8.088235294117648e-06, | |
| "loss": 0.6485, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06655574043261231, | |
| "grad_norm": 1.7115993526016227, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.6417, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07099278979478647, | |
| "grad_norm": 1.7099641443698512, | |
| "learning_rate": 9.558823529411764e-06, | |
| "loss": 0.6591, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07542983915696062, | |
| "grad_norm": 1.850613121296057, | |
| "learning_rate": 1.0294117647058824e-05, | |
| "loss": 0.639, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07986688851913477, | |
| "grad_norm": 1.7329331324202384, | |
| "learning_rate": 1.1029411764705883e-05, | |
| "loss": 0.5688, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08430393788130892, | |
| "grad_norm": 1.7006038939523738, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.7069, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08874098724348309, | |
| "grad_norm": 1.7294014767466337, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.6022, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09317803660565724, | |
| "grad_norm": 1.6255794372391963, | |
| "learning_rate": 1.323529411764706e-05, | |
| "loss": 0.5901, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09761508596783139, | |
| "grad_norm": 1.6321097143730798, | |
| "learning_rate": 1.3970588235294118e-05, | |
| "loss": 0.6678, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10205213533000555, | |
| "grad_norm": 2.412341050362017, | |
| "learning_rate": 1.4705882352941177e-05, | |
| "loss": 0.6245, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.1064891846921797, | |
| "grad_norm": 1.6139308554481517, | |
| "learning_rate": 1.5441176470588237e-05, | |
| "loss": 0.6382, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11092623405435385, | |
| "grad_norm": 1.6049208251343587, | |
| "learning_rate": 1.6176470588235296e-05, | |
| "loss": 0.7061, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.115363283416528, | |
| "grad_norm": 1.5780465890270454, | |
| "learning_rate": 1.6911764705882355e-05, | |
| "loss": 0.7123, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.11980033277870217, | |
| "grad_norm": 1.696241273794309, | |
| "learning_rate": 1.7647058823529414e-05, | |
| "loss": 0.7704, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.12423738214087632, | |
| "grad_norm": 1.5906252428215757, | |
| "learning_rate": 1.8382352941176472e-05, | |
| "loss": 0.5403, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.12867443150305047, | |
| "grad_norm": 1.5414524788746833, | |
| "learning_rate": 1.9117647058823528e-05, | |
| "loss": 0.5979, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13311148086522462, | |
| "grad_norm": 1.596561047173864, | |
| "learning_rate": 1.9852941176470586e-05, | |
| "loss": 0.5242, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13754853022739877, | |
| "grad_norm": 1.7145694174342805, | |
| "learning_rate": 2.058823529411765e-05, | |
| "loss": 0.6417, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.14198557958957295, | |
| "grad_norm": 1.433522900901715, | |
| "learning_rate": 2.1323529411764707e-05, | |
| "loss": 0.6031, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1464226289517471, | |
| "grad_norm": 1.4685539764106288, | |
| "learning_rate": 2.2058823529411766e-05, | |
| "loss": 0.6575, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.15085967831392125, | |
| "grad_norm": 1.4483896850463052, | |
| "learning_rate": 2.2794117647058825e-05, | |
| "loss": 0.5516, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1552967276760954, | |
| "grad_norm": 1.7283584437052888, | |
| "learning_rate": 2.3529411764705884e-05, | |
| "loss": 0.644, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15973377703826955, | |
| "grad_norm": 1.5105552434381186, | |
| "learning_rate": 2.4264705882352942e-05, | |
| "loss": 0.5804, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1641708264004437, | |
| "grad_norm": 1.5464090743766123, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.5736, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.16860787576261785, | |
| "grad_norm": 1.5477842490785196, | |
| "learning_rate": 2.5735294117647057e-05, | |
| "loss": 0.7317, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17304492512479203, | |
| "grad_norm": 1.8091612351926254, | |
| "learning_rate": 2.647058823529412e-05, | |
| "loss": 0.6621, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17748197448696618, | |
| "grad_norm": 1.785226632698615, | |
| "learning_rate": 2.7205882352941174e-05, | |
| "loss": 0.6575, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18191902384914033, | |
| "grad_norm": 1.4735924454223048, | |
| "learning_rate": 2.7941176470588236e-05, | |
| "loss": 0.6695, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.18635607321131448, | |
| "grad_norm": 2.236068403995869, | |
| "learning_rate": 2.8676470588235295e-05, | |
| "loss": 0.6331, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.19079312257348863, | |
| "grad_norm": 2.2307337187664844, | |
| "learning_rate": 2.9411764705882354e-05, | |
| "loss": 0.6197, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19523017193566278, | |
| "grad_norm": 1.298781361132413, | |
| "learning_rate": 3.0147058823529413e-05, | |
| "loss": 0.652, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.19966722129783693, | |
| "grad_norm": 1.707441058167839, | |
| "learning_rate": 3.0882352941176475e-05, | |
| "loss": 0.664, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2041042706600111, | |
| "grad_norm": 1.7003456325349169, | |
| "learning_rate": 3.161764705882353e-05, | |
| "loss": 0.7874, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.20854132002218526, | |
| "grad_norm": 1.3961934673643377, | |
| "learning_rate": 3.235294117647059e-05, | |
| "loss": 0.7497, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.2129783693843594, | |
| "grad_norm": 1.946072288342776, | |
| "learning_rate": 3.308823529411765e-05, | |
| "loss": 0.7855, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.21741541874653356, | |
| "grad_norm": 1.5449525851779393, | |
| "learning_rate": 3.382352941176471e-05, | |
| "loss": 0.6254, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2218524681087077, | |
| "grad_norm": 1.6317975115632137, | |
| "learning_rate": 3.455882352941177e-05, | |
| "loss": 0.7916, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.22628951747088186, | |
| "grad_norm": 1.4264993609758905, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 0.6275, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.230726566833056, | |
| "grad_norm": 1.872068656876876, | |
| "learning_rate": 3.6029411764705886e-05, | |
| "loss": 0.6932, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.23516361619523019, | |
| "grad_norm": 1.5201820999787976, | |
| "learning_rate": 3.6764705882352945e-05, | |
| "loss": 0.6139, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.23960066555740434, | |
| "grad_norm": 1.3257820106913285, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.6663, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.24403771491957849, | |
| "grad_norm": 1.4712901773857452, | |
| "learning_rate": 3.8235294117647055e-05, | |
| "loss": 0.7818, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.24847476428175264, | |
| "grad_norm": 1.6491941189067307, | |
| "learning_rate": 3.897058823529412e-05, | |
| "loss": 0.5963, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.2529118136439268, | |
| "grad_norm": 1.6066731114607802, | |
| "learning_rate": 3.970588235294117e-05, | |
| "loss": 0.7589, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.25734886300610094, | |
| "grad_norm": 1.5803395963838538, | |
| "learning_rate": 4.044117647058824e-05, | |
| "loss": 0.6909, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2617859123682751, | |
| "grad_norm": 1.8348490633721526, | |
| "learning_rate": 4.11764705882353e-05, | |
| "loss": 0.6927, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.26622296173044924, | |
| "grad_norm": 1.7772939821758158, | |
| "learning_rate": 4.1911764705882356e-05, | |
| "loss": 0.8725, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2706600110926234, | |
| "grad_norm": 1.570535065170522, | |
| "learning_rate": 4.2647058823529415e-05, | |
| "loss": 0.752, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.27509706045479754, | |
| "grad_norm": 1.4213107320379403, | |
| "learning_rate": 4.3382352941176474e-05, | |
| "loss": 0.664, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2795341098169717, | |
| "grad_norm": 1.4010270932182938, | |
| "learning_rate": 4.411764705882353e-05, | |
| "loss": 0.6503, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.2839711591791459, | |
| "grad_norm": 1.559712020751588, | |
| "learning_rate": 4.485294117647059e-05, | |
| "loss": 0.6395, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.28840820854132004, | |
| "grad_norm": 1.398449689929195, | |
| "learning_rate": 4.558823529411765e-05, | |
| "loss": 0.7274, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2928452579034942, | |
| "grad_norm": 1.5012715830907073, | |
| "learning_rate": 4.632352941176471e-05, | |
| "loss": 0.6339, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.29728230726566834, | |
| "grad_norm": 1.3953862067188367, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 0.7273, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3017193566278425, | |
| "grad_norm": 1.7813022003555992, | |
| "learning_rate": 4.7794117647058826e-05, | |
| "loss": 0.6098, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.30615640599001664, | |
| "grad_norm": 1.6629697408532516, | |
| "learning_rate": 4.8529411764705885e-05, | |
| "loss": 0.7984, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3105934553521908, | |
| "grad_norm": 1.5104460965501205, | |
| "learning_rate": 4.9264705882352944e-05, | |
| "loss": 0.7231, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.31503050471436495, | |
| "grad_norm": 1.601897860515358, | |
| "learning_rate": 5e-05, | |
| "loss": 0.6644, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3194675540765391, | |
| "grad_norm": 1.4486607713844175, | |
| "learning_rate": 4.99176276771005e-05, | |
| "loss": 0.7003, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.32390460343871325, | |
| "grad_norm": 1.493309438051621, | |
| "learning_rate": 4.9835255354200987e-05, | |
| "loss": 0.6949, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3283416528008874, | |
| "grad_norm": 1.361584063296398, | |
| "learning_rate": 4.975288303130148e-05, | |
| "loss": 0.6169, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.33277870216306155, | |
| "grad_norm": 1.458929630258479, | |
| "learning_rate": 4.967051070840198e-05, | |
| "loss": 0.732, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3372157515252357, | |
| "grad_norm": 1.8222299717464396, | |
| "learning_rate": 4.958813838550247e-05, | |
| "loss": 0.7367, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.34165280088740985, | |
| "grad_norm": 1.5631573629403719, | |
| "learning_rate": 4.950576606260297e-05, | |
| "loss": 0.7863, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.34608985024958405, | |
| "grad_norm": 1.5057181404666258, | |
| "learning_rate": 4.9423393739703464e-05, | |
| "loss": 0.784, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3505268996117582, | |
| "grad_norm": 1.5000204243858912, | |
| "learning_rate": 4.934102141680396e-05, | |
| "loss": 0.7517, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.35496394897393235, | |
| "grad_norm": 1.3679094424808862, | |
| "learning_rate": 4.9258649093904455e-05, | |
| "loss": 0.5966, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3594009983361065, | |
| "grad_norm": 1.509099539505429, | |
| "learning_rate": 4.9176276771004943e-05, | |
| "loss": 0.7955, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.36383804769828065, | |
| "grad_norm": 1.2782656174885776, | |
| "learning_rate": 4.909390444810544e-05, | |
| "loss": 0.6774, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3682750970604548, | |
| "grad_norm": 1.4750850491330671, | |
| "learning_rate": 4.9011532125205934e-05, | |
| "loss": 0.6925, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.37271214642262895, | |
| "grad_norm": 1.4230772531949174, | |
| "learning_rate": 4.892915980230643e-05, | |
| "loss": 0.7124, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3771491957848031, | |
| "grad_norm": 1.3933968511428394, | |
| "learning_rate": 4.884678747940692e-05, | |
| "loss": 0.6607, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.38158624514697725, | |
| "grad_norm": 1.4883118473622319, | |
| "learning_rate": 4.8764415156507414e-05, | |
| "loss": 0.7622, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.3860232945091514, | |
| "grad_norm": 1.4552755541921092, | |
| "learning_rate": 4.868204283360791e-05, | |
| "loss": 0.8021, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.39046034387132555, | |
| "grad_norm": 1.5206313674109841, | |
| "learning_rate": 4.8599670510708405e-05, | |
| "loss": 0.7699, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3948973932334997, | |
| "grad_norm": 1.3870986632973126, | |
| "learning_rate": 4.8517298187808894e-05, | |
| "loss": 0.7337, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.39933444259567386, | |
| "grad_norm": 1.2888477493348762, | |
| "learning_rate": 4.843492586490939e-05, | |
| "loss": 0.7701, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.403771491957848, | |
| "grad_norm": 1.3593434735731116, | |
| "learning_rate": 4.8352553542009884e-05, | |
| "loss": 0.7312, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4082085413200222, | |
| "grad_norm": 1.4612922441757445, | |
| "learning_rate": 4.827018121911038e-05, | |
| "loss": 0.723, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.41264559068219636, | |
| "grad_norm": 1.2774829380366661, | |
| "learning_rate": 4.8187808896210875e-05, | |
| "loss": 0.6934, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.4170826400443705, | |
| "grad_norm": 1.6465782781320284, | |
| "learning_rate": 4.810543657331137e-05, | |
| "loss": 0.7358, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.42151968940654466, | |
| "grad_norm": 1.6370311793933272, | |
| "learning_rate": 4.8023064250411866e-05, | |
| "loss": 0.8459, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4259567387687188, | |
| "grad_norm": 1.371967960762082, | |
| "learning_rate": 4.794069192751236e-05, | |
| "loss": 0.6993, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.43039378813089296, | |
| "grad_norm": 1.2628676908270764, | |
| "learning_rate": 4.785831960461285e-05, | |
| "loss": 0.6787, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4348308374930671, | |
| "grad_norm": 1.4005074977653627, | |
| "learning_rate": 4.7775947281713346e-05, | |
| "loss": 0.8011, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.43926788685524126, | |
| "grad_norm": 1.2543277210397603, | |
| "learning_rate": 4.769357495881384e-05, | |
| "loss": 0.7595, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4437049362174154, | |
| "grad_norm": 1.1805964120015806, | |
| "learning_rate": 4.761120263591434e-05, | |
| "loss": 0.7692, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.44814198557958956, | |
| "grad_norm": 1.2276416089327171, | |
| "learning_rate": 4.7528830313014825e-05, | |
| "loss": 0.6908, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.4525790349417637, | |
| "grad_norm": 1.3507566309682646, | |
| "learning_rate": 4.744645799011532e-05, | |
| "loss": 0.8863, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.45701608430393786, | |
| "grad_norm": 1.4097027247458103, | |
| "learning_rate": 4.7364085667215816e-05, | |
| "loss": 0.789, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.461453133666112, | |
| "grad_norm": 1.1723423343870047, | |
| "learning_rate": 4.728171334431631e-05, | |
| "loss": 0.7641, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.46589018302828616, | |
| "grad_norm": 1.1380218692079394, | |
| "learning_rate": 4.719934102141681e-05, | |
| "loss": 0.6747, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.47032723239046037, | |
| "grad_norm": 1.1778272399222929, | |
| "learning_rate": 4.71169686985173e-05, | |
| "loss": 0.727, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4747642817526345, | |
| "grad_norm": 1.4296109538614465, | |
| "learning_rate": 4.70345963756178e-05, | |
| "loss": 0.766, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.47920133111480867, | |
| "grad_norm": 1.5045893715366057, | |
| "learning_rate": 4.6952224052718294e-05, | |
| "loss": 0.7547, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.4836383804769828, | |
| "grad_norm": 1.2423295713548035, | |
| "learning_rate": 4.686985172981878e-05, | |
| "loss": 0.7361, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.48807542983915697, | |
| "grad_norm": 1.2453380433721188, | |
| "learning_rate": 4.678747940691928e-05, | |
| "loss": 0.693, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4925124792013311, | |
| "grad_norm": 1.1167979061286064, | |
| "learning_rate": 4.670510708401977e-05, | |
| "loss": 0.7904, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.49694952856350527, | |
| "grad_norm": 1.1003429073432243, | |
| "learning_rate": 4.662273476112027e-05, | |
| "loss": 0.8332, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5013865779256794, | |
| "grad_norm": 1.479955257048743, | |
| "learning_rate": 4.654036243822076e-05, | |
| "loss": 0.7635, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5058236272878536, | |
| "grad_norm": 1.1210095310809869, | |
| "learning_rate": 4.645799011532125e-05, | |
| "loss": 0.8422, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5102606766500277, | |
| "grad_norm": 1.1554116999100634, | |
| "learning_rate": 4.637561779242175e-05, | |
| "loss": 0.777, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5146977260122019, | |
| "grad_norm": 1.3394475857232722, | |
| "learning_rate": 4.6293245469522244e-05, | |
| "loss": 0.7022, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.519134775374376, | |
| "grad_norm": 1.4296884171946767, | |
| "learning_rate": 4.621087314662273e-05, | |
| "loss": 0.7253, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5235718247365502, | |
| "grad_norm": 1.6262478072366437, | |
| "learning_rate": 4.612850082372323e-05, | |
| "loss": 0.688, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5280088740987243, | |
| "grad_norm": 1.3776166899700177, | |
| "learning_rate": 4.6046128500823723e-05, | |
| "loss": 0.7821, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5324459234608985, | |
| "grad_norm": 1.1789329591510056, | |
| "learning_rate": 4.596375617792422e-05, | |
| "loss": 0.6633, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5368829728230726, | |
| "grad_norm": 1.3628007976726708, | |
| "learning_rate": 4.5881383855024714e-05, | |
| "loss": 0.7478, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5413200221852468, | |
| "grad_norm": 1.2320996813979046, | |
| "learning_rate": 4.579901153212521e-05, | |
| "loss": 0.6536, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5457570715474209, | |
| "grad_norm": 1.3977794270959722, | |
| "learning_rate": 4.5716639209225705e-05, | |
| "loss": 0.7028, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5501941209095951, | |
| "grad_norm": 1.0468264326530035, | |
| "learning_rate": 4.56342668863262e-05, | |
| "loss": 0.6335, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5546311702717692, | |
| "grad_norm": 1.113360378732564, | |
| "learning_rate": 4.555189456342669e-05, | |
| "loss": 0.6417, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5590682196339434, | |
| "grad_norm": 1.3100109567984795, | |
| "learning_rate": 4.5469522240527185e-05, | |
| "loss": 0.7237, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5635052689961176, | |
| "grad_norm": 1.2716899184171513, | |
| "learning_rate": 4.538714991762768e-05, | |
| "loss": 0.8029, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5679423183582918, | |
| "grad_norm": 1.1535260875316555, | |
| "learning_rate": 4.5304777594728176e-05, | |
| "loss": 0.6549, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5723793677204659, | |
| "grad_norm": 1.3185370883549747, | |
| "learning_rate": 4.5222405271828664e-05, | |
| "loss": 0.7478, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5768164170826401, | |
| "grad_norm": 1.0101101138265811, | |
| "learning_rate": 4.514003294892916e-05, | |
| "loss": 0.7266, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5812534664448142, | |
| "grad_norm": 1.2160341863689144, | |
| "learning_rate": 4.5057660626029655e-05, | |
| "loss": 0.7094, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5856905158069884, | |
| "grad_norm": 1.28669160074244, | |
| "learning_rate": 4.497528830313015e-05, | |
| "loss": 0.6842, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5901275651691625, | |
| "grad_norm": 1.2305138473723627, | |
| "learning_rate": 4.4892915980230646e-05, | |
| "loss": 0.7785, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5945646145313367, | |
| "grad_norm": 1.1839778264392269, | |
| "learning_rate": 4.481054365733114e-05, | |
| "loss": 0.7821, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5990016638935108, | |
| "grad_norm": 1.13330078125, | |
| "learning_rate": 4.472817133443164e-05, | |
| "loss": 0.8602, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.603438713255685, | |
| "grad_norm": 1.3982152922135496, | |
| "learning_rate": 4.464579901153213e-05, | |
| "loss": 0.7126, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6078757626178591, | |
| "grad_norm": 1.2006974935017933, | |
| "learning_rate": 4.456342668863262e-05, | |
| "loss": 0.8415, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6123128119800333, | |
| "grad_norm": 0.9033962898263505, | |
| "learning_rate": 4.448105436573312e-05, | |
| "loss": 0.6684, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6167498613422074, | |
| "grad_norm": 1.104182597111398, | |
| "learning_rate": 4.439868204283361e-05, | |
| "loss": 0.728, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6211869107043816, | |
| "grad_norm": 0.8660290859416853, | |
| "learning_rate": 4.43163097199341e-05, | |
| "loss": 0.6536, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6256239600665557, | |
| "grad_norm": 1.2196199295675134, | |
| "learning_rate": 4.4233937397034596e-05, | |
| "loss": 0.6733, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6300610094287299, | |
| "grad_norm": 0.9899389063162056, | |
| "learning_rate": 4.415156507413509e-05, | |
| "loss": 0.5878, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.634498058790904, | |
| "grad_norm": 1.0461409543179492, | |
| "learning_rate": 4.406919275123559e-05, | |
| "loss": 0.6893, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6389351081530782, | |
| "grad_norm": 1.6991287382993012, | |
| "learning_rate": 4.3986820428336076e-05, | |
| "loss": 0.7363, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6433721575152523, | |
| "grad_norm": 1.8572584014098694, | |
| "learning_rate": 4.390444810543657e-05, | |
| "loss": 0.7767, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6478092068774265, | |
| "grad_norm": 1.1577735348630036, | |
| "learning_rate": 4.382207578253707e-05, | |
| "loss": 0.8126, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6522462562396006, | |
| "grad_norm": 1.1352947006279315, | |
| "learning_rate": 4.373970345963756e-05, | |
| "loss": 0.7144, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6566833056017748, | |
| "grad_norm": 1.3015549566776177, | |
| "learning_rate": 4.365733113673806e-05, | |
| "loss": 0.7324, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6611203549639489, | |
| "grad_norm": 1.094701516565074, | |
| "learning_rate": 4.357495881383855e-05, | |
| "loss": 0.6737, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6655574043261231, | |
| "grad_norm": 1.2602135150382683, | |
| "learning_rate": 4.349258649093905e-05, | |
| "loss": 0.8867, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6699944536882972, | |
| "grad_norm": 1.2826388205488253, | |
| "learning_rate": 4.3410214168039544e-05, | |
| "loss": 0.7138, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6744315030504714, | |
| "grad_norm": 0.914123989547532, | |
| "learning_rate": 4.332784184514003e-05, | |
| "loss": 0.6167, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6788685524126455, | |
| "grad_norm": 1.0897851196706918, | |
| "learning_rate": 4.324546952224053e-05, | |
| "loss": 0.6871, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6833056017748197, | |
| "grad_norm": 1.4113414148232353, | |
| "learning_rate": 4.3163097199341024e-05, | |
| "loss": 0.8047, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.687742651136994, | |
| "grad_norm": 1.3141605227239495, | |
| "learning_rate": 4.308072487644152e-05, | |
| "loss": 0.6614, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6921797004991681, | |
| "grad_norm": 1.0685581949797351, | |
| "learning_rate": 4.299835255354201e-05, | |
| "loss": 0.7868, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6966167498613423, | |
| "grad_norm": 1.0776774818305639, | |
| "learning_rate": 4.29159802306425e-05, | |
| "loss": 0.7193, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7010537992235164, | |
| "grad_norm": 0.9757258073435284, | |
| "learning_rate": 4.2833607907743e-05, | |
| "loss": 0.6609, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7054908485856906, | |
| "grad_norm": 1.091813252826064, | |
| "learning_rate": 4.2751235584843494e-05, | |
| "loss": 0.5548, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7099278979478647, | |
| "grad_norm": 1.2464545992047698, | |
| "learning_rate": 4.266886326194399e-05, | |
| "loss": 0.8032, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7143649473100389, | |
| "grad_norm": 1.148898492789776, | |
| "learning_rate": 4.2586490939044485e-05, | |
| "loss": 0.7182, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.718801996672213, | |
| "grad_norm": 1.3851632003480712, | |
| "learning_rate": 4.250411861614498e-05, | |
| "loss": 0.7686, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7232390460343872, | |
| "grad_norm": 1.072810467152235, | |
| "learning_rate": 4.2421746293245476e-05, | |
| "loss": 0.6567, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7276760953965613, | |
| "grad_norm": 1.0450828356970694, | |
| "learning_rate": 4.2339373970345965e-05, | |
| "loss": 0.7796, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7321131447587355, | |
| "grad_norm": 1.5079605741380997, | |
| "learning_rate": 4.225700164744646e-05, | |
| "loss": 0.6934, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7365501941209096, | |
| "grad_norm": 1.104207535953022, | |
| "learning_rate": 4.2174629324546956e-05, | |
| "loss": 0.7709, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7409872434830838, | |
| "grad_norm": 1.2299912752446822, | |
| "learning_rate": 4.209225700164745e-05, | |
| "loss": 0.6813, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7454242928452579, | |
| "grad_norm": 1.2594723855505108, | |
| "learning_rate": 4.200988467874794e-05, | |
| "loss": 0.7787, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7498613422074321, | |
| "grad_norm": 1.0866567850443722, | |
| "learning_rate": 4.1927512355848435e-05, | |
| "loss": 0.7779, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7542983915696062, | |
| "grad_norm": 1.2387755457364094, | |
| "learning_rate": 4.184514003294893e-05, | |
| "loss": 0.8369, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.7587354409317804, | |
| "grad_norm": 1.2877205076548874, | |
| "learning_rate": 4.1762767710049426e-05, | |
| "loss": 0.6673, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.7631724902939545, | |
| "grad_norm": 1.141038323333023, | |
| "learning_rate": 4.1680395387149915e-05, | |
| "loss": 0.6762, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7676095396561287, | |
| "grad_norm": 1.1145196819253242, | |
| "learning_rate": 4.159802306425041e-05, | |
| "loss": 0.7632, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7720465890183028, | |
| "grad_norm": 1.0668930786960935, | |
| "learning_rate": 4.1515650741350906e-05, | |
| "loss": 0.6362, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.776483638380477, | |
| "grad_norm": 1.2744678939031877, | |
| "learning_rate": 4.14332784184514e-05, | |
| "loss": 0.7398, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7809206877426511, | |
| "grad_norm": 1.149177416713596, | |
| "learning_rate": 4.13509060955519e-05, | |
| "loss": 0.6689, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7853577371048253, | |
| "grad_norm": 1.1000077659159404, | |
| "learning_rate": 4.126853377265239e-05, | |
| "loss": 0.7335, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7897947864669994, | |
| "grad_norm": 1.2669367644465137, | |
| "learning_rate": 4.118616144975289e-05, | |
| "loss": 0.8656, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7942318358291736, | |
| "grad_norm": 1.1232511915026113, | |
| "learning_rate": 4.110378912685338e-05, | |
| "loss": 0.7366, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7986688851913477, | |
| "grad_norm": 1.1200223581602873, | |
| "learning_rate": 4.102141680395387e-05, | |
| "loss": 0.7661, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8031059345535219, | |
| "grad_norm": 1.0756192043044124, | |
| "learning_rate": 4.093904448105437e-05, | |
| "loss": 0.6768, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.807542983915696, | |
| "grad_norm": 1.2291643282765705, | |
| "learning_rate": 4.085667215815486e-05, | |
| "loss": 0.827, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8119800332778702, | |
| "grad_norm": 1.2924700463843568, | |
| "learning_rate": 4.077429983525536e-05, | |
| "loss": 0.751, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8164170826400444, | |
| "grad_norm": 1.1406485150147252, | |
| "learning_rate": 4.069192751235585e-05, | |
| "loss": 0.8197, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8208541320022186, | |
| "grad_norm": 1.6746923648103702, | |
| "learning_rate": 4.060955518945634e-05, | |
| "loss": 0.8636, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8252911813643927, | |
| "grad_norm": 1.0226322529704257, | |
| "learning_rate": 4.052718286655684e-05, | |
| "loss": 0.7296, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8297282307265669, | |
| "grad_norm": 1.2317174479211401, | |
| "learning_rate": 4.044481054365733e-05, | |
| "loss": 0.7331, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.834165280088741, | |
| "grad_norm": 1.1765805692665705, | |
| "learning_rate": 4.036243822075783e-05, | |
| "loss": 0.7811, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8386023294509152, | |
| "grad_norm": 1.1237666999331317, | |
| "learning_rate": 4.0280065897858324e-05, | |
| "loss": 0.8865, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8430393788130893, | |
| "grad_norm": 1.0807313237302771, | |
| "learning_rate": 4.019769357495882e-05, | |
| "loss": 0.7731, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8474764281752635, | |
| "grad_norm": 1.149091469506126, | |
| "learning_rate": 4.0115321252059315e-05, | |
| "loss": 0.6796, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.8519134775374376, | |
| "grad_norm": 1.5166262436115496, | |
| "learning_rate": 4.0032948929159804e-05, | |
| "loss": 0.6694, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.8563505268996118, | |
| "grad_norm": 1.3121738482730616, | |
| "learning_rate": 3.99505766062603e-05, | |
| "loss": 0.6903, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8607875762617859, | |
| "grad_norm": 1.0641393356477866, | |
| "learning_rate": 3.9868204283360795e-05, | |
| "loss": 0.8431, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8652246256239601, | |
| "grad_norm": 1.2395748277046463, | |
| "learning_rate": 3.978583196046129e-05, | |
| "loss": 0.7475, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8696616749861342, | |
| "grad_norm": 1.1301999187009746, | |
| "learning_rate": 3.970345963756178e-05, | |
| "loss": 0.7522, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8740987243483084, | |
| "grad_norm": 0.9439853147479779, | |
| "learning_rate": 3.9621087314662274e-05, | |
| "loss": 0.7204, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8785357737104825, | |
| "grad_norm": 1.0228506837425515, | |
| "learning_rate": 3.953871499176277e-05, | |
| "loss": 0.6889, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8829728230726567, | |
| "grad_norm": 1.1469914681234548, | |
| "learning_rate": 3.9456342668863265e-05, | |
| "loss": 0.791, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8874098724348308, | |
| "grad_norm": 1.0061181781739899, | |
| "learning_rate": 3.9373970345963754e-05, | |
| "loss": 0.8283, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.891846921797005, | |
| "grad_norm": 1.0773855797362921, | |
| "learning_rate": 3.929159802306425e-05, | |
| "loss": 0.6699, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8962839711591791, | |
| "grad_norm": 1.0711644255778545, | |
| "learning_rate": 3.9209225700164745e-05, | |
| "loss": 0.7143, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9007210205213533, | |
| "grad_norm": 0.9061474248779531, | |
| "learning_rate": 3.912685337726524e-05, | |
| "loss": 0.5905, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9051580698835274, | |
| "grad_norm": 1.1783259536605601, | |
| "learning_rate": 3.9044481054365736e-05, | |
| "loss": 0.8541, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9095951192457016, | |
| "grad_norm": 0.9644733598331529, | |
| "learning_rate": 3.896210873146623e-05, | |
| "loss": 0.8005, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9140321686078757, | |
| "grad_norm": 1.1284868457857407, | |
| "learning_rate": 3.8879736408566727e-05, | |
| "loss": 0.7008, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9184692179700499, | |
| "grad_norm": 1.1875985757168446, | |
| "learning_rate": 3.8797364085667215e-05, | |
| "loss": 0.6928, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.922906267332224, | |
| "grad_norm": 1.0014509165672858, | |
| "learning_rate": 3.871499176276771e-05, | |
| "loss": 0.6631, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9273433166943982, | |
| "grad_norm": 1.1995148929913273, | |
| "learning_rate": 3.8632619439868206e-05, | |
| "loss": 0.75, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9317803660565723, | |
| "grad_norm": 1.0930321653891049, | |
| "learning_rate": 3.85502471169687e-05, | |
| "loss": 0.7021, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9362174154187465, | |
| "grad_norm": 1.035845142234787, | |
| "learning_rate": 3.846787479406919e-05, | |
| "loss": 0.7832, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9406544647809207, | |
| "grad_norm": 1.1155766965522822, | |
| "learning_rate": 3.8385502471169686e-05, | |
| "loss": 0.8157, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.9450915141430949, | |
| "grad_norm": 1.161790307104464, | |
| "learning_rate": 3.830313014827018e-05, | |
| "loss": 0.8157, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.949528563505269, | |
| "grad_norm": 1.1603177661401252, | |
| "learning_rate": 3.822075782537068e-05, | |
| "loss": 0.7666, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9539656128674432, | |
| "grad_norm": 1.0978324450063657, | |
| "learning_rate": 3.813838550247117e-05, | |
| "loss": 0.6614, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.9584026622296173, | |
| "grad_norm": 1.0604673464692589, | |
| "learning_rate": 3.805601317957167e-05, | |
| "loss": 0.779, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9628397115917915, | |
| "grad_norm": 1.1550506351155287, | |
| "learning_rate": 3.797364085667216e-05, | |
| "loss": 0.8661, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9672767609539656, | |
| "grad_norm": 1.0968998919757251, | |
| "learning_rate": 3.789126853377266e-05, | |
| "loss": 0.7193, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9717138103161398, | |
| "grad_norm": 1.0406620649088993, | |
| "learning_rate": 3.780889621087315e-05, | |
| "loss": 0.648, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9761508596783139, | |
| "grad_norm": 1.0409014497880078, | |
| "learning_rate": 3.772652388797364e-05, | |
| "loss": 0.8307, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9805879090404881, | |
| "grad_norm": 1.0170270778648376, | |
| "learning_rate": 3.764415156507414e-05, | |
| "loss": 0.683, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9850249584026622, | |
| "grad_norm": 1.0712048228829765, | |
| "learning_rate": 3.7561779242174634e-05, | |
| "loss": 0.7853, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9894620077648364, | |
| "grad_norm": 1.0226822024346145, | |
| "learning_rate": 3.747940691927512e-05, | |
| "loss": 0.7272, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9938990571270105, | |
| "grad_norm": 1.04540416959913, | |
| "learning_rate": 3.739703459637562e-05, | |
| "loss": 0.6587, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9983361064891847, | |
| "grad_norm": 1.4481215938022614, | |
| "learning_rate": 3.731466227347611e-05, | |
| "loss": 0.8142, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0027731558513588, | |
| "grad_norm": 0.9916722919935, | |
| "learning_rate": 3.723228995057661e-05, | |
| "loss": 0.6207, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.007210205213533, | |
| "grad_norm": 0.7317840240122286, | |
| "learning_rate": 3.71499176276771e-05, | |
| "loss": 0.3225, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0116472545757071, | |
| "grad_norm": 0.7507249983753904, | |
| "learning_rate": 3.706754530477759e-05, | |
| "loss": 0.3261, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0160843039378813, | |
| "grad_norm": 0.8379545387487033, | |
| "learning_rate": 3.698517298187809e-05, | |
| "loss": 0.2845, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0205213533000554, | |
| "grad_norm": 0.8473111342241626, | |
| "learning_rate": 3.6902800658978584e-05, | |
| "loss": 0.2527, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0249584026622296, | |
| "grad_norm": 1.6604069778497936, | |
| "learning_rate": 3.682042833607908e-05, | |
| "loss": 0.3043, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0293954520244037, | |
| "grad_norm": 0.9140945820191196, | |
| "learning_rate": 3.6738056013179575e-05, | |
| "loss": 0.2774, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.033832501386578, | |
| "grad_norm": 1.0852895216811833, | |
| "learning_rate": 3.665568369028007e-05, | |
| "loss": 0.3232, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.038269550748752, | |
| "grad_norm": 1.0861523093312244, | |
| "learning_rate": 3.6573311367380566e-05, | |
| "loss": 0.3223, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0427066001109262, | |
| "grad_norm": 0.9068528505087753, | |
| "learning_rate": 3.6490939044481054e-05, | |
| "loss": 0.2583, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0471436494731003, | |
| "grad_norm": 1.1612832089901859, | |
| "learning_rate": 3.640856672158155e-05, | |
| "loss": 0.3444, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0515806988352745, | |
| "grad_norm": 0.9997441441335119, | |
| "learning_rate": 3.6326194398682045e-05, | |
| "loss": 0.3421, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0560177481974486, | |
| "grad_norm": 0.9958660089779533, | |
| "learning_rate": 3.624382207578254e-05, | |
| "loss": 0.3039, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.0604547975596228, | |
| "grad_norm": 0.8133162286658734, | |
| "learning_rate": 3.616144975288303e-05, | |
| "loss": 0.2468, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.064891846921797, | |
| "grad_norm": 1.0163785046625053, | |
| "learning_rate": 3.6079077429983525e-05, | |
| "loss": 0.3126, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.069328896283971, | |
| "grad_norm": 0.9544507171632961, | |
| "learning_rate": 3.599670510708402e-05, | |
| "loss": 0.3053, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.0737659456461452, | |
| "grad_norm": 0.9019798436756871, | |
| "learning_rate": 3.5914332784184516e-05, | |
| "loss": 0.2968, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.0782029950083194, | |
| "grad_norm": 0.7217732576614062, | |
| "learning_rate": 3.583196046128501e-05, | |
| "loss": 0.2845, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.0826400443704935, | |
| "grad_norm": 1.061456392166015, | |
| "learning_rate": 3.5749588138385507e-05, | |
| "loss": 0.2893, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.0870770937326677, | |
| "grad_norm": 0.9467418677779301, | |
| "learning_rate": 3.5667215815486e-05, | |
| "loss": 0.3549, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.0915141430948418, | |
| "grad_norm": 0.8491210235543027, | |
| "learning_rate": 3.55848434925865e-05, | |
| "loss": 0.2585, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.095951192457016, | |
| "grad_norm": 1.1423342713536853, | |
| "learning_rate": 3.5502471169686986e-05, | |
| "loss": 0.3952, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1003882418191901, | |
| "grad_norm": 0.9288000150333899, | |
| "learning_rate": 3.542009884678748e-05, | |
| "loss": 0.3183, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1048252911813643, | |
| "grad_norm": 0.9360465226654853, | |
| "learning_rate": 3.533772652388798e-05, | |
| "loss": 0.2987, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1092623405435384, | |
| "grad_norm": 0.8683516789900543, | |
| "learning_rate": 3.525535420098847e-05, | |
| "loss": 0.2772, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.1136993899057126, | |
| "grad_norm": 1.0308466035630688, | |
| "learning_rate": 3.517298187808896e-05, | |
| "loss": 0.3035, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.1181364392678868, | |
| "grad_norm": 0.8014850407999317, | |
| "learning_rate": 3.509060955518946e-05, | |
| "loss": 0.2814, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.122573488630061, | |
| "grad_norm": 0.7585536046508811, | |
| "learning_rate": 3.500823723228995e-05, | |
| "loss": 0.2757, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.127010537992235, | |
| "grad_norm": 0.8396995243167018, | |
| "learning_rate": 3.492586490939045e-05, | |
| "loss": 0.2495, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.1314475873544092, | |
| "grad_norm": 0.7893894386934217, | |
| "learning_rate": 3.4843492586490936e-05, | |
| "loss": 0.2749, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1358846367165834, | |
| "grad_norm": 1.0932045802816392, | |
| "learning_rate": 3.476112026359143e-05, | |
| "loss": 0.3878, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.1403216860787575, | |
| "grad_norm": 0.7575049808703544, | |
| "learning_rate": 3.467874794069193e-05, | |
| "loss": 0.2722, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.1447587354409319, | |
| "grad_norm": 1.159473051053268, | |
| "learning_rate": 3.459637561779242e-05, | |
| "loss": 0.3658, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.149195784803106, | |
| "grad_norm": 1.053227183651025, | |
| "learning_rate": 3.451400329489292e-05, | |
| "loss": 0.311, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.1536328341652802, | |
| "grad_norm": 1.2917391182463467, | |
| "learning_rate": 3.4431630971993414e-05, | |
| "loss": 0.2715, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.1580698835274543, | |
| "grad_norm": 0.801097416684275, | |
| "learning_rate": 3.434925864909391e-05, | |
| "loss": 0.3288, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.1625069328896285, | |
| "grad_norm": 1.1338875766376697, | |
| "learning_rate": 3.4266886326194404e-05, | |
| "loss": 0.3412, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.1669439822518026, | |
| "grad_norm": 0.9350602192279442, | |
| "learning_rate": 3.418451400329489e-05, | |
| "loss": 0.3275, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1713810316139768, | |
| "grad_norm": 0.7845508271984446, | |
| "learning_rate": 3.410214168039539e-05, | |
| "loss": 0.2728, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.175818080976151, | |
| "grad_norm": 0.858609847246771, | |
| "learning_rate": 3.4019769357495884e-05, | |
| "loss": 0.2967, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.180255130338325, | |
| "grad_norm": 0.9073453400448334, | |
| "learning_rate": 3.393739703459638e-05, | |
| "loss": 0.2576, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.1846921797004992, | |
| "grad_norm": 0.8136112976233186, | |
| "learning_rate": 3.385502471169687e-05, | |
| "loss": 0.2419, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.1891292290626734, | |
| "grad_norm": 0.8282445065621016, | |
| "learning_rate": 3.3772652388797364e-05, | |
| "loss": 0.2645, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.1935662784248475, | |
| "grad_norm": 1.2341381763275305, | |
| "learning_rate": 3.369028006589786e-05, | |
| "loss": 0.3302, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.1980033277870217, | |
| "grad_norm": 1.0081006606605467, | |
| "learning_rate": 3.3607907742998355e-05, | |
| "loss": 0.2697, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2024403771491958, | |
| "grad_norm": 0.8126944529320962, | |
| "learning_rate": 3.352553542009885e-05, | |
| "loss": 0.2788, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.20687742651137, | |
| "grad_norm": 0.7731330349380529, | |
| "learning_rate": 3.3443163097199346e-05, | |
| "loss": 0.2507, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.2113144758735441, | |
| "grad_norm": 0.8577813370399663, | |
| "learning_rate": 3.336079077429984e-05, | |
| "loss": 0.2703, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.2157515252357183, | |
| "grad_norm": 1.070830505209032, | |
| "learning_rate": 3.327841845140033e-05, | |
| "loss": 0.3063, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2201885745978924, | |
| "grad_norm": 1.0377982342895966, | |
| "learning_rate": 3.3196046128500825e-05, | |
| "loss": 0.3407, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.2246256239600666, | |
| "grad_norm": 1.210461824573832, | |
| "learning_rate": 3.311367380560132e-05, | |
| "loss": 0.2878, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.2290626733222407, | |
| "grad_norm": 0.8504328102649177, | |
| "learning_rate": 3.3031301482701816e-05, | |
| "loss": 0.3108, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.2334997226844149, | |
| "grad_norm": 0.9184666358814657, | |
| "learning_rate": 3.2948929159802305e-05, | |
| "loss": 0.3501, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.237936772046589, | |
| "grad_norm": 1.0056092182673133, | |
| "learning_rate": 3.28665568369028e-05, | |
| "loss": 0.2707, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2423738214087632, | |
| "grad_norm": 0.994694343852553, | |
| "learning_rate": 3.2784184514003296e-05, | |
| "loss": 0.3204, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.2468108707709373, | |
| "grad_norm": 0.8917410700644745, | |
| "learning_rate": 3.270181219110379e-05, | |
| "loss": 0.2532, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.2512479201331115, | |
| "grad_norm": 0.9639140603708609, | |
| "learning_rate": 3.261943986820428e-05, | |
| "loss": 0.304, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.2556849694952856, | |
| "grad_norm": 0.8265452521963786, | |
| "learning_rate": 3.2537067545304775e-05, | |
| "loss": 0.3003, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.2601220188574598, | |
| "grad_norm": 0.9524436766825125, | |
| "learning_rate": 3.245469522240527e-05, | |
| "loss": 0.3842, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.264559068219634, | |
| "grad_norm": 0.8633449410120922, | |
| "learning_rate": 3.2372322899505766e-05, | |
| "loss": 0.2975, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.268996117581808, | |
| "grad_norm": 0.8701473620456235, | |
| "learning_rate": 3.228995057660626e-05, | |
| "loss": 0.278, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.2734331669439822, | |
| "grad_norm": 0.8051498714278014, | |
| "learning_rate": 3.220757825370676e-05, | |
| "loss": 0.2895, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.2778702163061564, | |
| "grad_norm": 0.9415134036547899, | |
| "learning_rate": 3.212520593080725e-05, | |
| "loss": 0.322, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.2823072656683305, | |
| "grad_norm": 0.8176229071894334, | |
| "learning_rate": 3.204283360790775e-05, | |
| "loss": 0.3009, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.2867443150305047, | |
| "grad_norm": 1.2048221854237993, | |
| "learning_rate": 3.196046128500824e-05, | |
| "loss": 0.3218, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.2911813643926788, | |
| "grad_norm": 0.8829288912315847, | |
| "learning_rate": 3.187808896210873e-05, | |
| "loss": 0.2953, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.295618413754853, | |
| "grad_norm": 0.8975883783589373, | |
| "learning_rate": 3.179571663920923e-05, | |
| "loss": 0.2623, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3000554631170271, | |
| "grad_norm": 0.8529012812904146, | |
| "learning_rate": 3.171334431630972e-05, | |
| "loss": 0.2307, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3044925124792013, | |
| "grad_norm": 0.9528461736356937, | |
| "learning_rate": 3.163097199341021e-05, | |
| "loss": 0.2976, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.3089295618413754, | |
| "grad_norm": 0.7594351159934947, | |
| "learning_rate": 3.154859967051071e-05, | |
| "loss": 0.2556, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3133666112035496, | |
| "grad_norm": 0.7686742667754073, | |
| "learning_rate": 3.14662273476112e-05, | |
| "loss": 0.2054, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3178036605657237, | |
| "grad_norm": 0.8745361869963002, | |
| "learning_rate": 3.13838550247117e-05, | |
| "loss": 0.3749, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3222407099278979, | |
| "grad_norm": 0.8835353086591043, | |
| "learning_rate": 3.1301482701812194e-05, | |
| "loss": 0.3486, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.326677759290072, | |
| "grad_norm": 0.9474340544727997, | |
| "learning_rate": 3.121911037891269e-05, | |
| "loss": 0.2235, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.3311148086522462, | |
| "grad_norm": 0.9114246289515612, | |
| "learning_rate": 3.1136738056013184e-05, | |
| "loss": 0.2845, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.3355518580144203, | |
| "grad_norm": 0.9417669467925294, | |
| "learning_rate": 3.105436573311368e-05, | |
| "loss": 0.3097, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.3399889073765945, | |
| "grad_norm": 1.030762933091569, | |
| "learning_rate": 3.097199341021417e-05, | |
| "loss": 0.3131, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.3444259567387689, | |
| "grad_norm": 0.9372533791726105, | |
| "learning_rate": 3.0889621087314664e-05, | |
| "loss": 0.376, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.348863006100943, | |
| "grad_norm": 0.9500920552274774, | |
| "learning_rate": 3.080724876441516e-05, | |
| "loss": 0.3784, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.3533000554631172, | |
| "grad_norm": 0.7514858230527693, | |
| "learning_rate": 3.0724876441515655e-05, | |
| "loss": 0.2833, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3577371048252913, | |
| "grad_norm": 0.7939288335875365, | |
| "learning_rate": 3.0642504118616144e-05, | |
| "loss": 0.2497, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3621741541874655, | |
| "grad_norm": 0.8358984964843912, | |
| "learning_rate": 3.056013179571664e-05, | |
| "loss": 0.2741, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3666112035496396, | |
| "grad_norm": 0.9062743019265684, | |
| "learning_rate": 3.0477759472817135e-05, | |
| "loss": 0.2993, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.3710482529118138, | |
| "grad_norm": 0.9332367282212933, | |
| "learning_rate": 3.039538714991763e-05, | |
| "loss": 0.3015, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.375485302273988, | |
| "grad_norm": 1.0069126577219902, | |
| "learning_rate": 3.0313014827018122e-05, | |
| "loss": 0.2868, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.379922351636162, | |
| "grad_norm": 0.9033547884573856, | |
| "learning_rate": 3.0230642504118618e-05, | |
| "loss": 0.2841, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.3843594009983362, | |
| "grad_norm": 0.8078728747356874, | |
| "learning_rate": 3.0148270181219113e-05, | |
| "loss": 0.2782, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.3887964503605104, | |
| "grad_norm": 0.9084694748675116, | |
| "learning_rate": 3.006589785831961e-05, | |
| "loss": 0.2259, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.3932334997226845, | |
| "grad_norm": 0.7881935547213544, | |
| "learning_rate": 2.9983525535420097e-05, | |
| "loss": 0.2487, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.3976705490848587, | |
| "grad_norm": 0.8016907528003615, | |
| "learning_rate": 2.9901153212520593e-05, | |
| "loss": 0.2626, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.4021075984470328, | |
| "grad_norm": 0.8279080376625739, | |
| "learning_rate": 2.9818780889621088e-05, | |
| "loss": 0.2477, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.406544647809207, | |
| "grad_norm": 0.8147915290345366, | |
| "learning_rate": 2.9736408566721587e-05, | |
| "loss": 0.2526, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.410981697171381, | |
| "grad_norm": 1.1702039884776705, | |
| "learning_rate": 2.9654036243822076e-05, | |
| "loss": 0.2434, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.4154187465335553, | |
| "grad_norm": 0.7850168968010128, | |
| "learning_rate": 2.957166392092257e-05, | |
| "loss": 0.2621, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4198557958957294, | |
| "grad_norm": 1.0163017952307563, | |
| "learning_rate": 2.9489291598023067e-05, | |
| "loss": 0.2982, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4242928452579036, | |
| "grad_norm": 0.7909751599169546, | |
| "learning_rate": 2.9406919275123562e-05, | |
| "loss": 0.281, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.4287298946200777, | |
| "grad_norm": 0.8777170910425888, | |
| "learning_rate": 2.9324546952224054e-05, | |
| "loss": 0.2668, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.4331669439822519, | |
| "grad_norm": 1.0099323658912123, | |
| "learning_rate": 2.924217462932455e-05, | |
| "loss": 0.2783, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.437603993344426, | |
| "grad_norm": 0.8585319545399854, | |
| "learning_rate": 2.9159802306425045e-05, | |
| "loss": 0.2639, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.4420410427066002, | |
| "grad_norm": 1.2373091194055394, | |
| "learning_rate": 2.907742998352554e-05, | |
| "loss": 0.3356, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.4464780920687743, | |
| "grad_norm": 0.8724563274001284, | |
| "learning_rate": 2.899505766062603e-05, | |
| "loss": 0.2682, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4509151414309485, | |
| "grad_norm": 0.8894584444512684, | |
| "learning_rate": 2.8912685337726525e-05, | |
| "loss": 0.2345, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.4553521907931226, | |
| "grad_norm": 0.791290988794545, | |
| "learning_rate": 2.883031301482702e-05, | |
| "loss": 0.227, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.4597892401552968, | |
| "grad_norm": 0.9471555980217089, | |
| "learning_rate": 2.8747940691927515e-05, | |
| "loss": 0.3117, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.464226289517471, | |
| "grad_norm": 0.9461745437913269, | |
| "learning_rate": 2.8665568369028008e-05, | |
| "loss": 0.2994, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.468663338879645, | |
| "grad_norm": 0.9585732629602921, | |
| "learning_rate": 2.8583196046128503e-05, | |
| "loss": 0.3601, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.4731003882418192, | |
| "grad_norm": 0.9797419555715378, | |
| "learning_rate": 2.8500823723229e-05, | |
| "loss": 0.2985, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.4775374376039934, | |
| "grad_norm": 0.9041880795511308, | |
| "learning_rate": 2.8418451400329494e-05, | |
| "loss": 0.2879, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.4819744869661675, | |
| "grad_norm": 0.7579890566566851, | |
| "learning_rate": 2.8336079077429983e-05, | |
| "loss": 0.2363, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.4864115363283417, | |
| "grad_norm": 1.0136081330061153, | |
| "learning_rate": 2.8253706754530478e-05, | |
| "loss": 0.3412, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.4908485856905158, | |
| "grad_norm": 0.8575878634355566, | |
| "learning_rate": 2.8171334431630973e-05, | |
| "loss": 0.3048, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.49528563505269, | |
| "grad_norm": 1.0070902284052874, | |
| "learning_rate": 2.808896210873147e-05, | |
| "loss": 0.2816, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.4997226844148641, | |
| "grad_norm": 1.0785926689088374, | |
| "learning_rate": 2.800658978583196e-05, | |
| "loss": 0.2806, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.5041597337770383, | |
| "grad_norm": 0.8276318215194345, | |
| "learning_rate": 2.7924217462932456e-05, | |
| "loss": 0.2752, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.5085967831392124, | |
| "grad_norm": 0.9757991401247934, | |
| "learning_rate": 2.7841845140032952e-05, | |
| "loss": 0.2357, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5130338325013866, | |
| "grad_norm": 1.8061211906359669, | |
| "learning_rate": 2.775947281713344e-05, | |
| "loss": 0.3929, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.5174708818635607, | |
| "grad_norm": 0.8440439630511615, | |
| "learning_rate": 2.7677100494233936e-05, | |
| "loss": 0.2631, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.5219079312257349, | |
| "grad_norm": 0.8785557026624231, | |
| "learning_rate": 2.759472817133443e-05, | |
| "loss": 0.2579, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.526344980587909, | |
| "grad_norm": 0.9168099414234008, | |
| "learning_rate": 2.7512355848434927e-05, | |
| "loss": 0.3854, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.5307820299500832, | |
| "grad_norm": 0.9170947195302683, | |
| "learning_rate": 2.742998352553542e-05, | |
| "loss": 0.3013, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.5352190793122573, | |
| "grad_norm": 0.8619100460371237, | |
| "learning_rate": 2.7347611202635915e-05, | |
| "loss": 0.3073, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.5396561286744315, | |
| "grad_norm": 0.9818485779002657, | |
| "learning_rate": 2.726523887973641e-05, | |
| "loss": 0.3411, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.5440931780366056, | |
| "grad_norm": 0.9725562967318679, | |
| "learning_rate": 2.7182866556836905e-05, | |
| "loss": 0.2584, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.5485302273987798, | |
| "grad_norm": 1.0259249212520478, | |
| "learning_rate": 2.7100494233937394e-05, | |
| "loss": 0.3058, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.552967276760954, | |
| "grad_norm": 1.0405768351821034, | |
| "learning_rate": 2.7018121911037893e-05, | |
| "loss": 0.2496, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.557404326123128, | |
| "grad_norm": 0.953800399868498, | |
| "learning_rate": 2.693574958813839e-05, | |
| "loss": 0.3542, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5618413754853022, | |
| "grad_norm": 0.8755003656509656, | |
| "learning_rate": 2.6853377265238884e-05, | |
| "loss": 0.3325, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5662784248474764, | |
| "grad_norm": 1.1163203468668474, | |
| "learning_rate": 2.6771004942339373e-05, | |
| "loss": 0.31, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.5707154742096505, | |
| "grad_norm": 0.7249833861453431, | |
| "learning_rate": 2.6688632619439868e-05, | |
| "loss": 0.2298, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.5751525235718247, | |
| "grad_norm": 0.8321543857634233, | |
| "learning_rate": 2.6606260296540363e-05, | |
| "loss": 0.3034, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.5795895729339988, | |
| "grad_norm": 0.9329128575612846, | |
| "learning_rate": 2.652388797364086e-05, | |
| "loss": 0.3591, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.584026622296173, | |
| "grad_norm": 0.7888539104661904, | |
| "learning_rate": 2.644151565074135e-05, | |
| "loss": 0.2383, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.5884636716583471, | |
| "grad_norm": 0.8482609363146597, | |
| "learning_rate": 2.6359143327841846e-05, | |
| "loss": 0.3259, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.5929007210205213, | |
| "grad_norm": 0.8128427369485877, | |
| "learning_rate": 2.6276771004942342e-05, | |
| "loss": 0.2471, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.5973377703826954, | |
| "grad_norm": 1.1424089877149042, | |
| "learning_rate": 2.6194398682042837e-05, | |
| "loss": 0.3053, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.6017748197448696, | |
| "grad_norm": 0.797133684244717, | |
| "learning_rate": 2.6112026359143326e-05, | |
| "loss": 0.3085, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.6062118691070437, | |
| "grad_norm": 1.0599037313830935, | |
| "learning_rate": 2.602965403624382e-05, | |
| "loss": 0.3954, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.6106489184692179, | |
| "grad_norm": 0.9704756594970683, | |
| "learning_rate": 2.5947281713344317e-05, | |
| "loss": 0.29, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.615085967831392, | |
| "grad_norm": 0.8971954716853453, | |
| "learning_rate": 2.5864909390444812e-05, | |
| "loss": 0.2839, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.6195230171935662, | |
| "grad_norm": 0.8698279245703869, | |
| "learning_rate": 2.5782537067545305e-05, | |
| "loss": 0.2789, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.6239600665557403, | |
| "grad_norm": 1.0854765098802885, | |
| "learning_rate": 2.57001647446458e-05, | |
| "loss": 0.3104, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.6283971159179145, | |
| "grad_norm": 0.8386426987212547, | |
| "learning_rate": 2.5617792421746295e-05, | |
| "loss": 0.2445, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.6328341652800886, | |
| "grad_norm": 0.8092227277052146, | |
| "learning_rate": 2.553542009884679e-05, | |
| "loss": 0.2425, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.6372712146422628, | |
| "grad_norm": 1.0627030571275184, | |
| "learning_rate": 2.545304777594728e-05, | |
| "loss": 0.2771, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.641708264004437, | |
| "grad_norm": 0.7848551917908129, | |
| "learning_rate": 2.5370675453047775e-05, | |
| "loss": 0.2914, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.646145313366611, | |
| "grad_norm": 0.9757022578302557, | |
| "learning_rate": 2.528830313014827e-05, | |
| "loss": 0.2373, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.6505823627287852, | |
| "grad_norm": 1.027171131325082, | |
| "learning_rate": 2.520593080724877e-05, | |
| "loss": 0.2909, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.6550194120909594, | |
| "grad_norm": 0.8259234404332476, | |
| "learning_rate": 2.5123558484349258e-05, | |
| "loss": 0.2698, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6594564614531335, | |
| "grad_norm": 0.9243264336301544, | |
| "learning_rate": 2.5041186161449753e-05, | |
| "loss": 0.3099, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6638935108153077, | |
| "grad_norm": 0.7192692746737803, | |
| "learning_rate": 2.495881383855025e-05, | |
| "loss": 0.2423, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6683305601774818, | |
| "grad_norm": 0.9214747901796418, | |
| "learning_rate": 2.487644151565074e-05, | |
| "loss": 0.2449, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.672767609539656, | |
| "grad_norm": 0.8653940436672877, | |
| "learning_rate": 2.4794069192751236e-05, | |
| "loss": 0.2449, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.6772046589018301, | |
| "grad_norm": 0.8719790283817912, | |
| "learning_rate": 2.4711696869851732e-05, | |
| "loss": 0.3459, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.6816417082640043, | |
| "grad_norm": 0.884484185262341, | |
| "learning_rate": 2.4629324546952227e-05, | |
| "loss": 0.2936, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.6860787576261784, | |
| "grad_norm": 0.9556852567947143, | |
| "learning_rate": 2.454695222405272e-05, | |
| "loss": 0.258, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.6905158069883528, | |
| "grad_norm": 0.9006107747994894, | |
| "learning_rate": 2.4464579901153215e-05, | |
| "loss": 0.2868, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.694952856350527, | |
| "grad_norm": 1.0573331639957975, | |
| "learning_rate": 2.4382207578253707e-05, | |
| "loss": 0.2647, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.699389905712701, | |
| "grad_norm": 0.8786757442415969, | |
| "learning_rate": 2.4299835255354202e-05, | |
| "loss": 0.2474, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.7038269550748752, | |
| "grad_norm": 1.034443390305425, | |
| "learning_rate": 2.4217462932454694e-05, | |
| "loss": 0.3275, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.7082640044370494, | |
| "grad_norm": 1.0336686295354667, | |
| "learning_rate": 2.413509060955519e-05, | |
| "loss": 0.2602, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.7127010537992235, | |
| "grad_norm": 0.9454431364731563, | |
| "learning_rate": 2.4052718286655685e-05, | |
| "loss": 0.2715, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.7171381031613977, | |
| "grad_norm": 0.8850143116801344, | |
| "learning_rate": 2.397034596375618e-05, | |
| "loss": 0.2814, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.7215751525235718, | |
| "grad_norm": 0.8386031102638419, | |
| "learning_rate": 2.3887973640856673e-05, | |
| "loss": 0.263, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.726012201885746, | |
| "grad_norm": 1.061756996212574, | |
| "learning_rate": 2.380560131795717e-05, | |
| "loss": 0.3846, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.7304492512479202, | |
| "grad_norm": 0.8173241087183969, | |
| "learning_rate": 2.372322899505766e-05, | |
| "loss": 0.2794, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7348863006100943, | |
| "grad_norm": 0.7094044767464892, | |
| "learning_rate": 2.3640856672158156e-05, | |
| "loss": 0.24, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.7393233499722685, | |
| "grad_norm": 1.0181711412890253, | |
| "learning_rate": 2.355848434925865e-05, | |
| "loss": 0.2293, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.7437603993344426, | |
| "grad_norm": 0.7974819041384473, | |
| "learning_rate": 2.3476112026359147e-05, | |
| "loss": 0.299, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.7481974486966168, | |
| "grad_norm": 1.0842945651693483, | |
| "learning_rate": 2.339373970345964e-05, | |
| "loss": 0.4007, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.752634498058791, | |
| "grad_norm": 1.0029823415690775, | |
| "learning_rate": 2.3311367380560134e-05, | |
| "loss": 0.2896, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.757071547420965, | |
| "grad_norm": 0.8037712287769038, | |
| "learning_rate": 2.3228995057660626e-05, | |
| "loss": 0.2463, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.7615085967831392, | |
| "grad_norm": 0.8595851294504453, | |
| "learning_rate": 2.3146622734761122e-05, | |
| "loss": 0.2451, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.7659456461453134, | |
| "grad_norm": 0.8154261057883538, | |
| "learning_rate": 2.3064250411861614e-05, | |
| "loss": 0.2669, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.7703826955074875, | |
| "grad_norm": 0.8657303673690703, | |
| "learning_rate": 2.298187808896211e-05, | |
| "loss": 0.33, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.7748197448696617, | |
| "grad_norm": 0.8175680846114033, | |
| "learning_rate": 2.2899505766062605e-05, | |
| "loss": 0.2579, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.7792567942318358, | |
| "grad_norm": 0.8055971799890129, | |
| "learning_rate": 2.28171334431631e-05, | |
| "loss": 0.2466, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.78369384359401, | |
| "grad_norm": 0.9723359465312749, | |
| "learning_rate": 2.2734761120263592e-05, | |
| "loss": 0.2923, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.788130892956184, | |
| "grad_norm": 0.7871930477829718, | |
| "learning_rate": 2.2652388797364088e-05, | |
| "loss": 0.245, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.7925679423183583, | |
| "grad_norm": 0.9587060093143946, | |
| "learning_rate": 2.257001647446458e-05, | |
| "loss": 0.327, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.7970049916805324, | |
| "grad_norm": 0.7518155058726878, | |
| "learning_rate": 2.2487644151565075e-05, | |
| "loss": 0.248, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.8014420410427066, | |
| "grad_norm": 0.8452159720710262, | |
| "learning_rate": 2.240527182866557e-05, | |
| "loss": 0.2898, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.8058790904048807, | |
| "grad_norm": 0.8077753320403622, | |
| "learning_rate": 2.2322899505766066e-05, | |
| "loss": 0.3047, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.8103161397670549, | |
| "grad_norm": 0.883161222736862, | |
| "learning_rate": 2.224052718286656e-05, | |
| "loss": 0.2765, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.8147531891292292, | |
| "grad_norm": 1.019587665034733, | |
| "learning_rate": 2.215815485996705e-05, | |
| "loss": 0.3072, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.8191902384914034, | |
| "grad_norm": 0.973902291030218, | |
| "learning_rate": 2.2075782537067546e-05, | |
| "loss": 0.2474, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.8236272878535775, | |
| "grad_norm": 0.8247021166224197, | |
| "learning_rate": 2.1993410214168038e-05, | |
| "loss": 0.2281, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.8280643372157517, | |
| "grad_norm": 0.8467350889953564, | |
| "learning_rate": 2.1911037891268533e-05, | |
| "loss": 0.2551, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.8325013865779258, | |
| "grad_norm": 0.9629678753241201, | |
| "learning_rate": 2.182866556836903e-05, | |
| "loss": 0.319, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.8369384359401, | |
| "grad_norm": 0.9004484807622022, | |
| "learning_rate": 2.1746293245469524e-05, | |
| "loss": 0.3164, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.8413754853022741, | |
| "grad_norm": 0.9259354668143355, | |
| "learning_rate": 2.1663920922570016e-05, | |
| "loss": 0.255, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.8458125346644483, | |
| "grad_norm": 1.0792016996983913, | |
| "learning_rate": 2.1581548599670512e-05, | |
| "loss": 0.2869, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.8502495840266224, | |
| "grad_norm": 0.7780691624718593, | |
| "learning_rate": 2.1499176276771004e-05, | |
| "loss": 0.2631, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.8546866333887966, | |
| "grad_norm": 1.0150115287764934, | |
| "learning_rate": 2.14168039538715e-05, | |
| "loss": 0.2905, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.8591236827509707, | |
| "grad_norm": 0.9414552541301019, | |
| "learning_rate": 2.1334431630971995e-05, | |
| "loss": 0.2175, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.8635607321131449, | |
| "grad_norm": 1.022383285240764, | |
| "learning_rate": 2.125205930807249e-05, | |
| "loss": 0.2797, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.867997781475319, | |
| "grad_norm": 0.8661251950323628, | |
| "learning_rate": 2.1169686985172982e-05, | |
| "loss": 0.3511, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.8724348308374932, | |
| "grad_norm": 1.0114859530406477, | |
| "learning_rate": 2.1087314662273478e-05, | |
| "loss": 0.2536, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.8768718801996673, | |
| "grad_norm": 0.8061046528471709, | |
| "learning_rate": 2.100494233937397e-05, | |
| "loss": 0.3064, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.8813089295618415, | |
| "grad_norm": 0.8968122294188483, | |
| "learning_rate": 2.0922570016474465e-05, | |
| "loss": 0.2891, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.8857459789240156, | |
| "grad_norm": 0.9586335140521199, | |
| "learning_rate": 2.0840197693574957e-05, | |
| "loss": 0.2613, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.8901830282861898, | |
| "grad_norm": 0.9543990078587583, | |
| "learning_rate": 2.0757825370675453e-05, | |
| "loss": 0.3193, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.894620077648364, | |
| "grad_norm": 0.964414122524041, | |
| "learning_rate": 2.067545304777595e-05, | |
| "loss": 0.2762, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.899057127010538, | |
| "grad_norm": 0.7175021327050176, | |
| "learning_rate": 2.0593080724876444e-05, | |
| "loss": 0.3267, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.9034941763727122, | |
| "grad_norm": 1.0475358727442101, | |
| "learning_rate": 2.0510708401976936e-05, | |
| "loss": 0.2623, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9079312257348864, | |
| "grad_norm": 0.9951684880533599, | |
| "learning_rate": 2.042833607907743e-05, | |
| "loss": 0.3282, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.9123682750970605, | |
| "grad_norm": 0.8125336713416275, | |
| "learning_rate": 2.0345963756177923e-05, | |
| "loss": 0.4212, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.9168053244592347, | |
| "grad_norm": 0.89389068456606, | |
| "learning_rate": 2.026359143327842e-05, | |
| "loss": 0.2463, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.9212423738214088, | |
| "grad_norm": 0.8712168721998037, | |
| "learning_rate": 2.0181219110378914e-05, | |
| "loss": 0.2754, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.925679423183583, | |
| "grad_norm": 0.8748220535261405, | |
| "learning_rate": 2.009884678747941e-05, | |
| "loss": 0.2562, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.9301164725457571, | |
| "grad_norm": 0.9056723496594914, | |
| "learning_rate": 2.0016474464579902e-05, | |
| "loss": 0.3349, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.9345535219079313, | |
| "grad_norm": 0.767429638349272, | |
| "learning_rate": 1.9934102141680397e-05, | |
| "loss": 0.2061, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.9389905712701054, | |
| "grad_norm": 0.8320915665949056, | |
| "learning_rate": 1.985172981878089e-05, | |
| "loss": 0.3067, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.9434276206322796, | |
| "grad_norm": 0.7555880983869324, | |
| "learning_rate": 1.9769357495881385e-05, | |
| "loss": 0.2607, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.9478646699944537, | |
| "grad_norm": 0.7105796667986897, | |
| "learning_rate": 1.9686985172981877e-05, | |
| "loss": 0.2464, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.9523017193566279, | |
| "grad_norm": 0.9767889752038426, | |
| "learning_rate": 1.9604612850082372e-05, | |
| "loss": 0.2808, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.956738768718802, | |
| "grad_norm": 0.7118591789785065, | |
| "learning_rate": 1.9522240527182868e-05, | |
| "loss": 0.2548, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.9611758180809762, | |
| "grad_norm": 0.9356831429456314, | |
| "learning_rate": 1.9439868204283363e-05, | |
| "loss": 0.2863, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.9656128674431503, | |
| "grad_norm": 0.9251371952073134, | |
| "learning_rate": 1.9357495881383855e-05, | |
| "loss": 0.2446, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.9700499168053245, | |
| "grad_norm": 0.9581332032312233, | |
| "learning_rate": 1.927512355848435e-05, | |
| "loss": 0.2707, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.9744869661674986, | |
| "grad_norm": 0.8201545926653439, | |
| "learning_rate": 1.9192751235584843e-05, | |
| "loss": 0.2358, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.9789240155296728, | |
| "grad_norm": 0.8917348538527614, | |
| "learning_rate": 1.911037891268534e-05, | |
| "loss": 0.2327, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.983361064891847, | |
| "grad_norm": 0.793870009713998, | |
| "learning_rate": 1.9028006589785834e-05, | |
| "loss": 0.2351, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.987798114254021, | |
| "grad_norm": 0.7696144330140515, | |
| "learning_rate": 1.894563426688633e-05, | |
| "loss": 0.2435, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.9922351636161952, | |
| "grad_norm": 0.8950314518394308, | |
| "learning_rate": 1.886326194398682e-05, | |
| "loss": 0.2186, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.9966722129783694, | |
| "grad_norm": 0.937115749450349, | |
| "learning_rate": 1.8780889621087317e-05, | |
| "loss": 0.2396, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.0011092623405435, | |
| "grad_norm": 0.7675418990810471, | |
| "learning_rate": 1.869851729818781e-05, | |
| "loss": 0.2239, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.0055463117027177, | |
| "grad_norm": 0.7037873856826813, | |
| "learning_rate": 1.8616144975288304e-05, | |
| "loss": 0.0884, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.009983361064892, | |
| "grad_norm": 1.0684471303545413, | |
| "learning_rate": 1.8533772652388796e-05, | |
| "loss": 0.0896, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.014420410427066, | |
| "grad_norm": 0.4397641924768517, | |
| "learning_rate": 1.8451400329489292e-05, | |
| "loss": 0.0987, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.01885745978924, | |
| "grad_norm": 0.753924710536916, | |
| "learning_rate": 1.8369028006589787e-05, | |
| "loss": 0.108, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.0232945091514143, | |
| "grad_norm": 0.7470844342262113, | |
| "learning_rate": 1.8286655683690283e-05, | |
| "loss": 0.0982, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.0277315585135884, | |
| "grad_norm": 0.4854531440890792, | |
| "learning_rate": 1.8204283360790775e-05, | |
| "loss": 0.0931, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.0321686078757626, | |
| "grad_norm": 1.039503591889987, | |
| "learning_rate": 1.812191103789127e-05, | |
| "loss": 0.1163, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.0366056572379367, | |
| "grad_norm": 0.9666652397167154, | |
| "learning_rate": 1.8039538714991762e-05, | |
| "loss": 0.1476, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.041042706600111, | |
| "grad_norm": 0.4388264574776795, | |
| "learning_rate": 1.7957166392092258e-05, | |
| "loss": 0.0621, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.045479755962285, | |
| "grad_norm": 0.5621865511807232, | |
| "learning_rate": 1.7874794069192753e-05, | |
| "loss": 0.0812, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.049916805324459, | |
| "grad_norm": 0.41065498184531973, | |
| "learning_rate": 1.779242174629325e-05, | |
| "loss": 0.0753, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.0543538546866333, | |
| "grad_norm": 0.5116349654001895, | |
| "learning_rate": 1.771004942339374e-05, | |
| "loss": 0.0839, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.0587909040488075, | |
| "grad_norm": 0.5144947264005313, | |
| "learning_rate": 1.7627677100494236e-05, | |
| "loss": 0.095, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.0632279534109816, | |
| "grad_norm": 0.5109739926339697, | |
| "learning_rate": 1.754530477759473e-05, | |
| "loss": 0.0632, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.067665002773156, | |
| "grad_norm": 0.6532249894330738, | |
| "learning_rate": 1.7462932454695224e-05, | |
| "loss": 0.0853, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.07210205213533, | |
| "grad_norm": 1.5461723966868925, | |
| "learning_rate": 1.7380560131795716e-05, | |
| "loss": 0.1146, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.076539101497504, | |
| "grad_norm": 0.7694204027923975, | |
| "learning_rate": 1.729818780889621e-05, | |
| "loss": 0.0851, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.0809761508596782, | |
| "grad_norm": 0.781619518629895, | |
| "learning_rate": 1.7215815485996707e-05, | |
| "loss": 0.1058, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.0854132002218524, | |
| "grad_norm": 0.48288750667703845, | |
| "learning_rate": 1.7133443163097202e-05, | |
| "loss": 0.0711, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.0898502495840265, | |
| "grad_norm": 0.6270734249696698, | |
| "learning_rate": 1.7051070840197694e-05, | |
| "loss": 0.0771, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.0942872989462007, | |
| "grad_norm": 0.5509850686277749, | |
| "learning_rate": 1.696869851729819e-05, | |
| "loss": 0.076, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.098724348308375, | |
| "grad_norm": 0.6074745179942609, | |
| "learning_rate": 1.6886326194398682e-05, | |
| "loss": 0.0692, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.103161397670549, | |
| "grad_norm": 0.4698384998207071, | |
| "learning_rate": 1.6803953871499177e-05, | |
| "loss": 0.0677, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.107598447032723, | |
| "grad_norm": 0.5091839976360598, | |
| "learning_rate": 1.6721581548599673e-05, | |
| "loss": 0.082, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.1120354963948973, | |
| "grad_norm": 0.6306552852091889, | |
| "learning_rate": 1.6639209225700165e-05, | |
| "loss": 0.0799, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.1164725457570714, | |
| "grad_norm": 0.5453074741951704, | |
| "learning_rate": 1.655683690280066e-05, | |
| "loss": 0.087, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.1209095951192456, | |
| "grad_norm": 0.8333096421370929, | |
| "learning_rate": 1.6474464579901152e-05, | |
| "loss": 0.1027, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.1253466444814197, | |
| "grad_norm": 0.6588226255524119, | |
| "learning_rate": 1.6392092257001648e-05, | |
| "loss": 0.0946, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.129783693843594, | |
| "grad_norm": 0.5146614087549416, | |
| "learning_rate": 1.630971993410214e-05, | |
| "loss": 0.0849, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.134220743205768, | |
| "grad_norm": 0.6728810828478047, | |
| "learning_rate": 1.6227347611202635e-05, | |
| "loss": 0.1113, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.138657792567942, | |
| "grad_norm": 0.6846333038038229, | |
| "learning_rate": 1.614497528830313e-05, | |
| "loss": 0.079, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.1430948419301163, | |
| "grad_norm": 0.5156807435854919, | |
| "learning_rate": 1.6062602965403626e-05, | |
| "loss": 0.0666, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.1475318912922905, | |
| "grad_norm": 0.44976733338742386, | |
| "learning_rate": 1.598023064250412e-05, | |
| "loss": 0.0717, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.1519689406544646, | |
| "grad_norm": 0.4132489182745962, | |
| "learning_rate": 1.5897858319604614e-05, | |
| "loss": 0.0589, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.156405990016639, | |
| "grad_norm": 1.332057382097956, | |
| "learning_rate": 1.5815485996705106e-05, | |
| "loss": 0.1179, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.160843039378813, | |
| "grad_norm": 0.8673309345016713, | |
| "learning_rate": 1.57331136738056e-05, | |
| "loss": 0.0864, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.165280088740987, | |
| "grad_norm": 0.5207558160952449, | |
| "learning_rate": 1.5650741350906097e-05, | |
| "loss": 0.0828, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.1697171381031612, | |
| "grad_norm": 0.5697716016101573, | |
| "learning_rate": 1.5568369028006592e-05, | |
| "loss": 0.0721, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.1741541874653354, | |
| "grad_norm": 0.6143986429843966, | |
| "learning_rate": 1.5485996705107084e-05, | |
| "loss": 0.089, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.1785912368275095, | |
| "grad_norm": 0.6728794883834863, | |
| "learning_rate": 1.540362438220758e-05, | |
| "loss": 0.0724, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.1830282861896837, | |
| "grad_norm": 0.5646442235039268, | |
| "learning_rate": 1.5321252059308072e-05, | |
| "loss": 0.0903, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.187465335551858, | |
| "grad_norm": 0.5635051753086566, | |
| "learning_rate": 1.5238879736408567e-05, | |
| "loss": 0.063, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.191902384914032, | |
| "grad_norm": 0.5104055662959266, | |
| "learning_rate": 1.5156507413509061e-05, | |
| "loss": 0.0493, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.196339434276206, | |
| "grad_norm": 0.4011144468810002, | |
| "learning_rate": 1.5074135090609557e-05, | |
| "loss": 0.0451, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.2007764836383803, | |
| "grad_norm": 0.3690753051448543, | |
| "learning_rate": 1.4991762767710049e-05, | |
| "loss": 0.0542, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.2052135330005544, | |
| "grad_norm": 0.646653951203632, | |
| "learning_rate": 1.4909390444810544e-05, | |
| "loss": 0.0957, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.2096505823627286, | |
| "grad_norm": 0.6739316998918211, | |
| "learning_rate": 1.4827018121911038e-05, | |
| "loss": 0.0931, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.2140876317249027, | |
| "grad_norm": 0.45490674242141194, | |
| "learning_rate": 1.4744645799011533e-05, | |
| "loss": 0.0595, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.218524681087077, | |
| "grad_norm": 0.5700139871353489, | |
| "learning_rate": 1.4662273476112027e-05, | |
| "loss": 0.0583, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.222961730449251, | |
| "grad_norm": 0.6867079073209452, | |
| "learning_rate": 1.4579901153212522e-05, | |
| "loss": 0.1612, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.227398779811425, | |
| "grad_norm": 0.5329755919416289, | |
| "learning_rate": 1.4497528830313015e-05, | |
| "loss": 0.077, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.2318358291735994, | |
| "grad_norm": 0.8264761290105969, | |
| "learning_rate": 1.441515650741351e-05, | |
| "loss": 0.1016, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.2362728785357735, | |
| "grad_norm": 0.8494572645802404, | |
| "learning_rate": 1.4332784184514004e-05, | |
| "loss": 0.1514, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.2407099278979477, | |
| "grad_norm": 0.7267350842992685, | |
| "learning_rate": 1.42504118616145e-05, | |
| "loss": 0.09, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.245146977260122, | |
| "grad_norm": 0.42344046588154616, | |
| "learning_rate": 1.4168039538714991e-05, | |
| "loss": 0.066, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.249584026622296, | |
| "grad_norm": 0.48137381063317836, | |
| "learning_rate": 1.4085667215815487e-05, | |
| "loss": 0.0738, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.25402107598447, | |
| "grad_norm": 0.5369929239439096, | |
| "learning_rate": 1.400329489291598e-05, | |
| "loss": 0.0838, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.2584581253466443, | |
| "grad_norm": 0.5755565396332807, | |
| "learning_rate": 1.3920922570016476e-05, | |
| "loss": 0.1193, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.2628951747088184, | |
| "grad_norm": 0.7425527376654243, | |
| "learning_rate": 1.3838550247116968e-05, | |
| "loss": 0.159, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.2673322240709926, | |
| "grad_norm": 0.6678896851621685, | |
| "learning_rate": 1.3756177924217463e-05, | |
| "loss": 0.0704, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.2717692734331667, | |
| "grad_norm": 0.6407874761956576, | |
| "learning_rate": 1.3673805601317957e-05, | |
| "loss": 0.0673, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.276206322795341, | |
| "grad_norm": 0.606625884811036, | |
| "learning_rate": 1.3591433278418453e-05, | |
| "loss": 0.053, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.280643372157515, | |
| "grad_norm": 0.43153445634045084, | |
| "learning_rate": 1.3509060955518946e-05, | |
| "loss": 0.05, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.2850804215196896, | |
| "grad_norm": 0.5168983746170788, | |
| "learning_rate": 1.3426688632619442e-05, | |
| "loss": 0.0562, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.2895174708818637, | |
| "grad_norm": 0.6170294354031257, | |
| "learning_rate": 1.3344316309719934e-05, | |
| "loss": 0.0699, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.293954520244038, | |
| "grad_norm": 0.4461452327489392, | |
| "learning_rate": 1.326194398682043e-05, | |
| "loss": 0.0585, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.298391569606212, | |
| "grad_norm": 0.47529950612582456, | |
| "learning_rate": 1.3179571663920923e-05, | |
| "loss": 0.0743, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.302828618968386, | |
| "grad_norm": 0.6245852524785371, | |
| "learning_rate": 1.3097199341021419e-05, | |
| "loss": 0.0913, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.3072656683305603, | |
| "grad_norm": 0.5049104959656808, | |
| "learning_rate": 1.301482701812191e-05, | |
| "loss": 0.0693, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.3117027176927345, | |
| "grad_norm": 0.5192151075769776, | |
| "learning_rate": 1.2932454695222406e-05, | |
| "loss": 0.063, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.3161397670549086, | |
| "grad_norm": 0.8438188030306165, | |
| "learning_rate": 1.28500823723229e-05, | |
| "loss": 0.0765, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.320576816417083, | |
| "grad_norm": 0.7458599183514235, | |
| "learning_rate": 1.2767710049423395e-05, | |
| "loss": 0.0883, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.325013865779257, | |
| "grad_norm": 0.4773691261151876, | |
| "learning_rate": 1.2685337726523888e-05, | |
| "loss": 0.0825, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.329450915141431, | |
| "grad_norm": 0.6631184767996374, | |
| "learning_rate": 1.2602965403624385e-05, | |
| "loss": 0.0678, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.3338879645036053, | |
| "grad_norm": 0.48514902659274023, | |
| "learning_rate": 1.2520593080724877e-05, | |
| "loss": 0.0629, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.3383250138657794, | |
| "grad_norm": 0.6363169332495404, | |
| "learning_rate": 1.243822075782537e-05, | |
| "loss": 0.076, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.3427620632279536, | |
| "grad_norm": 0.39497748718207804, | |
| "learning_rate": 1.2355848434925866e-05, | |
| "loss": 0.082, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.3471991125901277, | |
| "grad_norm": 0.520740707426262, | |
| "learning_rate": 1.227347611202636e-05, | |
| "loss": 0.0696, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.351636161952302, | |
| "grad_norm": 0.46006806051497967, | |
| "learning_rate": 1.2191103789126853e-05, | |
| "loss": 0.0608, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.356073211314476, | |
| "grad_norm": 0.450141042884803, | |
| "learning_rate": 1.2108731466227347e-05, | |
| "loss": 0.0719, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.36051026067665, | |
| "grad_norm": 0.4490820967102202, | |
| "learning_rate": 1.2026359143327843e-05, | |
| "loss": 0.0582, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.3649473100388243, | |
| "grad_norm": 0.3785612364255548, | |
| "learning_rate": 1.1943986820428336e-05, | |
| "loss": 0.0371, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.3693843594009985, | |
| "grad_norm": 0.597793712889077, | |
| "learning_rate": 1.186161449752883e-05, | |
| "loss": 0.0704, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.3738214087631726, | |
| "grad_norm": 0.5339908021820534, | |
| "learning_rate": 1.1779242174629326e-05, | |
| "loss": 0.064, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.3782584581253468, | |
| "grad_norm": 0.545507738200127, | |
| "learning_rate": 1.169686985172982e-05, | |
| "loss": 0.0823, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.382695507487521, | |
| "grad_norm": 0.683046307023061, | |
| "learning_rate": 1.1614497528830313e-05, | |
| "loss": 0.0811, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.387132556849695, | |
| "grad_norm": 0.6888771352760809, | |
| "learning_rate": 1.1532125205930807e-05, | |
| "loss": 0.0437, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.391569606211869, | |
| "grad_norm": 0.4496631580203644, | |
| "learning_rate": 1.1449752883031302e-05, | |
| "loss": 0.078, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.3960066555740434, | |
| "grad_norm": 0.5088280426732856, | |
| "learning_rate": 1.1367380560131796e-05, | |
| "loss": 0.0561, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.4004437049362175, | |
| "grad_norm": 0.4440276029808717, | |
| "learning_rate": 1.128500823723229e-05, | |
| "loss": 0.0646, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.4048807542983917, | |
| "grad_norm": 0.4119316549510094, | |
| "learning_rate": 1.1202635914332785e-05, | |
| "loss": 0.0766, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.409317803660566, | |
| "grad_norm": 0.4575063078607008, | |
| "learning_rate": 1.112026359143328e-05, | |
| "loss": 0.0566, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.41375485302274, | |
| "grad_norm": 0.5761313731295636, | |
| "learning_rate": 1.1037891268533773e-05, | |
| "loss": 0.0532, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.418191902384914, | |
| "grad_norm": 0.5218773265032963, | |
| "learning_rate": 1.0955518945634267e-05, | |
| "loss": 0.0776, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.4226289517470883, | |
| "grad_norm": 0.4300647466728647, | |
| "learning_rate": 1.0873146622734762e-05, | |
| "loss": 0.0613, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.4270660011092624, | |
| "grad_norm": 0.6984842374384344, | |
| "learning_rate": 1.0790774299835256e-05, | |
| "loss": 0.0872, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.4315030504714366, | |
| "grad_norm": 0.407745232277647, | |
| "learning_rate": 1.070840197693575e-05, | |
| "loss": 0.0655, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.4359400998336107, | |
| "grad_norm": 0.4468363238317229, | |
| "learning_rate": 1.0626029654036245e-05, | |
| "loss": 0.0545, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.440377149195785, | |
| "grad_norm": 0.41888501993897503, | |
| "learning_rate": 1.0543657331136739e-05, | |
| "loss": 0.0506, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.444814198557959, | |
| "grad_norm": 0.614258637298855, | |
| "learning_rate": 1.0461285008237233e-05, | |
| "loss": 0.0638, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.449251247920133, | |
| "grad_norm": 0.5383878715788591, | |
| "learning_rate": 1.0378912685337726e-05, | |
| "loss": 0.0714, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.4536882972823073, | |
| "grad_norm": 0.49440474858341443, | |
| "learning_rate": 1.0296540362438222e-05, | |
| "loss": 0.0774, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.4581253466444815, | |
| "grad_norm": 0.5396107083695306, | |
| "learning_rate": 1.0214168039538716e-05, | |
| "loss": 0.0605, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.4625623960066556, | |
| "grad_norm": 0.6227835454493174, | |
| "learning_rate": 1.013179571663921e-05, | |
| "loss": 0.0591, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.4669994453688298, | |
| "grad_norm": 0.6709203813676109, | |
| "learning_rate": 1.0049423393739705e-05, | |
| "loss": 0.0636, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.471436494731004, | |
| "grad_norm": 0.884123107839097, | |
| "learning_rate": 9.967051070840199e-06, | |
| "loss": 0.0967, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.475873544093178, | |
| "grad_norm": 0.557280216515346, | |
| "learning_rate": 9.884678747940692e-06, | |
| "loss": 0.0527, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.480310593455352, | |
| "grad_norm": 0.4521203578197039, | |
| "learning_rate": 9.802306425041186e-06, | |
| "loss": 0.0624, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.4847476428175264, | |
| "grad_norm": 0.47421592133383833, | |
| "learning_rate": 9.719934102141682e-06, | |
| "loss": 0.0638, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.4891846921797005, | |
| "grad_norm": 0.44367777948603515, | |
| "learning_rate": 9.637561779242175e-06, | |
| "loss": 0.0435, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.4936217415418747, | |
| "grad_norm": 0.5122972378060052, | |
| "learning_rate": 9.55518945634267e-06, | |
| "loss": 0.0616, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.498058790904049, | |
| "grad_norm": 0.6076303106035497, | |
| "learning_rate": 9.472817133443165e-06, | |
| "loss": 0.0891, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.502495840266223, | |
| "grad_norm": 0.45183570543231943, | |
| "learning_rate": 9.390444810543658e-06, | |
| "loss": 0.0706, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.506932889628397, | |
| "grad_norm": 0.5822435030412974, | |
| "learning_rate": 9.308072487644152e-06, | |
| "loss": 0.0862, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.5113699389905713, | |
| "grad_norm": 0.34218349933462927, | |
| "learning_rate": 9.225700164744646e-06, | |
| "loss": 0.0488, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.5158069883527454, | |
| "grad_norm": 0.5334752544681166, | |
| "learning_rate": 9.143327841845141e-06, | |
| "loss": 0.0732, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.5202440377149196, | |
| "grad_norm": 0.7441077437858243, | |
| "learning_rate": 9.060955518945635e-06, | |
| "loss": 0.0945, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.5246810870770937, | |
| "grad_norm": 0.5803895364880287, | |
| "learning_rate": 8.978583196046129e-06, | |
| "loss": 0.0628, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.529118136439268, | |
| "grad_norm": 0.7023908809340375, | |
| "learning_rate": 8.896210873146624e-06, | |
| "loss": 0.0808, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.533555185801442, | |
| "grad_norm": 0.7513922246654061, | |
| "learning_rate": 8.813838550247118e-06, | |
| "loss": 0.0675, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.537992235163616, | |
| "grad_norm": 0.5272545067896169, | |
| "learning_rate": 8.731466227347612e-06, | |
| "loss": 0.0663, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.5424292845257903, | |
| "grad_norm": 0.34520250916196227, | |
| "learning_rate": 8.649093904448106e-06, | |
| "loss": 0.0467, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.5468663338879645, | |
| "grad_norm": 0.5582395644470308, | |
| "learning_rate": 8.566721581548601e-06, | |
| "loss": 0.0945, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.5513033832501386, | |
| "grad_norm": 0.47035592281087324, | |
| "learning_rate": 8.484349258649095e-06, | |
| "loss": 0.0594, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.5557404326123128, | |
| "grad_norm": 0.5237242354563192, | |
| "learning_rate": 8.401976935749589e-06, | |
| "loss": 0.0719, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.560177481974487, | |
| "grad_norm": 0.6226672507813605, | |
| "learning_rate": 8.319604612850082e-06, | |
| "loss": 0.0737, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.564614531336661, | |
| "grad_norm": 0.6005362637454409, | |
| "learning_rate": 8.237232289950576e-06, | |
| "loss": 0.066, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.569051580698835, | |
| "grad_norm": 0.8960722597267446, | |
| "learning_rate": 8.15485996705107e-06, | |
| "loss": 0.0523, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.5734886300610094, | |
| "grad_norm": 0.5358038554925384, | |
| "learning_rate": 8.072487644151565e-06, | |
| "loss": 0.0819, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.5779256794231835, | |
| "grad_norm": 0.6011761750085439, | |
| "learning_rate": 7.99011532125206e-06, | |
| "loss": 0.0983, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.5823627287853577, | |
| "grad_norm": 0.5709607346457392, | |
| "learning_rate": 7.907742998352553e-06, | |
| "loss": 0.0593, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.586799778147532, | |
| "grad_norm": 0.5049364662806708, | |
| "learning_rate": 7.825370675453048e-06, | |
| "loss": 0.0704, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.591236827509706, | |
| "grad_norm": 0.4517422988015621, | |
| "learning_rate": 7.742998352553542e-06, | |
| "loss": 0.0471, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.59567387687188, | |
| "grad_norm": 0.6858449217380069, | |
| "learning_rate": 7.660626029654036e-06, | |
| "loss": 0.0919, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.6001109262340543, | |
| "grad_norm": 0.5095035566275075, | |
| "learning_rate": 7.5782537067545305e-06, | |
| "loss": 0.07, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.6045479755962284, | |
| "grad_norm": 0.40903359219934565, | |
| "learning_rate": 7.495881383855024e-06, | |
| "loss": 0.05, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.6089850249584026, | |
| "grad_norm": 0.6623676887680296, | |
| "learning_rate": 7.413509060955519e-06, | |
| "loss": 0.0607, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.6134220743205767, | |
| "grad_norm": 0.44794662138853475, | |
| "learning_rate": 7.3311367380560135e-06, | |
| "loss": 0.0428, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.617859123682751, | |
| "grad_norm": 0.4648021230571587, | |
| "learning_rate": 7.248764415156507e-06, | |
| "loss": 0.0683, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.622296173044925, | |
| "grad_norm": 0.41227107920751704, | |
| "learning_rate": 7.166392092257002e-06, | |
| "loss": 0.0601, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.626733222407099, | |
| "grad_norm": 0.682180480536477, | |
| "learning_rate": 7.084019769357496e-06, | |
| "loss": 0.061, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.6311702717692733, | |
| "grad_norm": 0.4296358597675649, | |
| "learning_rate": 7.00164744645799e-06, | |
| "loss": 0.059, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.6356073211314475, | |
| "grad_norm": 0.39973459230303815, | |
| "learning_rate": 6.919275123558484e-06, | |
| "loss": 0.0422, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.6400443704936216, | |
| "grad_norm": 0.3776539035356485, | |
| "learning_rate": 6.836902800658979e-06, | |
| "loss": 0.0564, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.6444814198557958, | |
| "grad_norm": 0.448340677315833, | |
| "learning_rate": 6.754530477759473e-06, | |
| "loss": 0.0733, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.64891846921797, | |
| "grad_norm": 0.44326019858338933, | |
| "learning_rate": 6.672158154859967e-06, | |
| "loss": 0.0585, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.653355518580144, | |
| "grad_norm": 0.4393932216930158, | |
| "learning_rate": 6.589785831960462e-06, | |
| "loss": 0.0585, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.657792567942318, | |
| "grad_norm": 0.6003175957864102, | |
| "learning_rate": 6.507413509060955e-06, | |
| "loss": 0.0578, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.6622296173044924, | |
| "grad_norm": 0.6888817210512455, | |
| "learning_rate": 6.42504118616145e-06, | |
| "loss": 0.0622, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.5133451866147913, | |
| "learning_rate": 6.342668863261944e-06, | |
| "loss": 0.085, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.6711037160288407, | |
| "grad_norm": 0.6264705999619853, | |
| "learning_rate": 6.260296540362438e-06, | |
| "loss": 0.0774, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.675540765391015, | |
| "grad_norm": 0.5217647870120197, | |
| "learning_rate": 6.177924217462933e-06, | |
| "loss": 0.0605, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.679977814753189, | |
| "grad_norm": 0.5906476738023915, | |
| "learning_rate": 6.095551894563427e-06, | |
| "loss": 0.0725, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.684414864115363, | |
| "grad_norm": 0.5933742338006437, | |
| "learning_rate": 6.013179571663921e-06, | |
| "loss": 0.0651, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.6888519134775377, | |
| "grad_norm": 0.4941534410103864, | |
| "learning_rate": 5.930807248764415e-06, | |
| "loss": 0.0642, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.693288962839712, | |
| "grad_norm": 0.5281822602419853, | |
| "learning_rate": 5.84843492586491e-06, | |
| "loss": 0.0638, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.697726012201886, | |
| "grad_norm": 0.7662992816902268, | |
| "learning_rate": 5.7660626029654035e-06, | |
| "loss": 0.0564, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.70216306156406, | |
| "grad_norm": 0.4372832919599054, | |
| "learning_rate": 5.683690280065898e-06, | |
| "loss": 0.0352, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.7066001109262343, | |
| "grad_norm": 0.5487768889163155, | |
| "learning_rate": 5.601317957166393e-06, | |
| "loss": 0.0675, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.7110371602884085, | |
| "grad_norm": 0.4992506103616956, | |
| "learning_rate": 5.5189456342668865e-06, | |
| "loss": 0.0675, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.7154742096505826, | |
| "grad_norm": 0.5265814463662151, | |
| "learning_rate": 5.436573311367381e-06, | |
| "loss": 0.0737, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.7199112590127568, | |
| "grad_norm": 0.49921904610160917, | |
| "learning_rate": 5.354200988467875e-06, | |
| "loss": 0.0367, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.724348308374931, | |
| "grad_norm": 0.5023294366722539, | |
| "learning_rate": 5.2718286655683695e-06, | |
| "loss": 0.0661, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.728785357737105, | |
| "grad_norm": 0.585411853250103, | |
| "learning_rate": 5.189456342668863e-06, | |
| "loss": 0.0718, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.733222407099279, | |
| "grad_norm": 0.46309840693552984, | |
| "learning_rate": 5.107084019769358e-06, | |
| "loss": 0.0589, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.7376594564614534, | |
| "grad_norm": 0.42236199351208137, | |
| "learning_rate": 5.0247116968698524e-06, | |
| "loss": 0.0442, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.7420965058236275, | |
| "grad_norm": 0.6546125646782767, | |
| "learning_rate": 4.942339373970346e-06, | |
| "loss": 0.0558, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.7465335551858017, | |
| "grad_norm": 0.8323427432114843, | |
| "learning_rate": 4.859967051070841e-06, | |
| "loss": 0.0953, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.750970604547976, | |
| "grad_norm": 0.5960678234936589, | |
| "learning_rate": 4.777594728171335e-06, | |
| "loss": 0.0846, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.75540765391015, | |
| "grad_norm": 0.6092122667608001, | |
| "learning_rate": 4.695222405271829e-06, | |
| "loss": 0.0847, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.759844703272324, | |
| "grad_norm": 0.48532761451387557, | |
| "learning_rate": 4.612850082372323e-06, | |
| "loss": 0.0688, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.7642817526344983, | |
| "grad_norm": 0.4948244150935661, | |
| "learning_rate": 4.5304777594728176e-06, | |
| "loss": 0.0752, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.7687188019966724, | |
| "grad_norm": 0.5544094678362775, | |
| "learning_rate": 4.448105436573312e-06, | |
| "loss": 0.0774, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.7731558513588466, | |
| "grad_norm": 0.6077609572338848, | |
| "learning_rate": 4.365733113673806e-06, | |
| "loss": 0.0862, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.7775929007210207, | |
| "grad_norm": 0.5485551637243942, | |
| "learning_rate": 4.2833607907743006e-06, | |
| "loss": 0.0688, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.782029950083195, | |
| "grad_norm": 0.6388005118357853, | |
| "learning_rate": 4.200988467874794e-06, | |
| "loss": 0.0735, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.786466999445369, | |
| "grad_norm": 0.45218177161710815, | |
| "learning_rate": 4.118616144975288e-06, | |
| "loss": 0.0545, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.790904048807543, | |
| "grad_norm": 0.4346717630330878, | |
| "learning_rate": 4.036243822075783e-06, | |
| "loss": 0.0505, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.7953410981697173, | |
| "grad_norm": 0.5175320298883663, | |
| "learning_rate": 3.9538714991762765e-06, | |
| "loss": 0.0534, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.7997781475318915, | |
| "grad_norm": 0.44823500683586665, | |
| "learning_rate": 3.871499176276771e-06, | |
| "loss": 0.0517, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.8042151968940656, | |
| "grad_norm": 0.350213801698505, | |
| "learning_rate": 3.7891268533772653e-06, | |
| "loss": 0.051, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.8086522462562398, | |
| "grad_norm": 0.6663699731646126, | |
| "learning_rate": 3.7067545304777594e-06, | |
| "loss": 0.0942, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.813089295618414, | |
| "grad_norm": 0.6932216453765204, | |
| "learning_rate": 3.6243822075782536e-06, | |
| "loss": 0.1499, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.817526344980588, | |
| "grad_norm": 0.4397429125368117, | |
| "learning_rate": 3.542009884678748e-06, | |
| "loss": 0.0616, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.821963394342762, | |
| "grad_norm": 0.48755041742041916, | |
| "learning_rate": 3.459637561779242e-06, | |
| "loss": 0.056, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.8264004437049364, | |
| "grad_norm": 0.4803019598534695, | |
| "learning_rate": 3.3772652388797366e-06, | |
| "loss": 0.046, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.8308374930671105, | |
| "grad_norm": 0.48878390570205, | |
| "learning_rate": 3.294892915980231e-06, | |
| "loss": 0.0455, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.8352745424292847, | |
| "grad_norm": 0.4355736625884251, | |
| "learning_rate": 3.212520593080725e-06, | |
| "loss": 0.0513, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.839711591791459, | |
| "grad_norm": 0.4806382415296703, | |
| "learning_rate": 3.130148270181219e-06, | |
| "loss": 0.0691, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.844148641153633, | |
| "grad_norm": 0.7085503544920213, | |
| "learning_rate": 3.0477759472817134e-06, | |
| "loss": 0.092, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.848585690515807, | |
| "grad_norm": 0.4539527895470607, | |
| "learning_rate": 2.9654036243822076e-06, | |
| "loss": 0.0771, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.8530227398779813, | |
| "grad_norm": 0.4315965379665383, | |
| "learning_rate": 2.8830313014827017e-06, | |
| "loss": 0.0546, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.8574597892401554, | |
| "grad_norm": 0.5233086384678962, | |
| "learning_rate": 2.8006589785831964e-06, | |
| "loss": 0.0658, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.8618968386023296, | |
| "grad_norm": 0.6225381047486279, | |
| "learning_rate": 2.7182866556836905e-06, | |
| "loss": 0.0572, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.8663338879645037, | |
| "grad_norm": 0.7265111023145802, | |
| "learning_rate": 2.6359143327841847e-06, | |
| "loss": 0.0801, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.870770937326678, | |
| "grad_norm": 0.478098847415718, | |
| "learning_rate": 2.553542009884679e-06, | |
| "loss": 0.0653, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.875207986688852, | |
| "grad_norm": 0.46170161646311497, | |
| "learning_rate": 2.471169686985173e-06, | |
| "loss": 0.0676, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.879645036051026, | |
| "grad_norm": 0.5335652166420076, | |
| "learning_rate": 2.3887973640856673e-06, | |
| "loss": 0.051, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.8840820854132003, | |
| "grad_norm": 0.43710767662120653, | |
| "learning_rate": 2.3064250411861615e-06, | |
| "loss": 0.065, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.8885191347753745, | |
| "grad_norm": 0.593841244563058, | |
| "learning_rate": 2.224052718286656e-06, | |
| "loss": 0.0567, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.8929561841375486, | |
| "grad_norm": 0.4648433332681791, | |
| "learning_rate": 2.1416803953871503e-06, | |
| "loss": 0.0609, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.8973932334997228, | |
| "grad_norm": 0.6244898382891827, | |
| "learning_rate": 2.059308072487644e-06, | |
| "loss": 0.0546, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.901830282861897, | |
| "grad_norm": 0.4721974124032116, | |
| "learning_rate": 1.9769357495881382e-06, | |
| "loss": 0.0655, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.906267332224071, | |
| "grad_norm": 0.4897283701099957, | |
| "learning_rate": 1.8945634266886326e-06, | |
| "loss": 0.0615, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.9107043815862452, | |
| "grad_norm": 0.4779585729427744, | |
| "learning_rate": 1.8121911037891268e-06, | |
| "loss": 0.0584, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.9151414309484194, | |
| "grad_norm": 0.6361478340546802, | |
| "learning_rate": 1.729818780889621e-06, | |
| "loss": 0.0599, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.9195784803105935, | |
| "grad_norm": 0.5243759283482696, | |
| "learning_rate": 1.6474464579901154e-06, | |
| "loss": 0.0605, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.9240155296727677, | |
| "grad_norm": 0.546129345040773, | |
| "learning_rate": 1.5650741350906096e-06, | |
| "loss": 0.0705, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.928452579034942, | |
| "grad_norm": 0.4469212701491516, | |
| "learning_rate": 1.4827018121911038e-06, | |
| "loss": 0.0598, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.932889628397116, | |
| "grad_norm": 0.5182427511559341, | |
| "learning_rate": 1.4003294892915982e-06, | |
| "loss": 0.0606, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.93732667775929, | |
| "grad_norm": 0.4125175985281365, | |
| "learning_rate": 1.3179571663920924e-06, | |
| "loss": 0.0569, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.9417637271214643, | |
| "grad_norm": 0.4624383382674929, | |
| "learning_rate": 1.2355848434925866e-06, | |
| "loss": 0.0631, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.9462007764836384, | |
| "grad_norm": 0.6619390758156715, | |
| "learning_rate": 1.1532125205930807e-06, | |
| "loss": 0.0629, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.9506378258458126, | |
| "grad_norm": 0.4984503364703818, | |
| "learning_rate": 1.0708401976935751e-06, | |
| "loss": 0.0575, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.9550748752079867, | |
| "grad_norm": 0.4221022311828792, | |
| "learning_rate": 9.884678747940691e-07, | |
| "loss": 0.0546, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.959511924570161, | |
| "grad_norm": 0.812292219150222, | |
| "learning_rate": 9.060955518945634e-07, | |
| "loss": 0.0711, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.963948973932335, | |
| "grad_norm": 0.5341856849794002, | |
| "learning_rate": 8.237232289950577e-07, | |
| "loss": 0.0639, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.968386023294509, | |
| "grad_norm": 0.36564535589692126, | |
| "learning_rate": 7.413509060955519e-07, | |
| "loss": 0.0495, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.9728230726566833, | |
| "grad_norm": 0.5936093916793973, | |
| "learning_rate": 6.589785831960462e-07, | |
| "loss": 0.0703, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.9772601220188575, | |
| "grad_norm": 0.4459384400665918, | |
| "learning_rate": 5.766062602965404e-07, | |
| "loss": 0.0477, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.9816971713810316, | |
| "grad_norm": 0.5018832384433306, | |
| "learning_rate": 4.942339373970346e-07, | |
| "loss": 0.0561, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.986134220743206, | |
| "grad_norm": 0.582965689563923, | |
| "learning_rate": 4.1186161449752885e-07, | |
| "loss": 0.0746, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.99057127010538, | |
| "grad_norm": 0.9209740325547386, | |
| "learning_rate": 3.294892915980231e-07, | |
| "loss": 0.075, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.995008319467554, | |
| "grad_norm": 0.5632266014139005, | |
| "learning_rate": 2.471169686985173e-07, | |
| "loss": 0.0554, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.995008319467554, | |
| "step": 675, | |
| "total_flos": 116490238033920.0, | |
| "train_loss": 0.36549280296873166, | |
| "train_runtime": 96569.5267, | |
| "train_samples_per_second": 0.224, | |
| "train_steps_per_second": 0.007 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 675, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 116490238033920.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |