{ "best_metric": null, "best_model_checkpoint": null, "epoch": 5.0, "eval_steps": 500, "global_step": 450, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.01, "learning_rate": 1.0000000000000002e-06, "loss": 1.3015, "step": 1 }, { "epoch": 0.02, "learning_rate": 2.0000000000000003e-06, "loss": 1.4281, "step": 2 }, { "epoch": 0.03, "learning_rate": 3e-06, "loss": 1.4498, "step": 3 }, { "epoch": 0.04, "learning_rate": 4.000000000000001e-06, "loss": 1.3943, "step": 4 }, { "epoch": 0.06, "learning_rate": 5e-06, "loss": 1.455, "step": 5 }, { "epoch": 0.07, "learning_rate": 6e-06, "loss": 1.4377, "step": 6 }, { "epoch": 0.08, "learning_rate": 7e-06, "loss": 1.5221, "step": 7 }, { "epoch": 0.09, "learning_rate": 8.000000000000001e-06, "loss": 1.4089, "step": 8 }, { "epoch": 0.1, "learning_rate": 9e-06, "loss": 1.4574, "step": 9 }, { "epoch": 0.11, "learning_rate": 1e-05, "loss": 1.3287, "step": 10 }, { "epoch": 0.12, "learning_rate": 1.1000000000000001e-05, "loss": 1.4322, "step": 11 }, { "epoch": 0.13, "learning_rate": 1.2e-05, "loss": 1.4901, "step": 12 }, { "epoch": 0.14, "learning_rate": 1.3000000000000001e-05, "loss": 1.3765, "step": 13 }, { "epoch": 0.16, "learning_rate": 1.4e-05, "loss": 1.4277, "step": 14 }, { "epoch": 0.17, "learning_rate": 1.5000000000000002e-05, "loss": 1.4893, "step": 15 }, { "epoch": 0.18, "learning_rate": 1.6000000000000003e-05, "loss": 1.2648, "step": 16 }, { "epoch": 0.19, "learning_rate": 1.7e-05, "loss": 1.4851, "step": 17 }, { "epoch": 0.2, "learning_rate": 1.8e-05, "loss": 1.3235, "step": 18 }, { "epoch": 0.21, "learning_rate": 1.9e-05, "loss": 1.3674, "step": 19 }, { "epoch": 0.22, "learning_rate": 2e-05, "loss": 1.3619, "step": 20 }, { "epoch": 0.23, "learning_rate": 2e-05, "loss": 1.3814, "step": 21 }, { "epoch": 0.24, "learning_rate": 2e-05, "loss": 1.3902, "step": 22 }, { "epoch": 0.26, "learning_rate": 2e-05, "loss": 1.4724, "step": 23 }, { "epoch": 0.27, "learning_rate": 2e-05, "loss": 1.4082, "step": 24 }, { "epoch": 0.28, "learning_rate": 2e-05, "loss": 1.4753, "step": 25 }, { "epoch": 0.29, "learning_rate": 2e-05, "loss": 1.3142, "step": 26 }, { "epoch": 0.3, "learning_rate": 2e-05, "loss": 1.3124, "step": 27 }, { "epoch": 0.31, "learning_rate": 2e-05, "loss": 1.3489, "step": 28 }, { "epoch": 0.32, "learning_rate": 2e-05, "loss": 1.3424, "step": 29 }, { "epoch": 0.33, "learning_rate": 2e-05, "loss": 1.4087, "step": 30 }, { "epoch": 0.34, "learning_rate": 2e-05, "loss": 1.4061, "step": 31 }, { "epoch": 0.36, "learning_rate": 2e-05, "loss": 1.4727, "step": 32 }, { "epoch": 0.37, "learning_rate": 2e-05, "loss": 1.3032, "step": 33 }, { "epoch": 0.38, "learning_rate": 2e-05, "loss": 1.3527, "step": 34 }, { "epoch": 0.39, "learning_rate": 2e-05, "loss": 1.4014, "step": 35 }, { "epoch": 0.4, "learning_rate": 2e-05, "loss": 1.3887, "step": 36 }, { "epoch": 0.41, "learning_rate": 2e-05, "loss": 1.3897, "step": 37 }, { "epoch": 0.42, "learning_rate": 2e-05, "loss": 1.3574, "step": 38 }, { "epoch": 0.43, "learning_rate": 2e-05, "loss": 1.4434, "step": 39 }, { "epoch": 0.44, "learning_rate": 2e-05, "loss": 1.2705, "step": 40 }, { "epoch": 0.46, "learning_rate": 2e-05, "loss": 1.4504, "step": 41 }, { "epoch": 0.47, "learning_rate": 2e-05, "loss": 1.5292, "step": 42 }, { "epoch": 0.48, "learning_rate": 2e-05, "loss": 1.3911, "step": 43 }, { "epoch": 0.49, "learning_rate": 2e-05, "loss": 1.2904, "step": 44 }, { "epoch": 0.5, "learning_rate": 2e-05, "loss": 1.3152, "step": 45 }, { "epoch": 0.51, "learning_rate": 2e-05, "loss": 1.2907, "step": 46 }, { "epoch": 0.52, "learning_rate": 2e-05, "loss": 1.3705, "step": 47 }, { "epoch": 0.53, "learning_rate": 2e-05, "loss": 1.3643, "step": 48 }, { "epoch": 0.54, "learning_rate": 2e-05, "loss": 1.4056, "step": 49 }, { "epoch": 0.56, "learning_rate": 2e-05, "loss": 1.502, "step": 50 }, { "epoch": 0.57, "learning_rate": 2e-05, "loss": 1.4266, "step": 51 }, { "epoch": 0.58, "learning_rate": 2e-05, "loss": 1.3296, "step": 52 }, { "epoch": 0.59, "learning_rate": 2e-05, "loss": 1.3355, "step": 53 }, { "epoch": 0.6, "learning_rate": 2e-05, "loss": 1.3942, "step": 54 }, { "epoch": 0.61, "learning_rate": 2e-05, "loss": 1.3474, "step": 55 }, { "epoch": 0.62, "learning_rate": 2e-05, "loss": 1.3985, "step": 56 }, { "epoch": 0.63, "learning_rate": 2e-05, "loss": 1.4883, "step": 57 }, { "epoch": 0.64, "learning_rate": 2e-05, "loss": 1.1958, "step": 58 }, { "epoch": 0.66, "learning_rate": 2e-05, "loss": 1.5296, "step": 59 }, { "epoch": 0.67, "learning_rate": 2e-05, "loss": 1.3338, "step": 60 }, { "epoch": 0.68, "learning_rate": 2e-05, "loss": 1.2165, "step": 61 }, { "epoch": 0.69, "learning_rate": 2e-05, "loss": 1.3481, "step": 62 }, { "epoch": 0.7, "learning_rate": 2e-05, "loss": 1.3667, "step": 63 }, { "epoch": 0.71, "learning_rate": 2e-05, "loss": 1.3844, "step": 64 }, { "epoch": 0.72, "learning_rate": 2e-05, "loss": 1.3475, "step": 65 }, { "epoch": 0.73, "learning_rate": 2e-05, "loss": 1.3695, "step": 66 }, { "epoch": 0.74, "learning_rate": 2e-05, "loss": 1.212, "step": 67 }, { "epoch": 0.76, "learning_rate": 2e-05, "loss": 1.2033, "step": 68 }, { "epoch": 0.77, "learning_rate": 2e-05, "loss": 1.3812, "step": 69 }, { "epoch": 0.78, "learning_rate": 2e-05, "loss": 1.5949, "step": 70 }, { "epoch": 0.79, "learning_rate": 2e-05, "loss": 1.3994, "step": 71 }, { "epoch": 0.8, "learning_rate": 2e-05, "loss": 1.3861, "step": 72 }, { "epoch": 0.81, "learning_rate": 2e-05, "loss": 1.2532, "step": 73 }, { "epoch": 0.82, "learning_rate": 2e-05, "loss": 1.3334, "step": 74 }, { "epoch": 0.83, "learning_rate": 2e-05, "loss": 1.3905, "step": 75 }, { "epoch": 0.84, "learning_rate": 2e-05, "loss": 1.2549, "step": 76 }, { "epoch": 0.86, "learning_rate": 2e-05, "loss": 1.3188, "step": 77 }, { "epoch": 0.87, "learning_rate": 2e-05, "loss": 1.2879, "step": 78 }, { "epoch": 0.88, "learning_rate": 2e-05, "loss": 1.3814, "step": 79 }, { "epoch": 0.89, "learning_rate": 2e-05, "loss": 1.4786, "step": 80 }, { "epoch": 0.9, "learning_rate": 2e-05, "loss": 1.2379, "step": 81 }, { "epoch": 0.91, "learning_rate": 2e-05, "loss": 1.2746, "step": 82 }, { "epoch": 0.92, "learning_rate": 2e-05, "loss": 1.341, "step": 83 }, { "epoch": 0.93, "learning_rate": 2e-05, "loss": 1.4664, "step": 84 }, { "epoch": 0.94, "learning_rate": 2e-05, "loss": 1.2359, "step": 85 }, { "epoch": 0.96, "learning_rate": 2e-05, "loss": 1.3863, "step": 86 }, { "epoch": 0.97, "learning_rate": 2e-05, "loss": 1.3409, "step": 87 }, { "epoch": 0.98, "learning_rate": 2e-05, "loss": 1.261, "step": 88 }, { "epoch": 0.99, "learning_rate": 2e-05, "loss": 1.4486, "step": 89 }, { "epoch": 1.0, "learning_rate": 2e-05, "loss": 1.3002, "step": 90 }, { "epoch": 1.01, "learning_rate": 2e-05, "loss": 1.3119, "step": 91 }, { "epoch": 1.02, "learning_rate": 2e-05, "loss": 1.3295, "step": 92 }, { "epoch": 1.03, "learning_rate": 2e-05, "loss": 1.3181, "step": 93 }, { "epoch": 1.04, "learning_rate": 2e-05, "loss": 1.2759, "step": 94 }, { "epoch": 1.06, "learning_rate": 2e-05, "loss": 1.309, "step": 95 }, { "epoch": 1.07, "learning_rate": 2e-05, "loss": 1.3587, "step": 96 }, { "epoch": 1.08, "learning_rate": 2e-05, "loss": 1.2882, "step": 97 }, { "epoch": 1.09, "learning_rate": 2e-05, "loss": 1.4208, "step": 98 }, { "epoch": 1.1, "learning_rate": 2e-05, "loss": 1.139, "step": 99 }, { "epoch": 1.11, "learning_rate": 2e-05, "loss": 1.1894, "step": 100 }, { "epoch": 1.12, "learning_rate": 2e-05, "loss": 1.3628, "step": 101 }, { "epoch": 1.13, "learning_rate": 2e-05, "loss": 1.27, "step": 102 }, { "epoch": 1.14, "learning_rate": 2e-05, "loss": 1.2595, "step": 103 }, { "epoch": 1.16, "learning_rate": 2e-05, "loss": 1.33, "step": 104 }, { "epoch": 1.17, "learning_rate": 2e-05, "loss": 1.4013, "step": 105 }, { "epoch": 1.18, "learning_rate": 2e-05, "loss": 1.1994, "step": 106 }, { "epoch": 1.19, "learning_rate": 2e-05, "loss": 1.3142, "step": 107 }, { "epoch": 1.2, "learning_rate": 2e-05, "loss": 1.4185, "step": 108 }, { "epoch": 1.21, "learning_rate": 2e-05, "loss": 1.2608, "step": 109 }, { "epoch": 1.22, "learning_rate": 2e-05, "loss": 1.3811, "step": 110 }, { "epoch": 1.23, "learning_rate": 2e-05, "loss": 1.3882, "step": 111 }, { "epoch": 1.24, "learning_rate": 2e-05, "loss": 1.2802, "step": 112 }, { "epoch": 1.26, "learning_rate": 2e-05, "loss": 1.1823, "step": 113 }, { "epoch": 1.27, "learning_rate": 2e-05, "loss": 1.2833, "step": 114 }, { "epoch": 1.28, "learning_rate": 2e-05, "loss": 1.4519, "step": 115 }, { "epoch": 1.29, "learning_rate": 2e-05, "loss": 1.2855, "step": 116 }, { "epoch": 1.3, "learning_rate": 2e-05, "loss": 1.2367, "step": 117 }, { "epoch": 1.31, "learning_rate": 2e-05, "loss": 1.3691, "step": 118 }, { "epoch": 1.32, "learning_rate": 2e-05, "loss": 1.2726, "step": 119 }, { "epoch": 1.33, "learning_rate": 2e-05, "loss": 1.2895, "step": 120 }, { "epoch": 1.34, "learning_rate": 2e-05, "loss": 1.3119, "step": 121 }, { "epoch": 1.36, "learning_rate": 2e-05, "loss": 1.279, "step": 122 }, { "epoch": 1.37, "learning_rate": 2e-05, "loss": 1.2333, "step": 123 }, { "epoch": 1.38, "learning_rate": 2e-05, "loss": 1.2206, "step": 124 }, { "epoch": 1.39, "learning_rate": 2e-05, "loss": 1.3423, "step": 125 }, { "epoch": 1.4, "learning_rate": 2e-05, "loss": 1.2588, "step": 126 }, { "epoch": 1.41, "learning_rate": 2e-05, "loss": 1.1811, "step": 127 }, { "epoch": 1.42, "learning_rate": 2e-05, "loss": 1.3719, "step": 128 }, { "epoch": 1.43, "learning_rate": 2e-05, "loss": 1.4495, "step": 129 }, { "epoch": 1.44, "learning_rate": 2e-05, "loss": 1.4182, "step": 130 }, { "epoch": 1.46, "learning_rate": 2e-05, "loss": 1.3553, "step": 131 }, { "epoch": 1.47, "learning_rate": 2e-05, "loss": 1.3562, "step": 132 }, { "epoch": 1.48, "learning_rate": 2e-05, "loss": 1.2997, "step": 133 }, { "epoch": 1.49, "learning_rate": 2e-05, "loss": 1.2518, "step": 134 }, { "epoch": 1.5, "learning_rate": 2e-05, "loss": 1.3145, "step": 135 }, { "epoch": 1.51, "learning_rate": 2e-05, "loss": 1.3927, "step": 136 }, { "epoch": 1.52, "learning_rate": 2e-05, "loss": 1.4503, "step": 137 }, { "epoch": 1.53, "learning_rate": 2e-05, "loss": 1.4147, "step": 138 }, { "epoch": 1.54, "learning_rate": 2e-05, "loss": 1.2696, "step": 139 }, { "epoch": 1.56, "learning_rate": 2e-05, "loss": 1.2751, "step": 140 }, { "epoch": 1.57, "learning_rate": 2e-05, "loss": 1.303, "step": 141 }, { "epoch": 1.58, "learning_rate": 2e-05, "loss": 1.295, "step": 142 }, { "epoch": 1.59, "learning_rate": 2e-05, "loss": 1.3347, "step": 143 }, { "epoch": 1.6, "learning_rate": 2e-05, "loss": 1.3195, "step": 144 }, { "epoch": 1.61, "learning_rate": 2e-05, "loss": 1.2815, "step": 145 }, { "epoch": 1.62, "learning_rate": 2e-05, "loss": 1.3128, "step": 146 }, { "epoch": 1.63, "learning_rate": 2e-05, "loss": 1.1768, "step": 147 }, { "epoch": 1.64, "learning_rate": 2e-05, "loss": 1.3368, "step": 148 }, { "epoch": 1.66, "learning_rate": 2e-05, "loss": 1.3746, "step": 149 }, { "epoch": 1.67, "learning_rate": 2e-05, "loss": 1.3906, "step": 150 }, { "epoch": 1.68, "learning_rate": 2e-05, "loss": 1.321, "step": 151 }, { "epoch": 1.69, "learning_rate": 2e-05, "loss": 1.2435, "step": 152 }, { "epoch": 1.7, "learning_rate": 2e-05, "loss": 1.3075, "step": 153 }, { "epoch": 1.71, "learning_rate": 2e-05, "loss": 1.3707, "step": 154 }, { "epoch": 1.72, "learning_rate": 2e-05, "loss": 1.3653, "step": 155 }, { "epoch": 1.73, "learning_rate": 2e-05, "loss": 1.3269, "step": 156 }, { "epoch": 1.74, "learning_rate": 2e-05, "loss": 1.2515, "step": 157 }, { "epoch": 1.76, "learning_rate": 2e-05, "loss": 1.3876, "step": 158 }, { "epoch": 1.77, "learning_rate": 2e-05, "loss": 1.1814, "step": 159 }, { "epoch": 1.78, "learning_rate": 2e-05, "loss": 1.3542, "step": 160 }, { "epoch": 1.79, "learning_rate": 2e-05, "loss": 1.2409, "step": 161 }, { "epoch": 1.8, "learning_rate": 2e-05, "loss": 1.4083, "step": 162 }, { "epoch": 1.81, "learning_rate": 2e-05, "loss": 1.3068, "step": 163 }, { "epoch": 1.82, "learning_rate": 2e-05, "loss": 1.3397, "step": 164 }, { "epoch": 1.83, "learning_rate": 2e-05, "loss": 1.3989, "step": 165 }, { "epoch": 1.84, "learning_rate": 2e-05, "loss": 1.1801, "step": 166 }, { "epoch": 1.86, "learning_rate": 2e-05, "loss": 1.231, "step": 167 }, { "epoch": 1.87, "learning_rate": 2e-05, "loss": 1.2747, "step": 168 }, { "epoch": 1.88, "learning_rate": 2e-05, "loss": 1.3697, "step": 169 }, { "epoch": 1.89, "learning_rate": 2e-05, "loss": 1.1737, "step": 170 }, { "epoch": 1.9, "learning_rate": 2e-05, "loss": 1.4005, "step": 171 }, { "epoch": 1.91, "learning_rate": 2e-05, "loss": 1.3068, "step": 172 }, { "epoch": 1.92, "learning_rate": 2e-05, "loss": 1.314, "step": 173 }, { "epoch": 1.93, "learning_rate": 2e-05, "loss": 1.2888, "step": 174 }, { "epoch": 1.94, "learning_rate": 2e-05, "loss": 1.4259, "step": 175 }, { "epoch": 1.96, "learning_rate": 2e-05, "loss": 1.346, "step": 176 }, { "epoch": 1.97, "learning_rate": 2e-05, "loss": 1.1118, "step": 177 }, { "epoch": 1.98, "learning_rate": 2e-05, "loss": 1.3533, "step": 178 }, { "epoch": 1.99, "learning_rate": 2e-05, "loss": 1.2803, "step": 179 }, { "epoch": 2.0, "learning_rate": 2e-05, "loss": 1.2263, "step": 180 }, { "epoch": 2.01, "learning_rate": 2e-05, "loss": 1.211, "step": 181 }, { "epoch": 2.02, "learning_rate": 2e-05, "loss": 1.3963, "step": 182 }, { "epoch": 2.03, "learning_rate": 2e-05, "loss": 1.3386, "step": 183 }, { "epoch": 2.04, "learning_rate": 2e-05, "loss": 1.2399, "step": 184 }, { "epoch": 2.06, "learning_rate": 2e-05, "loss": 1.2932, "step": 185 }, { "epoch": 2.07, "learning_rate": 2e-05, "loss": 1.3952, "step": 186 }, { "epoch": 2.08, "learning_rate": 2e-05, "loss": 1.3003, "step": 187 }, { "epoch": 2.09, "learning_rate": 2e-05, "loss": 1.2899, "step": 188 }, { "epoch": 2.1, "learning_rate": 2e-05, "loss": 1.363, "step": 189 }, { "epoch": 2.11, "learning_rate": 2e-05, "loss": 1.2344, "step": 190 }, { "epoch": 2.12, "learning_rate": 2e-05, "loss": 1.1305, "step": 191 }, { "epoch": 2.13, "learning_rate": 2e-05, "loss": 1.186, "step": 192 }, { "epoch": 2.14, "learning_rate": 2e-05, "loss": 1.2804, "step": 193 }, { "epoch": 2.16, "learning_rate": 2e-05, "loss": 1.1893, "step": 194 }, { "epoch": 2.17, "learning_rate": 2e-05, "loss": 1.2607, "step": 195 }, { "epoch": 2.18, "learning_rate": 2e-05, "loss": 1.2015, "step": 196 }, { "epoch": 2.19, "learning_rate": 2e-05, "loss": 1.3948, "step": 197 }, { "epoch": 2.2, "learning_rate": 2e-05, "loss": 1.2904, "step": 198 }, { "epoch": 2.21, "learning_rate": 2e-05, "loss": 1.4623, "step": 199 }, { "epoch": 2.22, "learning_rate": 2e-05, "loss": 1.3776, "step": 200 }, { "epoch": 2.23, "learning_rate": 2e-05, "loss": 1.2358, "step": 201 }, { "epoch": 2.24, "learning_rate": 2e-05, "loss": 1.2964, "step": 202 }, { "epoch": 2.26, "learning_rate": 2e-05, "loss": 1.095, "step": 203 }, { "epoch": 2.27, "learning_rate": 2e-05, "loss": 1.3107, "step": 204 }, { "epoch": 2.28, "learning_rate": 2e-05, "loss": 1.1573, "step": 205 }, { "epoch": 2.29, "learning_rate": 2e-05, "loss": 1.2963, "step": 206 }, { "epoch": 2.3, "learning_rate": 2e-05, "loss": 1.212, "step": 207 }, { "epoch": 2.31, "learning_rate": 2e-05, "loss": 1.3345, "step": 208 }, { "epoch": 2.32, "learning_rate": 2e-05, "loss": 1.5012, "step": 209 }, { "epoch": 2.33, "learning_rate": 2e-05, "loss": 1.2649, "step": 210 }, { "epoch": 2.34, "learning_rate": 2e-05, "loss": 1.1612, "step": 211 }, { "epoch": 2.36, "learning_rate": 2e-05, "loss": 1.2271, "step": 212 }, { "epoch": 2.37, "learning_rate": 2e-05, "loss": 1.3223, "step": 213 }, { "epoch": 2.38, "learning_rate": 2e-05, "loss": 1.343, "step": 214 }, { "epoch": 2.39, "learning_rate": 2e-05, "loss": 1.3141, "step": 215 }, { "epoch": 2.4, "learning_rate": 2e-05, "loss": 1.3062, "step": 216 }, { "epoch": 2.41, "learning_rate": 2e-05, "loss": 1.3975, "step": 217 }, { "epoch": 2.42, "learning_rate": 2e-05, "loss": 1.1237, "step": 218 }, { "epoch": 2.43, "learning_rate": 2e-05, "loss": 1.2495, "step": 219 }, { "epoch": 2.44, "learning_rate": 2e-05, "loss": 1.419, "step": 220 }, { "epoch": 2.46, "learning_rate": 2e-05, "loss": 1.2452, "step": 221 }, { "epoch": 2.47, "learning_rate": 2e-05, "loss": 1.4173, "step": 222 }, { "epoch": 2.48, "learning_rate": 2e-05, "loss": 1.4226, "step": 223 }, { "epoch": 2.49, "learning_rate": 2e-05, "loss": 1.303, "step": 224 }, { "epoch": 2.5, "learning_rate": 2e-05, "loss": 1.3202, "step": 225 }, { "epoch": 2.51, "learning_rate": 2e-05, "loss": 1.3554, "step": 226 }, { "epoch": 2.52, "learning_rate": 2e-05, "loss": 1.1988, "step": 227 }, { "epoch": 2.53, "learning_rate": 2e-05, "loss": 1.2966, "step": 228 }, { "epoch": 2.54, "learning_rate": 2e-05, "loss": 1.1834, "step": 229 }, { "epoch": 2.56, "learning_rate": 2e-05, "loss": 1.204, "step": 230 }, { "epoch": 2.57, "learning_rate": 2e-05, "loss": 1.2432, "step": 231 }, { "epoch": 2.58, "learning_rate": 2e-05, "loss": 1.2509, "step": 232 }, { "epoch": 2.59, "learning_rate": 2e-05, "loss": 1.2474, "step": 233 }, { "epoch": 2.6, "learning_rate": 2e-05, "loss": 1.2901, "step": 234 }, { "epoch": 2.61, "learning_rate": 2e-05, "loss": 1.2185, "step": 235 }, { "epoch": 2.62, "learning_rate": 2e-05, "loss": 1.1084, "step": 236 }, { "epoch": 2.63, "learning_rate": 2e-05, "loss": 1.4245, "step": 237 }, { "epoch": 2.64, "learning_rate": 2e-05, "loss": 1.2993, "step": 238 }, { "epoch": 2.66, "learning_rate": 2e-05, "loss": 1.313, "step": 239 }, { "epoch": 2.67, "learning_rate": 2e-05, "loss": 1.3624, "step": 240 }, { "epoch": 2.68, "learning_rate": 2e-05, "loss": 1.1682, "step": 241 }, { "epoch": 2.69, "learning_rate": 2e-05, "loss": 1.2933, "step": 242 }, { "epoch": 2.7, "learning_rate": 2e-05, "loss": 1.243, "step": 243 }, { "epoch": 2.71, "learning_rate": 2e-05, "loss": 1.1929, "step": 244 }, { "epoch": 2.72, "learning_rate": 2e-05, "loss": 1.3383, "step": 245 }, { "epoch": 2.73, "learning_rate": 2e-05, "loss": 1.1987, "step": 246 }, { "epoch": 2.74, "learning_rate": 2e-05, "loss": 1.2616, "step": 247 }, { "epoch": 2.76, "learning_rate": 2e-05, "loss": 1.2453, "step": 248 }, { "epoch": 2.77, "learning_rate": 2e-05, "loss": 1.2939, "step": 249 }, { "epoch": 2.78, "learning_rate": 2e-05, "loss": 1.2291, "step": 250 }, { "epoch": 2.79, "learning_rate": 2e-05, "loss": 1.223, "step": 251 }, { "epoch": 2.8, "learning_rate": 2e-05, "loss": 1.2218, "step": 252 }, { "epoch": 2.81, "learning_rate": 2e-05, "loss": 1.325, "step": 253 }, { "epoch": 2.82, "learning_rate": 2e-05, "loss": 1.1948, "step": 254 }, { "epoch": 2.83, "learning_rate": 2e-05, "loss": 1.3043, "step": 255 }, { "epoch": 2.84, "learning_rate": 2e-05, "loss": 1.2954, "step": 256 }, { "epoch": 2.86, "learning_rate": 2e-05, "loss": 1.2569, "step": 257 }, { "epoch": 2.87, "learning_rate": 2e-05, "loss": 1.3089, "step": 258 }, { "epoch": 2.88, "learning_rate": 2e-05, "loss": 1.2996, "step": 259 }, { "epoch": 2.89, "learning_rate": 2e-05, "loss": 1.299, "step": 260 }, { "epoch": 2.9, "learning_rate": 2e-05, "loss": 1.2845, "step": 261 }, { "epoch": 2.91, "learning_rate": 2e-05, "loss": 1.1922, "step": 262 }, { "epoch": 2.92, "learning_rate": 2e-05, "loss": 1.1089, "step": 263 }, { "epoch": 2.93, "learning_rate": 2e-05, "loss": 1.2926, "step": 264 }, { "epoch": 2.94, "learning_rate": 2e-05, "loss": 1.2205, "step": 265 }, { "epoch": 2.96, "learning_rate": 2e-05, "loss": 1.2509, "step": 266 }, { "epoch": 2.97, "learning_rate": 2e-05, "loss": 1.3576, "step": 267 }, { "epoch": 2.98, "learning_rate": 2e-05, "loss": 1.3584, "step": 268 }, { "epoch": 2.99, "learning_rate": 2e-05, "loss": 1.2712, "step": 269 }, { "epoch": 3.0, "learning_rate": 2e-05, "loss": 1.3389, "step": 270 }, { "epoch": 3.01, "learning_rate": 2e-05, "loss": 1.217, "step": 271 }, { "epoch": 3.02, "learning_rate": 2e-05, "loss": 1.2596, "step": 272 }, { "epoch": 3.03, "learning_rate": 2e-05, "loss": 1.2238, "step": 273 }, { "epoch": 3.04, "learning_rate": 2e-05, "loss": 1.1841, "step": 274 }, { "epoch": 3.06, "learning_rate": 2e-05, "loss": 1.3192, "step": 275 }, { "epoch": 3.07, "learning_rate": 2e-05, "loss": 1.3277, "step": 276 }, { "epoch": 3.08, "learning_rate": 2e-05, "loss": 1.3483, "step": 277 }, { "epoch": 3.09, "learning_rate": 2e-05, "loss": 1.2256, "step": 278 }, { "epoch": 3.1, "learning_rate": 2e-05, "loss": 1.1878, "step": 279 }, { "epoch": 3.11, "learning_rate": 2e-05, "loss": 1.2736, "step": 280 }, { "epoch": 3.12, "learning_rate": 2e-05, "loss": 1.2108, "step": 281 }, { "epoch": 3.13, "learning_rate": 2e-05, "loss": 1.1168, "step": 282 }, { "epoch": 3.14, "learning_rate": 2e-05, "loss": 1.2841, "step": 283 }, { "epoch": 3.16, "learning_rate": 2e-05, "loss": 1.215, "step": 284 }, { "epoch": 3.17, "learning_rate": 2e-05, "loss": 1.1552, "step": 285 }, { "epoch": 3.18, "learning_rate": 2e-05, "loss": 1.2161, "step": 286 }, { "epoch": 3.19, "learning_rate": 2e-05, "loss": 1.3477, "step": 287 }, { "epoch": 3.2, "learning_rate": 2e-05, "loss": 1.2334, "step": 288 }, { "epoch": 3.21, "learning_rate": 2e-05, "loss": 1.3001, "step": 289 }, { "epoch": 3.22, "learning_rate": 2e-05, "loss": 1.0189, "step": 290 }, { "epoch": 3.23, "learning_rate": 2e-05, "loss": 1.254, "step": 291 }, { "epoch": 3.24, "learning_rate": 2e-05, "loss": 1.0806, "step": 292 }, { "epoch": 3.26, "learning_rate": 2e-05, "loss": 1.3138, "step": 293 }, { "epoch": 3.27, "learning_rate": 2e-05, "loss": 1.2069, "step": 294 }, { "epoch": 3.28, "learning_rate": 2e-05, "loss": 1.2564, "step": 295 }, { "epoch": 3.29, "learning_rate": 2e-05, "loss": 1.2159, "step": 296 }, { "epoch": 3.3, "learning_rate": 2e-05, "loss": 1.035, "step": 297 }, { "epoch": 3.31, "learning_rate": 2e-05, "loss": 1.3348, "step": 298 }, { "epoch": 3.32, "learning_rate": 2e-05, "loss": 1.1858, "step": 299 }, { "epoch": 3.33, "learning_rate": 2e-05, "loss": 1.2503, "step": 300 }, { "epoch": 3.34, "learning_rate": 2e-05, "loss": 1.2748, "step": 301 }, { "epoch": 3.36, "learning_rate": 2e-05, "loss": 1.2131, "step": 302 }, { "epoch": 3.37, "learning_rate": 2e-05, "loss": 1.2046, "step": 303 }, { "epoch": 3.38, "learning_rate": 2e-05, "loss": 1.2716, "step": 304 }, { "epoch": 3.39, "learning_rate": 2e-05, "loss": 1.3449, "step": 305 }, { "epoch": 3.4, "learning_rate": 2e-05, "loss": 1.1789, "step": 306 }, { "epoch": 3.41, "learning_rate": 2e-05, "loss": 1.2985, "step": 307 }, { "epoch": 3.42, "learning_rate": 2e-05, "loss": 1.2788, "step": 308 }, { "epoch": 3.43, "learning_rate": 2e-05, "loss": 1.2677, "step": 309 }, { "epoch": 3.44, "learning_rate": 2e-05, "loss": 1.2218, "step": 310 }, { "epoch": 3.46, "learning_rate": 2e-05, "loss": 1.271, "step": 311 }, { "epoch": 3.47, "learning_rate": 2e-05, "loss": 1.2715, "step": 312 }, { "epoch": 3.48, "learning_rate": 2e-05, "loss": 1.2005, "step": 313 }, { "epoch": 3.49, "learning_rate": 2e-05, "loss": 1.1717, "step": 314 }, { "epoch": 3.5, "learning_rate": 2e-05, "loss": 1.3096, "step": 315 }, { "epoch": 3.51, "learning_rate": 2e-05, "loss": 1.3051, "step": 316 }, { "epoch": 3.52, "learning_rate": 2e-05, "loss": 1.1878, "step": 317 }, { "epoch": 3.53, "learning_rate": 2e-05, "loss": 1.2418, "step": 318 }, { "epoch": 3.54, "learning_rate": 2e-05, "loss": 1.3962, "step": 319 }, { "epoch": 3.56, "learning_rate": 2e-05, "loss": 1.299, "step": 320 }, { "epoch": 3.57, "learning_rate": 2e-05, "loss": 1.3695, "step": 321 }, { "epoch": 3.58, "learning_rate": 2e-05, "loss": 1.1061, "step": 322 }, { "epoch": 3.59, "learning_rate": 2e-05, "loss": 1.2401, "step": 323 }, { "epoch": 3.6, "learning_rate": 2e-05, "loss": 1.3242, "step": 324 }, { "epoch": 3.61, "learning_rate": 2e-05, "loss": 1.2457, "step": 325 }, { "epoch": 3.62, "learning_rate": 2e-05, "loss": 1.2978, "step": 326 }, { "epoch": 3.63, "learning_rate": 2e-05, "loss": 1.3897, "step": 327 }, { "epoch": 3.64, "learning_rate": 2e-05, "loss": 1.2661, "step": 328 }, { "epoch": 3.66, "learning_rate": 2e-05, "loss": 1.2118, "step": 329 }, { "epoch": 3.67, "learning_rate": 2e-05, "loss": 1.1694, "step": 330 }, { "epoch": 3.68, "learning_rate": 2e-05, "loss": 1.3988, "step": 331 }, { "epoch": 3.69, "learning_rate": 2e-05, "loss": 1.2731, "step": 332 }, { "epoch": 3.7, "learning_rate": 2e-05, "loss": 1.2065, "step": 333 }, { "epoch": 3.71, "learning_rate": 2e-05, "loss": 1.2534, "step": 334 }, { "epoch": 3.72, "learning_rate": 2e-05, "loss": 1.27, "step": 335 }, { "epoch": 3.73, "learning_rate": 2e-05, "loss": 1.2694, "step": 336 }, { "epoch": 3.74, "learning_rate": 2e-05, "loss": 1.3454, "step": 337 }, { "epoch": 3.76, "learning_rate": 2e-05, "loss": 1.2267, "step": 338 }, { "epoch": 3.77, "learning_rate": 2e-05, "loss": 1.218, "step": 339 }, { "epoch": 3.78, "learning_rate": 2e-05, "loss": 1.2273, "step": 340 }, { "epoch": 3.79, "learning_rate": 2e-05, "loss": 1.2489, "step": 341 }, { "epoch": 3.8, "learning_rate": 2e-05, "loss": 1.2881, "step": 342 }, { "epoch": 3.81, "learning_rate": 2e-05, "loss": 1.2662, "step": 343 }, { "epoch": 3.82, "learning_rate": 2e-05, "loss": 1.2939, "step": 344 }, { "epoch": 3.83, "learning_rate": 2e-05, "loss": 1.2775, "step": 345 }, { "epoch": 3.84, "learning_rate": 2e-05, "loss": 1.2644, "step": 346 }, { "epoch": 3.86, "learning_rate": 2e-05, "loss": 1.2018, "step": 347 }, { "epoch": 3.87, "learning_rate": 2e-05, "loss": 1.2024, "step": 348 }, { "epoch": 3.88, "learning_rate": 2e-05, "loss": 1.1545, "step": 349 }, { "epoch": 3.89, "learning_rate": 2e-05, "loss": 1.2889, "step": 350 }, { "epoch": 3.9, "learning_rate": 2e-05, "loss": 1.325, "step": 351 }, { "epoch": 3.91, "learning_rate": 2e-05, "loss": 1.2361, "step": 352 }, { "epoch": 3.92, "learning_rate": 2e-05, "loss": 1.1623, "step": 353 }, { "epoch": 3.93, "learning_rate": 2e-05, "loss": 1.2293, "step": 354 }, { "epoch": 3.94, "learning_rate": 2e-05, "loss": 1.2276, "step": 355 }, { "epoch": 3.96, "learning_rate": 2e-05, "loss": 1.3927, "step": 356 }, { "epoch": 3.97, "learning_rate": 2e-05, "loss": 1.2086, "step": 357 }, { "epoch": 3.98, "learning_rate": 2e-05, "loss": 1.2342, "step": 358 }, { "epoch": 3.99, "learning_rate": 2e-05, "loss": 1.3277, "step": 359 }, { "epoch": 4.0, "learning_rate": 2e-05, "loss": 1.095, "step": 360 }, { "epoch": 4.01, "learning_rate": 2e-05, "loss": 1.2089, "step": 361 }, { "epoch": 4.02, "learning_rate": 2e-05, "loss": 1.1913, "step": 362 }, { "epoch": 4.03, "learning_rate": 2e-05, "loss": 1.2266, "step": 363 }, { "epoch": 4.04, "learning_rate": 2e-05, "loss": 1.2277, "step": 364 }, { "epoch": 4.06, "learning_rate": 2e-05, "loss": 1.1979, "step": 365 }, { "epoch": 4.07, "learning_rate": 2e-05, "loss": 1.0823, "step": 366 }, { "epoch": 4.08, "learning_rate": 2e-05, "loss": 1.154, "step": 367 }, { "epoch": 4.09, "learning_rate": 2e-05, "loss": 1.0512, "step": 368 }, { "epoch": 4.1, "learning_rate": 2e-05, "loss": 1.2045, "step": 369 }, { "epoch": 4.11, "learning_rate": 2e-05, "loss": 1.014, "step": 370 }, { "epoch": 4.12, "learning_rate": 2e-05, "loss": 1.3316, "step": 371 }, { "epoch": 4.13, "learning_rate": 2e-05, "loss": 1.203, "step": 372 }, { "epoch": 4.14, "learning_rate": 2e-05, "loss": 1.12, "step": 373 }, { "epoch": 4.16, "learning_rate": 2e-05, "loss": 1.1264, "step": 374 }, { "epoch": 4.17, "learning_rate": 2e-05, "loss": 1.1057, "step": 375 }, { "epoch": 4.18, "learning_rate": 2e-05, "loss": 1.2399, "step": 376 }, { "epoch": 4.19, "learning_rate": 2e-05, "loss": 1.1301, "step": 377 }, { "epoch": 4.2, "learning_rate": 2e-05, "loss": 1.3355, "step": 378 }, { "epoch": 4.21, "learning_rate": 2e-05, "loss": 1.1899, "step": 379 }, { "epoch": 4.22, "learning_rate": 2e-05, "loss": 1.2065, "step": 380 }, { "epoch": 4.23, "learning_rate": 2e-05, "loss": 1.136, "step": 381 }, { "epoch": 4.24, "learning_rate": 2e-05, "loss": 1.1253, "step": 382 }, { "epoch": 4.26, "learning_rate": 2e-05, "loss": 1.0939, "step": 383 }, { "epoch": 4.27, "learning_rate": 2e-05, "loss": 1.1914, "step": 384 }, { "epoch": 4.28, "learning_rate": 2e-05, "loss": 1.0254, "step": 385 }, { "epoch": 4.29, "learning_rate": 2e-05, "loss": 1.2813, "step": 386 }, { "epoch": 4.3, "learning_rate": 2e-05, "loss": 1.33, "step": 387 }, { "epoch": 4.31, "learning_rate": 2e-05, "loss": 1.2726, "step": 388 }, { "epoch": 4.32, "learning_rate": 2e-05, "loss": 1.285, "step": 389 }, { "epoch": 4.33, "learning_rate": 2e-05, "loss": 1.2668, "step": 390 }, { "epoch": 4.34, "learning_rate": 2e-05, "loss": 1.1492, "step": 391 }, { "epoch": 4.36, "learning_rate": 2e-05, "loss": 1.3813, "step": 392 }, { "epoch": 4.37, "learning_rate": 2e-05, "loss": 1.2568, "step": 393 }, { "epoch": 4.38, "learning_rate": 2e-05, "loss": 1.2187, "step": 394 }, { "epoch": 4.39, "learning_rate": 2e-05, "loss": 1.2138, "step": 395 }, { "epoch": 4.4, "learning_rate": 2e-05, "loss": 1.1566, "step": 396 }, { "epoch": 4.41, "learning_rate": 2e-05, "loss": 1.1737, "step": 397 }, { "epoch": 4.42, "learning_rate": 2e-05, "loss": 1.0587, "step": 398 }, { "epoch": 4.43, "learning_rate": 2e-05, "loss": 1.2795, "step": 399 }, { "epoch": 4.44, "learning_rate": 2e-05, "loss": 1.1618, "step": 400 }, { "epoch": 4.46, "learning_rate": 2e-05, "loss": 1.34, "step": 401 }, { "epoch": 4.47, "learning_rate": 2e-05, "loss": 1.209, "step": 402 }, { "epoch": 4.48, "learning_rate": 2e-05, "loss": 1.2528, "step": 403 }, { "epoch": 4.49, "learning_rate": 2e-05, "loss": 1.2385, "step": 404 }, { "epoch": 4.5, "learning_rate": 2e-05, "loss": 1.1926, "step": 405 }, { "epoch": 4.51, "learning_rate": 2e-05, "loss": 1.1949, "step": 406 }, { "epoch": 4.52, "learning_rate": 2e-05, "loss": 1.2489, "step": 407 }, { "epoch": 4.53, "learning_rate": 2e-05, "loss": 1.2361, "step": 408 }, { "epoch": 4.54, "learning_rate": 2e-05, "loss": 1.186, "step": 409 }, { "epoch": 4.56, "learning_rate": 2e-05, "loss": 1.2088, "step": 410 }, { "epoch": 4.57, "learning_rate": 2e-05, "loss": 1.2573, "step": 411 }, { "epoch": 4.58, "learning_rate": 2e-05, "loss": 1.3231, "step": 412 }, { "epoch": 4.59, "learning_rate": 2e-05, "loss": 1.1717, "step": 413 }, { "epoch": 4.6, "learning_rate": 2e-05, "loss": 1.2754, "step": 414 }, { "epoch": 4.61, "learning_rate": 2e-05, "loss": 1.1537, "step": 415 }, { "epoch": 4.62, "learning_rate": 2e-05, "loss": 1.3393, "step": 416 }, { "epoch": 4.63, "learning_rate": 2e-05, "loss": 1.3616, "step": 417 }, { "epoch": 4.64, "learning_rate": 2e-05, "loss": 1.2624, "step": 418 }, { "epoch": 4.66, "learning_rate": 2e-05, "loss": 1.3203, "step": 419 }, { "epoch": 4.67, "learning_rate": 2e-05, "loss": 1.1883, "step": 420 }, { "epoch": 4.68, "learning_rate": 2e-05, "loss": 1.1471, "step": 421 }, { "epoch": 4.69, "learning_rate": 2e-05, "loss": 1.2844, "step": 422 }, { "epoch": 4.7, "learning_rate": 2e-05, "loss": 1.1279, "step": 423 }, { "epoch": 4.71, "learning_rate": 2e-05, "loss": 1.1775, "step": 424 }, { "epoch": 4.72, "learning_rate": 2e-05, "loss": 1.172, "step": 425 }, { "epoch": 4.73, "learning_rate": 2e-05, "loss": 1.1898, "step": 426 }, { "epoch": 4.74, "learning_rate": 2e-05, "loss": 1.3115, "step": 427 }, { "epoch": 4.76, "learning_rate": 2e-05, "loss": 1.1819, "step": 428 }, { "epoch": 4.77, "learning_rate": 2e-05, "loss": 1.4133, "step": 429 }, { "epoch": 4.78, "learning_rate": 2e-05, "loss": 1.0906, "step": 430 }, { "epoch": 4.79, "learning_rate": 2e-05, "loss": 1.1073, "step": 431 }, { "epoch": 4.8, "learning_rate": 2e-05, "loss": 1.2781, "step": 432 }, { "epoch": 4.81, "learning_rate": 2e-05, "loss": 1.2681, "step": 433 }, { "epoch": 4.82, "learning_rate": 2e-05, "loss": 1.1153, "step": 434 }, { "epoch": 4.83, "learning_rate": 2e-05, "loss": 1.3497, "step": 435 }, { "epoch": 4.84, "learning_rate": 2e-05, "loss": 1.2134, "step": 436 }, { "epoch": 4.86, "learning_rate": 2e-05, "loss": 1.3535, "step": 437 }, { "epoch": 4.87, "learning_rate": 2e-05, "loss": 1.3021, "step": 438 }, { "epoch": 4.88, "learning_rate": 2e-05, "loss": 1.1148, "step": 439 }, { "epoch": 4.89, "learning_rate": 2e-05, "loss": 1.0422, "step": 440 }, { "epoch": 4.9, "learning_rate": 2e-05, "loss": 1.3359, "step": 441 }, { "epoch": 4.91, "learning_rate": 2e-05, "loss": 1.128, "step": 442 }, { "epoch": 4.92, "learning_rate": 2e-05, "loss": 1.2882, "step": 443 }, { "epoch": 4.93, "learning_rate": 2e-05, "loss": 1.2593, "step": 444 }, { "epoch": 4.94, "learning_rate": 2e-05, "loss": 1.2965, "step": 445 }, { "epoch": 4.96, "learning_rate": 2e-05, "loss": 1.2561, "step": 446 }, { "epoch": 4.97, "learning_rate": 2e-05, "loss": 1.195, "step": 447 }, { "epoch": 4.98, "learning_rate": 2e-05, "loss": 1.1439, "step": 448 }, { "epoch": 4.99, "learning_rate": 2e-05, "loss": 1.2452, "step": 449 }, { "epoch": 5.0, "learning_rate": 2e-05, "loss": 1.3703, "step": 450 }, { "epoch": 5.0, "step": 450, "total_flos": 1.347899379867648e+17, "train_loss": 1.2846886356671652, "train_runtime": 1802.7215, "train_samples_per_second": 1.997, "train_steps_per_second": 0.25 } ], "logging_steps": 1, "max_steps": 450, "num_train_epochs": 5, "save_steps": 500, "total_flos": 1.347899379867648e+17, "trial_name": null, "trial_params": null }