|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.998097567604294, |
|
"eval_steps": 500, |
|
"global_step": 1838, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 2.422, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 2.4497, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 2.3952, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 2.4029, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 2.3259, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 2.3632, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 2.2819, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 2.3343, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.4285714285714295e-06, |
|
"loss": 2.3087, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 2.2965, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.857142857142858e-06, |
|
"loss": 2.256, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 2.2867, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.285714285714288e-06, |
|
"loss": 2.2479, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3395, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 2.2321, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 2.2262, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2142857142857142e-05, |
|
"loss": 2.2642, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2857142857142859e-05, |
|
"loss": 2.2324, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3571428571428574e-05, |
|
"loss": 2.2856, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 2.2749, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 2.2005, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5714285714285715e-05, |
|
"loss": 2.1922, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.642857142857143e-05, |
|
"loss": 2.2262, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 2.2273, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 2.097, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8571428571428575e-05, |
|
"loss": 2.2123, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.928571428571429e-05, |
|
"loss": 2.1893, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2e-05, |
|
"loss": 2.2119, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.999993783958694e-05, |
|
"loss": 2.2133, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9999751359120544e-05, |
|
"loss": 2.1557, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9999440560919153e-05, |
|
"loss": 2.1473, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.999900544884663e-05, |
|
"loss": 2.1789, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9998446028312334e-05, |
|
"loss": 2.2071, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.999776230627102e-05, |
|
"loss": 2.1029, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9996954291222778e-05, |
|
"loss": 2.1411, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.999602199321292e-05, |
|
"loss": 2.1792, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9994965423831853e-05, |
|
"loss": 2.1753, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9993784596214932e-05, |
|
"loss": 2.1749, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9992479525042305e-05, |
|
"loss": 2.1159, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999105022653872e-05, |
|
"loss": 2.125, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.998949671847334e-05, |
|
"loss": 2.1453, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9987819020159503e-05, |
|
"loss": 2.1158, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9986017152454497e-05, |
|
"loss": 2.1194, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.998409113775928e-05, |
|
"loss": 2.1211, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9982041000018237e-05, |
|
"loss": 2.1652, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9979866764718846e-05, |
|
"loss": 2.1269, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9977568458891377e-05, |
|
"loss": 2.0816, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.997514611110856e-05, |
|
"loss": 2.0884, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9972599751485225e-05, |
|
"loss": 2.0982, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.996992941167792e-05, |
|
"loss": 2.0934, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.996713512488454e-05, |
|
"loss": 2.0704, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9964216925843876e-05, |
|
"loss": 2.1215, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9961174850835224e-05, |
|
"loss": 2.1173, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.995800893767792e-05, |
|
"loss": 2.039, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 2.077, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9951305755891982e-05, |
|
"loss": 2.0687, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9947768570597865e-05, |
|
"loss": 2.0389, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9944107713823068e-05, |
|
"loss": 2.0584, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9940323231079674e-05, |
|
"loss": 2.0666, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.993641516941668e-05, |
|
"loss": 2.0508, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 2.129, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9928228505209017e-05, |
|
"loss": 2.0056, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.992395000444163e-05, |
|
"loss": 2.0473, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9919548128307954e-05, |
|
"loss": 1.9965, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.991502293153247e-05, |
|
"loss": 2.0373, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9910374470372805e-05, |
|
"loss": 2.0406, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.990560280261901e-05, |
|
"loss": 2.0437, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9900707987592847e-05, |
|
"loss": 2.0367, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9895690086147065e-05, |
|
"loss": 1.9801, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9890549160664633e-05, |
|
"loss": 2.0334, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9885285275057954e-05, |
|
"loss": 1.9997, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9879898494768093e-05, |
|
"loss": 2.0198, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9874388886763944e-05, |
|
"loss": 2.0302, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9868756519541416e-05, |
|
"loss": 2.0155, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9863001463122557e-05, |
|
"loss": 1.9745, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9857123789054707e-05, |
|
"loss": 2.0129, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9851123570409595e-05, |
|
"loss": 2.0115, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9845000881782434e-05, |
|
"loss": 1.9893, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9838755799290993e-05, |
|
"loss": 1.954, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9832388400574658e-05, |
|
"loss": 2.0076, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9825898764793455e-05, |
|
"loss": 1.9959, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 1.9442, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9812553106273848e-05, |
|
"loss": 2.0281, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9805697249449774e-05, |
|
"loss": 1.9917, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.979871948738743e-05, |
|
"loss": 1.9503, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9791619906834928e-05, |
|
"loss": 2.0444, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9784398596054833e-05, |
|
"loss": 1.9531, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9777055644823087e-05, |
|
"loss": 1.9387, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.976959114442786e-05, |
|
"loss": 1.9421, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9762005187668442e-05, |
|
"loss": 2.0183, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9754297868854075e-05, |
|
"loss": 1.9711, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9746469283802773e-05, |
|
"loss": 1.9696, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9738519529840162e-05, |
|
"loss": 1.9482, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 1.9837, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9722256912014153e-05, |
|
"loss": 1.9391, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.971394425032896e-05, |
|
"loss": 1.9329, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.970551082408636e-05, |
|
"loss": 1.9301, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9696956738131406e-05, |
|
"loss": 1.9736, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9688282098809195e-05, |
|
"loss": 1.95, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9679487013963566e-05, |
|
"loss": 1.9916, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9670571592935733e-05, |
|
"loss": 1.9198, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9661535946562954e-05, |
|
"loss": 1.9665, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9652380187177128e-05, |
|
"loss": 1.9197, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.964310442860341e-05, |
|
"loss": 1.9704, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9633708786158803e-05, |
|
"loss": 1.9304, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9624193376650708e-05, |
|
"loss": 1.958, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.961455831837548e-05, |
|
"loss": 1.893, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9604803731116953e-05, |
|
"loss": 1.9692, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 1.9248, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.958493645621386e-05, |
|
"loss": 1.9701, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.957482401556089e-05, |
|
"loss": 1.932, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.956459253990476e-05, |
|
"loss": 1.9386, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.955424215644403e-05, |
|
"loss": 1.8804, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.954377299385551e-05, |
|
"loss": 1.9025, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9533185182292705e-05, |
|
"loss": 1.8824, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9522478853384154e-05, |
|
"loss": 1.9541, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.951165414023183e-05, |
|
"loss": 1.9414, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9500711177409456e-05, |
|
"loss": 1.8997, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.948965010096085e-05, |
|
"loss": 1.9041, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9478471048398233e-05, |
|
"loss": 1.8728, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9467174158700507e-05, |
|
"loss": 1.9314, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9455759572311535e-05, |
|
"loss": 1.8404, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.94442274311384e-05, |
|
"loss": 1.8847, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9432577878549635e-05, |
|
"loss": 1.939, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9420811059373444e-05, |
|
"loss": 1.8801, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9408927119895888e-05, |
|
"loss": 1.9351, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 1.9459, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.938480847245937e-05, |
|
"loss": 1.9396, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9372574064345424e-05, |
|
"loss": 1.8986, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9360223135616423e-05, |
|
"loss": 1.8859, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9347755839820138e-05, |
|
"loss": 1.9002, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9335172331951014e-05, |
|
"loss": 1.9257, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.932247276844826e-05, |
|
"loss": 1.9266, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.93096573071939e-05, |
|
"loss": 1.9285, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.929672610751081e-05, |
|
"loss": 1.8927, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 1.8301, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.927051713734227e-05, |
|
"loss": 1.8755, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9257239692688907e-05, |
|
"loss": 1.848, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9243847161266924e-05, |
|
"loss": 1.8943, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9230339709573376e-05, |
|
"loss": 1.9041, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9216717505534025e-05, |
|
"loss": 1.8478, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.920298071850123e-05, |
|
"loss": 1.8383, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9189129519251873e-05, |
|
"loss": 1.9098, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9175164079985196e-05, |
|
"loss": 1.8996, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9161084574320696e-05, |
|
"loss": 1.9136, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9146891177295956e-05, |
|
"loss": 1.8666, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9132584065364455e-05, |
|
"loss": 1.8936, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9118163416393392e-05, |
|
"loss": 1.8678, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9103629409661468e-05, |
|
"loss": 1.8798, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9088982225856653e-05, |
|
"loss": 1.8753, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9074222047073945e-05, |
|
"loss": 1.8768, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.905934905681311e-05, |
|
"loss": 1.8844, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9044363439976393e-05, |
|
"loss": 1.8549, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9029265382866216e-05, |
|
"loss": 1.8632, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9014055073182872e-05, |
|
"loss": 1.9043, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8998732700022186e-05, |
|
"loss": 1.9065, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8983298453873172e-05, |
|
"loss": 1.8886, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.896775252661565e-05, |
|
"loss": 1.8515, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8952095111517875e-05, |
|
"loss": 1.8296, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 1.8673, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8920446597802276e-05, |
|
"loss": 1.9119, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8904455892641394e-05, |
|
"loss": 1.8574, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 1.8388, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8872142579699818e-05, |
|
"loss": 1.8829, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.88558203736409e-05, |
|
"loss": 1.8707, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8839388071291506e-05, |
|
"loss": 1.8343, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8822845876939365e-05, |
|
"loss": 1.8576, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.880619399623841e-05, |
|
"loss": 1.8613, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8789432636206197e-05, |
|
"loss": 1.7953, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8772562005221338e-05, |
|
"loss": 1.87, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8755582313020912e-05, |
|
"loss": 1.8275, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.873849377069785e-05, |
|
"loss": 1.8532, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.872129659069833e-05, |
|
"loss": 1.8742, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.870399098681911e-05, |
|
"loss": 1.8423, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8686577174204887e-05, |
|
"loss": 1.8021, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8669055369345617e-05, |
|
"loss": 1.8343, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.865142579007383e-05, |
|
"loss": 1.8753, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.863368865556191e-05, |
|
"loss": 1.8335, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8615844186319377e-05, |
|
"loss": 1.8022, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8597892604190148e-05, |
|
"loss": 1.8993, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8579834132349773e-05, |
|
"loss": 1.8056, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8561668995302668e-05, |
|
"loss": 1.8487, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8543397418879315e-05, |
|
"loss": 1.8424, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8525019630233463e-05, |
|
"loss": 1.8354, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8506535857839304e-05, |
|
"loss": 1.8145, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8487946331488613e-05, |
|
"loss": 1.7753, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8469251282287925e-05, |
|
"loss": 1.8, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.845045094265563e-05, |
|
"loss": 1.787, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.843154554631911e-05, |
|
"loss": 1.809, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 1.8421, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8393420524970337e-05, |
|
"loss": 1.8344, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8374201373931497e-05, |
|
"loss": 1.7844, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 1.8122, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8335450985792307e-05, |
|
"loss": 1.8689, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8315920230439985e-05, |
|
"loss": 1.802, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8296286090880362e-05, |
|
"loss": 1.8247, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8276548811206684e-05, |
|
"loss": 1.8199, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8256708636794446e-05, |
|
"loss": 1.8626, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8236765814298328e-05, |
|
"loss": 1.8371, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8216720591649152e-05, |
|
"loss": 1.7781, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8196573218050784e-05, |
|
"loss": 1.8401, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8176323943977034e-05, |
|
"loss": 1.8454, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8155973021168546e-05, |
|
"loss": 1.8241, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8135520702629677e-05, |
|
"loss": 1.8396, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8114967242625342e-05, |
|
"loss": 1.7954, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8094312896677853e-05, |
|
"loss": 1.8167, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8073557921563747e-05, |
|
"loss": 1.7939, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8052702575310588e-05, |
|
"loss": 1.7459, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.803174711719376e-05, |
|
"loss": 1.7744, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.8010691807733253e-05, |
|
"loss": 1.7739, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7989536908690413e-05, |
|
"loss": 1.8295, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.796828268306469e-05, |
|
"loss": 1.8341, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7946929395090382e-05, |
|
"loss": 1.7919, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.792547731023332e-05, |
|
"loss": 1.8405, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7903926695187595e-05, |
|
"loss": 1.7965, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7882277817872236e-05, |
|
"loss": 1.8004, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 1.7944, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.78386863542134e-05, |
|
"loss": 1.8024, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7816744309802604e-05, |
|
"loss": 1.7964, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.779470508698079e-05, |
|
"loss": 1.8238, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.77725689597414e-05, |
|
"loss": 1.8253, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7750336203282588e-05, |
|
"loss": 1.7792, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.772800709400383e-05, |
|
"loss": 1.767, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7705581909502457e-05, |
|
"loss": 1.8158, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.768306092857021e-05, |
|
"loss": 1.8087, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 1.7894, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.763773269853134e-05, |
|
"loss": 1.7663, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7614926012949027e-05, |
|
"loss": 1.793, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7592024657977432e-05, |
|
"loss": 1.8305, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.75690289183281e-05, |
|
"loss": 1.8158, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7545939079885962e-05, |
|
"loss": 1.7655, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.75227554297058e-05, |
|
"loss": 1.7749, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7499478256008658e-05, |
|
"loss": 1.7694, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7476107848178298e-05, |
|
"loss": 1.775, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.745264449675755e-05, |
|
"loss": 1.7868, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7429088493444738e-05, |
|
"loss": 1.7893, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.740544013109005e-05, |
|
"loss": 1.7614, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7381699703691866e-05, |
|
"loss": 1.7538, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7357867506393154e-05, |
|
"loss": 1.7761, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7333943835477752e-05, |
|
"loss": 1.7411, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.730992898836672e-05, |
|
"loss": 1.7823, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7285823263614613e-05, |
|
"loss": 1.8391, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7261626960905795e-05, |
|
"loss": 1.8099, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 1.7953, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7212963825982103e-05, |
|
"loss": 1.7979, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7188497598751343e-05, |
|
"loss": 1.8395, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7163942003524574e-05, |
|
"loss": 1.7335, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7139297345578992e-05, |
|
"loss": 1.7415, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7114563931299016e-05, |
|
"loss": 1.7761, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.70897420681725e-05, |
|
"loss": 1.8035, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7064832064786894e-05, |
|
"loss": 1.7779, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.703983423082542e-05, |
|
"loss": 1.7921, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7014748877063212e-05, |
|
"loss": 1.7611, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6989576315363467e-05, |
|
"loss": 1.763, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6964316858673538e-05, |
|
"loss": 1.8226, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.693897082102109e-05, |
|
"loss": 1.7751, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6913538517510155e-05, |
|
"loss": 1.7234, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6888020264317227e-05, |
|
"loss": 1.7529, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 1.7853, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.683672717893011e-05, |
|
"loss": 1.7837, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6810952984415804e-05, |
|
"loss": 1.7382, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6785094115571323e-05, |
|
"loss": 1.7846, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6759150893876272e-05, |
|
"loss": 1.7314, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6733123641858915e-05, |
|
"loss": 1.7938, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.670701268309221e-05, |
|
"loss": 1.8066, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6680818342189748e-05, |
|
"loss": 1.7837, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.665454094480174e-05, |
|
"loss": 1.7621, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6628180817610963e-05, |
|
"loss": 1.6982, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6601738288328692e-05, |
|
"loss": 1.7988, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.657521368569064e-05, |
|
"loss": 1.7403, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 1.7521, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.652191958038763e-05, |
|
"loss": 1.7478, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.649515074027939e-05, |
|
"loss": 1.806, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6468301151920576e-05, |
|
"loss": 1.7771, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6441371149107478e-05, |
|
"loss": 1.7893, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6414361066636123e-05, |
|
"loss": 1.7405, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6387271240298082e-05, |
|
"loss": 1.7464, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6360102006876317e-05, |
|
"loss": 1.741, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.633285370414098e-05, |
|
"loss": 1.7354, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6305526670845225e-05, |
|
"loss": 1.7579, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.627812124672099e-05, |
|
"loss": 1.7034, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6250637772474764e-05, |
|
"loss": 1.7857, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6223076589783368e-05, |
|
"loss": 1.8225, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.619543804128971e-05, |
|
"loss": 1.7726, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6167722470598506e-05, |
|
"loss": 1.7834, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.613993022227202e-05, |
|
"loss": 1.7551, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.611206164182578e-05, |
|
"loss": 1.7747, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.608411707572428e-05, |
|
"loss": 1.731, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.6056096871376667e-05, |
|
"loss": 1.7758, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.6028001377132447e-05, |
|
"loss": 1.7286, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5999830942277124e-05, |
|
"loss": 1.8262, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 1.6955, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5943266652529162e-05, |
|
"loss": 1.7457, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5914873500848446e-05, |
|
"loss": 1.7036, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5886406814971728e-05, |
|
"loss": 1.741, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5857866948799202e-05, |
|
"loss": 1.713, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.582925425714084e-05, |
|
"loss": 1.7516, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 1.7651, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5771811821128932e-05, |
|
"loss": 1.7862, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.57429827909045e-05, |
|
"loss": 1.7699, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5714082363443576e-05, |
|
"loss": 1.7597, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5685110898038653e-05, |
|
"loss": 1.7237, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5656068754865388e-05, |
|
"loss": 1.7492, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5626956294978103e-05, |
|
"loss": 1.7847, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5597773880305307e-05, |
|
"loss": 1.7614, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5568521873645183e-05, |
|
"loss": 1.7552, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5539200638661106e-05, |
|
"loss": 1.7548, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.550981053987708e-05, |
|
"loss": 1.7262, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5480351942673248e-05, |
|
"loss": 1.7185, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5450825213281317e-05, |
|
"loss": 1.7144, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5421230718780036e-05, |
|
"loss": 1.7541, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.53915688270906e-05, |
|
"loss": 1.7444, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5361839906972095e-05, |
|
"loss": 1.7496, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5332044328016916e-05, |
|
"loss": 1.736, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5302182460646158e-05, |
|
"loss": 1.767, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5272254676105026e-05, |
|
"loss": 1.7799, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.524226134645821e-05, |
|
"loss": 1.7511, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5212202844585258e-05, |
|
"loss": 1.7502, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5182079544175957e-05, |
|
"loss": 1.7569, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5151891819725657e-05, |
|
"loss": 1.7103, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5121640046530651e-05, |
|
"loss": 1.7018, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5091324600683472e-05, |
|
"loss": 1.7335, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5060945859068252e-05, |
|
"loss": 1.8015, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5030504199356022e-05, |
|
"loss": 1.7309, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.6938, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4969433640230922e-05, |
|
"loss": 1.7381, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4938805500052294e-05, |
|
"loss": 1.7376, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4908115960235683e-05, |
|
"loss": 1.7744, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4877365402315986e-05, |
|
"loss": 1.7562, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4846554208586676e-05, |
|
"loss": 1.7368, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4815682762095065e-05, |
|
"loss": 1.7065, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4784751446637522e-05, |
|
"loss": 1.7647, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4753760646754714e-05, |
|
"loss": 1.7098, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.472271074772683e-05, |
|
"loss": 1.6901, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4691602135568776e-05, |
|
"loss": 1.7465, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4660435197025391e-05, |
|
"loss": 1.7038, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4629210319566626e-05, |
|
"loss": 1.7026, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4597927891382742e-05, |
|
"loss": 1.7556, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.456658830137947e-05, |
|
"loss": 1.7338, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4535191939173179e-05, |
|
"loss": 1.698, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4503739195086038e-05, |
|
"loss": 1.7293, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4472230460141159e-05, |
|
"loss": 1.6856, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4440666126057743e-05, |
|
"loss": 1.7491, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4409046585246193e-05, |
|
"loss": 1.7427, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4377372230803257e-05, |
|
"loss": 1.7147, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4345643456507126e-05, |
|
"loss": 1.697, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.4313860656812537e-05, |
|
"loss": 1.7302, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4282024226845885e-05, |
|
"loss": 1.7183, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4250134562400301e-05, |
|
"loss": 1.7126, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4218192059930724e-05, |
|
"loss": 1.7619, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4186197116548987e-05, |
|
"loss": 1.6905, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 1.6881, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4122051498751144e-05, |
|
"loss": 1.6719, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4089901621798663e-05, |
|
"loss": 1.738, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.405770089885134e-05, |
|
"loss": 1.7501, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.402544973023123e-05, |
|
"loss": 1.744, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3993148516887522e-05, |
|
"loss": 1.7005, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.396079766039157e-05, |
|
"loss": 1.6991, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3928397562931895e-05, |
|
"loss": 1.7411, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3895948627309184e-05, |
|
"loss": 1.7382, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3863451256931286e-05, |
|
"loss": 1.6855, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3830905855808197e-05, |
|
"loss": 1.7052, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3798312828547028e-05, |
|
"loss": 1.6984, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3765672580346986e-05, |
|
"loss": 1.6992, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3732985516994341e-05, |
|
"loss": 1.7465, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3700252044857356e-05, |
|
"loss": 1.6975, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3667472570881264e-05, |
|
"loss": 1.7907, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3634647502583196e-05, |
|
"loss": 1.7531, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3601777248047105e-05, |
|
"loss": 1.6898, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.356886221591872e-05, |
|
"loss": 1.6602, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.3535902815400435e-05, |
|
"loss": 1.6931, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.350289945624624e-05, |
|
"loss": 1.74, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.3469852548756626e-05, |
|
"loss": 1.7237, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.3436762503773473e-05, |
|
"loss": 1.6722, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.3403629732674954e-05, |
|
"loss": 1.7103, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.3370454647370418e-05, |
|
"loss": 1.7433, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.3337237660295269e-05, |
|
"loss": 1.6665, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.3303979184405826e-05, |
|
"loss": 1.7245, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 1.7018, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3237339420583213e-05, |
|
"loss": 1.7226, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3203958961121087e-05, |
|
"loss": 1.6809, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3170538669776469e-05, |
|
"loss": 1.7042, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3137078962033186e-05, |
|
"loss": 1.7106, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3103580253865086e-05, |
|
"loss": 1.7325, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3070042961730878e-05, |
|
"loss": 1.7218, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3036467502568945e-05, |
|
"loss": 1.6903, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3002854293792175e-05, |
|
"loss": 1.7216, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.296920375328275e-05, |
|
"loss": 1.735, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2935516299386972e-05, |
|
"loss": 1.6757, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2901792350910053e-05, |
|
"loss": 1.7137, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 1.7286, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.283423664769693e-05, |
|
"loss": 1.7215, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.280040573281881e-05, |
|
"loss": 1.7133, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2766540003065272e-05, |
|
"loss": 1.7211, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2732639879457867e-05, |
|
"loss": 1.7219, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2698705783445733e-05, |
|
"loss": 1.7033, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.266473813690035e-05, |
|
"loss": 1.7126, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2630737362110314e-05, |
|
"loss": 1.7169, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.259670388177606e-05, |
|
"loss": 1.6949, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2562638119004627e-05, |
|
"loss": 1.6698, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2528540497304398e-05, |
|
"loss": 1.6932, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2494411440579814e-05, |
|
"loss": 1.6716, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2460251373126136e-05, |
|
"loss": 1.6726, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2426060719624139e-05, |
|
"loss": 1.6908, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2391839905134853e-05, |
|
"loss": 1.7084, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 1.7016, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2323309495308066e-05, |
|
"loss": 1.7306, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.228900075194628e-05, |
|
"loss": 1.6963, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2254663551538047e-05, |
|
"loss": 1.7015, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2220298320966278e-05, |
|
"loss": 1.6627, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.2185905487462359e-05, |
|
"loss": 1.7094, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.215148547860084e-05, |
|
"loss": 1.7147, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.211703872229411e-05, |
|
"loss": 1.7279, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.2082565646787092e-05, |
|
"loss": 1.6936, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.2048066680651908e-05, |
|
"loss": 1.7198, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.2013542252782556e-05, |
|
"loss": 1.6956, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1978992792389574e-05, |
|
"loss": 1.7093, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.194441872899471e-05, |
|
"loss": 1.6571, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1909820492425574e-05, |
|
"loss": 1.6882, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1875198512810301e-05, |
|
"loss": 1.698, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1840553220572204e-05, |
|
"loss": 1.6806, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.180588504642442e-05, |
|
"loss": 1.725, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1771194421364546e-05, |
|
"loss": 1.726, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 1.7191, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1701747543889161e-05, |
|
"loss": 1.6475, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.166699215484297e-05, |
|
"loss": 1.6848, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1632216041612595e-05, |
|
"loss": 1.67, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.159741963653755e-05, |
|
"loss": 1.6746, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1562603372209616e-05, |
|
"loss": 1.6875, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1527767681467472e-05, |
|
"loss": 1.6981, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1492912997391296e-05, |
|
"loss": 1.679, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1458039753297408e-05, |
|
"loss": 1.6867, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 1.6921, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1388239319470035e-05, |
|
"loss": 1.7129, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1353312997501313e-05, |
|
"loss": 1.6651, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1318369851033604e-05, |
|
"loss": 1.7309, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1283410314482994e-05, |
|
"loss": 1.6894, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1248434822469334e-05, |
|
"loss": 1.6943, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.121344380981082e-05, |
|
"loss": 1.7303, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.117843771151862e-05, |
|
"loss": 1.7122, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.1143416962791437e-05, |
|
"loss": 1.6939, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.1108381999010111e-05, |
|
"loss": 1.6969, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.1073333255732208e-05, |
|
"loss": 1.7352, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1038271168686603e-05, |
|
"loss": 1.6713, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1003196173768051e-05, |
|
"loss": 1.7177, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0968108707031792e-05, |
|
"loss": 1.6492, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.093300920468811e-05, |
|
"loss": 1.6801, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0897898103096917e-05, |
|
"loss": 1.6938, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0862775838762332e-05, |
|
"loss": 1.7084, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.082764284832724e-05, |
|
"loss": 1.6669, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0792499568567885e-05, |
|
"loss": 1.6981, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0757346436388427e-05, |
|
"loss": 1.683, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0722183888815503e-05, |
|
"loss": 1.7075, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.068701236299281e-05, |
|
"loss": 1.6859, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0651832296175671e-05, |
|
"loss": 1.7037, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0616644125725573e-05, |
|
"loss": 1.7035, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0581448289104759e-05, |
|
"loss": 1.6886, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0546245223870783e-05, |
|
"loss": 1.6434, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0511035367671057e-05, |
|
"loss": 1.6806, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 1.7277, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0440597033380706e-05, |
|
"loss": 1.6252, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.040536943098527e-05, |
|
"loss": 1.649, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0370136789003582e-05, |
|
"loss": 1.6915, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0334899545450753e-05, |
|
"loss": 1.6248, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.029965813839911e-05, |
|
"loss": 1.7067, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0264413005972736e-05, |
|
"loss": 1.6492, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0229164586342024e-05, |
|
"loss": 1.6859, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0193913317718245e-05, |
|
"loss": 1.658, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.015865963834808e-05, |
|
"loss": 1.6379, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0123403986508184e-05, |
|
"loss": 1.6257, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0088146800499734e-05, |
|
"loss": 1.661, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0052888518642978e-05, |
|
"loss": 1.6839, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0017629579271789e-05, |
|
"loss": 1.6268, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.982370420728213e-06, |
|
"loss": 1.6114, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.947111481357023e-06, |
|
"loss": 1.6545, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.911853199500268e-06, |
|
"loss": 1.6429, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.876596013491817e-06, |
|
"loss": 1.635, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.841340361651921e-06, |
|
"loss": 1.6508, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.806086682281759e-06, |
|
"loss": 1.6623, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.770835413657978e-06, |
|
"loss": 1.6282, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.735586994027267e-06, |
|
"loss": 1.6316, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.700341861600894e-06, |
|
"loss": 1.6269, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.665100454549249e-06, |
|
"loss": 1.626, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.62986321099642e-06, |
|
"loss": 1.6456, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.594630569014733e-06, |
|
"loss": 1.6367, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.559402966619296e-06, |
|
"loss": 1.6145, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 1.6695, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.488964632328945e-06, |
|
"loss": 1.656, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.453754776129219e-06, |
|
"loss": 1.6895, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.418551710895243e-06, |
|
"loss": 1.6528, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.383355874274434e-06, |
|
"loss": 1.6276, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.348167703824334e-06, |
|
"loss": 1.6301, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.312987637007191e-06, |
|
"loss": 1.6541, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.277816111184504e-06, |
|
"loss": 1.6725, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.24265356361158e-06, |
|
"loss": 1.6714, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.207500431432115e-06, |
|
"loss": 1.6509, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.17235715167276e-06, |
|
"loss": 1.5701, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.137224161237672e-06, |
|
"loss": 1.6494, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.102101896903084e-06, |
|
"loss": 1.591, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.066990795311891e-06, |
|
"loss": 1.6227, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.03189129296821e-06, |
|
"loss": 1.6277, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.99680382623195e-06, |
|
"loss": 1.6702, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.9617288313134e-06, |
|
"loss": 1.6227, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.926666744267794e-06, |
|
"loss": 1.6527, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.89161800098989e-06, |
|
"loss": 1.6507, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.856583037208566e-06, |
|
"loss": 1.6088, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.821562288481383e-06, |
|
"loss": 1.5843, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.786556190189183e-06, |
|
"loss": 1.6324, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.751565177530669e-06, |
|
"loss": 1.5882, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.716589685517007e-06, |
|
"loss": 1.6663, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.681630148966397e-06, |
|
"loss": 1.609, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.646687002498692e-06, |
|
"loss": 1.6324, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.61176068052997e-06, |
|
"loss": 1.6329, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 1.6288, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.541960246702597e-06, |
|
"loss": 1.6064, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.507087002608707e-06, |
|
"loss": 1.6498, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.472232318532531e-06, |
|
"loss": 1.6407, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.437396627790384e-06, |
|
"loss": 1.6374, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.402580363462451e-06, |
|
"loss": 1.618, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.367783958387407e-06, |
|
"loss": 1.6261, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.333007845157032e-06, |
|
"loss": 1.6398, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.298252456110839e-06, |
|
"loss": 1.628, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 1.6068, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.228805578635457e-06, |
|
"loss": 1.6232, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.194114953575584e-06, |
|
"loss": 1.6444, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.159446779427798e-06, |
|
"loss": 1.6266, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.1248014871897e-06, |
|
"loss": 1.6385, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.090179507574428e-06, |
|
"loss": 1.6566, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.055581271005292e-06, |
|
"loss": 1.6199, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.021007207610427e-06, |
|
"loss": 1.6047, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.986457747217448e-06, |
|
"loss": 1.5977, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.951933319348095e-06, |
|
"loss": 1.6626, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.917434353212911e-06, |
|
"loss": 1.6085, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.882961277705897e-06, |
|
"loss": 1.6528, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.848514521399167e-06, |
|
"loss": 1.6223, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.814094512537645e-06, |
|
"loss": 1.5814, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.779701679033729e-06, |
|
"loss": 1.6478, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.745336448461958e-06, |
|
"loss": 1.6188, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.710999248053722e-06, |
|
"loss": 1.6254, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.676690504691935e-06, |
|
"loss": 1.694, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 1.6248, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.608160094865148e-06, |
|
"loss": 1.6333, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.573939280375864e-06, |
|
"loss": 1.5986, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.539748626873866e-06, |
|
"loss": 1.6283, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.505588559420188e-06, |
|
"loss": 1.6012, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.471459502695606e-06, |
|
"loss": 1.5643, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.4373618809953755e-06, |
|
"loss": 1.6646, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.403296118223944e-06, |
|
"loss": 1.6241, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.369262637889689e-06, |
|
"loss": 1.6218, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.335261863099652e-06, |
|
"loss": 1.6436, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.301294216554271e-06, |
|
"loss": 1.6144, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.267360120542135e-06, |
|
"loss": 1.6411, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.233459996934731e-06, |
|
"loss": 1.6185, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.199594267181193e-06, |
|
"loss": 1.6463, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.165763352303072e-06, |
|
"loss": 1.6129, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 1.6221, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.09820764908995e-06, |
|
"loss": 1.6203, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.0644837006130295e-06, |
|
"loss": 1.6553, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 7.0307962467172555e-06, |
|
"loss": 1.6053, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.99714570620783e-06, |
|
"loss": 1.6477, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.963532497431056e-06, |
|
"loss": 1.6295, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.929957038269123e-06, |
|
"loss": 1.5975, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.896419746134915e-06, |
|
"loss": 1.6256, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.862921037966815e-06, |
|
"loss": 1.6324, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.8294613302235325e-06, |
|
"loss": 1.6103, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.796041038878918e-06, |
|
"loss": 1.5883, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.762660579416791e-06, |
|
"loss": 1.6604, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 1.6339, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.696020815594176e-06, |
|
"loss": 1.5994, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.6627623397047355e-06, |
|
"loss": 1.6027, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.629545352629583e-06, |
|
"loss": 1.6474, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.59637026732505e-06, |
|
"loss": 1.6322, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.563237496226531e-06, |
|
"loss": 1.6382, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.530147451243377e-06, |
|
"loss": 1.6061, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.497100543753763e-06, |
|
"loss": 1.6787, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.4640971845995685e-06, |
|
"loss": 1.6073, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.431137784081283e-06, |
|
"loss": 1.6591, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.3982227519528986e-06, |
|
"loss": 1.6769, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.3653524974168105e-06, |
|
"loss": 1.6325, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.33252742911874e-06, |
|
"loss": 1.651, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.299747955142648e-06, |
|
"loss": 1.6184, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.267014483005664e-06, |
|
"loss": 1.5922, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.234327419653013e-06, |
|
"loss": 1.5832, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.2016871714529725e-06, |
|
"loss": 1.6537, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.169094144191805e-06, |
|
"loss": 1.6424, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.136548743068713e-06, |
|
"loss": 1.5831, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.1040513726908154e-06, |
|
"loss": 1.6628, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.071602437068108e-06, |
|
"loss": 1.6093, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 1.6575, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.00685148311248e-06, |
|
"loss": 1.6236, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.974550269768775e-06, |
|
"loss": 1.6254, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.9422991011486635e-06, |
|
"loss": 1.6757, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.910098378201341e-06, |
|
"loss": 1.6367, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.877948501248858e-06, |
|
"loss": 1.6179, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 1.6268, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.813802883451014e-06, |
|
"loss": 1.6498, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.781807940069277e-06, |
|
"loss": 1.6222, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.749865437599703e-06, |
|
"loss": 1.6241, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.717975773154116e-06, |
|
"loss": 1.6186, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.686139343187468e-06, |
|
"loss": 1.6451, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.654356543492883e-06, |
|
"loss": 1.6549, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.622627769196746e-06, |
|
"loss": 1.5829, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.59095341475381e-06, |
|
"loss": 1.5941, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.559333873942259e-06, |
|
"loss": 1.6329, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.527769539858839e-06, |
|
"loss": 1.638, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.4962608049139646e-06, |
|
"loss": 1.6026, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.464808060826825e-06, |
|
"loss": 1.6985, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.43341169862053e-06, |
|
"loss": 1.62, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.402072108617258e-06, |
|
"loss": 1.6195, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.370789680433376e-06, |
|
"loss": 1.5992, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.339564802974615e-06, |
|
"loss": 1.5754, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.308397864431226e-06, |
|
"loss": 1.6545, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.277289252273175e-06, |
|
"loss": 1.6625, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.24623935324529e-06, |
|
"loss": 1.6156, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.215248553362482e-06, |
|
"loss": 1.6134, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.184317237904939e-06, |
|
"loss": 1.5809, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.153445791413327e-06, |
|
"loss": 1.6192, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.122634597684016e-06, |
|
"loss": 1.667, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.091884039764321e-06, |
|
"loss": 1.6199, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.061194499947712e-06, |
|
"loss": 1.5865, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.03056635976908e-06, |
|
"loss": 1.6202, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 1.596, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.969495800643985e-06, |
|
"loss": 1.6162, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.939054140931751e-06, |
|
"loss": 1.6511, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.908675399316534e-06, |
|
"loss": 1.5728, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.878359953469354e-06, |
|
"loss": 1.6091, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.848108180274345e-06, |
|
"loss": 1.601, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.817920455824045e-06, |
|
"loss": 1.5653, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.787797155414742e-06, |
|
"loss": 1.5522, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.757738653541795e-06, |
|
"loss": 1.6041, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.727745323894976e-06, |
|
"loss": 1.5904, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.697817539353845e-06, |
|
"loss": 1.6125, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.66795567198309e-06, |
|
"loss": 1.5485, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.638160093027908e-06, |
|
"loss": 1.6128, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.608431172909405e-06, |
|
"loss": 1.6421, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.578769281219969e-06, |
|
"loss": 1.6406, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.549174786718684e-06, |
|
"loss": 1.617, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.519648057326757e-06, |
|
"loss": 1.632, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.490189460122926e-06, |
|
"loss": 1.6389, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.460799361338898e-06, |
|
"loss": 1.6284, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.431478126354818e-06, |
|
"loss": 1.6267, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.4022261196946995e-06, |
|
"loss": 1.6062, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.373043705021899e-06, |
|
"loss": 1.6064, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.343931245134616e-06, |
|
"loss": 1.5933, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.314889101961353e-06, |
|
"loss": 1.5707, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.2859176365564294e-06, |
|
"loss": 1.5996, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.257017209095503e-06, |
|
"loss": 1.6112, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.228188178871069e-06, |
|
"loss": 1.6038, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 1.6185, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.1707457428591615e-06, |
|
"loss": 1.6087, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.142133051200799e-06, |
|
"loss": 1.6118, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.113593185028273e-06, |
|
"loss": 1.6334, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.085126499151555e-06, |
|
"loss": 1.5767, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.056733347470842e-06, |
|
"loss": 1.657, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.028414082972141e-06, |
|
"loss": 1.6121, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.000169057722879e-06, |
|
"loss": 1.6357, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.9719986228675546e-06, |
|
"loss": 1.5881, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.943903128623336e-06, |
|
"loss": 1.6296, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.915882924275725e-06, |
|
"loss": 1.6005, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.887938358174224e-06, |
|
"loss": 1.5584, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.860069777727983e-06, |
|
"loss": 1.5739, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.832277529401494e-06, |
|
"loss": 1.5829, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.804561958710292e-06, |
|
"loss": 1.6117, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.7769234102166365e-06, |
|
"loss": 1.6283, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.7493622275252417e-06, |
|
"loss": 1.6123, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.7218787532790167e-06, |
|
"loss": 1.613, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.6944733291547784e-06, |
|
"loss": 1.5981, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.667146295859021e-06, |
|
"loss": 1.5747, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.6398979931236866e-06, |
|
"loss": 1.5758, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.612728759701919e-06, |
|
"loss": 1.6561, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.5856389333638795e-06, |
|
"loss": 1.6034, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.5586288508925203e-06, |
|
"loss": 1.5936, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.5316988480794255e-06, |
|
"loss": 1.6436, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.5048492597206107e-06, |
|
"loss": 1.5742, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.478080419612372e-06, |
|
"loss": 1.6403, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 1.5285, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.424786314309365e-06, |
|
"loss": 1.6122, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.398261711671309e-06, |
|
"loss": 1.6457, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.37181918238904e-06, |
|
"loss": 1.6318, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.3454590551982623e-06, |
|
"loss": 1.6204, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.3191816578102533e-06, |
|
"loss": 1.5928, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.292987316907792e-06, |
|
"loss": 1.5786, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.2668763581410877e-06, |
|
"loss": 1.6392, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.2408491061237325e-06, |
|
"loss": 1.6181, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.2149058844286796e-06, |
|
"loss": 1.6025, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.1890470155842023e-06, |
|
"loss": 1.6043, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.1632728210698902e-06, |
|
"loss": 1.6213, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.1375836213126653e-06, |
|
"loss": 1.6274, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.1119797356827796e-06, |
|
"loss": 1.6218, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.0864614824898487e-06, |
|
"loss": 1.6166, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.0610291789789094e-06, |
|
"loss": 1.6601, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.035683141326462e-06, |
|
"loss": 1.6031, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.0104236846365376e-06, |
|
"loss": 1.5814, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.9852511229367862e-06, |
|
"loss": 1.5826, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.96016576917458e-06, |
|
"loss": 1.5896, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.935167935213107e-06, |
|
"loss": 1.651, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.9102579318274994e-06, |
|
"loss": 1.5741, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.885436068700984e-06, |
|
"loss": 1.6375, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.8607026544210115e-06, |
|
"loss": 1.589, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.8360579964754277e-06, |
|
"loss": 1.6302, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.8115024012486624e-06, |
|
"loss": 1.6499, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.7870361740179007e-06, |
|
"loss": 1.614, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 1.5965, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.7383730390942075e-06, |
|
"loss": 1.595, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.714176736385392e-06, |
|
"loss": 1.6051, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.690071011633284e-06, |
|
"loss": 1.5999, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.666056164522249e-06, |
|
"loss": 1.5581, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.6421324936068492e-06, |
|
"loss": 1.567, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.618300296308135e-06, |
|
"loss": 1.6331, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.594559868909956e-06, |
|
"loss": 1.624, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.5709115065552647e-06, |
|
"loss": 1.5888, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.5473555032424534e-06, |
|
"loss": 1.6057, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.5238921518217042e-06, |
|
"loss": 1.6054, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.500521743991342e-06, |
|
"loss": 1.6118, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.477244570294206e-06, |
|
"loss": 1.583, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4540609201140386e-06, |
|
"loss": 1.679, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4309710816719014e-06, |
|
"loss": 1.6098, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4079753420225694e-06, |
|
"loss": 1.5828, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3850739870509745e-06, |
|
"loss": 1.6061, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.362267301468659e-06, |
|
"loss": 1.6345, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 1.5623, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3169390714297936e-06, |
|
"loss": 1.5693, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.2944180904975457e-06, |
|
"loss": 1.6289, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.27199290599617e-06, |
|
"loss": 1.6167, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2496637967174104e-06, |
|
"loss": 1.6036, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2274310402586042e-06, |
|
"loss": 1.598, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2052949130192136e-06, |
|
"loss": 1.6045, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.1832556901973967e-06, |
|
"loss": 1.6274, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.161313645786599e-06, |
|
"loss": 1.6466, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 1.5997, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.117722182127765e-06, |
|
"loss": 1.6361, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0960733048124082e-06, |
|
"loss": 1.5858, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0745226897666858e-06, |
|
"loss": 1.6085, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.0530706049096206e-06, |
|
"loss": 1.6317, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.0317173169353066e-06, |
|
"loss": 1.6084, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.010463091309587e-06, |
|
"loss": 1.5833, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.989308192266748e-06, |
|
"loss": 1.6618, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9682528828062397e-06, |
|
"loss": 1.5823, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.947297424689414e-06, |
|
"loss": 1.65, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.926442078436255e-06, |
|
"loss": 1.6057, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.9056871033221458e-06, |
|
"loss": 1.5702, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8850327573746584e-06, |
|
"loss": 1.6414, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8644792973703252e-06, |
|
"loss": 1.6103, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.844026978831457e-06, |
|
"loss": 1.5989, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8236760560229715e-06, |
|
"loss": 1.5267, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.803426781949219e-06, |
|
"loss": 1.5732, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7832794083508476e-06, |
|
"loss": 1.6437, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7632341857016733e-06, |
|
"loss": 1.6356, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7432913632055582e-06, |
|
"loss": 1.645, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7234511887933159e-06, |
|
"loss": 1.6653, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7037139091196396e-06, |
|
"loss": 1.6185, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.6840797695600187e-06, |
|
"loss": 1.6301, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.6645490142076948e-06, |
|
"loss": 1.5717, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 1.5834, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.6257986260685077e-06, |
|
"loss": 1.5836, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.6065794750296648e-06, |
|
"loss": 1.5904, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 1.6012, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.56845445368089e-06, |
|
"loss": 1.6069, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5495490573443705e-06, |
|
"loss": 1.5809, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5307487177120773e-06, |
|
"loss": 1.5976, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5120536685113895e-06, |
|
"loss": 1.5876, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.493464142160701e-06, |
|
"loss": 1.6259, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4749803697665366e-06, |
|
"loss": 1.5831, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4566025811206875e-06, |
|
"loss": 1.6355, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4383310046973365e-06, |
|
"loss": 1.6095, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.4201658676502294e-06, |
|
"loss": 1.6515, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.4021073958098553e-06, |
|
"loss": 1.5863, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3841558136806254e-06, |
|
"loss": 1.6162, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3663113444380905e-06, |
|
"loss": 1.6111, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.34857420992617e-06, |
|
"loss": 1.5941, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.3309446306543827e-06, |
|
"loss": 1.5376, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.3134228257951142e-06, |
|
"loss": 1.5601, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2960090131808923e-06, |
|
"loss": 1.6233, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2787034093016726e-06, |
|
"loss": 1.5793, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2615062293021508e-06, |
|
"loss": 1.6271, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2444176869790925e-06, |
|
"loss": 1.611, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2274379947786662e-06, |
|
"loss": 1.6106, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2105673637938054e-06, |
|
"loss": 1.5766, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1938060037615906e-06, |
|
"loss": 1.6424, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1771541230606364e-06, |
|
"loss": 1.6204, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1606119287084982e-06, |
|
"loss": 1.568, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1441796263590988e-06, |
|
"loss": 1.5711, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1278574203001846e-06, |
|
"loss": 1.6569, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 1.62, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.095544107358607e-06, |
|
"loss": 1.6469, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0795534021977229e-06, |
|
"loss": 1.5438, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0636735967658785e-06, |
|
"loss": 1.6138, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0479048884821253e-06, |
|
"loss": 1.5924, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.03224747338435e-06, |
|
"loss": 1.5761, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.0167015461268303e-06, |
|
"loss": 1.5651, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.001267299977815e-06, |
|
"loss": 1.6368, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.859449268171328e-07, |
|
"loss": 1.6616, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.707346171337895e-07, |
|
"loss": 1.656, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.556365600236107e-07, |
|
"loss": 1.5951, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.406509431868893e-07, |
|
"loss": 1.6588, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.257779529260558e-07, |
|
"loss": 1.6394, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.110177741433501e-07, |
|
"loss": 1.5994, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.963705903385344e-07, |
|
"loss": 1.5851, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.818365836066101e-07, |
|
"loss": 1.5949, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.67415934635546e-07, |
|
"loss": 1.6209, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.53108822704044e-07, |
|
"loss": 1.5985, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.389154256793042e-07, |
|
"loss": 1.6188, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.248359200148059e-07, |
|
"loss": 1.5641, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.108704807481282e-07, |
|
"loss": 1.6266, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.970192814987676e-07, |
|
"loss": 1.5811, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.832824944659768e-07, |
|
"loss": 1.6541, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.696602904266237e-07, |
|
"loss": 1.5432, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.561528387330797e-07, |
|
"loss": 1.5934, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.427603073110967e-07, |
|
"loss": 1.6044, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.294828626577321e-07, |
|
"loss": 1.5923, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 1.5758, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.032738924891936e-07, |
|
"loss": 1.6244, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.903426928061019e-07, |
|
"loss": 1.6335, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.775272315517423e-07, |
|
"loss": 1.5721, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.648276680489896e-07, |
|
"loss": 1.5646, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.522441601798646e-07, |
|
"loss": 1.5941, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.397768643835755e-07, |
|
"loss": 1.6154, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.274259356545775e-07, |
|
"loss": 1.6103, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.151915275406339e-07, |
|
"loss": 1.6158, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 1.649, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.910728801041166e-07, |
|
"loss": 1.5911, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.791889406265583e-07, |
|
"loss": 1.6096, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.674221214503639e-07, |
|
"loss": 1.5872, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.557725688616001e-07, |
|
"loss": 1.5873, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.442404276884683e-07, |
|
"loss": 1.5804, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.328258412994958e-07, |
|
"loss": 1.6198, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.215289516017685e-07, |
|
"loss": 1.6101, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.103498990391509e-07, |
|
"loss": 1.625, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.992888225905467e-07, |
|
"loss": 1.5625, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.883458597681722e-07, |
|
"loss": 1.587, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.775211466158469e-07, |
|
"loss": 1.6131, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.6681481770729844e-07, |
|
"loss": 1.6255, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.562270061444907e-07, |
|
"loss": 1.6489, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.4575784355597284e-07, |
|
"loss": 1.5978, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.354074600952407e-07, |
|
"loss": 1.6011, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.2517598443911235e-07, |
|
"loss": 1.632, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.1506354378614277e-07, |
|
"loss": 1.6592, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 1.5987, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9519626888304684e-07, |
|
"loss": 1.62, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.8544168162452544e-07, |
|
"loss": 1.5558, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.7580662334929517e-07, |
|
"loss": 1.5665, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.662912138411967e-07, |
|
"loss": 1.5468, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.5689557139658824e-07, |
|
"loss": 1.6209, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.476198128228736e-07, |
|
"loss": 1.629, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.3846405343704757e-07, |
|
"loss": 1.6102, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.2942840706426725e-07, |
|
"loss": 1.6254, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.2051298603643754e-07, |
|
"loss": 1.61, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.117179011908067e-07, |
|
"loss": 1.6095, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.0304326186859525e-07, |
|
"loss": 1.5745, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.9448917591363923e-07, |
|
"loss": 1.6298, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.8605574967103924e-07, |
|
"loss": 1.6412, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.777430879858478e-07, |
|
"loss": 1.5913, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.6955129420176193e-07, |
|
"loss": 1.5864, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.6148047015983834e-07, |
|
"loss": 1.6392, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.535307161972278e-07, |
|
"loss": 1.5669, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.4570213114592957e-07, |
|
"loss": 1.5771, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.3799481233156008e-07, |
|
"loss": 1.6162, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.3040885557214065e-07, |
|
"loss": 1.6131, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.2294435517691504e-07, |
|
"loss": 1.6432, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.1560140394516815e-07, |
|
"loss": 1.6298, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.083800931650759e-07, |
|
"loss": 1.6646, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0128051261257165e-07, |
|
"loss": 1.5325, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.9430275055022597e-07, |
|
"loss": 1.5823, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874468937261531e-07, |
|
"loss": 1.6312, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 1.6128, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7410123520654743e-07, |
|
"loss": 1.6452, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.6761159942534356e-07, |
|
"loss": 1.5895, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.612442007090076e-07, |
|
"loss": 1.6007, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5499911821756918e-07, |
|
"loss": 1.5869, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4887642959040881e-07, |
|
"loss": 1.6045, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4287621094529524e-07, |
|
"loss": 1.6119, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.3699853687744514e-07, |
|
"loss": 1.5636, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.3124348045858716e-07, |
|
"loss": 1.5762, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2561111323605714e-07, |
|
"loss": 1.5763, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.201015052319099e-07, |
|
"loss": 1.5797, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1471472494204994e-07, |
|
"loss": 1.58, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0945083933537104e-07, |
|
"loss": 1.6223, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0430991385293576e-07, |
|
"loss": 1.6336, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.929201240715503e-08, |
|
"loss": 1.6027, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.439719738099318e-08, |
|
"loss": 1.5486, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.962552962719528e-08, |
|
"loss": 1.6065, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.497706846752885e-08, |
|
"loss": 1.5629, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.04518716920466e-08, |
|
"loss": 1.5595, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.604999555837045e-08, |
|
"loss": 1.5957, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.177149479098644e-08, |
|
"loss": 1.5925, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 1.5969, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.358483058332199e-08, |
|
"loss": 1.5818, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.967676892032814e-08, |
|
"loss": 1.6095, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.5892286176932875e-08, |
|
"loss": 1.7004, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.223142940213865e-08, |
|
"loss": 1.5483, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.869424410801848e-08, |
|
"loss": 1.5991, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 1.605, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.1991062322082057e-08, |
|
"loss": 1.6146, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.88251491647762e-08, |
|
"loss": 1.6491, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.578307415612714e-08, |
|
"loss": 1.6435, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.2864875115464765e-08, |
|
"loss": 1.5878, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.0070588322079765e-08, |
|
"loss": 1.5822, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.7400248514776184e-08, |
|
"loss": 1.6123, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.4853888891439582e-08, |
|
"loss": 1.5567, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.2431541108624e-08, |
|
"loss": 1.6457, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.013323528115674e-08, |
|
"loss": 1.6099, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7958999981764203e-08, |
|
"loss": 1.598, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.590886224071997e-08, |
|
"loss": 1.5856, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3982847545507271e-08, |
|
"loss": 1.5992, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.2180979840498153e-08, |
|
"loss": 1.5829, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0503281526662579e-08, |
|
"loss": 1.5946, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.949773461282008e-09, |
|
"loss": 1.6109, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.520474957699586e-09, |
|
"loss": 1.6202, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.2154037850703505e-09, |
|
"loss": 1.63, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.034576168149175e-09, |
|
"loss": 1.5909, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.978006787079824e-09, |
|
"loss": 1.5918, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.0457087772228777e-09, |
|
"loss": 1.5599, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.237693728981416e-09, |
|
"loss": 1.6168, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.553971687667799e-09, |
|
"loss": 1.5127, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.945511533693275e-10, |
|
"loss": 1.5519, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.594390808494332e-10, |
|
"loss": 1.6029, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.4864087945664085e-10, |
|
"loss": 1.6068, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.216041306017495e-11, |
|
"loss": 1.6181, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.6149, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1838, |
|
"total_flos": 2.6076119341989888e+17, |
|
"train_loss": 1.732028792680153, |
|
"train_runtime": 22667.7018, |
|
"train_samples_per_second": 5.194, |
|
"train_steps_per_second": 0.081 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 1838, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"total_flos": 2.6076119341989888e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|