|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.9984550019312475, |
|
"eval_steps": 500, |
|
"global_step": 2588, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.5697, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.564102564102564e-08, |
|
"loss": 2.3369, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.128205128205128e-08, |
|
"loss": 2.3348, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0256410256410256e-07, |
|
"loss": 2.3988, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5384615384615385e-07, |
|
"loss": 2.4363, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0512820512820512e-07, |
|
"loss": 2.3588, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.5641025641025636e-07, |
|
"loss": 2.3657, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.076923076923077e-07, |
|
"loss": 2.3402, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.5897435897435896e-07, |
|
"loss": 2.3937, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.1025641025641024e-07, |
|
"loss": 2.4769, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.6153846153846156e-07, |
|
"loss": 2.1873, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.128205128205127e-07, |
|
"loss": 2.3047, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.641025641025641e-07, |
|
"loss": 2.161, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.153846153846154e-07, |
|
"loss": 2.0211, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.666666666666666e-07, |
|
"loss": 2.1124, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.179487179487179e-07, |
|
"loss": 2.2242, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.692307692307693e-07, |
|
"loss": 2.286, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8.205128205128205e-07, |
|
"loss": 2.2227, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8.717948717948718e-07, |
|
"loss": 2.0711, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.230769230769231e-07, |
|
"loss": 2.1506, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.743589743589742e-07, |
|
"loss": 2.14, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0256410256410255e-06, |
|
"loss": 2.1143, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0769230769230769e-06, |
|
"loss": 2.036, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1282051282051281e-06, |
|
"loss": 2.1153, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.1794871794871795e-06, |
|
"loss": 2.0553, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2307692307692308e-06, |
|
"loss": 2.2378, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2820512820512822e-06, |
|
"loss": 1.8404, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.3333333333333332e-06, |
|
"loss": 2.1071, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.3846153846153844e-06, |
|
"loss": 2.0214, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4358974358974359e-06, |
|
"loss": 1.9517, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.487179487179487e-06, |
|
"loss": 1.9885, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.5384615384615385e-06, |
|
"loss": 2.0129, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.5897435897435895e-06, |
|
"loss": 1.9631, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.641025641025641e-06, |
|
"loss": 1.9949, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.6923076923076922e-06, |
|
"loss": 2.031, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.7435897435897436e-06, |
|
"loss": 2.0105, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.7948717948717948e-06, |
|
"loss": 1.984, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.8461538461538462e-06, |
|
"loss": 1.9213, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.8974358974358973e-06, |
|
"loss": 2.0379, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9487179487179485e-06, |
|
"loss": 1.955, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2e-06, |
|
"loss": 1.8524, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9999968668436226e-06, |
|
"loss": 2.0125, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999987467394123e-06, |
|
"loss": 2.1746, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9999718017104015e-06, |
|
"loss": 1.9293, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9999498698906243e-06, |
|
"loss": 1.9394, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9999216720722226e-06, |
|
"loss": 2.0692, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999887208431893e-06, |
|
"loss": 2.0711, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999846479185596e-06, |
|
"loss": 1.847, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9997994845885525e-06, |
|
"loss": 1.9367, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9997462249352464e-06, |
|
"loss": 1.9585, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.999686700559419e-06, |
|
"loss": 2.0348, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9996209118340687e-06, |
|
"loss": 1.9504, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9995488591714483e-06, |
|
"loss": 1.9144, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9994705430230622e-06, |
|
"loss": 1.9307, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999385963879664e-06, |
|
"loss": 2.0213, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9992951222712525e-06, |
|
"loss": 2.0064, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9991980187670706e-06, |
|
"loss": 2.0478, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9990946539755986e-06, |
|
"loss": 1.9109, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9989850285445526e-06, |
|
"loss": 1.9247, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9988691431608804e-06, |
|
"loss": 1.9108, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9987469985507553e-06, |
|
"loss": 2.1015, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.998618595479574e-06, |
|
"loss": 1.9171, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.99848393475195e-06, |
|
"loss": 1.9811, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9983430172117105e-06, |
|
"loss": 2.0563, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.998195843741888e-06, |
|
"loss": 1.8801, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9980424152647173e-06, |
|
"loss": 1.8937, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.99788273274163e-06, |
|
"loss": 1.8312, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9977167971732456e-06, |
|
"loss": 2.0657, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9975446095993697e-06, |
|
"loss": 1.9725, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9973661710989825e-06, |
|
"loss": 1.8266, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9971814827902357e-06, |
|
"loss": 1.9097, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9969905458304442e-06, |
|
"loss": 1.8952, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9967933614160787e-06, |
|
"loss": 1.9183, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.996589930782758e-06, |
|
"loss": 1.8918, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9963802552052426e-06, |
|
"loss": 1.8186, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.996164335997425e-06, |
|
"loss": 1.975, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9959421745123216e-06, |
|
"loss": 1.934, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9957137721420675e-06, |
|
"loss": 1.9189, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9954791303179026e-06, |
|
"loss": 2.0058, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9952382505101654e-06, |
|
"loss": 1.8476, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9949911342282845e-06, |
|
"loss": 1.9127, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9947377830207685e-06, |
|
"loss": 1.9451, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.994478198475194e-06, |
|
"loss": 1.8618, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9942123822182e-06, |
|
"loss": 1.8733, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.993940335915474e-06, |
|
"loss": 1.8766, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9936620612717426e-06, |
|
"loss": 1.8367, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.993377560030763e-06, |
|
"loss": 1.9046, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9930868339753076e-06, |
|
"loss": 1.9289, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.992789884927158e-06, |
|
"loss": 1.9739, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.992486714747089e-06, |
|
"loss": 1.9757, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.99217732533486e-06, |
|
"loss": 1.8021, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.991861718629202e-06, |
|
"loss": 1.8618, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9915398966078052e-06, |
|
"loss": 2.0311, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9912118612873066e-06, |
|
"loss": 1.9473, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.990877614723279e-06, |
|
"loss": 1.8588, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9905371590102153e-06, |
|
"loss": 1.8763, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.990190496281517e-06, |
|
"loss": 1.9192, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9898376287094825e-06, |
|
"loss": 1.9306, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9894785585052896e-06, |
|
"loss": 1.8198, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9891132879189843e-06, |
|
"loss": 1.9363, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9887418192394667e-06, |
|
"loss": 1.961, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9883641547944755e-06, |
|
"loss": 1.987, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9879802969505746e-06, |
|
"loss": 1.9039, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9875902481131365e-06, |
|
"loss": 1.803, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.98719401072633e-06, |
|
"loss": 1.8843, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9867915872731027e-06, |
|
"loss": 1.9404, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.986382980275165e-06, |
|
"loss": 1.6843, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9859681922929764e-06, |
|
"loss": 1.8471, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9855472259257282e-06, |
|
"loss": 1.9118, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9851200838113275e-06, |
|
"loss": 1.9658, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.98468676862638e-06, |
|
"loss": 1.9117, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9842472830861743e-06, |
|
"loss": 1.9864, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9838016299446648e-06, |
|
"loss": 1.9184, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9833498119944523e-06, |
|
"loss": 1.9284, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.982891832066771e-06, |
|
"loss": 1.8806, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9824276930314646e-06, |
|
"loss": 1.9573, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9819573977969748e-06, |
|
"loss": 1.9234, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.981480949310318e-06, |
|
"loss": 2.039, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9809983505570693e-06, |
|
"loss": 1.7877, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.980509604561344e-06, |
|
"loss": 1.8714, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9800147143857772e-06, |
|
"loss": 1.9176, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.979513683131505e-06, |
|
"loss": 1.9746, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9790065139381464e-06, |
|
"loss": 1.8056, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.978493209983782e-06, |
|
"loss": 1.8755, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.977973774484935e-06, |
|
"loss": 1.8937, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9774482106965513e-06, |
|
"loss": 1.9763, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9769165219119764e-06, |
|
"loss": 1.8513, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.97637871146294e-06, |
|
"loss": 1.8992, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.97583478271953e-06, |
|
"loss": 1.843, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.975284739090174e-06, |
|
"loss": 1.9599, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.974728584021618e-06, |
|
"loss": 1.9457, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9741663209989025e-06, |
|
"loss": 1.9473, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.973597953545344e-06, |
|
"loss": 1.9482, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9730234852225113e-06, |
|
"loss": 2.0576, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9724429196302017e-06, |
|
"loss": 1.9497, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.971856260406421e-06, |
|
"loss": 1.8798, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9712635112273597e-06, |
|
"loss": 1.82, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9706646758073693e-06, |
|
"loss": 1.8849, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.97005975789894e-06, |
|
"loss": 1.9943, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.969448761292676e-06, |
|
"loss": 1.8151, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.968831689817274e-06, |
|
"loss": 1.9047, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9682085473394966e-06, |
|
"loss": 1.9864, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.967579337764149e-06, |
|
"loss": 1.9399, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9669440650340556e-06, |
|
"loss": 1.918, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.966302733130034e-06, |
|
"loss": 2.0827, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9656553460708705e-06, |
|
"loss": 1.8794, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9650019079132947e-06, |
|
"loss": 1.971, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9643424227519547e-06, |
|
"loss": 1.8937, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.96367689471939e-06, |
|
"loss": 1.9809, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9630053279860086e-06, |
|
"loss": 1.8516, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.962327726760057e-06, |
|
"loss": 1.8671, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9616440952875964e-06, |
|
"loss": 1.8827, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9609544378524757e-06, |
|
"loss": 2.0144, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.960258758776304e-06, |
|
"loss": 1.9509, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.959557062418424e-06, |
|
"loss": 1.8525, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.958849353175884e-06, |
|
"loss": 1.8554, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.958135635483412e-06, |
|
"loss": 1.9764, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9574159138133864e-06, |
|
"loss": 1.9485, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9566901926758076e-06, |
|
"loss": 1.7491, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9559584766182724e-06, |
|
"loss": 1.8623, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.955220770225941e-06, |
|
"loss": 2.0079, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9544770781215134e-06, |
|
"loss": 1.9188, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9537274049651965e-06, |
|
"loss": 1.7707, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.952971755454677e-06, |
|
"loss": 1.7864, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.952210134325091e-06, |
|
"loss": 1.977, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9514425463489945e-06, |
|
"loss": 1.841, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.950668996336334e-06, |
|
"loss": 1.8996, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.949889489134416e-06, |
|
"loss": 1.9262, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.949104029627876e-06, |
|
"loss": 1.8152, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9483126227386495e-06, |
|
"loss": 1.9181, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.947515273425939e-06, |
|
"loss": 1.7915, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9467119866861844e-06, |
|
"loss": 1.8844, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9459027675530327e-06, |
|
"loss": 1.9846, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9450876210973034e-06, |
|
"loss": 1.8323, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.944266552426959e-06, |
|
"loss": 1.8946, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9434395666870734e-06, |
|
"loss": 2.0483, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.942606669059797e-06, |
|
"loss": 1.857, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9417678647643266e-06, |
|
"loss": 1.8837, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9409231590568734e-06, |
|
"loss": 1.8, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.940072557230627e-06, |
|
"loss": 1.9081, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.939216064615724e-06, |
|
"loss": 1.8239, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.938353686579216e-06, |
|
"loss": 1.8054, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9374854285250323e-06, |
|
"loss": 1.7697, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9366112958939503e-06, |
|
"loss": 1.9451, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.935731294163558e-06, |
|
"loss": 1.9249, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.934845428848222e-06, |
|
"loss": 1.9443, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9339537054990508e-06, |
|
"loss": 1.8793, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.933056129703862e-06, |
|
"loss": 1.8155, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9321527070871465e-06, |
|
"loss": 1.8284, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9312434433100327e-06, |
|
"loss": 1.9193, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.930328344070252e-06, |
|
"loss": 1.8524, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.929407415102102e-06, |
|
"loss": 1.7907, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9284806621764126e-06, |
|
"loss": 1.9347, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9275480911005066e-06, |
|
"loss": 1.7719, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9266097077181663e-06, |
|
"loss": 1.8312, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.925665517909595e-06, |
|
"loss": 1.6917, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9247155275913824e-06, |
|
"loss": 1.8541, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9237597427164636e-06, |
|
"loss": 1.9176, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.922798169274087e-06, |
|
"loss": 1.8734, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9218308132897715e-06, |
|
"loss": 1.7785, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.920857680825272e-06, |
|
"loss": 1.8514, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9198787779785425e-06, |
|
"loss": 1.7701, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9188941108836925e-06, |
|
"loss": 1.9361, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9179036857109554e-06, |
|
"loss": 1.8618, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.916907508666644e-06, |
|
"loss": 1.8191, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.915905585993116e-06, |
|
"loss": 1.8981, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.914897923968732e-06, |
|
"loss": 1.8425, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.913884528907817e-06, |
|
"loss": 1.8637, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.912865407160622e-06, |
|
"loss": 1.7149, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.911840565113282e-06, |
|
"loss": 1.8883, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9108100091877785e-06, |
|
"loss": 1.9168, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9097737458418964e-06, |
|
"loss": 1.7796, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.908731781569187e-06, |
|
"loss": 1.7504, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.907684122898923e-06, |
|
"loss": 1.9456, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9066307763960628e-06, |
|
"loss": 1.9743, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9055717486612037e-06, |
|
"loss": 1.8858, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9045070463305452e-06, |
|
"loss": 1.7597, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9034366760758456e-06, |
|
"loss": 2.0285, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9023606446043787e-06, |
|
"loss": 1.951, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9012789586588949e-06, |
|
"loss": 1.7775, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9001916250175762e-06, |
|
"loss": 1.8796, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8990986504939959e-06, |
|
"loss": 1.8969, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.898000041937074e-06, |
|
"loss": 1.9649, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8968958062310344e-06, |
|
"loss": 1.9044, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8957859502953648e-06, |
|
"loss": 1.8616, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8946704810847687e-06, |
|
"loss": 1.8935, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8935494055891253e-06, |
|
"loss": 1.9628, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8924227308334442e-06, |
|
"loss": 1.7305, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.891290463877822e-06, |
|
"loss": 1.917, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8901526118173972e-06, |
|
"loss": 1.875, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8890091817823071e-06, |
|
"loss": 1.7643, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8878601809376415e-06, |
|
"loss": 2.0231, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8867056164833995e-06, |
|
"loss": 1.8586, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8855454956544428e-06, |
|
"loss": 1.829, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8843798257204512e-06, |
|
"loss": 1.9326, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8832086139858774e-06, |
|
"loss": 1.894, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8820318677899004e-06, |
|
"loss": 1.9465, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8808495945063793e-06, |
|
"loss": 1.7266, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8796618015438091e-06, |
|
"loss": 2.029, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8784684963452718e-06, |
|
"loss": 1.7227, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8772696863883904e-06, |
|
"loss": 1.9609, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8760653791852836e-06, |
|
"loss": 1.9502, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8748555822825169e-06, |
|
"loss": 1.7898, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.873640303261056e-06, |
|
"loss": 1.9254, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8724195497362192e-06, |
|
"loss": 1.7824, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.87119332935763e-06, |
|
"loss": 1.9062, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.869961649809169e-06, |
|
"loss": 1.8907, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.868724518808925e-06, |
|
"loss": 1.898, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8674819441091483e-06, |
|
"loss": 1.6844, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8662339334962001e-06, |
|
"loss": 1.9793, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8649804947905055e-06, |
|
"loss": 1.9557, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8637216358465034e-06, |
|
"loss": 2.1077, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8624573645525978e-06, |
|
"loss": 1.8411, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8611876888311078e-06, |
|
"loss": 1.7176, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8599126166382186e-06, |
|
"loss": 2.0167, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8586321559639315e-06, |
|
"loss": 2.0379, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8573463148320134e-06, |
|
"loss": 1.8084, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.856055101299947e-06, |
|
"loss": 1.8404, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8547585234588803e-06, |
|
"loss": 1.9097, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8534565894335755e-06, |
|
"loss": 1.9473, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.852149307382358e-06, |
|
"loss": 1.8777, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8508366854970668e-06, |
|
"loss": 1.9101, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8495187320030002e-06, |
|
"loss": 1.9262, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8481954551588679e-06, |
|
"loss": 1.8021, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8468668632567357e-06, |
|
"loss": 1.9045, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8455329646219764e-06, |
|
"loss": 1.8201, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.844193767613216e-06, |
|
"loss": 1.9146, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8428492806222819e-06, |
|
"loss": 1.924, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8414995120741497e-06, |
|
"loss": 1.9159, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8401444704268916e-06, |
|
"loss": 1.9255, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8387841641716222e-06, |
|
"loss": 2.0718, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.837418601832446e-06, |
|
"loss": 1.8706, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8360477919664037e-06, |
|
"loss": 1.7503, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8346717431634186e-06, |
|
"loss": 1.7474, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.833290464046243e-06, |
|
"loss": 1.9686, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.831903963270404e-06, |
|
"loss": 1.8571, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8305122495241486e-06, |
|
"loss": 1.9247, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8291153315283904e-06, |
|
"loss": 1.8846, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8277132180366551e-06, |
|
"loss": 1.8882, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8263059178350237e-06, |
|
"loss": 1.7906, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.82489343974208e-06, |
|
"loss": 1.9279, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8234757926088528e-06, |
|
"loss": 1.8469, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8220529853187632e-06, |
|
"loss": 1.8738, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.820625026787566e-06, |
|
"loss": 1.8807, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8191919259632962e-06, |
|
"loss": 2.0734, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.817753691826212e-06, |
|
"loss": 1.7294, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8163103333887377e-06, |
|
"loss": 1.7365, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8148618596954092e-06, |
|
"loss": 1.8011, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8134082798228158e-06, |
|
"loss": 1.9163, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8119496028795432e-06, |
|
"loss": 1.8797, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8104858380061176e-06, |
|
"loss": 1.7912, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8090169943749474e-06, |
|
"loss": 1.8681, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8075430811902662e-06, |
|
"loss": 1.8047, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.806064107688075e-06, |
|
"loss": 1.7869, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.804580083136084e-06, |
|
"loss": 1.9826, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8030910168336555e-06, |
|
"loss": 1.8603, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8015969181117447e-06, |
|
"loss": 1.8858, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8000977963328416e-06, |
|
"loss": 1.8022, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7985936608909121e-06, |
|
"loss": 1.7757, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7970845212113387e-06, |
|
"loss": 1.9535, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.795570386750863e-06, |
|
"loss": 1.7578, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7940512669975254e-06, |
|
"loss": 1.882, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7925271714706053e-06, |
|
"loss": 1.8345, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7909981097205615e-06, |
|
"loss": 1.9188, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7894640913289735e-06, |
|
"loss": 1.8421, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7879251259084802e-06, |
|
"loss": 1.852, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7863812231027202e-06, |
|
"loss": 1.8908, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7848323925862715e-06, |
|
"loss": 1.935, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7832786440645907e-06, |
|
"loss": 1.9136, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7817199872739514e-06, |
|
"loss": 1.6953, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.780156431981385e-06, |
|
"loss": 1.8422, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.778587987984618e-06, |
|
"loss": 1.9945, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7770146651120106e-06, |
|
"loss": 1.93, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7754364732224963e-06, |
|
"loss": 1.7876, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7738534222055189e-06, |
|
"loss": 1.8509, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7722655219809714e-06, |
|
"loss": 1.8929, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7706727824991332e-06, |
|
"loss": 1.7261, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7690752137406078e-06, |
|
"loss": 1.7547, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7674728257162608e-06, |
|
"loss": 1.8251, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7658656284671566e-06, |
|
"loss": 1.6881, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7642536320644962e-06, |
|
"loss": 1.784, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7626368466095532e-06, |
|
"loss": 1.7579, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.7610152822336103e-06, |
|
"loss": 1.835, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.7593889490978976e-06, |
|
"loss": 1.9273, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.757757857393527e-06, |
|
"loss": 1.7032, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.7561220173414296e-06, |
|
"loss": 1.8224, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.75448143919229e-06, |
|
"loss": 1.8265, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.7528361332264844e-06, |
|
"loss": 1.7415, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.7511861097540147e-06, |
|
"loss": 1.8503, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.749531379114444e-06, |
|
"loss": 1.8644, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.747871951676832e-06, |
|
"loss": 1.8244, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.7462078378396702e-06, |
|
"loss": 1.7677, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.7445390480308154e-06, |
|
"loss": 1.7912, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.7428655927074277e-06, |
|
"loss": 1.9271, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.7411874823559009e-06, |
|
"loss": 1.972, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.7395047274917993e-06, |
|
"loss": 1.7755, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.7378173386597916e-06, |
|
"loss": 1.8354, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.7361253264335836e-06, |
|
"loss": 1.6089, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.734428701415853e-06, |
|
"loss": 1.7224, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.732727474238183e-06, |
|
"loss": 1.6833, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.731021655560995e-06, |
|
"loss": 1.7555, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.7293112560734823e-06, |
|
"loss": 1.8383, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.7275962864935433e-06, |
|
"loss": 1.8652, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.7258767575677138e-06, |
|
"loss": 1.8083, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.7241526800710995e-06, |
|
"loss": 1.8201, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.7224240648073095e-06, |
|
"loss": 1.96, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.7206909226083871e-06, |
|
"loss": 1.8835, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.718953264334744e-06, |
|
"loss": 1.7952, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.7172111008750908e-06, |
|
"loss": 1.7536, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.715464443146367e-06, |
|
"loss": 1.9064, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.713713302093678e-06, |
|
"loss": 1.8183, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.7119576886902204e-06, |
|
"loss": 1.7305, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.710197613937217e-06, |
|
"loss": 1.7559, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.7084330888638463e-06, |
|
"loss": 1.7539, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.706664124527175e-06, |
|
"loss": 1.8263, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.7048907320120863e-06, |
|
"loss": 1.8781, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.703112922431213e-06, |
|
"loss": 1.7234, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.7013307069248652e-06, |
|
"loss": 1.7359, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.699544096660963e-06, |
|
"loss": 1.7359, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.697753102834965e-06, |
|
"loss": 1.7533, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.6959577366697989e-06, |
|
"loss": 1.863, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.6941580094157905e-06, |
|
"loss": 1.8307, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.6923539323505933e-06, |
|
"loss": 1.9062, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.690545516779119e-06, |
|
"loss": 1.712, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.6887327740334647e-06, |
|
"loss": 1.8202, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.6869157154728435e-06, |
|
"loss": 1.8079, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.685094352483513e-06, |
|
"loss": 1.7445, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.6832686964787026e-06, |
|
"loss": 1.7642, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.6814387588985443e-06, |
|
"loss": 1.8107, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.6796045512099992e-06, |
|
"loss": 1.7574, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.6777660849067867e-06, |
|
"loss": 1.7368, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.675923371509311e-06, |
|
"loss": 1.7956, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.674076422564591e-06, |
|
"loss": 1.6367, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.6722252496461865e-06, |
|
"loss": 1.8135, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.6703698643541255e-06, |
|
"loss": 1.7462, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.6685102783148328e-06, |
|
"loss": 1.7503, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.6666465031810561e-06, |
|
"loss": 1.7548, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.6647785506317934e-06, |
|
"loss": 1.8592, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.6629064323722196e-06, |
|
"loss": 1.7668, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.6610301601336127e-06, |
|
"loss": 1.6886, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.6591497456732824e-06, |
|
"loss": 1.8148, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.6572652007744928e-06, |
|
"loss": 1.7362, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.6553765372463921e-06, |
|
"loss": 1.8556, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.6534837669239367e-06, |
|
"loss": 1.857, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.6515869016678173e-06, |
|
"loss": 1.9135, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.649685953364385e-06, |
|
"loss": 1.8161, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.6477809339255759e-06, |
|
"loss": 1.8621, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.645871855288838e-06, |
|
"loss": 1.7759, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.6439587294170557e-06, |
|
"loss": 1.7955, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.6420415682984725e-06, |
|
"loss": 1.827, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.640120383946621e-06, |
|
"loss": 1.928, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.638195188400243e-06, |
|
"loss": 1.8544, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.636265993723215e-06, |
|
"loss": 1.8115, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.6343328120044753e-06, |
|
"loss": 1.8244, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.6323956553579445e-06, |
|
"loss": 1.7569, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.630454535922452e-06, |
|
"loss": 1.7995, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.62850946586166e-06, |
|
"loss": 1.7745, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.626560457363985e-06, |
|
"loss": 1.848, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.6246075226425239e-06, |
|
"loss": 1.8783, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.622650673934977e-06, |
|
"loss": 1.7377, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.62068992350357e-06, |
|
"loss": 1.6871, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.618725283634978e-06, |
|
"loss": 1.8517, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6167567666402495e-06, |
|
"loss": 1.7776, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6147843848547273e-06, |
|
"loss": 1.7212, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6128081506379727e-06, |
|
"loss": 1.8177, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.610828076373687e-06, |
|
"loss": 1.7746, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6088441744696354e-06, |
|
"loss": 1.7181, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6068564573575673e-06, |
|
"loss": 1.8437, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6048649374931395e-06, |
|
"loss": 1.665, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.6028696273558394e-06, |
|
"loss": 1.7477, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.600870539448903e-06, |
|
"loss": 1.9075, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5988676862992416e-06, |
|
"loss": 1.8513, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5968610804573586e-06, |
|
"loss": 1.8466, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5948507344972738e-06, |
|
"loss": 1.7391, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5928366610164442e-06, |
|
"loss": 1.7106, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5908188726356842e-06, |
|
"loss": 1.7513, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5887973819990862e-06, |
|
"loss": 1.9148, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5867722017739434e-06, |
|
"loss": 1.79, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.584743344650668e-06, |
|
"loss": 1.9419, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.5827108233427139e-06, |
|
"loss": 1.7563, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.5806746505864944e-06, |
|
"loss": 1.932, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.5786348391413057e-06, |
|
"loss": 1.8348, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.5765914017892436e-06, |
|
"loss": 1.8044, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.574544351335126e-06, |
|
"loss": 1.7619, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.5724937006064114e-06, |
|
"loss": 1.7901, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.5704394624531184e-06, |
|
"loss": 1.7489, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.5683816497477454e-06, |
|
"loss": 1.6718, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.566320275385191e-06, |
|
"loss": 1.7883, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5642553522826717e-06, |
|
"loss": 1.8084, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5621868933796408e-06, |
|
"loss": 1.8455, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5601149116377092e-06, |
|
"loss": 1.8094, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.5580394200405627e-06, |
|
"loss": 1.918, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.5559604315938802e-06, |
|
"loss": 1.8383, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.5538779593252537e-06, |
|
"loss": 1.7645, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.5517920162841062e-06, |
|
"loss": 1.7629, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.5497026155416086e-06, |
|
"loss": 1.7574, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.5476097701905999e-06, |
|
"loss": 1.7994, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.5455134933455032e-06, |
|
"loss": 1.85, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.5434137981422452e-06, |
|
"loss": 1.7758, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.5413106977381724e-06, |
|
"loss": 1.7414, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.5392042053119698e-06, |
|
"loss": 1.7693, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.5370943340635777e-06, |
|
"loss": 1.7338, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.5349810972141097e-06, |
|
"loss": 1.7876, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.532864508005768e-06, |
|
"loss": 1.841, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.5307445797017632e-06, |
|
"loss": 1.8523, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.5286213255862292e-06, |
|
"loss": 1.7955, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.5264947589641394e-06, |
|
"loss": 1.7693, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.5243648931612266e-06, |
|
"loss": 1.7764, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.522231741523895e-06, |
|
"loss": 1.7133, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.5200953174191402e-06, |
|
"loss": 1.7142, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.5179556342344641e-06, |
|
"loss": 1.7553, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.5158127053777907e-06, |
|
"loss": 1.6906, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.5136665442773823e-06, |
|
"loss": 1.8748, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.5115171643817553e-06, |
|
"loss": 1.7746, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.5093645791595967e-06, |
|
"loss": 1.7909, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.507208802099679e-06, |
|
"loss": 1.7669, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.5050498467107747e-06, |
|
"loss": 1.7813, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.502887726521574e-06, |
|
"loss": 1.7797, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.5007224550805985e-06, |
|
"loss": 1.5573, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4985540459561157e-06, |
|
"loss": 1.7936, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4963825127360558e-06, |
|
"loss": 1.7777, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4942078690279244e-06, |
|
"loss": 1.6408, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.49203012845872e-06, |
|
"loss": 1.7945, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4898493046748458e-06, |
|
"loss": 1.8931, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.4876654113420253e-06, |
|
"loss": 1.8076, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.4854784621452175e-06, |
|
"loss": 1.866, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.48328847078853e-06, |
|
"loss": 1.7224, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4810954509951336e-06, |
|
"loss": 1.8615, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4788994165071762e-06, |
|
"loss": 1.6081, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4767003810856966e-06, |
|
"loss": 1.6602, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.4744983585105385e-06, |
|
"loss": 1.8131, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.4722933625802645e-06, |
|
"loss": 1.7316, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.4700854071120678e-06, |
|
"loss": 1.6491, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.467874505941689e-06, |
|
"loss": 1.7703, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.4656606729233257e-06, |
|
"loss": 1.7789, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.4634439219295477e-06, |
|
"loss": 1.8016, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.461224266851211e-06, |
|
"loss": 1.8745, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.4590017215973674e-06, |
|
"loss": 1.7951, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.456776300095181e-06, |
|
"loss": 1.8513, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.4545480162898395e-06, |
|
"loss": 1.7876, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.4523168841444656e-06, |
|
"loss": 1.8696, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.450082917640031e-06, |
|
"loss": 1.7017, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.4478461307752687e-06, |
|
"loss": 1.724, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.445606537566585e-06, |
|
"loss": 1.7305, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.4433641520479709e-06, |
|
"loss": 1.8398, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.4411189882709158e-06, |
|
"loss": 1.8815, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.438871060304318e-06, |
|
"loss": 1.8124, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.436620382234397e-06, |
|
"loss": 1.8293, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.4343669681646056e-06, |
|
"loss": 1.9203, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.432110832215541e-06, |
|
"loss": 1.8347, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.4298519885248572e-06, |
|
"loss": 1.5941, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.4275904512471747e-06, |
|
"loss": 1.7325, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.4253262345539934e-06, |
|
"loss": 1.8463, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.4230593526336034e-06, |
|
"loss": 1.8358, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.420789819690996e-06, |
|
"loss": 1.7777, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.418517649947774e-06, |
|
"loss": 1.7727, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.4162428576420641e-06, |
|
"loss": 1.7962, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.4139654570284259e-06, |
|
"loss": 1.6427, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.4116854623777642e-06, |
|
"loss": 1.7686, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.4094028879772384e-06, |
|
"loss": 1.7569, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4071177481301738e-06, |
|
"loss": 1.7376, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4048300571559711e-06, |
|
"loss": 1.8214, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4025398293900175e-06, |
|
"loss": 1.7069, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.4002470791835965e-06, |
|
"loss": 1.7283, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.3979518209037983e-06, |
|
"loss": 1.8163, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.3956540689334285e-06, |
|
"loss": 1.8732, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.39335383767092e-06, |
|
"loss": 1.8257, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.3910511415302414e-06, |
|
"loss": 1.7225, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.3887459949408062e-06, |
|
"loss": 1.7624, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.3864384123473847e-06, |
|
"loss": 1.7821, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.384128408210011e-06, |
|
"loss": 1.8441, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.381815997003893e-06, |
|
"loss": 1.7825, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.379501193219323e-06, |
|
"loss": 1.77, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.377184011361585e-06, |
|
"loss": 1.7741, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.374864465950866e-06, |
|
"loss": 1.7832, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.3725425715221623e-06, |
|
"loss": 1.8521, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3702183426251909e-06, |
|
"loss": 1.6873, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.367891793824297e-06, |
|
"loss": 1.8806, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3655629396983624e-06, |
|
"loss": 1.717, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.3632317948407165e-06, |
|
"loss": 1.6907, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.3608983738590412e-06, |
|
"loss": 1.7584, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.3585626913752825e-06, |
|
"loss": 1.7731, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.3562247620255574e-06, |
|
"loss": 1.9717, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.3538846004600625e-06, |
|
"loss": 1.6558, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.3515422213429813e-06, |
|
"loss": 1.7423, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.3491976393523949e-06, |
|
"loss": 1.7268, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.3468508691801867e-06, |
|
"loss": 1.7645, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.3445019255319533e-06, |
|
"loss": 1.8005, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.342150823126909e-06, |
|
"loss": 1.7992, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.3397975766977982e-06, |
|
"loss": 1.7183, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.3374422009907982e-06, |
|
"loss": 1.6962, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.33508471076543e-06, |
|
"loss": 1.7685, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.3327251207944646e-06, |
|
"loss": 1.6572, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.3303634458638307e-06, |
|
"loss": 1.7565, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.327999700772522e-06, |
|
"loss": 1.784, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3256339003325051e-06, |
|
"loss": 1.7148, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3232660593686246e-06, |
|
"loss": 1.7926, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3208961927185131e-06, |
|
"loss": 1.6588, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.318524315232496e-06, |
|
"loss": 1.7512, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3161504417734998e-06, |
|
"loss": 1.8485, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3137745872169576e-06, |
|
"loss": 1.7469, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3113967664507172e-06, |
|
"loss": 1.6873, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.3090169943749473e-06, |
|
"loss": 1.8121, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.3066352859020442e-06, |
|
"loss": 1.7396, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.3042516559565376e-06, |
|
"loss": 1.6467, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.3018661194749985e-06, |
|
"loss": 1.7991, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2994786914059447e-06, |
|
"loss": 1.6634, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.2970893867097468e-06, |
|
"loss": 1.7896, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.2946982203585357e-06, |
|
"loss": 1.675, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.2923052073361074e-06, |
|
"loss": 1.789, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.2899103626378298e-06, |
|
"loss": 1.8316, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.2875137012705493e-06, |
|
"loss": 1.8196, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.285115238252495e-06, |
|
"loss": 1.7433, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.2827149886131866e-06, |
|
"loss": 1.7304, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.280312967393339e-06, |
|
"loss": 1.7185, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.277909189644768e-06, |
|
"loss": 1.6746, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.2755036704302977e-06, |
|
"loss": 1.866, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.2730964248236628e-06, |
|
"loss": 1.7399, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.2706874679094181e-06, |
|
"loss": 1.5624, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.2682768147828403e-06, |
|
"loss": 1.6956, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.265864480549836e-06, |
|
"loss": 1.8807, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.2634504803268465e-06, |
|
"loss": 1.6775, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.2610348292407512e-06, |
|
"loss": 1.8222, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.2586175424287763e-06, |
|
"loss": 1.7631, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.2561986350383964e-06, |
|
"loss": 1.7953, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.253778122227242e-06, |
|
"loss": 1.8682, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.2513560191630032e-06, |
|
"loss": 1.7159, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2489323410233355e-06, |
|
"loss": 1.7469, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2465071029957637e-06, |
|
"loss": 1.8322, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2440803202775886e-06, |
|
"loss": 1.6617, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.241652008075789e-06, |
|
"loss": 1.8599, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.2392221816069292e-06, |
|
"loss": 1.7623, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.2367908560970613e-06, |
|
"loss": 1.7148, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.2343580467816313e-06, |
|
"loss": 1.7697, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2319237689053838e-06, |
|
"loss": 1.8303, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2294880377222647e-06, |
|
"loss": 1.672, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2270508684953279e-06, |
|
"loss": 1.7788, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.2246122764966376e-06, |
|
"loss": 1.7533, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.222172277007174e-06, |
|
"loss": 1.7463, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.2197308853167377e-06, |
|
"loss": 1.8569, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2172881167238513e-06, |
|
"loss": 1.7632, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2148439865356682e-06, |
|
"loss": 1.7108, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2123985100678714e-06, |
|
"loss": 1.7541, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2099517026445819e-06, |
|
"loss": 1.6897, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.20750357959826e-06, |
|
"loss": 1.6967, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.2050541562696108e-06, |
|
"loss": 1.8045, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.2026034480074864e-06, |
|
"loss": 1.6859, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.2001514701687917e-06, |
|
"loss": 1.7066, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1976982381183866e-06, |
|
"loss": 1.7508, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1952437672289901e-06, |
|
"loss": 1.6706, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.192788072881085e-06, |
|
"loss": 1.7158, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1903311704628189e-06, |
|
"loss": 1.6929, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.1878730753699126e-06, |
|
"loss": 1.6978, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.185413803005557e-06, |
|
"loss": 1.9111, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1829533687803234e-06, |
|
"loss": 1.7479, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1804917881120606e-06, |
|
"loss": 1.6894, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.1780290764258043e-06, |
|
"loss": 1.745, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.1755652491536753e-06, |
|
"loss": 1.8222, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.1731003217347863e-06, |
|
"loss": 1.6884, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.170634309615143e-06, |
|
"loss": 1.8879, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.1681672282475494e-06, |
|
"loss": 1.724, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.165699093091508e-06, |
|
"loss": 1.7324, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.1632299196131264e-06, |
|
"loss": 1.6713, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.1607597232850171e-06, |
|
"loss": 1.6585, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.1582885195862035e-06, |
|
"loss": 1.7721, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.1558163240020207e-06, |
|
"loss": 1.8438, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.1533431520240192e-06, |
|
"loss": 1.7953, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.1508690191498683e-06, |
|
"loss": 1.7349, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.1483939408832581e-06, |
|
"loss": 1.7855, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.1459179327338039e-06, |
|
"loss": 1.8372, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1434410102169461e-06, |
|
"loss": 1.8718, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1409631888538565e-06, |
|
"loss": 1.8053, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.1384844841713385e-06, |
|
"loss": 1.7346, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1360049117017312e-06, |
|
"loss": 1.8633, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1335244869828106e-06, |
|
"loss": 1.6126, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1310432255576944e-06, |
|
"loss": 1.7821, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.128561142974742e-06, |
|
"loss": 1.8506, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.1260782547874599e-06, |
|
"loss": 1.7426, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.1235945765544014e-06, |
|
"loss": 1.7479, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1211101238390716e-06, |
|
"loss": 1.7548, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1186249122098282e-06, |
|
"loss": 1.7333, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.116138957239784e-06, |
|
"loss": 1.6402, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.1136522745067105e-06, |
|
"loss": 1.7524, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.11116487959294e-06, |
|
"loss": 1.728, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.1086767880852663e-06, |
|
"loss": 1.7604, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.1061880155748496e-06, |
|
"loss": 1.7521, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.103698577657116e-06, |
|
"loss": 1.7241, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.1012084899316627e-06, |
|
"loss": 1.7249, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0987177680021583e-06, |
|
"loss": 1.5836, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0962264274762447e-06, |
|
"loss": 1.8434, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0937344839654414e-06, |
|
"loss": 1.7286, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0912419530850456e-06, |
|
"loss": 1.5997, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0887488504540355e-06, |
|
"loss": 1.7912, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0862551916949715e-06, |
|
"loss": 1.8044, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0837609924338999e-06, |
|
"loss": 1.771, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0812662683002526e-06, |
|
"loss": 1.785, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0787710349267522e-06, |
|
"loss": 1.7252, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0762753079493106e-06, |
|
"loss": 1.7104, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0737791030069341e-06, |
|
"loss": 1.6688, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.071282435741623e-06, |
|
"loss": 1.773, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0687853217982759e-06, |
|
"loss": 1.7298, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0662877768245893e-06, |
|
"loss": 1.764, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0637898164709613e-06, |
|
"loss": 1.9291, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0612914563903927e-06, |
|
"loss": 1.8433, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0587927122383891e-06, |
|
"loss": 1.6675, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0562935996728628e-06, |
|
"loss": 1.8455, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.053794134354035e-06, |
|
"loss": 1.8364, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0512943319443368e-06, |
|
"loss": 1.8785, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0487942081083124e-06, |
|
"loss": 1.6365, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.046293778512519e-06, |
|
"loss": 1.6798, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.043793058825431e-06, |
|
"loss": 1.6305, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0412920647173398e-06, |
|
"loss": 1.7177, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0387908118602567e-06, |
|
"loss": 1.6787, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0362893159278147e-06, |
|
"loss": 1.7547, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0337875925951694e-06, |
|
"loss": 1.6793, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0312856575389016e-06, |
|
"loss": 1.6289, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0287835264369186e-06, |
|
"loss": 1.6652, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.0262812149683574e-06, |
|
"loss": 1.6313, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0237787388134828e-06, |
|
"loss": 1.8522, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0212761136535945e-06, |
|
"loss": 1.7227, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.0187733551709235e-06, |
|
"loss": 1.7947, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0162704790485376e-06, |
|
"loss": 1.6084, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0137675009702415e-06, |
|
"loss": 1.5853, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0112644366204784e-06, |
|
"loss": 1.7474, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.0087613016842323e-06, |
|
"loss": 1.6336, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.0062581118469298e-06, |
|
"loss": 1.7304, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.003754882794341e-06, |
|
"loss": 1.6759, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.0012516302124826e-06, |
|
"loss": 1.7696, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.987483697875177e-07, |
|
"loss": 1.7956, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.962451172056591e-07, |
|
"loss": 1.6856, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.937418881530703e-07, |
|
"loss": 1.6988, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.912386983157679e-07, |
|
"loss": 1.7866, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.887355633795217e-07, |
|
"loss": 1.7231, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.862324990297588e-07, |
|
"loss": 1.6996, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.837295209514623e-07, |
|
"loss": 1.8524, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.812266448290766e-07, |
|
"loss": 1.6326, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.787238863464054e-07, |
|
"loss": 1.7171, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.762212611865169e-07, |
|
"loss": 1.72, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.73718785031643e-07, |
|
"loss": 1.6281, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.712164735630815e-07, |
|
"loss": 1.7401, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.687143424610986e-07, |
|
"loss": 1.7269, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.662124074048307e-07, |
|
"loss": 1.7733, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.637106840721852e-07, |
|
"loss": 1.8924, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.612091881397434e-07, |
|
"loss": 1.7352, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.587079352826603e-07, |
|
"loss": 1.7683, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.56206941174569e-07, |
|
"loss": 1.6946, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.53706221487481e-07, |
|
"loss": 1.6862, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.512057918916876e-07, |
|
"loss": 1.6457, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.48705668055663e-07, |
|
"loss": 1.6856, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.46205865645965e-07, |
|
"loss": 1.7311, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.437064003271372e-07, |
|
"loss": 1.6133, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.412072877616109e-07, |
|
"loss": 1.7517, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.387085436096074e-07, |
|
"loss": 1.9106, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.362101835290386e-07, |
|
"loss": 1.6265, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.33712223175411e-07, |
|
"loss": 1.6314, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.312146782017242e-07, |
|
"loss": 1.7148, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.287175642583771e-07, |
|
"loss": 1.7809, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.262208969930659e-07, |
|
"loss": 1.7089, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.237246920506894e-07, |
|
"loss": 1.7735, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.21228965073248e-07, |
|
"loss": 1.761, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.187337316997474e-07, |
|
"loss": 1.6108, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.162390075661001e-07, |
|
"loss": 1.763, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.137448083050285e-07, |
|
"loss": 1.7859, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.112511495459646e-07, |
|
"loss": 1.7017, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.087580469149547e-07, |
|
"loss": 1.7892, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.062655160345586e-07, |
|
"loss": 1.6811, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.037735725237553e-07, |
|
"loss": 1.8112, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.01282231997842e-07, |
|
"loss": 1.7801, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.98791510068337e-07, |
|
"loss": 1.6536, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.96301422342884e-07, |
|
"loss": 1.6774, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.938119844251506e-07, |
|
"loss": 1.8115, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.913232119147337e-07, |
|
"loss": 1.7447, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.8883512040706e-07, |
|
"loss": 1.6887, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.863477254932893e-07, |
|
"loss": 1.8121, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.838610427602164e-07, |
|
"loss": 1.6829, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.813750877901722e-07, |
|
"loss": 1.6187, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.788898761609284e-07, |
|
"loss": 1.5662, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.764054234455986e-07, |
|
"loss": 1.6959, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.739217452125401e-07, |
|
"loss": 1.6173, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.714388570252577e-07, |
|
"loss": 1.7312, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 8.689567744423059e-07, |
|
"loss": 1.7964, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 8.664755130171896e-07, |
|
"loss": 1.6815, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 8.63995088298269e-07, |
|
"loss": 1.6762, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 8.615155158286614e-07, |
|
"loss": 1.7176, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 8.590368111461435e-07, |
|
"loss": 1.7004, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 8.565589897830542e-07, |
|
"loss": 1.7482, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.540820672661964e-07, |
|
"loss": 1.7698, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.516060591167418e-07, |
|
"loss": 1.6477, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.491309808501318e-07, |
|
"loss": 1.7135, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.466568479759807e-07, |
|
"loss": 1.7285, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.441836759979795e-07, |
|
"loss": 1.7036, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.417114804137966e-07, |
|
"loss": 1.6844, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.392402767149829e-07, |
|
"loss": 1.6886, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.367700803868736e-07, |
|
"loss": 1.6005, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.343009069084919e-07, |
|
"loss": 1.7191, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.318327717524508e-07, |
|
"loss": 1.7151, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.293656903848569e-07, |
|
"loss": 1.7397, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.268996782652138e-07, |
|
"loss": 1.6916, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.244347508463248e-07, |
|
"loss": 1.7746, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.219709235741956e-07, |
|
"loss": 1.6656, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.195082118879396e-07, |
|
"loss": 1.6611, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.17046631219677e-07, |
|
"loss": 1.6897, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.145861969944429e-07, |
|
"loss": 1.6733, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.121269246300874e-07, |
|
"loss": 1.6586, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.096688295371807e-07, |
|
"loss": 1.8541, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.072119271189155e-07, |
|
"loss": 1.6834, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.047562327710101e-07, |
|
"loss": 1.6726, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.023017618816135e-07, |
|
"loss": 1.6608, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.998485298312082e-07, |
|
"loss": 1.6709, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.973965519925135e-07, |
|
"loss": 1.7702, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.949458437303891e-07, |
|
"loss": 1.6864, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.9249642040174e-07, |
|
"loss": 1.6366, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.900482973554182e-07, |
|
"loss": 1.6758, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.876014899321288e-07, |
|
"loss": 1.7948, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.851560134643319e-07, |
|
"loss": 1.6308, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.827118832761486e-07, |
|
"loss": 1.715, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.802691146832628e-07, |
|
"loss": 1.6451, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.77827722992826e-07, |
|
"loss": 1.7076, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.753877235033623e-07, |
|
"loss": 1.7195, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.729491315046721e-07, |
|
"loss": 1.7336, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.705119622777351e-07, |
|
"loss": 1.7725, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.680762310946165e-07, |
|
"loss": 1.7105, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.656419532183687e-07, |
|
"loss": 1.665, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.632091439029389e-07, |
|
"loss": 1.7018, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.607778183930707e-07, |
|
"loss": 1.7512, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.583479919242107e-07, |
|
"loss": 1.7185, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.559196797224115e-07, |
|
"loss": 1.6237, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.534928970042363e-07, |
|
"loss": 1.6201, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.510676589766646e-07, |
|
"loss": 1.6381, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.486439808369969e-07, |
|
"loss": 1.7057, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.46221877772758e-07, |
|
"loss": 1.6837, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.438013649616037e-07, |
|
"loss": 1.7134, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.413824575712238e-07, |
|
"loss": 1.8225, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.389651707592487e-07, |
|
"loss": 1.6405, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.365495196731537e-07, |
|
"loss": 1.6202, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.341355194501637e-07, |
|
"loss": 1.6675, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.317231852171598e-07, |
|
"loss": 1.6469, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.293125320905821e-07, |
|
"loss": 1.6377, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.269035751763371e-07, |
|
"loss": 1.73, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.244963295697023e-07, |
|
"loss": 1.7618, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.220908103552318e-07, |
|
"loss": 1.7123, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.196870326066608e-07, |
|
"loss": 1.6733, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.172850113868136e-07, |
|
"loss": 1.6417, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.148847617475051e-07, |
|
"loss": 1.6907, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.124862987294509e-07, |
|
"loss": 1.7232, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.100896373621699e-07, |
|
"loss": 1.7538, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.076947926638925e-07, |
|
"loss": 1.7497, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.053017796414644e-07, |
|
"loss": 1.6393, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.029106132902533e-07, |
|
"loss": 1.6977, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.005213085940553e-07, |
|
"loss": 1.6356, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 6.981338805250014e-07, |
|
"loss": 1.63, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 6.957483440434624e-07, |
|
"loss": 1.71, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 6.933647140979562e-07, |
|
"loss": 1.7004, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 6.909830056250526e-07, |
|
"loss": 1.848, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.886032335492827e-07, |
|
"loss": 1.6403, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.862254127830424e-07, |
|
"loss": 1.5791, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.838495582265e-07, |
|
"loss": 1.6469, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.814756847675038e-07, |
|
"loss": 1.7173, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.791038072814869e-07, |
|
"loss": 1.5871, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.767339406313755e-07, |
|
"loss": 1.7168, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.743660996674949e-07, |
|
"loss": 1.6357, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.720002992274778e-07, |
|
"loss": 1.734, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.696365541361695e-07, |
|
"loss": 1.6841, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.672748792055355e-07, |
|
"loss": 1.8149, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.649152892345702e-07, |
|
"loss": 1.745, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.625577990092019e-07, |
|
"loss": 1.6469, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.602024233022017e-07, |
|
"loss": 1.6683, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.578491768730907e-07, |
|
"loss": 1.6904, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.554980744680469e-07, |
|
"loss": 1.6226, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.531491308198133e-07, |
|
"loss": 1.6818, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 6.50802360647605e-07, |
|
"loss": 1.7171, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 6.484577786570186e-07, |
|
"loss": 1.6437, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 6.461153995399378e-07, |
|
"loss": 1.7015, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.437752379744427e-07, |
|
"loss": 1.6586, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.414373086247175e-07, |
|
"loss": 1.8193, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.39101626140959e-07, |
|
"loss": 1.7108, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.367682051592835e-07, |
|
"loss": 1.7348, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.344370603016374e-07, |
|
"loss": 1.7896, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.321082061757035e-07, |
|
"loss": 1.6431, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.297816573748094e-07, |
|
"loss": 1.6321, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.274574284778378e-07, |
|
"loss": 1.7821, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.251355340491342e-07, |
|
"loss": 1.7996, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.228159886384148e-07, |
|
"loss": 1.5545, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.204988067806774e-07, |
|
"loss": 1.6468, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.181840029961071e-07, |
|
"loss": 1.7018, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.158715917899892e-07, |
|
"loss": 1.7334, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.135615876526152e-07, |
|
"loss": 1.6751, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.112540050591935e-07, |
|
"loss": 1.6638, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.089488584697589e-07, |
|
"loss": 1.7124, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.0664616232908e-07, |
|
"loss": 1.6956, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 6.043459310665715e-07, |
|
"loss": 1.7315, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 6.020481790962017e-07, |
|
"loss": 1.6792, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 5.997529208164033e-07, |
|
"loss": 1.6754, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 5.974601706099827e-07, |
|
"loss": 1.6349, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 5.951699428440291e-07, |
|
"loss": 1.7346, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 5.928822518698262e-07, |
|
"loss": 1.6471, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 5.905971120227615e-07, |
|
"loss": 1.5821, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 5.883145376222356e-07, |
|
"loss": 1.7099, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 5.871742169348447e-07, |
|
"loss": 1.6813, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.848955175178135e-07, |
|
"loss": 1.6904, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.826194192752957e-07, |
|
"loss": 1.8445, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.803459364700347e-07, |
|
"loss": 1.6987, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 5.780750833483849e-07, |
|
"loss": 1.6935, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.758068741402222e-07, |
|
"loss": 1.7032, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.735413230588544e-07, |
|
"loss": 1.5476, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 5.712784443009334e-07, |
|
"loss": 1.7515, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 5.690182520463653e-07, |
|
"loss": 1.6147, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 5.667607604582226e-07, |
|
"loss": 1.6685, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 5.645059836826516e-07, |
|
"loss": 1.7202, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.622539358487901e-07, |
|
"loss": 1.5869, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.600046310686739e-07, |
|
"loss": 1.6195, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 5.577580834371505e-07, |
|
"loss": 1.7684, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.555143070317899e-07, |
|
"loss": 1.7422, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.532733159127962e-07, |
|
"loss": 1.6241, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.51035124122921e-07, |
|
"loss": 1.6719, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.487997456873747e-07, |
|
"loss": 1.7241, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.465671946137367e-07, |
|
"loss": 1.6806, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.44337484891871e-07, |
|
"loss": 1.6525, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.421106304938355e-07, |
|
"loss": 1.6618, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.398866453737971e-07, |
|
"loss": 1.6008, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.37665543467941e-07, |
|
"loss": 1.6272, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.354473386943872e-07, |
|
"loss": 1.6709, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.332320449531007e-07, |
|
"loss": 1.6866, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.310196761258047e-07, |
|
"loss": 1.7594, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.288102460758936e-07, |
|
"loss": 1.758, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.266037686483478e-07, |
|
"loss": 1.7901, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.255016414894615e-07, |
|
"loss": 1.6924, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.232996189143034e-07, |
|
"loss": 1.708, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.211005834928241e-07, |
|
"loss": 1.695, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.189045490048664e-07, |
|
"loss": 1.6833, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.167115292114701e-07, |
|
"loss": 1.7641, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.145215378547824e-07, |
|
"loss": 1.753, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.123345886579749e-07, |
|
"loss": 1.6521, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.101506953251544e-07, |
|
"loss": 1.6386, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.079698715412798e-07, |
|
"loss": 1.6495, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.05792130972075e-07, |
|
"loss": 1.5408, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.036174872639443e-07, |
|
"loss": 1.8072, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.014459540438844e-07, |
|
"loss": 1.7753, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.992775449194014e-07, |
|
"loss": 1.7086, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.971122734784255e-07, |
|
"loss": 1.7871, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.949501532892252e-07, |
|
"loss": 1.7265, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.927911979003213e-07, |
|
"loss": 1.7409, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.906354208404032e-07, |
|
"loss": 1.7654, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.884828356182445e-07, |
|
"loss": 1.8296, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.863334557226179e-07, |
|
"loss": 1.6161, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.841872946222091e-07, |
|
"loss": 1.6787, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.82044365765536e-07, |
|
"loss": 1.6647, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.799046825808597e-07, |
|
"loss": 1.6605, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.777682584761052e-07, |
|
"loss": 1.7025, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.756351068387735e-07, |
|
"loss": 1.7235, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.735052410358602e-07, |
|
"loss": 1.6723, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.7137867441377087e-07, |
|
"loss": 1.753, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.692554202982367e-07, |
|
"loss": 1.7467, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.6713549199423186e-07, |
|
"loss": 1.7785, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.650189027858902e-07, |
|
"loss": 1.626, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.629056659364221e-07, |
|
"loss": 1.7275, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.6079579468803045e-07, |
|
"loss": 1.7071, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.586893022618278e-07, |
|
"loss": 1.6504, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.5658620185775477e-07, |
|
"loss": 1.7962, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.544865066544968e-07, |
|
"loss": 1.5978, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.523902298094001e-07, |
|
"loss": 1.7927, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.502973844583914e-07, |
|
"loss": 1.5912, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.482079837158937e-07, |
|
"loss": 1.6378, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.4612204067474636e-07, |
|
"loss": 1.6782, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.4403956840611987e-07, |
|
"loss": 1.731, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.4196057995943724e-07, |
|
"loss": 1.5723, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.398850883622904e-07, |
|
"loss": 1.6989, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3781310662035943e-07, |
|
"loss": 1.7962, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3574464771732857e-07, |
|
"loss": 1.7285, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.336797246148088e-07, |
|
"loss": 1.665, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3161835025225424e-07, |
|
"loss": 1.7412, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.295605375468817e-07, |
|
"loss": 1.7023, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.2750629939358886e-07, |
|
"loss": 1.675, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.2545564866487395e-07, |
|
"loss": 1.716, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.2340859821075614e-07, |
|
"loss": 1.7937, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.2136516085869446e-07, |
|
"loss": 1.6386, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.193253494135054e-07, |
|
"loss": 1.7234, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.1728917665728637e-07, |
|
"loss": 1.6263, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.1525665534933186e-07, |
|
"loss": 1.6959, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.132277982260567e-07, |
|
"loss": 1.6792, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.1120261800091373e-07, |
|
"loss": 1.6271, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.091811273643157e-07, |
|
"loss": 1.7233, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.071633389835556e-07, |
|
"loss": 1.6004, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.051492655027262e-07, |
|
"loss": 1.6996, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.031389195426416e-07, |
|
"loss": 1.7517, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.0113231370075837e-07, |
|
"loss": 1.8263, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.9912946055109684e-07, |
|
"loss": 1.7199, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.971303726441611e-07, |
|
"loss": 1.7281, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.9513506250686034e-07, |
|
"loss": 1.6784, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.9314354264243274e-07, |
|
"loss": 1.5936, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.9115582553036477e-07, |
|
"loss": 1.5532, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.891719236263128e-07, |
|
"loss": 1.7161, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.8719184936202744e-07, |
|
"loss": 1.7926, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.852156151452726e-07, |
|
"loss": 1.8328, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.832432333597505e-07, |
|
"loss": 1.6176, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.8127471636502173e-07, |
|
"loss": 1.6779, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.793100764964299e-07, |
|
"loss": 1.6335, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.773493260650229e-07, |
|
"loss": 1.7339, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.7539247735747613e-07, |
|
"loss": 1.7122, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.734395426360152e-07, |
|
"loss": 1.7544, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.7149053413834e-07, |
|
"loss": 1.7692, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.6954546407754796e-07, |
|
"loss": 1.6691, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.676043446420555e-07, |
|
"loss": 1.678, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.656671879955249e-07, |
|
"loss": 1.7866, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.6373400627678487e-07, |
|
"loss": 1.8046, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.618048115997574e-07, |
|
"loss": 1.5947, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.598796160533789e-07, |
|
"loss": 1.7178, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.579584317015272e-07, |
|
"loss": 1.6033, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.5604127058294465e-07, |
|
"loss": 1.6477, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.5412814471116203e-07, |
|
"loss": 1.7793, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.522190660744242e-07, |
|
"loss": 1.6408, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.5031404663561504e-07, |
|
"loss": 1.6641, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.4841309833218244e-07, |
|
"loss": 1.6488, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.4651623307606346e-07, |
|
"loss": 1.6658, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.446234627536079e-07, |
|
"loss": 1.6577, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.4273479922550705e-07, |
|
"loss": 1.5276, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.4085025432671745e-07, |
|
"loss": 1.5433, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.3896983986638704e-07, |
|
"loss": 1.6715, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.370935676277806e-07, |
|
"loss": 1.6532, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.352214493682065e-07, |
|
"loss": 1.5619, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.333534968189436e-07, |
|
"loss": 1.6104, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.314897216851673e-07, |
|
"loss": 1.6993, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.296301356458745e-07, |
|
"loss": 1.751, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.2777475035381376e-07, |
|
"loss": 1.6651, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.2592357743540887e-07, |
|
"loss": 1.6872, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.2407662849068907e-07, |
|
"loss": 1.6578, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.222339150932133e-07, |
|
"loss": 1.8125, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.2039544879000046e-07, |
|
"loss": 1.8527, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.1856124110145566e-07, |
|
"loss": 1.6272, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.167313035212976e-07, |
|
"loss": 1.6794, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.149056475164873e-07, |
|
"loss": 1.7241, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.1308428452715643e-07, |
|
"loss": 1.6428, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.112672259665354e-07, |
|
"loss": 1.7643, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.0945448322088096e-07, |
|
"loss": 1.6283, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.076460676494067e-07, |
|
"loss": 1.6474, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.0584199058420954e-07, |
|
"loss": 1.536, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.040422633302011e-07, |
|
"loss": 1.6241, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.022468971650348e-07, |
|
"loss": 1.6115, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.004559033390368e-07, |
|
"loss": 1.7143, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.986692930751349e-07, |
|
"loss": 1.6723, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.968870775687873e-07, |
|
"loss": 1.622, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.9510926798791357e-07, |
|
"loss": 1.6883, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 2.933358754728249e-07, |
|
"loss": 1.7071, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 2.9156691113615374e-07, |
|
"loss": 1.6615, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 2.898023860627834e-07, |
|
"loss": 1.6626, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.8804231130977974e-07, |
|
"loss": 1.7936, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.8628669790632185e-07, |
|
"loss": 1.7444, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.845355568536328e-07, |
|
"loss": 1.6659, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.8278889912490944e-07, |
|
"loss": 1.7632, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.810467356652559e-07, |
|
"loss": 1.7743, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.793090773916128e-07, |
|
"loss": 1.6388, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.7757593519269084e-07, |
|
"loss": 1.6374, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.7584731992890063e-07, |
|
"loss": 1.7458, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.7412324243228615e-07, |
|
"loss": 1.6739, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.724037135064566e-07, |
|
"loss": 1.6879, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 2.706887439265179e-07, |
|
"loss": 1.6265, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 2.6897834443900526e-07, |
|
"loss": 1.7095, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 2.6727252576181713e-07, |
|
"loss": 1.661, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.6557129858414697e-07, |
|
"loss": 1.7222, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.6387467356641644e-07, |
|
"loss": 1.6253, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.621826613402085e-07, |
|
"loss": 1.6533, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 2.604952725082005e-07, |
|
"loss": 1.5841, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 2.5881251764409883e-07, |
|
"loss": 1.7162, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 2.571344072925723e-07, |
|
"loss": 1.5638, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.5546095196918425e-07, |
|
"loss": 1.7073, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.5379216216033005e-07, |
|
"loss": 1.5829, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.5212804832316783e-07, |
|
"loss": 1.6375, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.5046862088555586e-07, |
|
"loss": 1.8227, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 2.48813890245985e-07, |
|
"loss": 1.6431, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 2.471638667735153e-07, |
|
"loss": 1.5948, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 2.4551856080770993e-07, |
|
"loss": 1.7022, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.4387798265857073e-07, |
|
"loss": 1.642, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.422421426064728e-07, |
|
"loss": 1.7252, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.406110509021021e-07, |
|
"loss": 1.7285, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.3898471776638964e-07, |
|
"loss": 1.7166, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.37363153390447e-07, |
|
"loss": 1.5705, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.3574636793550372e-07, |
|
"loss": 1.6352, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.341343715328431e-07, |
|
"loss": 1.7174, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.3252717428373925e-07, |
|
"loss": 1.5692, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.3092478625939217e-07, |
|
"loss": 1.5945, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.2932721750086704e-07, |
|
"loss": 1.7714, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.2773447801902858e-07, |
|
"loss": 1.63, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.2614657779448122e-07, |
|
"loss": 1.61, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.2456352677750366e-07, |
|
"loss": 1.588, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.2298533488798932e-07, |
|
"loss": 1.545, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.2141201201538205e-07, |
|
"loss": 1.6177, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.1984356801861505e-07, |
|
"loss": 1.5617, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.1828001272604846e-07, |
|
"loss": 1.6925, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.1672135593540918e-07, |
|
"loss": 1.6207, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.1516760741372807e-07, |
|
"loss": 1.6402, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.136187768972796e-07, |
|
"loss": 1.6566, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.120748740915198e-07, |
|
"loss": 1.6021, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.1053590867102645e-07, |
|
"loss": 1.6772, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.0900189027943827e-07, |
|
"loss": 1.6485, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.0747282852939475e-07, |
|
"loss": 1.6803, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.059487330024743e-07, |
|
"loss": 1.7312, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.0442961324913688e-07, |
|
"loss": 1.5705, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.029154787886612e-07, |
|
"loss": 1.8075, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.014063391090881e-07, |
|
"loss": 1.64, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.9990220366715826e-07, |
|
"loss": 1.6109, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.9840308188825495e-07, |
|
"loss": 1.6193, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.969089831663443e-07, |
|
"loss": 1.6262, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.954199168639161e-07, |
|
"loss": 1.6829, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.939358923119252e-07, |
|
"loss": 1.7136, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.924569188097338e-07, |
|
"loss": 1.6498, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.9098300562505264e-07, |
|
"loss": 1.6213, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.8951416199388248e-07, |
|
"loss": 1.6819, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.8805039712045677e-07, |
|
"loss": 1.5073, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.8659172017718404e-07, |
|
"loss": 1.6358, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.8513814030459062e-07, |
|
"loss": 1.7238, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.8368966661126218e-07, |
|
"loss": 1.6721, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.8224630817378827e-07, |
|
"loss": 1.7208, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.8080807403670373e-07, |
|
"loss": 1.6376, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.793749732124341e-07, |
|
"loss": 1.6332, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7794701468123686e-07, |
|
"loss": 1.6381, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7652420739114693e-07, |
|
"loss": 1.7215, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7510656025792002e-07, |
|
"loss": 1.6126, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7369408216497626e-07, |
|
"loss": 1.7069, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.72286781963345e-07, |
|
"loss": 1.5791, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.7088466847160944e-07, |
|
"loss": 1.6396, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.6948775047585163e-07, |
|
"loss": 1.5832, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.6809603672959617e-07, |
|
"loss": 1.6519, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.6670953595375703e-07, |
|
"loss": 1.7497, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.6532825683658126e-07, |
|
"loss": 1.553, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.6395220803359645e-07, |
|
"loss": 1.6504, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.62581398167554e-07, |
|
"loss": 1.7245, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.6121583582837772e-07, |
|
"loss": 1.5531, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.5985552957310843e-07, |
|
"loss": 1.6271, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.5850048792585046e-07, |
|
"loss": 1.4938, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.571507193777183e-07, |
|
"loss": 1.635, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.5580623238678392e-07, |
|
"loss": 1.695, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.5446703537802342e-07, |
|
"loss": 1.7068, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.5313313674326444e-07, |
|
"loss": 1.6036, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.5180454484113227e-07, |
|
"loss": 1.6802, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.5048126799699968e-07, |
|
"loss": 1.6124, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.4916331450293317e-07, |
|
"loss": 1.6829, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.4785069261764183e-07, |
|
"loss": 1.7414, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.465434105664246e-07, |
|
"loss": 1.637, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.4524147654111962e-07, |
|
"loss": 1.7293, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.4394489870005278e-07, |
|
"loss": 1.7024, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.426536851679866e-07, |
|
"loss": 1.633, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.4136784403606837e-07, |
|
"loss": 1.6426, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.400873833617814e-07, |
|
"loss": 1.5418, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.3881231116889213e-07, |
|
"loss": 1.8029, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.375426354474023e-07, |
|
"loss": 1.7329, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.3627836415349647e-07, |
|
"loss": 1.6314, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.3501950520949433e-07, |
|
"loss": 1.6311, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.3376606650379985e-07, |
|
"loss": 1.591, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.3251805589085185e-07, |
|
"loss": 1.59, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.31275481191075e-07, |
|
"loss": 1.517, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.3003835019083098e-07, |
|
"loss": 1.6197, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.2880667064237006e-07, |
|
"loss": 1.6633, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.2758045026378094e-07, |
|
"loss": 1.6858, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.263596967389441e-07, |
|
"loss": 1.7073, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.2514441771748307e-07, |
|
"loss": 1.6681, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.239346208147164e-07, |
|
"loss": 1.6052, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.2273031361160957e-07, |
|
"loss": 1.6936, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.215315036547282e-07, |
|
"loss": 1.753, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.2033819845619085e-07, |
|
"loss": 1.5724, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.1915040549362066e-07, |
|
"loss": 1.6851, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.1796813221009983e-07, |
|
"loss": 1.8028, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.1679138601412253e-07, |
|
"loss": 1.5791, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.1562017427954873e-07, |
|
"loss": 1.498, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.1445450434555736e-07, |
|
"loss": 1.5576, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.1329438351660048e-07, |
|
"loss": 1.7264, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.1213981906235825e-07, |
|
"loss": 1.6386, |
|
"step": 2212 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.1099081821769297e-07, |
|
"loss": 1.64, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.0984738818260264e-07, |
|
"loss": 1.6745, |
|
"step": 2216 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.0870953612217815e-07, |
|
"loss": 1.5745, |
|
"step": 2218 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.0757726916655574e-07, |
|
"loss": 1.6712, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.0645059441087479e-07, |
|
"loss": 1.7029, |
|
"step": 2222 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.0532951891523123e-07, |
|
"loss": 1.5144, |
|
"step": 2224 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.042140497046351e-07, |
|
"loss": 1.695, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.031041937689654e-07, |
|
"loss": 1.6943, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.0199995806292639e-07, |
|
"loss": 1.6339, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.0090134950600404e-07, |
|
"loss": 1.6216, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 9.980837498242356e-08, |
|
"loss": 1.7272, |
|
"step": 2234 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 9.872104134110492e-08, |
|
"loss": 1.6464, |
|
"step": 2236 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.76393553956214e-08, |
|
"loss": 1.6532, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.656332392415457e-08, |
|
"loss": 1.7036, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.549295366945454e-08, |
|
"loss": 1.6433, |
|
"step": 2242 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.442825133879607e-08, |
|
"loss": 1.6603, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.336922360393717e-08, |
|
"loss": 1.7932, |
|
"step": 2246 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.231587710107669e-08, |
|
"loss": 1.7465, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.1268218430813e-08, |
|
"loss": 1.6804, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.022625415810325e-08, |
|
"loss": 1.7131, |
|
"step": 2252 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 8.918999081222156e-08, |
|
"loss": 1.5673, |
|
"step": 2254 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.815943488671785e-08, |
|
"loss": 1.6055, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.713459283937796e-08, |
|
"loss": 1.6087, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.611547109218298e-08, |
|
"loss": 1.6197, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.510207603126818e-08, |
|
"loss": 1.7124, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.4094414006884e-08, |
|
"loss": 1.8889, |
|
"step": 2264 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.309249133335571e-08, |
|
"loss": 1.6678, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.209631428904462e-08, |
|
"loss": 1.6357, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.110588911630733e-08, |
|
"loss": 1.5471, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.012122202145755e-08, |
|
"loss": 1.6633, |
|
"step": 2272 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 7.914231917472747e-08, |
|
"loss": 1.6271, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 7.816918671022865e-08, |
|
"loss": 1.7145, |
|
"step": 2276 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 7.720183072591302e-08, |
|
"loss": 1.7156, |
|
"step": 2278 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 7.624025728353622e-08, |
|
"loss": 1.6741, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.528447240861779e-08, |
|
"loss": 1.6336, |
|
"step": 2282 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.433448209040494e-08, |
|
"loss": 1.647, |
|
"step": 2284 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.33902922818339e-08, |
|
"loss": 1.7456, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.245190889949338e-08, |
|
"loss": 1.7444, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 7.151933782358743e-08, |
|
"loss": 1.5676, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 7.059258489789777e-08, |
|
"loss": 1.6802, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 6.967165592974788e-08, |
|
"loss": 1.5163, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 6.875655668996705e-08, |
|
"loss": 1.7526, |
|
"step": 2296 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 6.78472929128534e-08, |
|
"loss": 1.579, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 6.694387029613802e-08, |
|
"loss": 1.7303, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 6.604629450094922e-08, |
|
"loss": 1.7015, |
|
"step": 2302 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 6.515457115177803e-08, |
|
"loss": 1.6907, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 6.426870583644194e-08, |
|
"loss": 1.6003, |
|
"step": 2306 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.338870410604969e-08, |
|
"loss": 1.5709, |
|
"step": 2308 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.251457147496775e-08, |
|
"loss": 1.6811, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.164631342078419e-08, |
|
"loss": 1.7674, |
|
"step": 2312 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 6.078393538427573e-08, |
|
"loss": 1.693, |
|
"step": 2314 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.992744276937312e-08, |
|
"loss": 1.626, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.907684094312637e-08, |
|
"loss": 1.5828, |
|
"step": 2318 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.823213523567305e-08, |
|
"loss": 1.7222, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.739333094020315e-08, |
|
"loss": 1.7899, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.6560433312926814e-08, |
|
"loss": 1.6139, |
|
"step": 2324 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.573344757304077e-08, |
|
"loss": 1.5863, |
|
"step": 2326 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 5.491237890269651e-08, |
|
"loss": 1.548, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 5.4097232446967175e-08, |
|
"loss": 1.5414, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 5.328801331381549e-08, |
|
"loss": 1.6775, |
|
"step": 2332 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 5.248472657406122e-08, |
|
"loss": 1.7709, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 5.168737726135053e-08, |
|
"loss": 1.6781, |
|
"step": 2336 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 5.08959703721239e-08, |
|
"loss": 1.7276, |
|
"step": 2338 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 5.0110510865583935e-08, |
|
"loss": 1.6942, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.933100366366583e-08, |
|
"loss": 1.5948, |
|
"step": 2342 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.855745365100539e-08, |
|
"loss": 1.6951, |
|
"step": 2344 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.7789865674909056e-08, |
|
"loss": 1.6364, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.702824454532295e-08, |
|
"loss": 1.6649, |
|
"step": 2348 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.6272595034803294e-08, |
|
"loss": 1.6061, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.552292187848661e-08, |
|
"loss": 1.6826, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.4779229774059124e-08, |
|
"loss": 1.703, |
|
"step": 2354 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.404152338172795e-08, |
|
"loss": 1.7616, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.33098073241922e-08, |
|
"loss": 1.6377, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.25840861866138e-08, |
|
"loss": 1.6814, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.186436451658804e-08, |
|
"loss": 1.6138, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.115064682411606e-08, |
|
"loss": 1.5977, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.044293758157624e-08, |
|
"loss": 1.7621, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.974124122369604e-08, |
|
"loss": 1.6687, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.904556214752419e-08, |
|
"loss": 1.6122, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.8355904712403465e-08, |
|
"loss": 1.6062, |
|
"step": 2372 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.767227323994293e-08, |
|
"loss": 1.6828, |
|
"step": 2374 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.6994672013991425e-08, |
|
"loss": 1.6297, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.632310528060978e-08, |
|
"loss": 1.8162, |
|
"step": 2378 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.565757724804552e-08, |
|
"loss": 1.7914, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.4998092086705234e-08, |
|
"loss": 1.7396, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.434465392912955e-08, |
|
"loss": 1.6963, |
|
"step": 2384 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.369726686996588e-08, |
|
"loss": 1.6747, |
|
"step": 2386 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.3055934965944164e-08, |
|
"loss": 1.6809, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.2420662235850714e-08, |
|
"loss": 1.7711, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.179145266050332e-08, |
|
"loss": 1.6423, |
|
"step": 2392 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.116831018272581e-08, |
|
"loss": 1.692, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.055123870732368e-08, |
|
"loss": 1.6362, |
|
"step": 2396 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 2.9940242101059944e-08, |
|
"loss": 1.6349, |
|
"step": 2398 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 2.933532419263063e-08, |
|
"loss": 1.7177, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 2.873648877264012e-08, |
|
"loss": 1.6556, |
|
"step": 2402 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 2.8143739593578852e-08, |
|
"loss": 1.7477, |
|
"step": 2404 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.7557080369798202e-08, |
|
"loss": 1.6377, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.6976514777488744e-08, |
|
"loss": 1.568, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.6402046454655713e-08, |
|
"loss": 1.6903, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.5833679001097343e-08, |
|
"loss": 1.6987, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.5271415978382116e-08, |
|
"loss": 1.6481, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.4715260909825786e-08, |
|
"loss": 1.7223, |
|
"step": 2416 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 2.416521728046983e-08, |
|
"loss": 1.7476, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 2.3621288537059804e-08, |
|
"loss": 1.4865, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 2.3083478088023355e-08, |
|
"loss": 1.7032, |
|
"step": 2422 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.2551789303449032e-08, |
|
"loss": 1.6166, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.2026225515064834e-08, |
|
"loss": 1.6313, |
|
"step": 2426 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.1506790016217913e-08, |
|
"loss": 1.6748, |
|
"step": 2428 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.0993486061853583e-08, |
|
"loss": 1.6125, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 2.0486316868495002e-08, |
|
"loss": 1.6414, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.998528561422297e-08, |
|
"loss": 1.6712, |
|
"step": 2434 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.9490395438655826e-08, |
|
"loss": 1.7687, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.900164944293048e-08, |
|
"loss": 1.7534, |
|
"step": 2438 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.8519050689681957e-08, |
|
"loss": 1.7519, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.8042602203025116e-08, |
|
"loss": 1.6051, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.7572306968535176e-08, |
|
"loss": 1.6003, |
|
"step": 2444 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.7108167933229224e-08, |
|
"loss": 1.7708, |
|
"step": 2446 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.6650188005547405e-08, |
|
"loss": 1.687, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.6198370055335196e-08, |
|
"loss": 1.6639, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.5752716913825513e-08, |
|
"loss": 1.4597, |
|
"step": 2452 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.531323137361995e-08, |
|
"loss": 1.6316, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.487991618867257e-08, |
|
"loss": 1.6441, |
|
"step": 2456 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.4452774074271812e-08, |
|
"loss": 1.608, |
|
"step": 2458 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.4031807707023724e-08, |
|
"loss": 1.6827, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.3617019724835089e-08, |
|
"loss": 1.6442, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.3208412726897322e-08, |
|
"loss": 1.6289, |
|
"step": 2464 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.2805989273669604e-08, |
|
"loss": 1.761, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.2409751886863217e-08, |
|
"loss": 1.6628, |
|
"step": 2468 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.2019703049425455e-08, |
|
"loss": 1.6411, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.1635845205524297e-08, |
|
"loss": 1.6303, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.1258180760533087e-08, |
|
"loss": 1.5626, |
|
"step": 2474 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.0886712081015548e-08, |
|
"loss": 1.6254, |
|
"step": 2476 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.0521441494710348e-08, |
|
"loss": 1.7046, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.0162371290517224e-08, |
|
"loss": 1.5625, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 9.809503718482436e-09, |
|
"loss": 1.5123, |
|
"step": 2482 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 9.462840989784671e-09, |
|
"loss": 1.6324, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 9.122385276720935e-09, |
|
"loss": 1.5698, |
|
"step": 2486 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.788138712693126e-09, |
|
"loss": 1.7139, |
|
"step": 2488 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 8.460103392194718e-09, |
|
"loss": 1.7076, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 8.138281370797861e-09, |
|
"loss": 1.6318, |
|
"step": 2492 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 7.82267466513975e-09, |
|
"loss": 1.6996, |
|
"step": 2494 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.513285252910839e-09, |
|
"loss": 1.8213, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 7.2101150728418606e-09, |
|
"loss": 1.7231, |
|
"step": 2498 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.913166024692052e-09, |
|
"loss": 1.7138, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.622439969236948e-09, |
|
"loss": 1.6939, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.337938728257053e-09, |
|
"loss": 1.5972, |
|
"step": 2504 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.059664084526073e-09, |
|
"loss": 1.6183, |
|
"step": 2506 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 5.787617781800036e-09, |
|
"loss": 1.7436, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.521801524805858e-09, |
|
"loss": 1.6657, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.262216979231682e-09, |
|
"loss": 1.6205, |
|
"step": 2512 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.0088657717152205e-09, |
|
"loss": 1.6337, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.761749489834544e-09, |
|
"loss": 1.697, |
|
"step": 2516 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.520869682097528e-09, |
|
"loss": 1.6544, |
|
"step": 2518 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.2862278579323115e-09, |
|
"loss": 1.6517, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.057825487678079e-09, |
|
"loss": 1.7255, |
|
"step": 2522 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.8356640025752895e-09, |
|
"loss": 1.6923, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.619744794757462e-09, |
|
"loss": 1.7489, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.410069217241962e-09, |
|
"loss": 1.647, |
|
"step": 2528 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.2066385839213393e-09, |
|
"loss": 1.7754, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0094541695556695e-09, |
|
"loss": 1.5713, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.818517209764115e-09, |
|
"loss": 1.7536, |
|
"step": 2534 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.6338289010174874e-09, |
|
"loss": 1.5791, |
|
"step": 2536 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.455390400630364e-09, |
|
"loss": 1.6787, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.283202826754316e-09, |
|
"loss": 1.6484, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.117267258370359e-09, |
|
"loss": 1.7767, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.957584735282847e-09, |
|
"loss": 1.665, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.804156258112255e-09, |
|
"loss": 1.7597, |
|
"step": 2546 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.6569827882896292e-09, |
|
"loss": 1.7102, |
|
"step": 2548 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.5160652480497028e-09, |
|
"loss": 1.6754, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.3814045204260105e-09, |
|
"loss": 1.5894, |
|
"step": 2552 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.2530014492446728e-09, |
|
"loss": 1.6795, |
|
"step": 2554 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.1308568391196205e-09, |
|
"loss": 1.7018, |
|
"step": 2556 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.0149714554471555e-09, |
|
"loss": 1.6882, |
|
"step": 2558 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 9.053460244013988e-10, |
|
"loss": 1.6995, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 8.019812329292941e-10, |
|
"loss": 1.6503, |
|
"step": 2562 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 7.048777287472773e-10, |
|
"loss": 1.6573, |
|
"step": 2564 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.140361203361699e-10, |
|
"loss": 1.6255, |
|
"step": 2566 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 5.294569769378476e-10, |
|
"loss": 1.5798, |
|
"step": 2568 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.511408285516882e-10, |
|
"loss": 1.5854, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.790881659312406e-10, |
|
"loss": 1.6944, |
|
"step": 2572 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.132994405808942e-10, |
|
"loss": 1.6082, |
|
"step": 2574 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.537750647535475e-10, |
|
"loss": 1.5678, |
|
"step": 2576 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.005154114474994e-10, |
|
"loss": 1.6283, |
|
"step": 2578 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.5352081440422881e-10, |
|
"loss": 1.6261, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.1279156810684032e-10, |
|
"loss": 1.7373, |
|
"step": 2582 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 7.83279277773996e-11, |
|
"loss": 1.6181, |
|
"step": 2584 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 5.013010937582329e-11, |
|
"loss": 1.6539, |
|
"step": 2586 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.8198289598435622e-11, |
|
"loss": 1.6629, |
|
"step": 2588 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 2588, |
|
"total_flos": 6.967815910824346e+16, |
|
"train_loss": 1.7649236768253804, |
|
"train_runtime": 22517.8712, |
|
"train_samples_per_second": 7.358, |
|
"train_steps_per_second": 0.115 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 2588, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 9000, |
|
"total_flos": 6.967815910824346e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|