| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0993816733561657, | |
| "global_step": 50000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4e-06, | |
| "loss": 10.285, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8e-06, | |
| "loss": 8.5629, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.2e-05, | |
| "loss": 7.7941, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.6e-05, | |
| "loss": 7.2214, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2e-05, | |
| "loss": 6.7445, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.4e-05, | |
| "loss": 6.5017, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.8e-05, | |
| "loss": 6.3973, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.2e-05, | |
| "loss": 6.3519, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.6e-05, | |
| "loss": 6.341, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4e-05, | |
| "loss": 6.3197, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.4e-05, | |
| "loss": 6.3186, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.8e-05, | |
| "loss": 6.3114, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 5.2e-05, | |
| "loss": 6.3023, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 5.6e-05, | |
| "loss": 6.2749, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 6e-05, | |
| "loss": 6.2705, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 6.4e-05, | |
| "loss": 6.2538, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 6.2464, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 7.2e-05, | |
| "loss": 6.2073, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 7.6e-05, | |
| "loss": 6.101, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 8e-05, | |
| "loss": 5.9768, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.400000000000001e-05, | |
| "loss": 5.7986, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 8.8e-05, | |
| "loss": 5.6088, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.2e-05, | |
| "loss": 5.4903, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.6e-05, | |
| "loss": 5.3685, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.0001, | |
| "loss": 5.282, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.000104, | |
| "loss": 5.1831, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.000108, | |
| "loss": 5.1007, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.000112, | |
| "loss": 5.0023, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00011600000000000001, | |
| "loss": 4.9239, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00012, | |
| "loss": 4.8625, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.000124, | |
| "loss": 4.7771, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.000128, | |
| "loss": 4.7051, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.000132, | |
| "loss": 4.6168, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00013600000000000003, | |
| "loss": 4.5591, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00014000000000000001, | |
| "loss": 4.5026, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.000144, | |
| "loss": 4.4251, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.000148, | |
| "loss": 4.3889, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.000152, | |
| "loss": 4.3326, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.000156, | |
| "loss": 4.288, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00016, | |
| "loss": 4.2549, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.000164, | |
| "loss": 4.19, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00016800000000000002, | |
| "loss": 4.1428, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00017199999999999998, | |
| "loss": 4.1085, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.000176, | |
| "loss": 4.078, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 4.0206, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.000184, | |
| "loss": 4.0012, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00018800000000000002, | |
| "loss": 3.98, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.000192, | |
| "loss": 3.9526, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00019600000000000002, | |
| "loss": 3.8986, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0002, | |
| "loss": 3.8794, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.000204, | |
| "loss": 3.84, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.000208, | |
| "loss": 3.8412, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.000212, | |
| "loss": 3.8021, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.000216, | |
| "loss": 3.7633, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00022, | |
| "loss": 3.7475, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.000224, | |
| "loss": 3.7366, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.000228, | |
| "loss": 3.7149, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00023200000000000003, | |
| "loss": 3.6964, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.000236, | |
| "loss": 3.6868, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00024, | |
| "loss": 3.6684, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.000244, | |
| "loss": 3.6452, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.000248, | |
| "loss": 3.6353, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.000252, | |
| "loss": 3.6115, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.000256, | |
| "loss": 3.6012, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00026000000000000003, | |
| "loss": 3.5799, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.000264, | |
| "loss": 3.5517, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.000268, | |
| "loss": 3.5509, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00027200000000000005, | |
| "loss": 3.539, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00027600000000000004, | |
| "loss": 3.5227, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00028000000000000003, | |
| "loss": 3.5066, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00028399999999999996, | |
| "loss": 3.5071, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.000288, | |
| "loss": 3.5012, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.000292, | |
| "loss": 3.4881, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.000296, | |
| "loss": 3.483, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0003, | |
| "loss": 3.4781, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.000304, | |
| "loss": 3.4433, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.000308, | |
| "loss": 3.4457, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.000312, | |
| "loss": 3.4491, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.000316, | |
| "loss": 3.4211, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00032, | |
| "loss": 3.416, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.000324, | |
| "loss": 3.4149, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.000328, | |
| "loss": 3.3943, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00033200000000000005, | |
| "loss": 3.3854, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00033600000000000004, | |
| "loss": 3.389, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00034, | |
| "loss": 3.3672, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00034399999999999996, | |
| "loss": 3.3656, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.000348, | |
| "loss": 3.3347, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.000352, | |
| "loss": 3.3448, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.000356, | |
| "loss": 3.3228, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00035999999999999997, | |
| "loss": 3.3337, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.000364, | |
| "loss": 3.3183, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.000368, | |
| "loss": 3.33, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.000372, | |
| "loss": 3.3125, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00037600000000000003, | |
| "loss": 3.2962, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00038, | |
| "loss": 3.2982, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.000384, | |
| "loss": 3.295, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.000388, | |
| "loss": 3.2839, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00039200000000000004, | |
| "loss": 3.2859, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00039600000000000003, | |
| "loss": 3.2746, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0004, | |
| "loss": 3.2595, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.000404, | |
| "loss": 3.2802, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.000408, | |
| "loss": 3.2474, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.000412, | |
| "loss": 3.2535, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.000416, | |
| "loss": 3.2276, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00042, | |
| "loss": 3.2365, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.000424, | |
| "loss": 3.2413, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.000428, | |
| "loss": 3.2175, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.000432, | |
| "loss": 3.2231, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.000436, | |
| "loss": 3.2403, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00044, | |
| "loss": 3.2022, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.000444, | |
| "loss": 3.2017, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.000448, | |
| "loss": 3.2083, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00045200000000000004, | |
| "loss": 3.1954, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.000456, | |
| "loss": 3.1911, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00046, | |
| "loss": 3.1823, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00046400000000000006, | |
| "loss": 3.1863, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00046800000000000005, | |
| "loss": 3.1878, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.000472, | |
| "loss": 3.1734, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00047599999999999997, | |
| "loss": 3.1782, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00048, | |
| "loss": 3.1711, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.000484, | |
| "loss": 3.1747, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.000488, | |
| "loss": 3.1527, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.000492, | |
| "loss": 3.15, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.000496, | |
| "loss": 3.1416, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0005, | |
| "loss": 3.1455, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.000504, | |
| "loss": 3.1441, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.000508, | |
| "loss": 3.1462, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.000512, | |
| "loss": 3.1531, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0005160000000000001, | |
| "loss": 3.1394, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0005200000000000001, | |
| "loss": 3.1304, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.000524, | |
| "loss": 3.1292, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.000528, | |
| "loss": 3.1334, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.000532, | |
| "loss": 3.1129, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.000536, | |
| "loss": 3.1171, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00054, | |
| "loss": 3.1147, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0005440000000000001, | |
| "loss": 3.1122, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0005480000000000001, | |
| "loss": 3.101, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0005520000000000001, | |
| "loss": 3.1152, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0005560000000000001, | |
| "loss": 3.0982, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0005600000000000001, | |
| "loss": 3.1003, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0005639999999999999, | |
| "loss": 3.1038, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.0005679999999999999, | |
| "loss": 3.1052, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0005719999999999999, | |
| "loss": 3.0987, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.000576, | |
| "loss": 3.0798, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00058, | |
| "loss": 3.0898, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.000584, | |
| "loss": 3.0645, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.000588, | |
| "loss": 3.085, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.000592, | |
| "loss": 3.0773, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.000596, | |
| "loss": 3.081, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0006, | |
| "loss": 3.0728, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.000604, | |
| "loss": 3.0774, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.000608, | |
| "loss": 3.0598, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.000612, | |
| "loss": 3.064, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.000616, | |
| "loss": 3.0875, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00062, | |
| "loss": 3.0779, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.000624, | |
| "loss": 3.0626, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.000628, | |
| "loss": 3.0632, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.000632, | |
| "loss": 3.0534, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0006360000000000001, | |
| "loss": 3.0666, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00064, | |
| "loss": 3.0665, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.000644, | |
| "loss": 3.0614, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.000648, | |
| "loss": 3.0527, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.000652, | |
| "loss": 3.0384, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.000656, | |
| "loss": 3.0633, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00066, | |
| "loss": 3.0308, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.0006640000000000001, | |
| "loss": 3.0476, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0006680000000000001, | |
| "loss": 3.0514, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0006720000000000001, | |
| "loss": 3.057, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0006760000000000001, | |
| "loss": 3.0202, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00068, | |
| "loss": 3.0328, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.000684, | |
| "loss": 3.0422, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0006879999999999999, | |
| "loss": 3.0236, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.000692, | |
| "loss": 3.0257, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.000696, | |
| "loss": 3.0346, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0007, | |
| "loss": 3.0428, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.000704, | |
| "loss": 3.0297, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.000708, | |
| "loss": 3.035, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.000712, | |
| "loss": 3.0265, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.000716, | |
| "loss": 3.0069, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0007199999999999999, | |
| "loss": 3.044, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.000724, | |
| "loss": 3.0174, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.000728, | |
| "loss": 3.0153, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.000732, | |
| "loss": 3.0149, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.000736, | |
| "loss": 3.0094, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00074, | |
| "loss": 3.0139, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.000744, | |
| "loss": 3.0115, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.000748, | |
| "loss": 3.0049, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.0007520000000000001, | |
| "loss": 3.0011, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.000756, | |
| "loss": 3.0098, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00076, | |
| "loss": 2.9991, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.000764, | |
| "loss": 3.0096, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.000768, | |
| "loss": 3.0174, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.000772, | |
| "loss": 3.0131, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.000776, | |
| "loss": 3.003, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0007800000000000001, | |
| "loss": 3.0081, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0007840000000000001, | |
| "loss": 2.9973, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.0007880000000000001, | |
| "loss": 3.0083, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0007920000000000001, | |
| "loss": 3.0031, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.000796, | |
| "loss": 2.9853, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.0008, | |
| "loss": 2.9972, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.000804, | |
| "loss": 2.9945, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.000808, | |
| "loss": 2.9802, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0008120000000000001, | |
| "loss": 2.9833, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.000816, | |
| "loss": 2.9901, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00082, | |
| "loss": 2.9983, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.000824, | |
| "loss": 2.9861, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.000828, | |
| "loss": 2.9787, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.000832, | |
| "loss": 2.9893, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0008359999999999999, | |
| "loss": 2.9919, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00084, | |
| "loss": 2.9547, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.000844, | |
| "loss": 2.9825, | |
| "step": 21100 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.000848, | |
| "loss": 2.9905, | |
| "step": 21200 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.000852, | |
| "loss": 2.982, | |
| "step": 21300 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.000856, | |
| "loss": 2.9726, | |
| "step": 21400 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00086, | |
| "loss": 2.9559, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.000864, | |
| "loss": 2.9818, | |
| "step": 21600 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.0008680000000000001, | |
| "loss": 2.978, | |
| "step": 21700 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.000872, | |
| "loss": 2.9851, | |
| "step": 21800 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.000876, | |
| "loss": 2.9761, | |
| "step": 21900 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00088, | |
| "loss": 2.9584, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.000884, | |
| "loss": 2.9792, | |
| "step": 22100 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.000888, | |
| "loss": 2.9692, | |
| "step": 22200 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.000892, | |
| "loss": 2.9636, | |
| "step": 22300 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.000896, | |
| "loss": 2.9668, | |
| "step": 22400 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0009000000000000001, | |
| "loss": 2.9632, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0009040000000000001, | |
| "loss": 2.9857, | |
| "step": 22600 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0009080000000000001, | |
| "loss": 2.9574, | |
| "step": 22700 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.000912, | |
| "loss": 2.9548, | |
| "step": 22800 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.000916, | |
| "loss": 2.9772, | |
| "step": 22900 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00092, | |
| "loss": 2.9661, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.000924, | |
| "loss": 2.9418, | |
| "step": 23100 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0009280000000000001, | |
| "loss": 2.9544, | |
| "step": 23200 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0009320000000000001, | |
| "loss": 2.978, | |
| "step": 23300 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.0009360000000000001, | |
| "loss": 2.9759, | |
| "step": 23400 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00094, | |
| "loss": 2.9494, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.000944, | |
| "loss": 2.9474, | |
| "step": 23600 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.000948, | |
| "loss": 2.9466, | |
| "step": 23700 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0009519999999999999, | |
| "loss": 2.9378, | |
| "step": 23800 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.0009559999999999999, | |
| "loss": 3.0109, | |
| "step": 23900 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00096, | |
| "loss": 2.969, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.000964, | |
| "loss": 2.9366, | |
| "step": 24100 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.000968, | |
| "loss": 2.9381, | |
| "step": 24200 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.000972, | |
| "loss": 2.9382, | |
| "step": 24300 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.000976, | |
| "loss": 2.9519, | |
| "step": 24400 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00098, | |
| "loss": 2.9452, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.000984, | |
| "loss": 2.942, | |
| "step": 24600 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.000988, | |
| "loss": 2.9315, | |
| "step": 24700 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.000992, | |
| "loss": 2.938, | |
| "step": 24800 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.000996, | |
| "loss": 2.9331, | |
| "step": 24900 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.001, | |
| "loss": 2.9488, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "eval_accuracy": 0.49347509891775837, | |
| "eval_loss": 2.7666990756988525, | |
| "eval_runtime": 6163.6406, | |
| "eval_samples_per_second": 35.265, | |
| "eval_steps_per_second": 2.204, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.000996, | |
| "loss": 2.9298, | |
| "step": 25100 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.000992, | |
| "loss": 2.93, | |
| "step": 25200 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.000988, | |
| "loss": 2.9365, | |
| "step": 25300 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.000984, | |
| "loss": 2.9522, | |
| "step": 25400 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00098, | |
| "loss": 2.9344, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.000976, | |
| "loss": 2.9349, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.000972, | |
| "loss": 2.922, | |
| "step": 25700 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.000968, | |
| "loss": 2.9094, | |
| "step": 25800 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.000964, | |
| "loss": 2.9079, | |
| "step": 25900 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.00096, | |
| "loss": 2.9033, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0009559999999999999, | |
| "loss": 2.8957, | |
| "step": 26100 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0009519999999999999, | |
| "loss": 2.9106, | |
| "step": 26200 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.000948, | |
| "loss": 2.9186, | |
| "step": 26300 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.000944, | |
| "loss": 2.8864, | |
| "step": 26400 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.00094, | |
| "loss": 2.9045, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.0009360000000000001, | |
| "loss": 2.8999, | |
| "step": 26600 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0009320000000000001, | |
| "loss": 2.8869, | |
| "step": 26700 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0009280000000000001, | |
| "loss": 2.8616, | |
| "step": 26800 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.000924, | |
| "loss": 2.8826, | |
| "step": 26900 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00092, | |
| "loss": 2.8858, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.000916, | |
| "loss": 2.8868, | |
| "step": 27100 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.000912, | |
| "loss": 2.8586, | |
| "step": 27200 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0009080000000000001, | |
| "loss": 2.872, | |
| "step": 27300 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.0009040000000000001, | |
| "loss": 2.8575, | |
| "step": 27400 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.0009000000000000001, | |
| "loss": 2.8648, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.000896, | |
| "loss": 2.8639, | |
| "step": 27600 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.000892, | |
| "loss": 2.8573, | |
| "step": 27700 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.000888, | |
| "loss": 2.849, | |
| "step": 27800 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.000884, | |
| "loss": 2.8566, | |
| "step": 27900 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00088, | |
| "loss": 2.8574, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.000876, | |
| "loss": 2.8519, | |
| "step": 28100 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.000872, | |
| "loss": 2.8346, | |
| "step": 28200 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0008680000000000001, | |
| "loss": 2.8502, | |
| "step": 28300 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.000864, | |
| "loss": 2.8269, | |
| "step": 28400 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00086, | |
| "loss": 2.842, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.000856, | |
| "loss": 2.8386, | |
| "step": 28600 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.000852, | |
| "loss": 2.8188, | |
| "step": 28700 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.000848, | |
| "loss": 2.8147, | |
| "step": 28800 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.000844, | |
| "loss": 2.82, | |
| "step": 28900 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00084, | |
| "loss": 2.8251, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.0008359999999999999, | |
| "loss": 2.8202, | |
| "step": 29100 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.000832, | |
| "loss": 2.808, | |
| "step": 29200 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.000828, | |
| "loss": 2.819, | |
| "step": 29300 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.000824, | |
| "loss": 2.8127, | |
| "step": 29400 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00082, | |
| "loss": 2.7945, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.000816, | |
| "loss": 2.8057, | |
| "step": 29600 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.0008120000000000001, | |
| "loss": 2.8077, | |
| "step": 29700 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.000808, | |
| "loss": 2.809, | |
| "step": 29800 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.000804, | |
| "loss": 2.8029, | |
| "step": 29900 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.0008, | |
| "loss": 2.7945, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.000796, | |
| "loss": 2.7991, | |
| "step": 30100 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.0007920000000000001, | |
| "loss": 2.7869, | |
| "step": 30200 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0007880000000000001, | |
| "loss": 2.7931, | |
| "step": 30300 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0007840000000000001, | |
| "loss": 2.7927, | |
| "step": 30400 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.0007800000000000001, | |
| "loss": 2.787, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.000776, | |
| "loss": 2.7747, | |
| "step": 30600 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.000772, | |
| "loss": 2.7916, | |
| "step": 30700 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.000768, | |
| "loss": 2.7759, | |
| "step": 30800 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.000764, | |
| "loss": 2.7599, | |
| "step": 30900 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00076, | |
| "loss": 2.7578, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.000756, | |
| "loss": 2.7688, | |
| "step": 31100 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.0007520000000000001, | |
| "loss": 2.7586, | |
| "step": 31200 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.000748, | |
| "loss": 2.7641, | |
| "step": 31300 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.000744, | |
| "loss": 2.7688, | |
| "step": 31400 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00074, | |
| "loss": 2.761, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.000736, | |
| "loss": 2.7543, | |
| "step": 31600 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.000732, | |
| "loss": 2.7434, | |
| "step": 31700 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.000728, | |
| "loss": 2.7458, | |
| "step": 31800 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.000724, | |
| "loss": 2.7583, | |
| "step": 31900 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0007199999999999999, | |
| "loss": 2.7356, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.000716, | |
| "loss": 2.7437, | |
| "step": 32100 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.000712, | |
| "loss": 2.7392, | |
| "step": 32200 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.000708, | |
| "loss": 2.7443, | |
| "step": 32300 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.000704, | |
| "loss": 2.7234, | |
| "step": 32400 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0007, | |
| "loss": 2.7302, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.000696, | |
| "loss": 2.72, | |
| "step": 32600 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.000692, | |
| "loss": 2.7183, | |
| "step": 32700 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.0006879999999999999, | |
| "loss": 2.7129, | |
| "step": 32800 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.000684, | |
| "loss": 2.7253, | |
| "step": 32900 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00068, | |
| "loss": 2.7299, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.0006760000000000001, | |
| "loss": 2.7195, | |
| "step": 33100 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.0006720000000000001, | |
| "loss": 2.7026, | |
| "step": 33200 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.0006680000000000001, | |
| "loss": 2.7002, | |
| "step": 33300 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.0006640000000000001, | |
| "loss": 2.7065, | |
| "step": 33400 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00066, | |
| "loss": 2.7092, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.000656, | |
| "loss": 2.7014, | |
| "step": 33600 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.000652, | |
| "loss": 2.7083, | |
| "step": 33700 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.000648, | |
| "loss": 2.6784, | |
| "step": 33800 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.000644, | |
| "loss": 2.6933, | |
| "step": 33900 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.00064, | |
| "loss": 2.7003, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0006360000000000001, | |
| "loss": 2.6866, | |
| "step": 34100 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.000632, | |
| "loss": 2.6984, | |
| "step": 34200 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.000628, | |
| "loss": 2.678, | |
| "step": 34300 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.000624, | |
| "loss": 2.698, | |
| "step": 34400 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.00062, | |
| "loss": 2.6927, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.000616, | |
| "loss": 2.6787, | |
| "step": 34600 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.000612, | |
| "loss": 2.6916, | |
| "step": 34700 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.000608, | |
| "loss": 2.6794, | |
| "step": 34800 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.000604, | |
| "loss": 2.6723, | |
| "step": 34900 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.0006, | |
| "loss": 2.6582, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.000596, | |
| "loss": 2.6683, | |
| "step": 35100 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.000592, | |
| "loss": 2.6748, | |
| "step": 35200 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.000588, | |
| "loss": 2.6711, | |
| "step": 35300 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.000584, | |
| "loss": 2.6566, | |
| "step": 35400 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00058, | |
| "loss": 2.6589, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.000576, | |
| "loss": 2.6656, | |
| "step": 35600 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.0005719999999999999, | |
| "loss": 2.6423, | |
| "step": 35700 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.0005679999999999999, | |
| "loss": 2.6561, | |
| "step": 35800 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0005639999999999999, | |
| "loss": 2.6579, | |
| "step": 35900 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0005600000000000001, | |
| "loss": 2.6596, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0005560000000000001, | |
| "loss": 2.6419, | |
| "step": 36100 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0005520000000000001, | |
| "loss": 2.6477, | |
| "step": 36200 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.0005480000000000001, | |
| "loss": 2.6391, | |
| "step": 36300 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.0005440000000000001, | |
| "loss": 2.6337, | |
| "step": 36400 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00054, | |
| "loss": 2.6417, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.000536, | |
| "loss": 2.6318, | |
| "step": 36600 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.000532, | |
| "loss": 2.6409, | |
| "step": 36700 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.000528, | |
| "loss": 2.6469, | |
| "step": 36800 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.000524, | |
| "loss": 2.6394, | |
| "step": 36900 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.0005200000000000001, | |
| "loss": 2.6342, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.0005160000000000001, | |
| "loss": 2.6335, | |
| "step": 37100 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.000512, | |
| "loss": 2.6274, | |
| "step": 37200 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.000508, | |
| "loss": 2.6209, | |
| "step": 37300 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.000504, | |
| "loss": 2.6213, | |
| "step": 37400 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.0005, | |
| "loss": 2.623, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.000496, | |
| "loss": 2.617, | |
| "step": 37600 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.000492, | |
| "loss": 2.6276, | |
| "step": 37700 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.000488, | |
| "loss": 2.627, | |
| "step": 37800 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.000484, | |
| "loss": 2.6128, | |
| "step": 37900 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.00048, | |
| "loss": 2.5874, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.00047599999999999997, | |
| "loss": 2.6047, | |
| "step": 38100 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.000472, | |
| "loss": 2.6107, | |
| "step": 38200 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00046800000000000005, | |
| "loss": 2.6277, | |
| "step": 38300 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.00046400000000000006, | |
| "loss": 2.6046, | |
| "step": 38400 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00046, | |
| "loss": 2.6021, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.000456, | |
| "loss": 2.5964, | |
| "step": 38600 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.00045200000000000004, | |
| "loss": 2.5984, | |
| "step": 38700 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.000448, | |
| "loss": 2.6017, | |
| "step": 38800 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.000444, | |
| "loss": 2.5829, | |
| "step": 38900 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.00044, | |
| "loss": 2.6043, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.000436, | |
| "loss": 2.5952, | |
| "step": 39100 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.000432, | |
| "loss": 2.5914, | |
| "step": 39200 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.000428, | |
| "loss": 2.5807, | |
| "step": 39300 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.000424, | |
| "loss": 2.5741, | |
| "step": 39400 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00042, | |
| "loss": 2.5896, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.000416, | |
| "loss": 2.5787, | |
| "step": 39600 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.000412, | |
| "loss": 2.5855, | |
| "step": 39700 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.000408, | |
| "loss": 2.5841, | |
| "step": 39800 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.000404, | |
| "loss": 2.5796, | |
| "step": 39900 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.0004, | |
| "loss": 2.5741, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.00039600000000000003, | |
| "loss": 2.5621, | |
| "step": 40100 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.00039200000000000004, | |
| "loss": 2.5595, | |
| "step": 40200 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.000388, | |
| "loss": 2.5762, | |
| "step": 40300 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.000384, | |
| "loss": 2.557, | |
| "step": 40400 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00038, | |
| "loss": 2.572, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.00037600000000000003, | |
| "loss": 2.5602, | |
| "step": 40600 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.000372, | |
| "loss": 2.5448, | |
| "step": 40700 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.000368, | |
| "loss": 2.5611, | |
| "step": 40800 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.000364, | |
| "loss": 2.5426, | |
| "step": 40900 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.00035999999999999997, | |
| "loss": 2.5544, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.000356, | |
| "loss": 2.5621, | |
| "step": 41100 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.000352, | |
| "loss": 2.5451, | |
| "step": 41200 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.000348, | |
| "loss": 2.5358, | |
| "step": 41300 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00034399999999999996, | |
| "loss": 2.5481, | |
| "step": 41400 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00034, | |
| "loss": 2.5407, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.00033600000000000004, | |
| "loss": 2.5451, | |
| "step": 41600 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.00033200000000000005, | |
| "loss": 2.5317, | |
| "step": 41700 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.000328, | |
| "loss": 2.5421, | |
| "step": 41800 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.000324, | |
| "loss": 2.5286, | |
| "step": 41900 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.00032, | |
| "loss": 2.5366, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.000316, | |
| "loss": 2.5288, | |
| "step": 42100 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.000312, | |
| "loss": 2.5221, | |
| "step": 42200 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.000308, | |
| "loss": 2.5278, | |
| "step": 42300 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.000304, | |
| "loss": 2.545, | |
| "step": 42400 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.0003, | |
| "loss": 2.5246, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.000296, | |
| "loss": 2.5333, | |
| "step": 42600 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.000292, | |
| "loss": 2.5258, | |
| "step": 42700 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.000288, | |
| "loss": 2.5334, | |
| "step": 42800 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.00028399999999999996, | |
| "loss": 2.5101, | |
| "step": 42900 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00028000000000000003, | |
| "loss": 2.5168, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00027600000000000004, | |
| "loss": 2.5184, | |
| "step": 43100 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.00027200000000000005, | |
| "loss": 2.5236, | |
| "step": 43200 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.000268, | |
| "loss": 2.5015, | |
| "step": 43300 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.000264, | |
| "loss": 2.5106, | |
| "step": 43400 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.00026000000000000003, | |
| "loss": 2.5095, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.000256, | |
| "loss": 2.5089, | |
| "step": 43600 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.000252, | |
| "loss": 2.5086, | |
| "step": 43700 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.000248, | |
| "loss": 2.4996, | |
| "step": 43800 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.000244, | |
| "loss": 2.5202, | |
| "step": 43900 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.00024, | |
| "loss": 2.5012, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 0.000236, | |
| "loss": 2.51, | |
| "step": 44100 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 0.00023200000000000003, | |
| "loss": 2.5011, | |
| "step": 44200 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.000228, | |
| "loss": 2.4794, | |
| "step": 44300 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 0.000224, | |
| "loss": 2.4736, | |
| "step": 44400 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.00022, | |
| "loss": 2.5016, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.000216, | |
| "loss": 2.4874, | |
| "step": 44600 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 0.000212, | |
| "loss": 2.4932, | |
| "step": 44700 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.000208, | |
| "loss": 2.5032, | |
| "step": 44800 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 0.000204, | |
| "loss": 2.4956, | |
| "step": 44900 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.0002, | |
| "loss": 2.5014, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.00019600000000000002, | |
| "loss": 2.4986, | |
| "step": 45100 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 0.000192, | |
| "loss": 2.4685, | |
| "step": 45200 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.00018800000000000002, | |
| "loss": 2.4901, | |
| "step": 45300 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 0.000184, | |
| "loss": 2.4783, | |
| "step": 45400 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 2.4947, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.000176, | |
| "loss": 2.4696, | |
| "step": 45600 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.00017199999999999998, | |
| "loss": 2.4728, | |
| "step": 45700 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 0.00016800000000000002, | |
| "loss": 2.4791, | |
| "step": 45800 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.000164, | |
| "loss": 2.4764, | |
| "step": 45900 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 0.00016, | |
| "loss": 2.4861, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.000156, | |
| "loss": 2.4619, | |
| "step": 46100 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 0.000152, | |
| "loss": 2.4698, | |
| "step": 46200 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.000148, | |
| "loss": 2.489, | |
| "step": 46300 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.000144, | |
| "loss": 2.4607, | |
| "step": 46400 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 0.00014000000000000001, | |
| "loss": 2.4727, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.00013600000000000003, | |
| "loss": 2.4548, | |
| "step": 46600 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 0.000132, | |
| "loss": 2.4553, | |
| "step": 46700 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.000128, | |
| "loss": 2.465, | |
| "step": 46800 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.000124, | |
| "loss": 2.4601, | |
| "step": 46900 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 0.00012, | |
| "loss": 2.4554, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 0.00011600000000000001, | |
| "loss": 2.4575, | |
| "step": 47100 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.000112, | |
| "loss": 2.4515, | |
| "step": 47200 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 0.000108, | |
| "loss": 2.4613, | |
| "step": 47300 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.000104, | |
| "loss": 2.4583, | |
| "step": 47400 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.0001, | |
| "loss": 2.4455, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 9.6e-05, | |
| "loss": 2.4446, | |
| "step": 47600 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.2e-05, | |
| "loss": 2.4488, | |
| "step": 47700 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.8e-05, | |
| "loss": 2.4454, | |
| "step": 47800 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.400000000000001e-05, | |
| "loss": 2.4445, | |
| "step": 47900 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 8e-05, | |
| "loss": 2.4388, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 7.6e-05, | |
| "loss": 2.4506, | |
| "step": 48100 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 7.2e-05, | |
| "loss": 2.4392, | |
| "step": 48200 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 2.4408, | |
| "step": 48300 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 6.4e-05, | |
| "loss": 2.4391, | |
| "step": 48400 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 6e-05, | |
| "loss": 2.4281, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 5.6e-05, | |
| "loss": 2.4451, | |
| "step": 48600 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 5.2e-05, | |
| "loss": 2.4277, | |
| "step": 48700 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 4.8e-05, | |
| "loss": 2.4388, | |
| "step": 48800 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 4.4e-05, | |
| "loss": 2.4368, | |
| "step": 48900 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 4e-05, | |
| "loss": 2.423, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.6e-05, | |
| "loss": 2.4458, | |
| "step": 49100 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 3.2e-05, | |
| "loss": 2.4218, | |
| "step": 49200 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 2.8e-05, | |
| "loss": 2.4226, | |
| "step": 49300 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 2.4e-05, | |
| "loss": 2.4287, | |
| "step": 49400 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 2e-05, | |
| "loss": 2.4237, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 1.6e-05, | |
| "loss": 2.421, | |
| "step": 49600 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.2e-05, | |
| "loss": 2.4214, | |
| "step": 49700 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 8e-06, | |
| "loss": 2.413, | |
| "step": 49800 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 4e-06, | |
| "loss": 2.4309, | |
| "step": 49900 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 0.0, | |
| "loss": 2.4233, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "eval_accuracy": 0.5611543501568519, | |
| "eval_loss": 2.2921805381774902, | |
| "eval_runtime": 6159.6175, | |
| "eval_samples_per_second": 35.288, | |
| "eval_steps_per_second": 2.206, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "step": 50000, | |
| "total_flos": 7.554075721728e+17, | |
| "train_loss": 3.1285950134277343, | |
| "train_runtime": 151231.41, | |
| "train_samples_per_second": 84.639, | |
| "train_steps_per_second": 0.331 | |
| } | |
| ], | |
| "max_steps": 50000, | |
| "num_train_epochs": 4, | |
| "total_flos": 7.554075721728e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |