| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005, | |
| "grad_norm": 99.5, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 1.3438, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 110.5, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.3195, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015, | |
| "grad_norm": 101.5, | |
| "learning_rate": 3e-06, | |
| "loss": 1.3234, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 117.0, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.332, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.025, | |
| "grad_norm": 182.0, | |
| "learning_rate": 5e-06, | |
| "loss": 1.2867, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 133.0, | |
| "learning_rate": 6e-06, | |
| "loss": 1.1648, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.035, | |
| "grad_norm": 45.75, | |
| "learning_rate": 7e-06, | |
| "loss": 0.991, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 135.0, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.8773, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.045, | |
| "grad_norm": 34.25, | |
| "learning_rate": 9e-06, | |
| "loss": 0.8332, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 43.25, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7129, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.055, | |
| "grad_norm": 20.5, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 0.6535, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 26.75, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.5713, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.065, | |
| "grad_norm": 37.75, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.5668, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 39.25, | |
| "learning_rate": 1.4e-05, | |
| "loss": 0.535, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.075, | |
| "grad_norm": 139.0, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.5762, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 20.5, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.51, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.085, | |
| "grad_norm": 61.0, | |
| "learning_rate": 1.7e-05, | |
| "loss": 0.4625, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.796875, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.4006, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.095, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 1.9e-05, | |
| "loss": 0.4035, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 4.90625, | |
| "learning_rate": 2e-05, | |
| "loss": 0.3875, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.105, | |
| "grad_norm": 4.5625, | |
| "learning_rate": 1.9998476951563914e-05, | |
| "loss": 0.351, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.999390827019096e-05, | |
| "loss": 0.3271, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.115, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.9986295347545738e-05, | |
| "loss": 0.301, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 12.9375, | |
| "learning_rate": 1.9975640502598243e-05, | |
| "loss": 0.2996, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 4.84375, | |
| "learning_rate": 1.9961946980917457e-05, | |
| "loss": 0.2951, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 3.59375, | |
| "learning_rate": 1.9945218953682736e-05, | |
| "loss": 0.2805, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.135, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.9925461516413224e-05, | |
| "loss": 0.2726, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.9902680687415704e-05, | |
| "loss": 0.2619, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.145, | |
| "grad_norm": 3.734375, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.2633, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.2517, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.155, | |
| "grad_norm": 3.796875, | |
| "learning_rate": 1.9816271834476642e-05, | |
| "loss": 0.2622, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 3.703125, | |
| "learning_rate": 1.9781476007338058e-05, | |
| "loss": 0.2524, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.165, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.9743700647852356e-05, | |
| "loss": 0.2375, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 4.65625, | |
| "learning_rate": 1.9702957262759964e-05, | |
| "loss": 0.2414, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.175, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 1.9659258262890683e-05, | |
| "loss": 0.2405, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 1.961261695938319e-05, | |
| "loss": 0.2604, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.185, | |
| "grad_norm": 2.625, | |
| "learning_rate": 1.9563047559630356e-05, | |
| "loss": 0.2386, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.2479, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.195, | |
| "grad_norm": 4.0625, | |
| "learning_rate": 1.945518575599317e-05, | |
| "loss": 0.2427, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 3.84375, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.2357, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.205, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.9335804264972018e-05, | |
| "loss": 0.2371, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.9271838545667876e-05, | |
| "loss": 0.2343, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.215, | |
| "grad_norm": 3.75, | |
| "learning_rate": 1.9205048534524405e-05, | |
| "loss": 0.2278, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 1.913545457642601e-05, | |
| "loss": 0.2275, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.225, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.9063077870366504e-05, | |
| "loss": 0.2349, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.28125, | |
| "learning_rate": 1.8987940462991673e-05, | |
| "loss": 0.2354, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.235, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.2296, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 1.8829475928589272e-05, | |
| "loss": 0.2383, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.245, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.874619707139396e-05, | |
| "loss": 0.2272, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.421875, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.2134, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.255, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 1.8571673007021124e-05, | |
| "loss": 0.2243, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.848048096156426e-05, | |
| "loss": 0.2193, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.265, | |
| "grad_norm": 3.9375, | |
| "learning_rate": 1.8386705679454243e-05, | |
| "loss": 0.2273, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 2.21875, | |
| "learning_rate": 1.8290375725550417e-05, | |
| "loss": 0.2113, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.275, | |
| "grad_norm": 2.1875, | |
| "learning_rate": 1.819152044288992e-05, | |
| "loss": 0.218, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.2086, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.285, | |
| "grad_norm": 1.65625, | |
| "learning_rate": 1.798635510047293e-05, | |
| "loss": 0.2122, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.788010753606722e-05, | |
| "loss": 0.2165, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.295, | |
| "grad_norm": 1.5, | |
| "learning_rate": 1.777145961456971e-05, | |
| "loss": 0.2059, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.2121, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.305, | |
| "grad_norm": 2.34375, | |
| "learning_rate": 1.7547095802227723e-05, | |
| "loss": 0.2029, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.7431448254773943e-05, | |
| "loss": 0.2092, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.315, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.7313537016191706e-05, | |
| "loss": 0.2082, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.7193398003386514e-05, | |
| "loss": 0.2084, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.325, | |
| "grad_norm": 1.75, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.2109, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.9140625, | |
| "learning_rate": 1.6946583704589973e-05, | |
| "loss": 0.2124, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.335, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.6819983600624986e-05, | |
| "loss": 0.2048, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 1.6691306063588583e-05, | |
| "loss": 0.2036, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.345, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.6560590289905074e-05, | |
| "loss": 0.2068, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.6427876096865394e-05, | |
| "loss": 0.2078, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.355, | |
| "grad_norm": 1.5703125, | |
| "learning_rate": 1.6293203910498375e-05, | |
| "loss": 0.202, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.6156614753256583e-05, | |
| "loss": 0.2026, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.365, | |
| "grad_norm": 3.53125, | |
| "learning_rate": 1.6018150231520486e-05, | |
| "loss": 0.2104, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.2106, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 1.9296875, | |
| "learning_rate": 1.573576436351046e-05, | |
| "loss": 0.1996, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 4.5625, | |
| "learning_rate": 1.5591929034707468e-05, | |
| "loss": 0.1965, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.385, | |
| "grad_norm": 1.703125, | |
| "learning_rate": 1.5446390350150272e-05, | |
| "loss": 0.1935, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.529919264233205e-05, | |
| "loss": 0.1954, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.395, | |
| "grad_norm": 4.1875, | |
| "learning_rate": 1.5150380749100545e-05, | |
| "loss": 0.2036, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.2004, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.405, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 1.4848096202463373e-05, | |
| "loss": 0.1983, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.469471562785891e-05, | |
| "loss": 0.1967, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.415, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.1923, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 1.4383711467890776e-05, | |
| "loss": 0.1982, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.425, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.4226182617406996e-05, | |
| "loss": 0.1933, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 1.4067366430758004e-05, | |
| "loss": 0.1886, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.435, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 1.3907311284892737e-05, | |
| "loss": 0.1909, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.140625, | |
| "learning_rate": 1.3746065934159123e-05, | |
| "loss": 0.1916, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.445, | |
| "grad_norm": 1.8359375, | |
| "learning_rate": 1.3583679495453e-05, | |
| "loss": 0.1938, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 4.625, | |
| "learning_rate": 1.342020143325669e-05, | |
| "loss": 0.1959, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.455, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.3255681544571568e-05, | |
| "loss": 0.1952, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.1925, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.465, | |
| "grad_norm": 2.390625, | |
| "learning_rate": 1.2923717047227368e-05, | |
| "loss": 0.191, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 1.2756373558169992e-05, | |
| "loss": 0.1938, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.475, | |
| "grad_norm": 4.65625, | |
| "learning_rate": 1.2588190451025209e-05, | |
| "loss": 0.189, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.2419218955996677e-05, | |
| "loss": 0.1918, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.485, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 1.2249510543438652e-05, | |
| "loss": 0.1886, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.8125, | |
| "learning_rate": 1.2079116908177592e-05, | |
| "loss": 0.1876, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.495, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 1.190808995376545e-05, | |
| "loss": 0.1943, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 3.90625, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.1953, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.505, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.1946, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 1.1391731009600655e-05, | |
| "loss": 0.1862, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.515, | |
| "grad_norm": 1.59375, | |
| "learning_rate": 1.1218693434051475e-05, | |
| "loss": 0.1854, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.8125, | |
| "learning_rate": 1.1045284632676535e-05, | |
| "loss": 0.1918, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.525, | |
| "grad_norm": 2.046875, | |
| "learning_rate": 1.0871557427476585e-05, | |
| "loss": 0.1848, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 1.0697564737441254e-05, | |
| "loss": 0.1855, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.535, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 1.0523359562429441e-05, | |
| "loss": 0.1846, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 1.0348994967025012e-05, | |
| "loss": 0.1818, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.545, | |
| "grad_norm": 1.7734375, | |
| "learning_rate": 1.0174524064372837e-05, | |
| "loss": 0.1812, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 1e-05, | |
| "loss": 0.1866, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.555, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 9.825475935627165e-06, | |
| "loss": 0.1901, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.9296875, | |
| "learning_rate": 9.651005032974994e-06, | |
| "loss": 0.1753, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.565, | |
| "grad_norm": 1.6875, | |
| "learning_rate": 9.476640437570562e-06, | |
| "loss": 0.1809, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 9.302435262558748e-06, | |
| "loss": 0.1819, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.575, | |
| "grad_norm": 1.7890625, | |
| "learning_rate": 9.128442572523418e-06, | |
| "loss": 0.1832, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 3.25, | |
| "learning_rate": 8.954715367323468e-06, | |
| "loss": 0.1906, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.585, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 8.781306565948528e-06, | |
| "loss": 0.1863, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 4.96875, | |
| "learning_rate": 8.60826899039935e-06, | |
| "loss": 0.1851, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.595, | |
| "grad_norm": 1.6328125, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.1855, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.1856, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.605, | |
| "grad_norm": 2.203125, | |
| "learning_rate": 8.091910046234552e-06, | |
| "loss": 0.1859, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 7.92088309182241e-06, | |
| "loss": 0.1792, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.615, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 7.750489456561351e-06, | |
| "loss": 0.1798, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.875, | |
| "learning_rate": 7.580781044003324e-06, | |
| "loss": 0.1848, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 7.411809548974792e-06, | |
| "loss": 0.1843, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 7.243626441830009e-06, | |
| "loss": 0.1804, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.635, | |
| "grad_norm": 5.96875, | |
| "learning_rate": 7.076282952772634e-06, | |
| "loss": 0.189, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.671875, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.1835, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.645, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 6.744318455428436e-06, | |
| "loss": 0.179, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.84375, | |
| "learning_rate": 6.579798566743314e-06, | |
| "loss": 0.1777, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.655, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 6.4163205045469975e-06, | |
| "loss": 0.1791, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.78125, | |
| "learning_rate": 6.25393406584088e-06, | |
| "loss": 0.1857, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.665, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 6.092688715107265e-06, | |
| "loss": 0.1775, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 5.932633569242e-06, | |
| "loss": 0.1835, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.675, | |
| "grad_norm": 2.625, | |
| "learning_rate": 5.773817382593008e-06, | |
| "loss": 0.1746, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 5.616288532109225e-06, | |
| "loss": 0.1795, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.685, | |
| "grad_norm": 2.0, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.1795, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 5.305284372141095e-06, | |
| "loss": 0.1729, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.695, | |
| "grad_norm": 1.2578125, | |
| "learning_rate": 5.151903797536631e-06, | |
| "loss": 0.1793, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.174, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.705, | |
| "grad_norm": 2.25, | |
| "learning_rate": 4.849619250899458e-06, | |
| "loss": 0.1799, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 4.700807357667953e-06, | |
| "loss": 0.1791, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.715, | |
| "grad_norm": 4.5625, | |
| "learning_rate": 4.5536096498497295e-06, | |
| "loss": 0.1832, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.6640625, | |
| "learning_rate": 4.408070965292534e-06, | |
| "loss": 0.1762, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.725, | |
| "grad_norm": 3.59375, | |
| "learning_rate": 4.264235636489542e-06, | |
| "loss": 0.1845, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 2.0625, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.1778, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.735, | |
| "grad_norm": 2.25, | |
| "learning_rate": 3.981849768479516e-06, | |
| "loss": 0.1869, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.5703125, | |
| "learning_rate": 3.8433852467434175e-06, | |
| "loss": 0.1803, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.745, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 3.7067960895016277e-06, | |
| "loss": 0.1775, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 3.5721239031346067e-06, | |
| "loss": 0.1783, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.755, | |
| "grad_norm": 2.40625, | |
| "learning_rate": 3.4394097100949286e-06, | |
| "loss": 0.1835, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.015625, | |
| "learning_rate": 3.308693936411421e-06, | |
| "loss": 0.1815, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.765, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 3.1800163993750166e-06, | |
| "loss": 0.184, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 3.0534162954100264e-06, | |
| "loss": 0.179, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.775, | |
| "grad_norm": 2.09375, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.174, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 2.8066019966134907e-06, | |
| "loss": 0.1794, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.785, | |
| "grad_norm": 2.328125, | |
| "learning_rate": 2.6864629838082957e-06, | |
| "loss": 0.1722, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.8984375, | |
| "learning_rate": 2.5685517452260566e-06, | |
| "loss": 0.1778, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.795, | |
| "grad_norm": 1.40625, | |
| "learning_rate": 2.45290419777228e-06, | |
| "loss": 0.1736, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.15625, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.1803, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.805, | |
| "grad_norm": 1.921875, | |
| "learning_rate": 2.2285403854302912e-06, | |
| "loss": 0.1735, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 2.119892463932781e-06, | |
| "loss": 0.1791, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.815, | |
| "grad_norm": 1.796875, | |
| "learning_rate": 2.013644899527074e-06, | |
| "loss": 0.1762, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.1729, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.825, | |
| "grad_norm": 1.890625, | |
| "learning_rate": 1.808479557110081e-06, | |
| "loss": 0.1741, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.709624274449584e-06, | |
| "loss": 0.1695, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.835, | |
| "grad_norm": 1.9765625, | |
| "learning_rate": 1.6132943205457607e-06, | |
| "loss": 0.1768, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.34375, | |
| "learning_rate": 1.5195190384357405e-06, | |
| "loss": 0.1798, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.845, | |
| "grad_norm": 1.984375, | |
| "learning_rate": 1.4283269929788779e-06, | |
| "loss": 0.1839, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.5859375, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.1776, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.855, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.2538029286060428e-06, | |
| "loss": 0.1751, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 1.1705240714107301e-06, | |
| "loss": 0.1745, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.865, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.1792, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.859375, | |
| "learning_rate": 1.012059537008332e-06, | |
| "loss": 0.1764, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 1.9453125, | |
| "learning_rate": 9.369221296335007e-07, | |
| "loss": 0.1745, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.765625, | |
| "learning_rate": 8.645454235739903e-07, | |
| "loss": 0.1772, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.885, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 7.949514654755963e-07, | |
| "loss": 0.1753, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 7.281614543321269e-07, | |
| "loss": 0.1791, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.895, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 6.641957350279838e-07, | |
| "loss": 0.184, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.1765, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.905, | |
| "grad_norm": 1.640625, | |
| "learning_rate": 5.448142440068316e-07, | |
| "loss": 0.176, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.1694, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.915, | |
| "grad_norm": 8.3125, | |
| "learning_rate": 4.3695244036964567e-07, | |
| "loss": 0.1781, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 3.8738304061681107e-07, | |
| "loss": 0.1757, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.925, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 3.4074173710931804e-07, | |
| "loss": 0.1762, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 4.03125, | |
| "learning_rate": 2.970427372400353e-07, | |
| "loss": 0.1789, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.935, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 2.5629935214764866e-07, | |
| "loss": 0.1734, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 2.1852399266194312e-07, | |
| "loss": 0.1754, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.945, | |
| "grad_norm": 1.90625, | |
| "learning_rate": 1.8372816552336025e-07, | |
| "loss": 0.1871, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.8671875, | |
| "learning_rate": 1.519224698779198e-07, | |
| "loss": 0.176, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.955, | |
| "grad_norm": 1.421875, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.1753, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.46875, | |
| "learning_rate": 9.731931258429638e-08, | |
| "loss": 0.1765, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.965, | |
| "grad_norm": 2.03125, | |
| "learning_rate": 7.453848358678018e-08, | |
| "loss": 0.1723, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.953125, | |
| "learning_rate": 5.4781046317267103e-08, | |
| "loss": 0.1775, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.975, | |
| "grad_norm": 2.0, | |
| "learning_rate": 3.805301908254455e-08, | |
| "loss": 0.1854, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 2.4359497401758026e-08, | |
| "loss": 0.1794, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.985, | |
| "grad_norm": 2.109375, | |
| "learning_rate": 1.370465245426167e-08, | |
| "loss": 0.1818, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.9921875, | |
| "learning_rate": 6.091729809042379e-09, | |
| "loss": 0.1856, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.995, | |
| "grad_norm": 1.9375, | |
| "learning_rate": 1.5230484360873043e-09, | |
| "loss": 0.1736, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.65625, | |
| "learning_rate": 0.0, | |
| "loss": 0.1816, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.2073827663872e+17, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |