|
{ |
|
"best_metric": 0.22680288553237915, |
|
"best_model_checkpoint": "./EulerMath-Mistral-7B-model/checkpoint-272", |
|
"epoch": 0.9990817263544536, |
|
"eval_steps": 68, |
|
"global_step": 272, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 19.19068191513093, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.707, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 0.9060535430908203, |
|
"eval_runtime": 1745.9683, |
|
"eval_samples_per_second": 1.324, |
|
"eval_steps_per_second": 0.074, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 20.035932532601844, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.7236, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.31513317860667, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.7201, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.561326930760348, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.6717, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.069275733221579, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.573, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.0702110208300475, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4965, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.5389430446896055, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.5093, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.709934958779789, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.524, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.1640217934257135, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.503, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.079182690080823, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4787, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.269731620276111, |
|
"learning_rate": 4.999956736067563e-06, |
|
"loss": 0.4545, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.059214670786909, |
|
"learning_rate": 4.999826945767665e-06, |
|
"loss": 0.4638, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.583247385116129, |
|
"learning_rate": 4.9996106335924965e-06, |
|
"loss": 0.4396, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2077663599892405, |
|
"learning_rate": 4.999307807028872e-06, |
|
"loss": 0.4287, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.3678816023894513, |
|
"learning_rate": 4.998918476557964e-06, |
|
"loss": 0.4169, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9925263681909064, |
|
"learning_rate": 4.998442655654946e-06, |
|
"loss": 0.4099, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7706573910428134, |
|
"learning_rate": 4.997880360788527e-06, |
|
"loss": 0.4003, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.6789390301868525, |
|
"learning_rate": 4.997231611420374e-06, |
|
"loss": 0.399, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5622054221426698, |
|
"learning_rate": 4.996496430004446e-06, |
|
"loss": 0.3885, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5663787846468284, |
|
"learning_rate": 4.995674841986217e-06, |
|
"loss": 0.3987, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4502330087611721, |
|
"learning_rate": 4.994766875801789e-06, |
|
"loss": 0.3962, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4188997099391882, |
|
"learning_rate": 4.993772562876909e-06, |
|
"loss": 0.3845, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4360806887465898, |
|
"learning_rate": 4.992691937625892e-06, |
|
"loss": 0.3764, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.4216582090099372, |
|
"learning_rate": 4.991525037450412e-06, |
|
"loss": 0.3712, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.2856499279799387, |
|
"learning_rate": 4.990271902738223e-06, |
|
"loss": 0.3603, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.247117404577534, |
|
"learning_rate": 4.988932576861754e-06, |
|
"loss": 0.3652, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3197850379000642, |
|
"learning_rate": 4.987507106176606e-06, |
|
"loss": 0.371, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.243400495941476, |
|
"learning_rate": 4.985995540019956e-06, |
|
"loss": 0.3599, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3278566257982103, |
|
"learning_rate": 4.984397930708838e-06, |
|
"loss": 0.3594, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.337022527470652, |
|
"learning_rate": 4.982714333538344e-06, |
|
"loss": 0.3477, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2099362672151601, |
|
"learning_rate": 4.980944806779698e-06, |
|
"loss": 0.3425, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2110593150023343, |
|
"learning_rate": 4.979089411678252e-06, |
|
"loss": 0.3567, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2334965596913852, |
|
"learning_rate": 4.977148212451354e-06, |
|
"loss": 0.3526, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1687161424016368, |
|
"learning_rate": 4.975121276286136e-06, |
|
"loss": 0.3496, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1881954676378432, |
|
"learning_rate": 4.973008673337181e-06, |
|
"loss": 0.3321, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2174270605971114, |
|
"learning_rate": 4.970810476724097e-06, |
|
"loss": 0.3446, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1609330509652702, |
|
"learning_rate": 4.968526762528988e-06, |
|
"loss": 0.341, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2149352568793006, |
|
"learning_rate": 4.9661576097938205e-06, |
|
"loss": 0.3459, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1885081900677397, |
|
"learning_rate": 4.963703100517684e-06, |
|
"loss": 0.3425, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.113235885075549, |
|
"learning_rate": 4.961163319653959e-06, |
|
"loss": 0.339, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0983562726057154, |
|
"learning_rate": 4.958538355107369e-06, |
|
"loss": 0.3298, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1594289217865181, |
|
"learning_rate": 4.955828297730949e-06, |
|
"loss": 0.3187, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1714548911644644, |
|
"learning_rate": 4.953033241322887e-06, |
|
"loss": 0.3373, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1450397323165031, |
|
"learning_rate": 4.950153282623289e-06, |
|
"loss": 0.3232, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1526363934692334, |
|
"learning_rate": 4.947188521310827e-06, |
|
"loss": 0.3243, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2175235837438554, |
|
"learning_rate": 4.944139059999286e-06, |
|
"loss": 0.3252, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.099789045296574, |
|
"learning_rate": 4.941005004234019e-06, |
|
"loss": 0.3178, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2219677196886505, |
|
"learning_rate": 4.937786462488284e-06, |
|
"loss": 0.3185, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1806399387287625, |
|
"learning_rate": 4.9344835461595016e-06, |
|
"loss": 0.3131, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1320527868188186, |
|
"learning_rate": 4.93109636956539e-06, |
|
"loss": 0.3198, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2551253674231917, |
|
"learning_rate": 4.927625049940013e-06, |
|
"loss": 0.3063, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1131050315591549, |
|
"learning_rate": 4.9240697074297205e-06, |
|
"loss": 0.3192, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.218025833644298, |
|
"learning_rate": 4.920430465088992e-06, |
|
"loss": 0.3083, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.090531576651011, |
|
"learning_rate": 4.916707448876173e-06, |
|
"loss": 0.3076, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1865422414756877, |
|
"learning_rate": 4.912900787649124e-06, |
|
"loss": 0.3155, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1236405558973956, |
|
"learning_rate": 4.909010613160751e-06, |
|
"loss": 0.306, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.222805799933775, |
|
"learning_rate": 4.90503706005445e-06, |
|
"loss": 0.3054, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.179814726076065, |
|
"learning_rate": 4.900980265859449e-06, |
|
"loss": 0.309, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.155763655177263, |
|
"learning_rate": 4.896840370986042e-06, |
|
"loss": 0.2974, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1687171308842221, |
|
"learning_rate": 4.892617518720737e-06, |
|
"loss": 0.3018, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2240587320323661, |
|
"learning_rate": 4.88831185522129e-06, |
|
"loss": 0.3066, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1042960875500205, |
|
"learning_rate": 4.883923529511646e-06, |
|
"loss": 0.2977, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1885949614868223, |
|
"learning_rate": 4.87945269347679e-06, |
|
"loss": 0.3087, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1420656757477574, |
|
"learning_rate": 4.874899501857477e-06, |
|
"loss": 0.2904, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1453980260713446, |
|
"learning_rate": 4.87026411224489e-06, |
|
"loss": 0.306, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2729287210416769, |
|
"learning_rate": 4.865546685075174e-06, |
|
"loss": 0.2938, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2052792222072466, |
|
"learning_rate": 4.860747383623889e-06, |
|
"loss": 0.2977, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2657508580603682, |
|
"learning_rate": 4.85586637400036e-06, |
|
"loss": 0.3011, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.32630813121795654, |
|
"eval_runtime": 1744.5857, |
|
"eval_samples_per_second": 1.325, |
|
"eval_steps_per_second": 0.074, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1832834131492187, |
|
"learning_rate": 4.85090382514192e-06, |
|
"loss": 0.2972, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.255475532117491, |
|
"learning_rate": 4.845859908808074e-06, |
|
"loss": 0.302, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.298818409489401, |
|
"learning_rate": 4.8407347995745465e-06, |
|
"loss": 0.2935, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.3499885398461409, |
|
"learning_rate": 4.8355286748272405e-06, |
|
"loss": 0.295, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3446382549398914, |
|
"learning_rate": 4.830241714756099e-06, |
|
"loss": 0.2824, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2082987304246777, |
|
"learning_rate": 4.8248741023488705e-06, |
|
"loss": 0.3026, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3432457490726049, |
|
"learning_rate": 4.81942602338477e-06, |
|
"loss": 0.2985, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.170337150254348, |
|
"learning_rate": 4.813897666428054e-06, |
|
"loss": 0.2969, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.339414484466056, |
|
"learning_rate": 4.808289222821491e-06, |
|
"loss": 0.2985, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1944077580462804, |
|
"learning_rate": 4.802600886679743e-06, |
|
"loss": 0.2852, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.357246876413576, |
|
"learning_rate": 4.79683285488264e-06, |
|
"loss": 0.2904, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.4115119936533302, |
|
"learning_rate": 4.790985327068376e-06, |
|
"loss": 0.3079, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.285315536324781, |
|
"learning_rate": 4.7850585056265866e-06, |
|
"loss": 0.2816, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.3631452273406317, |
|
"learning_rate": 4.779052595691355e-06, |
|
"loss": 0.2865, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.196518391890594, |
|
"learning_rate": 4.772967805134106e-06, |
|
"loss": 0.2793, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2485622601747421, |
|
"learning_rate": 4.766804344556414e-06, |
|
"loss": 0.2827, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2945099002171803, |
|
"learning_rate": 4.7605624272827125e-06, |
|
"loss": 0.2854, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.224576498812201, |
|
"learning_rate": 4.754242269352911e-06, |
|
"loss": 0.2875, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2535747430861524, |
|
"learning_rate": 4.747844089514919e-06, |
|
"loss": 0.2807, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.171951212608294, |
|
"learning_rate": 4.741368109217072e-06, |
|
"loss": 0.2761, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2123280755320154, |
|
"learning_rate": 4.734814552600469e-06, |
|
"loss": 0.2832, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1358700523339582, |
|
"learning_rate": 4.728183646491215e-06, |
|
"loss": 0.2871, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1484698203958048, |
|
"learning_rate": 4.721475620392567e-06, |
|
"loss": 0.2806, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1887290775946084, |
|
"learning_rate": 4.714690706477e-06, |
|
"loss": 0.2858, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1568061250650739, |
|
"learning_rate": 4.707829139578156e-06, |
|
"loss": 0.2888, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.176832058354239, |
|
"learning_rate": 4.700891157182729e-06, |
|
"loss": 0.2829, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.138549309431515, |
|
"learning_rate": 4.693876999422241e-06, |
|
"loss": 0.2763, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1479926100837645, |
|
"learning_rate": 4.68678690906473e-06, |
|
"loss": 0.2686, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1771516377197246, |
|
"learning_rate": 4.679621131506347e-06, |
|
"loss": 0.2814, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2184996974539424, |
|
"learning_rate": 4.672379914762867e-06, |
|
"loss": 0.2822, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1792108348242942, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 0.282, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.2850683815489914, |
|
"learning_rate": 4.657672168830211e-06, |
|
"loss": 0.2776, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.2508897770511975, |
|
"learning_rate": 4.650206148692977e-06, |
|
"loss": 0.2787, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.2031990746786907, |
|
"learning_rate": 4.642665707456908e-06, |
|
"loss": 0.2719, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1842474930123255, |
|
"learning_rate": 4.635051106105316e-06, |
|
"loss": 0.2732, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.2596970412015132, |
|
"learning_rate": 4.627362608188281e-06, |
|
"loss": 0.2731, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.4294759311096437, |
|
"learning_rate": 4.619600479813524e-06, |
|
"loss": 0.2738, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.31619095423113, |
|
"learning_rate": 4.6117649896372055e-06, |
|
"loss": 0.2764, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.2349728666776751, |
|
"learning_rate": 4.6038564088546185e-06, |
|
"loss": 0.2722, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2418477065252158, |
|
"learning_rate": 4.5958750111908065e-06, |
|
"loss": 0.271, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3529322240859796, |
|
"learning_rate": 4.587821072891089e-06, |
|
"loss": 0.276, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2671711562594927, |
|
"learning_rate": 4.579694872711501e-06, |
|
"loss": 0.2706, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.238356873891121, |
|
"learning_rate": 4.571496691909142e-06, |
|
"loss": 0.2749, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2059912760303926, |
|
"learning_rate": 4.563226814232444e-06, |
|
"loss": 0.2676, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1876458610423755, |
|
"learning_rate": 4.554885525911351e-06, |
|
"loss": 0.2743, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1715592937521375, |
|
"learning_rate": 4.54647311564741e-06, |
|
"loss": 0.2734, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.236329928620471, |
|
"learning_rate": 4.53798987460378e-06, |
|
"loss": 0.2855, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1717820999866062, |
|
"learning_rate": 4.529436096395157e-06, |
|
"loss": 0.2699, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.3490101744641771, |
|
"learning_rate": 4.520812077077604e-06, |
|
"loss": 0.2731, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.192962777526519, |
|
"learning_rate": 4.512118115138315e-06, |
|
"loss": 0.2719, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.2384657820337475, |
|
"learning_rate": 4.5033545114852734e-06, |
|
"loss": 0.2647, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.2128578058956592, |
|
"learning_rate": 4.494521569436845e-06, |
|
"loss": 0.2615, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.3237640584842072, |
|
"learning_rate": 4.485619594711278e-06, |
|
"loss": 0.2663, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.2691929068372239, |
|
"learning_rate": 4.476648895416116e-06, |
|
"loss": 0.2614, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.2606618599832538, |
|
"learning_rate": 4.467609782037543e-06, |
|
"loss": 0.2606, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3048381409549332, |
|
"learning_rate": 4.4585025674296315e-06, |
|
"loss": 0.2601, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3022768451107203, |
|
"learning_rate": 4.449327566803515e-06, |
|
"loss": 0.2683, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3820289309230962, |
|
"learning_rate": 4.44008509771648e-06, |
|
"loss": 0.2681, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2802354999925132, |
|
"learning_rate": 4.430775480060973e-06, |
|
"loss": 0.2648, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.3242106497833372, |
|
"learning_rate": 4.4213990360535274e-06, |
|
"loss": 0.268, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.3009976864959876, |
|
"learning_rate": 4.411956090223618e-06, |
|
"loss": 0.2662, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.3212829688401424, |
|
"learning_rate": 4.4024469694024194e-06, |
|
"loss": 0.2605, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.2123869956343973, |
|
"learning_rate": 4.3928720027115015e-06, |
|
"loss": 0.2604, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.284537459167204, |
|
"learning_rate": 4.383231521551432e-06, |
|
"loss": 0.2593, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.443338680183996, |
|
"learning_rate": 4.373525859590313e-06, |
|
"loss": 0.2561, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.2809230468289576, |
|
"learning_rate": 4.3637553527522265e-06, |
|
"loss": 0.2599, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3669470609932883, |
|
"learning_rate": 4.3539203392056114e-06, |
|
"loss": 0.2587, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4112940230474231, |
|
"learning_rate": 4.3440211593515556e-06, |
|
"loss": 0.2585, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.28355109691619873, |
|
"eval_runtime": 1744.5175, |
|
"eval_samples_per_second": 1.325, |
|
"eval_steps_per_second": 0.074, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3061396480876788, |
|
"learning_rate": 4.33405815581202e-06, |
|
"loss": 0.2549, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.46460991921356, |
|
"learning_rate": 4.324031673417971e-06, |
|
"loss": 0.2639, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.211168578821325, |
|
"learning_rate": 4.313942059197457e-06, |
|
"loss": 0.2581, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.4657150585182341, |
|
"learning_rate": 4.303789662363587e-06, |
|
"loss": 0.2616, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.4251800081691455, |
|
"learning_rate": 4.29357483430245e-06, |
|
"loss": 0.2668, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3599666478045191, |
|
"learning_rate": 4.283297928560951e-06, |
|
"loss": 0.2598, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.6103346253156021, |
|
"learning_rate": 4.272959300834574e-06, |
|
"loss": 0.2656, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.2184694580930981, |
|
"learning_rate": 4.262559308955072e-06, |
|
"loss": 0.2546, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.3362006281948362, |
|
"learning_rate": 4.252098312878083e-06, |
|
"loss": 0.2557, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.3369296531115935, |
|
"learning_rate": 4.241576674670668e-06, |
|
"loss": 0.2568, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4747872641188995, |
|
"learning_rate": 4.230994758498783e-06, |
|
"loss": 0.2564, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.60778480089848, |
|
"learning_rate": 4.220352930614672e-06, |
|
"loss": 0.2573, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.188044808018822, |
|
"learning_rate": 4.209651559344195e-06, |
|
"loss": 0.2525, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5856639134844415, |
|
"learning_rate": 4.198891015074074e-06, |
|
"loss": 0.2647, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.2859262024596512, |
|
"learning_rate": 4.1880716702390764e-06, |
|
"loss": 0.2471, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4653590828956073, |
|
"learning_rate": 4.177193899309127e-06, |
|
"loss": 0.2575, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.1821237121686685, |
|
"learning_rate": 4.166258078776342e-06, |
|
"loss": 0.2493, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.575597475848357, |
|
"learning_rate": 4.155264587142002e-06, |
|
"loss": 0.2537, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.2702085752651588, |
|
"learning_rate": 4.144213804903449e-06, |
|
"loss": 0.2493, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.5026735427361002, |
|
"learning_rate": 4.133106114540923e-06, |
|
"loss": 0.2505, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.5297903686100347, |
|
"learning_rate": 4.121941900504316e-06, |
|
"loss": 0.2472, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.25258373375573, |
|
"learning_rate": 4.110721549199866e-06, |
|
"loss": 0.2487, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.5941545034573665, |
|
"learning_rate": 4.099445448976793e-06, |
|
"loss": 0.2497, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.3096080921873048, |
|
"learning_rate": 4.088113990113846e-06, |
|
"loss": 0.2439, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.6950266606195492, |
|
"learning_rate": 4.076727564805803e-06, |
|
"loss": 0.2538, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.440485526817555, |
|
"learning_rate": 4.065286567149891e-06, |
|
"loss": 0.2613, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.606032223752871, |
|
"learning_rate": 4.0537913931321495e-06, |
|
"loss": 0.2505, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.5319951141665498, |
|
"learning_rate": 4.042242440613724e-06, |
|
"loss": 0.256, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3468098768373629, |
|
"learning_rate": 4.030640109317096e-06, |
|
"loss": 0.2424, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.6652562481471478, |
|
"learning_rate": 4.018984800812248e-06, |
|
"loss": 0.2396, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.302975081280886, |
|
"learning_rate": 4.007276918502763e-06, |
|
"loss": 0.2462, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.623125313268604, |
|
"learning_rate": 3.995516867611865e-06, |
|
"loss": 0.256, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.3069782036585045, |
|
"learning_rate": 3.983705055168391e-06, |
|
"loss": 0.2518, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.6527449270834242, |
|
"learning_rate": 3.971841889992706e-06, |
|
"loss": 0.2544, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.3586948189643275, |
|
"learning_rate": 3.959927782682551e-06, |
|
"loss": 0.2491, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.3440233460948727, |
|
"learning_rate": 3.947963145598833e-06, |
|
"loss": 0.2516, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3389168317613516, |
|
"learning_rate": 3.935948392851354e-06, |
|
"loss": 0.2541, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3142664585396417, |
|
"learning_rate": 3.923883940284472e-06, |
|
"loss": 0.2508, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.2767521320981983, |
|
"learning_rate": 3.911770205462717e-06, |
|
"loss": 0.2479, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3281972191838929, |
|
"learning_rate": 3.899607607656334e-06, |
|
"loss": 0.2501, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3793116543581005, |
|
"learning_rate": 3.887396567826769e-06, |
|
"loss": 0.2454, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3293987156576104, |
|
"learning_rate": 3.875137508612104e-06, |
|
"loss": 0.249, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4957835845929142, |
|
"learning_rate": 3.862830854312427e-06, |
|
"loss": 0.2445, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.2804679875446887, |
|
"learning_rate": 3.850477030875147e-06, |
|
"loss": 0.2411, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.5611119218300138, |
|
"learning_rate": 3.838076465880248e-06, |
|
"loss": 0.237, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.3387338916825537, |
|
"learning_rate": 3.825629588525498e-06, |
|
"loss": 0.2429, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.5091720406707172, |
|
"learning_rate": 3.813136829611583e-06, |
|
"loss": 0.2428, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.359116281666385, |
|
"learning_rate": 3.8005986215272056e-06, |
|
"loss": 0.2543, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.4094254259139338, |
|
"learning_rate": 3.7880153982341167e-06, |
|
"loss": 0.2502, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.2806047483095333, |
|
"learning_rate": 3.7753875952520943e-06, |
|
"loss": 0.2431, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.409218880016104, |
|
"learning_rate": 3.7627156496438686e-06, |
|
"loss": 0.2463, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.2466244404207094, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.2372, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.4192484726979884, |
|
"learning_rate": 3.7372410864236954e-06, |
|
"loss": 0.2396, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.3260879207799772, |
|
"learning_rate": 3.7244393505155713e-06, |
|
"loss": 0.241, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.6407257220698948, |
|
"learning_rate": 3.7115952353583804e-06, |
|
"loss": 0.2552, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4113760059054485, |
|
"learning_rate": 3.6987091855016667e-06, |
|
"loss": 0.2513, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.3008883773347888, |
|
"learning_rate": 3.6857816469463806e-06, |
|
"loss": 0.2361, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.3040857591494066, |
|
"learning_rate": 3.6728130671294485e-06, |
|
"loss": 0.2491, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.2543618451342111, |
|
"learning_rate": 3.6598038949082777e-06, |
|
"loss": 0.2309, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.3944108707435374, |
|
"learning_rate": 3.6467545805452266e-06, |
|
"loss": 0.2426, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.301851485207592, |
|
"learning_rate": 3.6336655756920198e-06, |
|
"loss": 0.2421, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.3562155385998595, |
|
"learning_rate": 3.620537333374114e-06, |
|
"loss": 0.2406, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.4263666275672418, |
|
"learning_rate": 3.6073703079750204e-06, |
|
"loss": 0.2418, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.2767612877970262, |
|
"learning_rate": 3.594164955220577e-06, |
|
"loss": 0.2353, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.3349267171117716, |
|
"learning_rate": 3.5809217321631745e-06, |
|
"loss": 0.2348, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.2217693484408796, |
|
"learning_rate": 3.5676410971659404e-06, |
|
"loss": 0.2287, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.4554473054976789, |
|
"learning_rate": 3.5543235098868702e-06, |
|
"loss": 0.241, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.184805169962002, |
|
"learning_rate": 3.5409694312629193e-06, |
|
"loss": 0.2352, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.25444912910461426, |
|
"eval_runtime": 1745.7708, |
|
"eval_samples_per_second": 1.324, |
|
"eval_steps_per_second": 0.074, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.2973792749867632, |
|
"learning_rate": 3.527579323494055e-06, |
|
"loss": 0.2404, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.390330195755624, |
|
"learning_rate": 3.5141536500272494e-06, |
|
"loss": 0.2397, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.2415077962351395, |
|
"learning_rate": 3.5006928755404467e-06, |
|
"loss": 0.2296, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.3223264932925407, |
|
"learning_rate": 3.4871974659264786e-06, |
|
"loss": 0.2332, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4376836200586416, |
|
"learning_rate": 3.473667888276935e-06, |
|
"loss": 0.2361, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.2495709137167788, |
|
"learning_rate": 3.4601046108660036e-06, |
|
"loss": 0.2351, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4449247677336339, |
|
"learning_rate": 3.446508103134259e-06, |
|
"loss": 0.2373, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3961526866418432, |
|
"learning_rate": 3.4328788356724135e-06, |
|
"loss": 0.2383, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.2766356071702671, |
|
"learning_rate": 3.419217280205032e-06, |
|
"loss": 0.2348, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.2201985305952152, |
|
"learning_rate": 3.4055239095742067e-06, |
|
"loss": 0.236, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.3670381437866368, |
|
"learning_rate": 3.3917991977231855e-06, |
|
"loss": 0.228, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.2724648753569285, |
|
"learning_rate": 3.378043619679974e-06, |
|
"loss": 0.2386, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.2826844172302947, |
|
"learning_rate": 3.364257651540891e-06, |
|
"loss": 0.2366, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.1767059777022655, |
|
"learning_rate": 3.3504417704540925e-06, |
|
"loss": 0.2251, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.3111513963454882, |
|
"learning_rate": 3.3365964546030544e-06, |
|
"loss": 0.2396, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.2617225478707708, |
|
"learning_rate": 3.322722183190025e-06, |
|
"loss": 0.2412, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.2183220743609309, |
|
"learning_rate": 3.308819436419437e-06, |
|
"loss": 0.2276, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.31561824749082, |
|
"learning_rate": 3.2948886954812877e-06, |
|
"loss": 0.2404, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.250087552624437, |
|
"learning_rate": 3.280930442534486e-06, |
|
"loss": 0.2263, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.2524310598377044, |
|
"learning_rate": 3.26694516069016e-06, |
|
"loss": 0.2368, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3487266981725987, |
|
"learning_rate": 3.252933333994942e-06, |
|
"loss": 0.2243, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.2427013509424278, |
|
"learning_rate": 3.238895447414211e-06, |
|
"loss": 0.2366, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.268723527146989, |
|
"learning_rate": 3.2248319868153067e-06, |
|
"loss": 0.2262, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.2476040692827028, |
|
"learning_rate": 3.210743438950718e-06, |
|
"loss": 0.234, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.2944243964732431, |
|
"learning_rate": 3.196630291441231e-06, |
|
"loss": 0.2261, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.2348938264581308, |
|
"learning_rate": 3.182493032759053e-06, |
|
"loss": 0.2368, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.3877133957904717, |
|
"learning_rate": 3.168332152210909e-06, |
|
"loss": 0.2342, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.2088837041711673, |
|
"learning_rate": 3.154148139921102e-06, |
|
"loss": 0.222, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4750513048080165, |
|
"learning_rate": 3.1399414868145506e-06, |
|
"loss": 0.2301, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.2097458338635088, |
|
"learning_rate": 3.1257126845998e-06, |
|
"loss": 0.2365, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.3570468614316236, |
|
"learning_rate": 3.1114622257520004e-06, |
|
"loss": 0.2275, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.2331713108579336, |
|
"learning_rate": 3.0971906034958616e-06, |
|
"loss": 0.2193, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.330924002893457, |
|
"learning_rate": 3.0828983117885856e-06, |
|
"loss": 0.2258, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.2713775149937143, |
|
"learning_rate": 3.0685858453027668e-06, |
|
"loss": 0.2287, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3460227514964078, |
|
"learning_rate": 3.05425369940927e-06, |
|
"loss": 0.2268, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3124465221253792, |
|
"learning_rate": 3.0399023701600903e-06, |
|
"loss": 0.2237, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.2621420000416141, |
|
"learning_rate": 3.0255323542711784e-06, |
|
"loss": 0.221, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.3207975689997922, |
|
"learning_rate": 3.011144149105251e-06, |
|
"loss": 0.2177, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.3364690610440046, |
|
"learning_rate": 2.996738252654577e-06, |
|
"loss": 0.2266, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3069082882086795, |
|
"learning_rate": 2.9823151635237424e-06, |
|
"loss": 0.2274, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.402608898892496, |
|
"learning_rate": 2.9678753809123884e-06, |
|
"loss": 0.233, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3349783439901974, |
|
"learning_rate": 2.9534194045979397e-06, |
|
"loss": 0.2198, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.3319911413244738, |
|
"learning_rate": 2.938947734918302e-06, |
|
"loss": 0.2241, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.2836113523110935, |
|
"learning_rate": 2.924460872754547e-06, |
|
"loss": 0.2247, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.3420053396118825, |
|
"learning_rate": 2.9099593195135743e-06, |
|
"loss": 0.2245, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.3018957576647208, |
|
"learning_rate": 2.8954435771107604e-06, |
|
"loss": 0.2198, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.493108819116986, |
|
"learning_rate": 2.8809141479525843e-06, |
|
"loss": 0.2261, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.2240817395656585, |
|
"learning_rate": 2.8663715349192388e-06, |
|
"loss": 0.2182, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.3972966685231503, |
|
"learning_rate": 2.8518162413472266e-06, |
|
"loss": 0.2289, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.3158850314947335, |
|
"learning_rate": 2.8372487710119374e-06, |
|
"loss": 0.2286, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.295772538693981, |
|
"learning_rate": 2.8226696281102134e-06, |
|
"loss": 0.2157, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.34085577207588, |
|
"learning_rate": 2.8080793172428965e-06, |
|
"loss": 0.2223, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3610764715193495, |
|
"learning_rate": 2.7934783433973672e-06, |
|
"loss": 0.2227, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.2629712566442401, |
|
"learning_rate": 2.778867211930061e-06, |
|
"loss": 0.2263, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.2782582856568219, |
|
"learning_rate": 2.764246428548983e-06, |
|
"loss": 0.2234, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.2621019245043847, |
|
"learning_rate": 2.7496164992961995e-06, |
|
"loss": 0.2177, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.2033350046761524, |
|
"learning_rate": 2.7349779305303263e-06, |
|
"loss": 0.2226, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.361220136423699, |
|
"learning_rate": 2.720331228909005e-06, |
|
"loss": 0.2179, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.3715434561254194, |
|
"learning_rate": 2.7056769013713623e-06, |
|
"loss": 0.2231, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.1330086039392537, |
|
"learning_rate": 2.691015455120468e-06, |
|
"loss": 0.2164, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.2694263709270768, |
|
"learning_rate": 2.6763473976057776e-06, |
|
"loss": 0.2127, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.3274231972419466, |
|
"learning_rate": 2.6616732365055713e-06, |
|
"loss": 0.2092, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.276485394682339, |
|
"learning_rate": 2.64699347970938e-06, |
|
"loss": 0.2206, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.33640777595863, |
|
"learning_rate": 2.6323086353004077e-06, |
|
"loss": 0.2201, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.2867150222472765, |
|
"learning_rate": 2.6176192115379494e-06, |
|
"loss": 0.2176, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.220258552427881, |
|
"learning_rate": 2.602925716839795e-06, |
|
"loss": 0.2131, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.3301323985426015, |
|
"learning_rate": 2.588228659764632e-06, |
|
"loss": 0.2244, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.2313785507924382, |
|
"learning_rate": 2.573528548994449e-06, |
|
"loss": 0.2192, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.22680288553237915, |
|
"eval_runtime": 1744.6696, |
|
"eval_samples_per_second": 1.325, |
|
"eval_steps_per_second": 0.074, |
|
"step": 272 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 544, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 272, |
|
"total_flos": 256045146439680.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|