|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 100, |
|
"global_step": 2310, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.021645021645021644, |
|
"grad_norm": 5.5244035720825195, |
|
"learning_rate": 1.8e-05, |
|
"loss": 5.7094, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04329004329004329, |
|
"grad_norm": 3.6613221168518066, |
|
"learning_rate": 3.8e-05, |
|
"loss": 5.4625, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06493506493506493, |
|
"grad_norm": 1.8148497343063354, |
|
"learning_rate": 5.8e-05, |
|
"loss": 5.3, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08658008658008658, |
|
"grad_norm": 1.9286500215530396, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 5.2125, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10822510822510822, |
|
"grad_norm": 2.1979825496673584, |
|
"learning_rate": 9.8e-05, |
|
"loss": 5.0969, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12987012987012986, |
|
"grad_norm": 2.6625876426696777, |
|
"learning_rate": 0.000118, |
|
"loss": 4.9938, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 2.470909595489502, |
|
"learning_rate": 0.000138, |
|
"loss": 4.9344, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17316017316017315, |
|
"grad_norm": 2.0229310989379883, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 4.7344, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19480519480519481, |
|
"grad_norm": 3.0014655590057373, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 4.675, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21645021645021645, |
|
"grad_norm": 3.8314504623413086, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 4.6344, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21645021645021645, |
|
"eval_loss": 3.2876250743865967, |
|
"eval_runtime": 6.4678, |
|
"eval_samples_per_second": 77.306, |
|
"eval_steps_per_second": 1.237, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 2.9059081077575684, |
|
"learning_rate": 0.00019918552036199096, |
|
"loss": 4.475, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 3.183814764022827, |
|
"learning_rate": 0.00019828054298642536, |
|
"loss": 4.4594, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2813852813852814, |
|
"grad_norm": 3.528064012527466, |
|
"learning_rate": 0.00019737556561085976, |
|
"loss": 4.4094, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 2.935842752456665, |
|
"learning_rate": 0.00019647058823529413, |
|
"loss": 4.2938, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3246753246753247, |
|
"grad_norm": 2.8355188369750977, |
|
"learning_rate": 0.0001955656108597285, |
|
"loss": 4.3031, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3463203463203463, |
|
"grad_norm": 2.983397960662842, |
|
"learning_rate": 0.0001946606334841629, |
|
"loss": 4.2656, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36796536796536794, |
|
"grad_norm": 3.0755467414855957, |
|
"learning_rate": 0.0001937556561085973, |
|
"loss": 4.1625, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.38961038961038963, |
|
"grad_norm": 2.8793647289276123, |
|
"learning_rate": 0.0001928506787330317, |
|
"loss": 4.1688, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41125541125541126, |
|
"grad_norm": 2.5316121578216553, |
|
"learning_rate": 0.0001919457013574661, |
|
"loss": 4.1688, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4329004329004329, |
|
"grad_norm": 4.25211763381958, |
|
"learning_rate": 0.00019104072398190046, |
|
"loss": 4.1969, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4329004329004329, |
|
"eval_loss": 2.902250051498413, |
|
"eval_runtime": 6.5111, |
|
"eval_samples_per_second": 76.792, |
|
"eval_steps_per_second": 1.229, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 2.9996044635772705, |
|
"learning_rate": 0.00019013574660633483, |
|
"loss": 4.0812, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 3.2504937648773193, |
|
"learning_rate": 0.00018923076923076923, |
|
"loss": 4.0406, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49783549783549785, |
|
"grad_norm": 3.9455931186676025, |
|
"learning_rate": 0.00018832579185520362, |
|
"loss": 4.0187, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 4.604526996612549, |
|
"learning_rate": 0.00018742081447963802, |
|
"loss": 4.0031, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5411255411255411, |
|
"grad_norm": 4.142082691192627, |
|
"learning_rate": 0.00018651583710407242, |
|
"loss": 4.0219, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5627705627705628, |
|
"grad_norm": 4.169658660888672, |
|
"learning_rate": 0.0001856108597285068, |
|
"loss": 3.9609, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5844155844155844, |
|
"grad_norm": 3.3557050228118896, |
|
"learning_rate": 0.0001847058823529412, |
|
"loss": 4.0078, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 2.9046578407287598, |
|
"learning_rate": 0.00018380090497737556, |
|
"loss": 3.9531, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6277056277056277, |
|
"grad_norm": 3.487697124481201, |
|
"learning_rate": 0.00018289592760180996, |
|
"loss": 3.9312, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6493506493506493, |
|
"grad_norm": 3.1762382984161377, |
|
"learning_rate": 0.00018199095022624435, |
|
"loss": 3.9469, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6493506493506493, |
|
"eval_loss": 2.724375009536743, |
|
"eval_runtime": 6.3055, |
|
"eval_samples_per_second": 79.295, |
|
"eval_steps_per_second": 1.269, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.670995670995671, |
|
"grad_norm": 3.686875581741333, |
|
"learning_rate": 0.00018108597285067875, |
|
"loss": 3.9625, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6926406926406926, |
|
"grad_norm": 3.1810693740844727, |
|
"learning_rate": 0.00018018099547511315, |
|
"loss": 3.9109, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 3.2442595958709717, |
|
"learning_rate": 0.00017927601809954752, |
|
"loss": 3.8609, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7359307359307359, |
|
"grad_norm": 3.7046546936035156, |
|
"learning_rate": 0.00017837104072398192, |
|
"loss": 3.9125, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 2.9039487838745117, |
|
"learning_rate": 0.0001774660633484163, |
|
"loss": 3.9406, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 3.293367862701416, |
|
"learning_rate": 0.00017656108597285068, |
|
"loss": 3.8234, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8008658008658008, |
|
"grad_norm": 3.5505335330963135, |
|
"learning_rate": 0.00017565610859728508, |
|
"loss": 3.8047, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8225108225108225, |
|
"grad_norm": 3.220608949661255, |
|
"learning_rate": 0.00017475113122171948, |
|
"loss": 3.8516, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8441558441558441, |
|
"grad_norm": 3.3645002841949463, |
|
"learning_rate": 0.00017384615384615385, |
|
"loss": 3.8766, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8658008658008658, |
|
"grad_norm": 3.1517069339752197, |
|
"learning_rate": 0.00017294117647058825, |
|
"loss": 3.8516, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8658008658008658, |
|
"eval_loss": 2.6535000801086426, |
|
"eval_runtime": 6.3052, |
|
"eval_samples_per_second": 79.299, |
|
"eval_steps_per_second": 1.269, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8874458874458875, |
|
"grad_norm": 3.14215350151062, |
|
"learning_rate": 0.00017203619909502264, |
|
"loss": 3.7781, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 3.1268439292907715, |
|
"learning_rate": 0.00017113122171945702, |
|
"loss": 3.8203, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9307359307359307, |
|
"grad_norm": 3.415076494216919, |
|
"learning_rate": 0.0001702262443438914, |
|
"loss": 3.7625, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 3.4878830909729004, |
|
"learning_rate": 0.0001693212669683258, |
|
"loss": 3.8531, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.974025974025974, |
|
"grad_norm": 3.972776174545288, |
|
"learning_rate": 0.00016841628959276018, |
|
"loss": 3.8266, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9956709956709957, |
|
"grad_norm": 4.266445636749268, |
|
"learning_rate": 0.00016751131221719458, |
|
"loss": 3.7516, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0173160173160174, |
|
"grad_norm": 4.685347080230713, |
|
"learning_rate": 0.00016660633484162898, |
|
"loss": 3.6875, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0389610389610389, |
|
"grad_norm": 3.38425612449646, |
|
"learning_rate": 0.00016570135746606337, |
|
"loss": 3.6719, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 3.108266592025757, |
|
"learning_rate": 0.00016479638009049774, |
|
"loss": 3.7156, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0822510822510822, |
|
"grad_norm": 3.597536563873291, |
|
"learning_rate": 0.00016389140271493214, |
|
"loss": 3.6531, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0822510822510822, |
|
"eval_loss": 2.546750068664551, |
|
"eval_runtime": 6.5359, |
|
"eval_samples_per_second": 76.501, |
|
"eval_steps_per_second": 1.224, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.103896103896104, |
|
"grad_norm": 3.6433794498443604, |
|
"learning_rate": 0.0001629864253393665, |
|
"loss": 3.6594, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.1255411255411256, |
|
"grad_norm": 3.4697842597961426, |
|
"learning_rate": 0.0001620814479638009, |
|
"loss": 3.6609, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1471861471861473, |
|
"grad_norm": 3.0815842151641846, |
|
"learning_rate": 0.0001611764705882353, |
|
"loss": 3.6578, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1688311688311688, |
|
"grad_norm": 3.8230438232421875, |
|
"learning_rate": 0.0001602714932126697, |
|
"loss": 3.6953, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 3.399728298187256, |
|
"learning_rate": 0.00015936651583710408, |
|
"loss": 3.5969, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 3.2932376861572266, |
|
"learning_rate": 0.00015846153846153847, |
|
"loss": 3.65, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.2337662337662338, |
|
"grad_norm": 4.999845504760742, |
|
"learning_rate": 0.00015755656108597284, |
|
"loss": 3.5953, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2554112554112553, |
|
"grad_norm": 4.074374675750732, |
|
"learning_rate": 0.00015665158371040724, |
|
"loss": 3.6891, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.277056277056277, |
|
"grad_norm": 4.196053504943848, |
|
"learning_rate": 0.00015574660633484164, |
|
"loss": 3.5922, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2987012987012987, |
|
"grad_norm": 4.6053853034973145, |
|
"learning_rate": 0.00015484162895927604, |
|
"loss": 3.6859, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2987012987012987, |
|
"eval_loss": 2.4678750038146973, |
|
"eval_runtime": 6.7301, |
|
"eval_samples_per_second": 74.293, |
|
"eval_steps_per_second": 1.189, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.3203463203463204, |
|
"grad_norm": 4.082015037536621, |
|
"learning_rate": 0.00015393665158371043, |
|
"loss": 3.6109, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.341991341991342, |
|
"grad_norm": 3.0748109817504883, |
|
"learning_rate": 0.0001530316742081448, |
|
"loss": 3.5875, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 3.769073486328125, |
|
"learning_rate": 0.00015212669683257917, |
|
"loss": 3.6469, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3852813852813852, |
|
"grad_norm": 3.995649576187134, |
|
"learning_rate": 0.00015122171945701357, |
|
"loss": 3.5516, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.406926406926407, |
|
"grad_norm": 3.429675579071045, |
|
"learning_rate": 0.00015031674208144797, |
|
"loss": 3.6437, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 3.3972747325897217, |
|
"learning_rate": 0.00014941176470588237, |
|
"loss": 3.5578, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4502164502164503, |
|
"grad_norm": 4.749250888824463, |
|
"learning_rate": 0.00014850678733031676, |
|
"loss": 3.5703, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.4718614718614718, |
|
"grad_norm": 5.253241062164307, |
|
"learning_rate": 0.00014760180995475113, |
|
"loss": 3.5125, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4935064935064934, |
|
"grad_norm": 4.124709129333496, |
|
"learning_rate": 0.0001466968325791855, |
|
"loss": 3.5766, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 4.052198886871338, |
|
"learning_rate": 0.0001457918552036199, |
|
"loss": 3.6094, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"eval_loss": 2.42787504196167, |
|
"eval_runtime": 6.6116, |
|
"eval_samples_per_second": 75.625, |
|
"eval_steps_per_second": 1.21, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5367965367965368, |
|
"grad_norm": 5.0124616622924805, |
|
"learning_rate": 0.0001448868778280543, |
|
"loss": 3.4797, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.5584415584415585, |
|
"grad_norm": 4.067927360534668, |
|
"learning_rate": 0.0001439819004524887, |
|
"loss": 3.5406, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.5800865800865802, |
|
"grad_norm": 3.851562023162842, |
|
"learning_rate": 0.0001430769230769231, |
|
"loss": 3.5719, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.601731601731602, |
|
"grad_norm": 3.5033013820648193, |
|
"learning_rate": 0.00014217194570135747, |
|
"loss": 3.5531, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.6233766233766234, |
|
"grad_norm": 4.524363040924072, |
|
"learning_rate": 0.00014126696832579186, |
|
"loss": 3.5406, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.645021645021645, |
|
"grad_norm": 3.595323085784912, |
|
"learning_rate": 0.00014036199095022623, |
|
"loss": 3.525, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 5.544644832611084, |
|
"learning_rate": 0.00013945701357466063, |
|
"loss": 3.5438, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.6883116883116882, |
|
"grad_norm": 3.835862636566162, |
|
"learning_rate": 0.00013855203619909503, |
|
"loss": 3.5312, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.70995670995671, |
|
"grad_norm": 3.518122911453247, |
|
"learning_rate": 0.00013764705882352943, |
|
"loss": 3.5516, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.7316017316017316, |
|
"grad_norm": 6.213132381439209, |
|
"learning_rate": 0.0001367420814479638, |
|
"loss": 3.5328, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.7316017316017316, |
|
"eval_loss": 2.3873751163482666, |
|
"eval_runtime": 6.5472, |
|
"eval_samples_per_second": 76.369, |
|
"eval_steps_per_second": 1.222, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.7532467532467533, |
|
"grad_norm": 4.263932704925537, |
|
"learning_rate": 0.0001358371040723982, |
|
"loss": 3.5094, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.774891774891775, |
|
"grad_norm": 3.5482687950134277, |
|
"learning_rate": 0.0001349321266968326, |
|
"loss": 3.55, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.7965367965367967, |
|
"grad_norm": 4.2857346534729, |
|
"learning_rate": 0.00013402714932126696, |
|
"loss": 3.4828, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 3.9592478275299072, |
|
"learning_rate": 0.00013312217194570136, |
|
"loss": 3.5328, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.8398268398268398, |
|
"grad_norm": 3.499769449234009, |
|
"learning_rate": 0.00013221719457013576, |
|
"loss": 3.5, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8614718614718615, |
|
"grad_norm": 3.270787477493286, |
|
"learning_rate": 0.00013131221719457013, |
|
"loss": 3.4516, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.883116883116883, |
|
"grad_norm": 4.917684078216553, |
|
"learning_rate": 0.00013040723981900453, |
|
"loss": 3.4656, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 4.024767875671387, |
|
"learning_rate": 0.00012950226244343892, |
|
"loss": 3.4922, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.9264069264069263, |
|
"grad_norm": 4.4510650634765625, |
|
"learning_rate": 0.00012859728506787332, |
|
"loss": 3.4891, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.948051948051948, |
|
"grad_norm": 3.7398886680603027, |
|
"learning_rate": 0.0001276923076923077, |
|
"loss": 3.4438, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.948051948051948, |
|
"eval_loss": 2.3561251163482666, |
|
"eval_runtime": 6.5958, |
|
"eval_samples_per_second": 75.806, |
|
"eval_steps_per_second": 1.213, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 4.9417219161987305, |
|
"learning_rate": 0.0001267873303167421, |
|
"loss": 3.4953, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.9913419913419914, |
|
"grad_norm": 4.173361301422119, |
|
"learning_rate": 0.0001258823529411765, |
|
"loss": 3.4813, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.012987012987013, |
|
"grad_norm": 3.6130735874176025, |
|
"learning_rate": 0.00012497737556561086, |
|
"loss": 3.4188, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.034632034632035, |
|
"grad_norm": 4.494176864624023, |
|
"learning_rate": 0.00012407239819004525, |
|
"loss": 3.3375, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.0562770562770565, |
|
"grad_norm": 4.0216593742370605, |
|
"learning_rate": 0.00012316742081447965, |
|
"loss": 3.3531, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.0779220779220777, |
|
"grad_norm": 4.224644184112549, |
|
"learning_rate": 0.00012226244343891405, |
|
"loss": 3.3859, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.0995670995670994, |
|
"grad_norm": 3.7151124477386475, |
|
"learning_rate": 0.0001213574660633484, |
|
"loss": 3.4297, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 4.0024847984313965, |
|
"learning_rate": 0.0001204524886877828, |
|
"loss": 3.3578, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 4.19533109664917, |
|
"learning_rate": 0.0001195475113122172, |
|
"loss": 3.4, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.1645021645021645, |
|
"grad_norm": 4.282599925994873, |
|
"learning_rate": 0.00011864253393665159, |
|
"loss": 3.4547, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.1645021645021645, |
|
"eval_loss": 2.300874948501587, |
|
"eval_runtime": 6.3309, |
|
"eval_samples_per_second": 78.978, |
|
"eval_steps_per_second": 1.264, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.186147186147186, |
|
"grad_norm": 3.4820311069488525, |
|
"learning_rate": 0.00011773755656108598, |
|
"loss": 3.3906, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.207792207792208, |
|
"grad_norm": 3.9142370223999023, |
|
"learning_rate": 0.00011683257918552038, |
|
"loss": 3.3203, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.2294372294372296, |
|
"grad_norm": 4.8086628913879395, |
|
"learning_rate": 0.00011592760180995476, |
|
"loss": 3.2969, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.2510822510822512, |
|
"grad_norm": 3.546926975250244, |
|
"learning_rate": 0.00011502262443438914, |
|
"loss": 3.3578, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 4.613463878631592, |
|
"learning_rate": 0.00011411764705882353, |
|
"loss": 3.3609, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.2943722943722946, |
|
"grad_norm": 4.487024784088135, |
|
"learning_rate": 0.00011321266968325792, |
|
"loss": 3.3141, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.316017316017316, |
|
"grad_norm": 3.963923692703247, |
|
"learning_rate": 0.00011230769230769231, |
|
"loss": 3.3312, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.3376623376623376, |
|
"grad_norm": 4.741418361663818, |
|
"learning_rate": 0.00011140271493212671, |
|
"loss": 3.3937, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.3593073593073592, |
|
"grad_norm": 4.762458801269531, |
|
"learning_rate": 0.0001104977375565611, |
|
"loss": 3.3469, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"grad_norm": 4.140308380126953, |
|
"learning_rate": 0.0001095927601809955, |
|
"loss": 3.4047, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"eval_loss": 2.2771248817443848, |
|
"eval_runtime": 6.4697, |
|
"eval_samples_per_second": 77.284, |
|
"eval_steps_per_second": 1.237, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.4025974025974026, |
|
"grad_norm": 3.9786176681518555, |
|
"learning_rate": 0.00010868778280542986, |
|
"loss": 3.4047, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 4.1307501792907715, |
|
"learning_rate": 0.00010778280542986425, |
|
"loss": 3.3984, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.445887445887446, |
|
"grad_norm": 5.001722812652588, |
|
"learning_rate": 0.00010687782805429865, |
|
"loss": 3.3969, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.4675324675324677, |
|
"grad_norm": 4.192739009857178, |
|
"learning_rate": 0.00010597285067873304, |
|
"loss": 3.3328, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.4891774891774894, |
|
"grad_norm": 4.113678932189941, |
|
"learning_rate": 0.00010506787330316743, |
|
"loss": 3.3609, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.5108225108225106, |
|
"grad_norm": 3.677635669708252, |
|
"learning_rate": 0.00010416289592760182, |
|
"loss": 3.3062, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.5324675324675323, |
|
"grad_norm": 4.670088291168213, |
|
"learning_rate": 0.00010325791855203621, |
|
"loss": 3.3344, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.554112554112554, |
|
"grad_norm": 4.220792770385742, |
|
"learning_rate": 0.00010235294117647058, |
|
"loss": 3.2266, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.5757575757575757, |
|
"grad_norm": 4.443659782409668, |
|
"learning_rate": 0.00010144796380090498, |
|
"loss": 3.3875, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.5974025974025974, |
|
"grad_norm": 4.4685187339782715, |
|
"learning_rate": 0.00010054298642533937, |
|
"loss": 3.3391, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.5974025974025974, |
|
"eval_loss": 2.250687599182129, |
|
"eval_runtime": 6.5637, |
|
"eval_samples_per_second": 76.177, |
|
"eval_steps_per_second": 1.219, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.619047619047619, |
|
"grad_norm": 4.526357650756836, |
|
"learning_rate": 9.963800904977376e-05, |
|
"loss": 3.2641, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.6406926406926408, |
|
"grad_norm": 4.371001720428467, |
|
"learning_rate": 9.873303167420816e-05, |
|
"loss": 3.3141, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.6623376623376624, |
|
"grad_norm": 3.9310309886932373, |
|
"learning_rate": 9.782805429864254e-05, |
|
"loss": 3.3391, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.683982683982684, |
|
"grad_norm": 4.313389778137207, |
|
"learning_rate": 9.692307692307692e-05, |
|
"loss": 3.2766, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.7056277056277054, |
|
"grad_norm": 4.895615100860596, |
|
"learning_rate": 9.601809954751132e-05, |
|
"loss": 3.2609, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 3.97171688079834, |
|
"learning_rate": 9.51131221719457e-05, |
|
"loss": 3.2781, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.7489177489177488, |
|
"grad_norm": 4.3905029296875, |
|
"learning_rate": 9.420814479638009e-05, |
|
"loss": 3.2953, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.7705627705627704, |
|
"grad_norm": 4.197689533233643, |
|
"learning_rate": 9.330316742081449e-05, |
|
"loss": 3.2766, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.792207792207792, |
|
"grad_norm": 4.34577751159668, |
|
"learning_rate": 9.239819004524888e-05, |
|
"loss": 3.3141, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.813852813852814, |
|
"grad_norm": 3.8355207443237305, |
|
"learning_rate": 9.149321266968326e-05, |
|
"loss": 3.2859, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.813852813852814, |
|
"eval_loss": 2.2236249446868896, |
|
"eval_runtime": 6.5337, |
|
"eval_samples_per_second": 76.527, |
|
"eval_steps_per_second": 1.224, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.8354978354978355, |
|
"grad_norm": 3.9101665019989014, |
|
"learning_rate": 9.058823529411765e-05, |
|
"loss": 3.2984, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 4.794166564941406, |
|
"learning_rate": 8.968325791855205e-05, |
|
"loss": 3.3188, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.878787878787879, |
|
"grad_norm": 4.448386192321777, |
|
"learning_rate": 8.877828054298642e-05, |
|
"loss": 3.2797, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.9004329004329006, |
|
"grad_norm": 3.9681570529937744, |
|
"learning_rate": 8.787330316742082e-05, |
|
"loss": 3.3484, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.9220779220779223, |
|
"grad_norm": 4.010087966918945, |
|
"learning_rate": 8.696832579185522e-05, |
|
"loss": 3.3297, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.9437229437229435, |
|
"grad_norm": 3.9292078018188477, |
|
"learning_rate": 8.60633484162896e-05, |
|
"loss": 3.3344, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.965367965367965, |
|
"grad_norm": 4.463669776916504, |
|
"learning_rate": 8.515837104072398e-05, |
|
"loss": 3.275, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.987012987012987, |
|
"grad_norm": 3.4622855186462402, |
|
"learning_rate": 8.425339366515838e-05, |
|
"loss": 3.3078, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.0086580086580086, |
|
"grad_norm": 3.9492321014404297, |
|
"learning_rate": 8.334841628959277e-05, |
|
"loss": 3.3141, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.0303030303030303, |
|
"grad_norm": 4.027843475341797, |
|
"learning_rate": 8.244343891402715e-05, |
|
"loss": 3.2484, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.0303030303030303, |
|
"eval_loss": 2.197937488555908, |
|
"eval_runtime": 6.7816, |
|
"eval_samples_per_second": 73.728, |
|
"eval_steps_per_second": 1.18, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.051948051948052, |
|
"grad_norm": 4.027857780456543, |
|
"learning_rate": 8.153846153846155e-05, |
|
"loss": 3.1766, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.0735930735930737, |
|
"grad_norm": 5.006868362426758, |
|
"learning_rate": 8.063348416289593e-05, |
|
"loss": 3.1719, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.0952380952380953, |
|
"grad_norm": 4.768033027648926, |
|
"learning_rate": 7.972850678733033e-05, |
|
"loss": 3.1344, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.116883116883117, |
|
"grad_norm": 5.115345478057861, |
|
"learning_rate": 7.882352941176471e-05, |
|
"loss": 3.1719, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.1385281385281387, |
|
"grad_norm": 3.7323756217956543, |
|
"learning_rate": 7.79185520361991e-05, |
|
"loss": 3.1344, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.16017316017316, |
|
"grad_norm": 5.077029228210449, |
|
"learning_rate": 7.70135746606335e-05, |
|
"loss": 3.2687, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.1818181818181817, |
|
"grad_norm": 4.608593463897705, |
|
"learning_rate": 7.610859728506788e-05, |
|
"loss": 3.2031, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.2034632034632033, |
|
"grad_norm": 5.431169033050537, |
|
"learning_rate": 7.520361990950226e-05, |
|
"loss": 3.1984, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.225108225108225, |
|
"grad_norm": 4.630691051483154, |
|
"learning_rate": 7.429864253393666e-05, |
|
"loss": 3.2359, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.2467532467532467, |
|
"grad_norm": 4.724872589111328, |
|
"learning_rate": 7.339366515837104e-05, |
|
"loss": 3.2109, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.2467532467532467, |
|
"eval_loss": 2.1664373874664307, |
|
"eval_runtime": 6.556, |
|
"eval_samples_per_second": 76.266, |
|
"eval_steps_per_second": 1.22, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.2683982683982684, |
|
"grad_norm": 4.629438877105713, |
|
"learning_rate": 7.248868778280543e-05, |
|
"loss": 3.1844, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.29004329004329, |
|
"grad_norm": 5.131148815155029, |
|
"learning_rate": 7.158371040723983e-05, |
|
"loss": 3.1688, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.311688311688312, |
|
"grad_norm": 4.398173809051514, |
|
"learning_rate": 7.067873303167421e-05, |
|
"loss": 3.2266, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 4.557602405548096, |
|
"learning_rate": 6.97737556561086e-05, |
|
"loss": 3.1531, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.354978354978355, |
|
"grad_norm": 4.508275508880615, |
|
"learning_rate": 6.886877828054299e-05, |
|
"loss": 3.2391, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.3766233766233764, |
|
"grad_norm": 4.455708980560303, |
|
"learning_rate": 6.796380090497739e-05, |
|
"loss": 3.1078, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.398268398268398, |
|
"grad_norm": 4.379371643066406, |
|
"learning_rate": 6.705882352941176e-05, |
|
"loss": 3.1875, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.41991341991342, |
|
"grad_norm": 5.268296241760254, |
|
"learning_rate": 6.615384615384616e-05, |
|
"loss": 3.1094, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.4415584415584415, |
|
"grad_norm": 3.684109687805176, |
|
"learning_rate": 6.524886877828055e-05, |
|
"loss": 3.2156, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.463203463203463, |
|
"grad_norm": 4.146853923797607, |
|
"learning_rate": 6.434389140271494e-05, |
|
"loss": 3.1953, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.463203463203463, |
|
"eval_loss": 2.1688125133514404, |
|
"eval_runtime": 6.4486, |
|
"eval_samples_per_second": 77.537, |
|
"eval_steps_per_second": 1.241, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.484848484848485, |
|
"grad_norm": 4.214244365692139, |
|
"learning_rate": 6.343891402714932e-05, |
|
"loss": 3.1297, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.5064935064935066, |
|
"grad_norm": 4.9321184158325195, |
|
"learning_rate": 6.253393665158372e-05, |
|
"loss": 3.1531, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.5281385281385282, |
|
"grad_norm": 4.7060017585754395, |
|
"learning_rate": 6.16289592760181e-05, |
|
"loss": 3.1703, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.54978354978355, |
|
"grad_norm": 4.779506206512451, |
|
"learning_rate": 6.072398190045249e-05, |
|
"loss": 3.1172, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 4.440573692321777, |
|
"learning_rate": 5.981900452488688e-05, |
|
"loss": 3.1437, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.5930735930735933, |
|
"grad_norm": 4.986171722412109, |
|
"learning_rate": 5.891402714932127e-05, |
|
"loss": 3.1938, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.6147186147186146, |
|
"grad_norm": 4.413197994232178, |
|
"learning_rate": 5.800904977375567e-05, |
|
"loss": 3.1422, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 4.145831108093262, |
|
"learning_rate": 5.7104072398190044e-05, |
|
"loss": 3.1297, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.658008658008658, |
|
"grad_norm": 4.501436710357666, |
|
"learning_rate": 5.6199095022624435e-05, |
|
"loss": 3.1172, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.6796536796536796, |
|
"grad_norm": 4.262230396270752, |
|
"learning_rate": 5.529411764705883e-05, |
|
"loss": 3.1875, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.6796536796536796, |
|
"eval_loss": 2.1631250381469727, |
|
"eval_runtime": 7.0489, |
|
"eval_samples_per_second": 70.933, |
|
"eval_steps_per_second": 1.135, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.7012987012987013, |
|
"grad_norm": 5.456052780151367, |
|
"learning_rate": 5.438914027149321e-05, |
|
"loss": 3.2078, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.722943722943723, |
|
"grad_norm": 4.408057689666748, |
|
"learning_rate": 5.34841628959276e-05, |
|
"loss": 3.1969, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.7445887445887447, |
|
"grad_norm": 3.87892484664917, |
|
"learning_rate": 5.2579185520362e-05, |
|
"loss": 3.1875, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.7662337662337664, |
|
"grad_norm": 4.127628326416016, |
|
"learning_rate": 5.167420814479639e-05, |
|
"loss": 3.1812, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.787878787878788, |
|
"grad_norm": 4.400271892547607, |
|
"learning_rate": 5.0769230769230766e-05, |
|
"loss": 3.1516, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.8095238095238093, |
|
"grad_norm": 4.7595601081848145, |
|
"learning_rate": 4.986425339366516e-05, |
|
"loss": 3.1484, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.8311688311688314, |
|
"grad_norm": 4.606581687927246, |
|
"learning_rate": 4.895927601809955e-05, |
|
"loss": 3.1375, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.8528138528138527, |
|
"grad_norm": 3.985682964324951, |
|
"learning_rate": 4.805429864253394e-05, |
|
"loss": 3.2391, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.8744588744588744, |
|
"grad_norm": 4.837043285369873, |
|
"learning_rate": 4.714932126696833e-05, |
|
"loss": 3.1375, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.896103896103896, |
|
"grad_norm": 5.133884429931641, |
|
"learning_rate": 4.624434389140272e-05, |
|
"loss": 3.1984, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.896103896103896, |
|
"eval_loss": 2.1586875915527344, |
|
"eval_runtime": 6.4439, |
|
"eval_samples_per_second": 77.593, |
|
"eval_steps_per_second": 1.241, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.9177489177489178, |
|
"grad_norm": 4.777352333068848, |
|
"learning_rate": 4.5339366515837104e-05, |
|
"loss": 3.0766, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.9393939393939394, |
|
"grad_norm": 4.657353401184082, |
|
"learning_rate": 4.4434389140271495e-05, |
|
"loss": 3.0906, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.961038961038961, |
|
"grad_norm": 4.391796588897705, |
|
"learning_rate": 4.3529411764705885e-05, |
|
"loss": 3.1688, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.982683982683983, |
|
"grad_norm": 4.547649383544922, |
|
"learning_rate": 4.262443438914027e-05, |
|
"loss": 3.1562, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.004329004329004, |
|
"grad_norm": 4.718013286590576, |
|
"learning_rate": 4.171945701357467e-05, |
|
"loss": 3.1313, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.025974025974026, |
|
"grad_norm": 5.101125240325928, |
|
"learning_rate": 4.081447963800905e-05, |
|
"loss": 3.0547, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.0476190476190474, |
|
"grad_norm": 4.414271831512451, |
|
"learning_rate": 3.990950226244344e-05, |
|
"loss": 3.0719, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.06926406926407, |
|
"grad_norm": 3.9164297580718994, |
|
"learning_rate": 3.900452488687783e-05, |
|
"loss": 3.1375, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.090909090909091, |
|
"grad_norm": 5.559360027313232, |
|
"learning_rate": 3.8099547511312216e-05, |
|
"loss": 3.1094, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.112554112554113, |
|
"grad_norm": 4.572648525238037, |
|
"learning_rate": 3.719457013574661e-05, |
|
"loss": 3.0641, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.112554112554113, |
|
"eval_loss": 2.1291251182556152, |
|
"eval_runtime": 6.5526, |
|
"eval_samples_per_second": 76.306, |
|
"eval_steps_per_second": 1.221, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.134199134199134, |
|
"grad_norm": 4.143476486206055, |
|
"learning_rate": 3.6289592760181e-05, |
|
"loss": 3.1281, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.1558441558441555, |
|
"grad_norm": 5.0819525718688965, |
|
"learning_rate": 3.538461538461539e-05, |
|
"loss": 3.1375, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.177489177489178, |
|
"grad_norm": 4.748097896575928, |
|
"learning_rate": 3.447963800904977e-05, |
|
"loss": 3.075, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.199134199134199, |
|
"grad_norm": 4.316249847412109, |
|
"learning_rate": 3.3574660633484164e-05, |
|
"loss": 2.9859, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.220779220779221, |
|
"grad_norm": 4.169147491455078, |
|
"learning_rate": 3.2669683257918554e-05, |
|
"loss": 3.1203, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.242424242424242, |
|
"grad_norm": 4.865535259246826, |
|
"learning_rate": 3.176470588235294e-05, |
|
"loss": 3.05, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.264069264069264, |
|
"grad_norm": 4.354024410247803, |
|
"learning_rate": 3.0859728506787336e-05, |
|
"loss": 3.0828, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 4.201045989990234, |
|
"learning_rate": 2.995475113122172e-05, |
|
"loss": 3.1562, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.307359307359308, |
|
"grad_norm": 4.673395156860352, |
|
"learning_rate": 2.904977375565611e-05, |
|
"loss": 3.0219, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.329004329004329, |
|
"grad_norm": 4.373682975769043, |
|
"learning_rate": 2.8144796380090498e-05, |
|
"loss": 3.1125, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.329004329004329, |
|
"eval_loss": 2.137437582015991, |
|
"eval_runtime": 6.3906, |
|
"eval_samples_per_second": 78.24, |
|
"eval_steps_per_second": 1.252, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.35064935064935, |
|
"grad_norm": 4.382651329040527, |
|
"learning_rate": 2.7239819004524886e-05, |
|
"loss": 3.0234, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.372294372294372, |
|
"grad_norm": 4.836797714233398, |
|
"learning_rate": 2.633484162895928e-05, |
|
"loss": 3.1156, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.393939393939394, |
|
"grad_norm": 4.31813907623291, |
|
"learning_rate": 2.5429864253393664e-05, |
|
"loss": 3.0484, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.415584415584416, |
|
"grad_norm": 4.9742350578308105, |
|
"learning_rate": 2.4524886877828054e-05, |
|
"loss": 3.0297, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.437229437229437, |
|
"grad_norm": 4.34569787979126, |
|
"learning_rate": 2.3619909502262445e-05, |
|
"loss": 3.0688, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.458874458874459, |
|
"grad_norm": 4.9268798828125, |
|
"learning_rate": 2.2714932126696833e-05, |
|
"loss": 3.0797, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.48051948051948, |
|
"grad_norm": 4.407137870788574, |
|
"learning_rate": 2.1809954751131223e-05, |
|
"loss": 3.0266, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.5021645021645025, |
|
"grad_norm": 4.480093479156494, |
|
"learning_rate": 2.090497737556561e-05, |
|
"loss": 3.0688, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.523809523809524, |
|
"grad_norm": 5.038318634033203, |
|
"learning_rate": 2e-05, |
|
"loss": 3.125, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 4.633235454559326, |
|
"learning_rate": 1.909502262443439e-05, |
|
"loss": 3.0562, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"eval_loss": 2.1156249046325684, |
|
"eval_runtime": 6.6798, |
|
"eval_samples_per_second": 74.853, |
|
"eval_steps_per_second": 1.198, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.567099567099567, |
|
"grad_norm": 5.131459712982178, |
|
"learning_rate": 1.819004524886878e-05, |
|
"loss": 3.1156, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.588744588744589, |
|
"grad_norm": 4.459514141082764, |
|
"learning_rate": 1.7285067873303167e-05, |
|
"loss": 3.0562, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.6103896103896105, |
|
"grad_norm": 4.892005443572998, |
|
"learning_rate": 1.6380090497737558e-05, |
|
"loss": 3.0297, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.632034632034632, |
|
"grad_norm": 4.963741779327393, |
|
"learning_rate": 1.5475113122171945e-05, |
|
"loss": 3.0125, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.653679653679654, |
|
"grad_norm": 4.354393482208252, |
|
"learning_rate": 1.4570135746606336e-05, |
|
"loss": 3.0578, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.675324675324675, |
|
"grad_norm": 4.8484649658203125, |
|
"learning_rate": 1.3665158371040724e-05, |
|
"loss": 3.0953, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.696969696969697, |
|
"grad_norm": 5.201778411865234, |
|
"learning_rate": 1.2760180995475113e-05, |
|
"loss": 3.0828, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.7186147186147185, |
|
"grad_norm": 5.245253086090088, |
|
"learning_rate": 1.1855203619909502e-05, |
|
"loss": 2.9922, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.740259740259741, |
|
"grad_norm": 5.066870212554932, |
|
"learning_rate": 1.0950226244343893e-05, |
|
"loss": 3.0516, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"grad_norm": 4.195698261260986, |
|
"learning_rate": 1.0045248868778282e-05, |
|
"loss": 3.0734, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"eval_loss": 2.101749897003174, |
|
"eval_runtime": 6.5232, |
|
"eval_samples_per_second": 76.65, |
|
"eval_steps_per_second": 1.226, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.783549783549784, |
|
"grad_norm": 4.493175506591797, |
|
"learning_rate": 9.140271493212669e-06, |
|
"loss": 3.0187, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.805194805194805, |
|
"grad_norm": 4.933455944061279, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 3.0734, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.8268398268398265, |
|
"grad_norm": 4.937650203704834, |
|
"learning_rate": 7.330316742081449e-06, |
|
"loss": 3.0609, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.848484848484849, |
|
"grad_norm": 4.216976165771484, |
|
"learning_rate": 6.425339366515837e-06, |
|
"loss": 3.0156, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.87012987012987, |
|
"grad_norm": 4.135624408721924, |
|
"learning_rate": 5.520361990950226e-06, |
|
"loss": 3.0109, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.891774891774892, |
|
"grad_norm": 5.964848041534424, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 2.9969, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.913419913419913, |
|
"grad_norm": 4.022227764129639, |
|
"learning_rate": 3.710407239819005e-06, |
|
"loss": 3.0078, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.935064935064935, |
|
"grad_norm": 4.731805801391602, |
|
"learning_rate": 2.805429864253394e-06, |
|
"loss": 2.9844, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.956709956709957, |
|
"grad_norm": 4.872863292694092, |
|
"learning_rate": 1.9004524886877828e-06, |
|
"loss": 3.0484, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.978354978354979, |
|
"grad_norm": 4.713351249694824, |
|
"learning_rate": 9.954751131221719e-07, |
|
"loss": 3.0469, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.978354978354979, |
|
"eval_loss": 2.1040000915527344, |
|
"eval_runtime": 6.4203, |
|
"eval_samples_per_second": 77.878, |
|
"eval_steps_per_second": 1.246, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 5.077022075653076, |
|
"learning_rate": 9.04977375565611e-08, |
|
"loss": 3.0016, |
|
"step": 2310 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2310, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 256, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|