|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005263157894736842, |
|
"grad_norm": 17.523106343551206, |
|
"learning_rate": 1.0526315789473685e-07, |
|
"loss": 1.5333, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002631578947368421, |
|
"grad_norm": 18.121909431354247, |
|
"learning_rate": 5.263157894736843e-07, |
|
"loss": 1.5177, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005263157894736842, |
|
"grad_norm": 14.836011770372137, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 1.4389, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007894736842105263, |
|
"grad_norm": 3.8440549265416597, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 1.4167, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010526315789473684, |
|
"grad_norm": 2.4276964194723165, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 1.2742, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013157894736842105, |
|
"grad_norm": 2.187369889390014, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.231, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.015789473684210527, |
|
"grad_norm": 2.0926997492754285, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 1.2264, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.018421052631578946, |
|
"grad_norm": 2.0987185493937197, |
|
"learning_rate": 3.6842105263157896e-06, |
|
"loss": 1.1576, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.021052631578947368, |
|
"grad_norm": 1.871475479211144, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.2529, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02368421052631579, |
|
"grad_norm": 2.2193852291586773, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 1.1641, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"grad_norm": 1.9595885172581249, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 1.2251, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02894736842105263, |
|
"grad_norm": 1.9931664072286583, |
|
"learning_rate": 5.789473684210527e-06, |
|
"loss": 1.1715, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.031578947368421054, |
|
"grad_norm": 2.1154232990627757, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 1.1263, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.034210526315789476, |
|
"grad_norm": 1.712961421918351, |
|
"learning_rate": 6.842105263157896e-06, |
|
"loss": 1.1846, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03684210526315789, |
|
"grad_norm": 1.907897309472424, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 1.1599, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.039473684210526314, |
|
"grad_norm": 1.8618405884575073, |
|
"learning_rate": 7.894736842105265e-06, |
|
"loss": 1.1446, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 2.003611558479045, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 1.2148, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04473684210526316, |
|
"grad_norm": 1.9509755028786422, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 1.2278, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04736842105263158, |
|
"grad_norm": 2.227455915272786, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 1.1945, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.2089132290988966, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2186, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 2.0403472473994992, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.244, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05526315789473684, |
|
"grad_norm": 2.07699667956044, |
|
"learning_rate": 1.105263157894737e-05, |
|
"loss": 1.2287, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05789473684210526, |
|
"grad_norm": 1.8269346866586127, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 1.2289, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.060526315789473685, |
|
"grad_norm": 1.9416484545897956, |
|
"learning_rate": 1.2105263157894737e-05, |
|
"loss": 1.1656, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06315789473684211, |
|
"grad_norm": 1.8329146478042504, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 1.2144, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06578947368421052, |
|
"grad_norm": 1.9961976737672504, |
|
"learning_rate": 1.3157894736842108e-05, |
|
"loss": 1.1669, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06842105263157895, |
|
"grad_norm": 1.817199311046236, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 1.2309, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07105263157894737, |
|
"grad_norm": 1.7397043184426004, |
|
"learning_rate": 1.4210526315789475e-05, |
|
"loss": 1.1937, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07368421052631578, |
|
"grad_norm": 2.045890849321593, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 1.2241, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07631578947368421, |
|
"grad_norm": 1.9435204128532164, |
|
"learning_rate": 1.5263157894736846e-05, |
|
"loss": 1.208, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07894736842105263, |
|
"grad_norm": 3.270850219919583, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 1.1838, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08157894736842106, |
|
"grad_norm": 1.9095124068691054, |
|
"learning_rate": 1.6315789473684213e-05, |
|
"loss": 1.2043, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 2.2387627201274025, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 1.2013, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0868421052631579, |
|
"grad_norm": 1.9325778214196014, |
|
"learning_rate": 1.736842105263158e-05, |
|
"loss": 1.211, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08947368421052632, |
|
"grad_norm": 2.0833292593807107, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 1.2453, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09210526315789473, |
|
"grad_norm": 1.927275946654803, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 1.2558, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09473684210526316, |
|
"grad_norm": 1.7879806074211535, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 1.2603, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09736842105263158, |
|
"grad_norm": 1.6245970203120101, |
|
"learning_rate": 1.9473684210526318e-05, |
|
"loss": 1.285, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7551464083695452, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2406, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10263157894736842, |
|
"grad_norm": 1.8960908258050249, |
|
"learning_rate": 1.9999578095183126e-05, |
|
"loss": 1.2636, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 1.8238289222983086, |
|
"learning_rate": 1.999831241633323e-05, |
|
"loss": 1.2385, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10789473684210527, |
|
"grad_norm": 1.7131092211626127, |
|
"learning_rate": 1.9996203070249516e-05, |
|
"loss": 1.2057, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11052631578947368, |
|
"grad_norm": 1.9814641064127423, |
|
"learning_rate": 1.9993250234920638e-05, |
|
"loss": 1.2105, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11315789473684211, |
|
"grad_norm": 1.764032181247943, |
|
"learning_rate": 1.998945415950969e-05, |
|
"loss": 1.2082, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11578947368421053, |
|
"grad_norm": 1.7110028028184467, |
|
"learning_rate": 1.9984815164333163e-05, |
|
"loss": 1.2292, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11842105263157894, |
|
"grad_norm": 1.9617301301812387, |
|
"learning_rate": 1.9979333640833947e-05, |
|
"loss": 1.2204, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12105263157894737, |
|
"grad_norm": 1.7477888973288305, |
|
"learning_rate": 1.9973010051548274e-05, |
|
"loss": 1.249, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12368421052631579, |
|
"grad_norm": 1.7276840169609364, |
|
"learning_rate": 1.99658449300667e-05, |
|
"loss": 1.1936, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12631578947368421, |
|
"grad_norm": 1.858438572075226, |
|
"learning_rate": 1.9957838880989076e-05, |
|
"loss": 1.2914, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12894736842105264, |
|
"grad_norm": 1.6258008392447836, |
|
"learning_rate": 1.9948992579873538e-05, |
|
"loss": 1.2453, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 1.6589841824832805, |
|
"learning_rate": 1.9939306773179498e-05, |
|
"loss": 1.2091, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13421052631578947, |
|
"grad_norm": 1.6489987345538428, |
|
"learning_rate": 1.992878227820465e-05, |
|
"loss": 1.2223, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1368421052631579, |
|
"grad_norm": 1.736814397762254, |
|
"learning_rate": 1.9917419983016025e-05, |
|
"loss": 1.2464, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1394736842105263, |
|
"grad_norm": 1.612061991991375, |
|
"learning_rate": 1.990522084637503e-05, |
|
"loss": 1.2334, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14210526315789473, |
|
"grad_norm": 1.634510448068168, |
|
"learning_rate": 1.989218589765658e-05, |
|
"loss": 1.2618, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14473684210526316, |
|
"grad_norm": 1.7336110112785093, |
|
"learning_rate": 1.9878316236762195e-05, |
|
"loss": 1.2533, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14736842105263157, |
|
"grad_norm": 1.8212242851158706, |
|
"learning_rate": 1.9863613034027224e-05, |
|
"loss": 1.2929, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.7068273105310745, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 1.1776, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15263157894736842, |
|
"grad_norm": 1.6973152439927286, |
|
"learning_rate": 1.9831711035947552e-05, |
|
"loss": 1.2079, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15526315789473685, |
|
"grad_norm": 1.7272781391952396, |
|
"learning_rate": 1.981451493252418e-05, |
|
"loss": 1.2529, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 1.8086226050669576, |
|
"learning_rate": 1.979649067087574e-05, |
|
"loss": 1.2245, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16052631578947368, |
|
"grad_norm": 1.7065793845674917, |
|
"learning_rate": 1.9777639771906795e-05, |
|
"loss": 1.2693, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1631578947368421, |
|
"grad_norm": 1.5865552591333323, |
|
"learning_rate": 1.9757963826274357e-05, |
|
"loss": 1.2161, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16578947368421051, |
|
"grad_norm": 1.5842481812204574, |
|
"learning_rate": 1.973746449425368e-05, |
|
"loss": 1.1984, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 1.6377526238069449, |
|
"learning_rate": 1.971614350559814e-05, |
|
"loss": 1.2238, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17105263157894737, |
|
"grad_norm": 1.4997127407445148, |
|
"learning_rate": 1.9694002659393306e-05, |
|
"loss": 1.2261, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1736842105263158, |
|
"grad_norm": 2.0809171217051583, |
|
"learning_rate": 1.967104382390511e-05, |
|
"loss": 1.2353, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1763157894736842, |
|
"grad_norm": 1.5420942514065212, |
|
"learning_rate": 1.9647268936422204e-05, |
|
"loss": 1.1808, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17894736842105263, |
|
"grad_norm": 1.7331848458071615, |
|
"learning_rate": 1.9622680003092503e-05, |
|
"loss": 1.2808, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18157894736842106, |
|
"grad_norm": 1.642676114735854, |
|
"learning_rate": 1.9597279098753893e-05, |
|
"loss": 1.1869, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18421052631578946, |
|
"grad_norm": 1.7245853030106764, |
|
"learning_rate": 1.9571068366759143e-05, |
|
"loss": 1.272, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1868421052631579, |
|
"grad_norm": 1.611354187249786, |
|
"learning_rate": 1.9544050018795076e-05, |
|
"loss": 1.2521, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.18947368421052632, |
|
"grad_norm": 1.604892439509072, |
|
"learning_rate": 1.951622633469592e-05, |
|
"loss": 1.2039, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19210526315789472, |
|
"grad_norm": 1.5654642508374634, |
|
"learning_rate": 1.9487599662250945e-05, |
|
"loss": 1.2186, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19473684210526315, |
|
"grad_norm": 1.5636346341285678, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.2436, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19736842105263158, |
|
"grad_norm": 1.7025910710749705, |
|
"learning_rate": 1.9427947082061432e-05, |
|
"loss": 1.2455, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.7656433674820964, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 1.2548, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2026315789473684, |
|
"grad_norm": 1.6243476505198133, |
|
"learning_rate": 1.936511241197055e-05, |
|
"loss": 1.3026, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.20526315789473684, |
|
"grad_norm": 1.5869818361559598, |
|
"learning_rate": 1.933250837887457e-05, |
|
"loss": 1.2697, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.20789473684210527, |
|
"grad_norm": 1.7074439932161543, |
|
"learning_rate": 1.929911685973088e-05, |
|
"loss": 1.2321, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.66975384315729, |
|
"learning_rate": 1.9264940672148018e-05, |
|
"loss": 1.1919, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2131578947368421, |
|
"grad_norm": 1.5614279544765697, |
|
"learning_rate": 1.922998269994563e-05, |
|
"loss": 1.21, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21578947368421053, |
|
"grad_norm": 1.5869104530304803, |
|
"learning_rate": 1.9194245892911077e-05, |
|
"loss": 1.2005, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.21842105263157896, |
|
"grad_norm": 1.5005378779293412, |
|
"learning_rate": 1.9157733266550577e-05, |
|
"loss": 1.2235, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22105263157894736, |
|
"grad_norm": 1.7626666578807375, |
|
"learning_rate": 1.9120447901834708e-05, |
|
"loss": 1.2067, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2236842105263158, |
|
"grad_norm": 1.6204644035149893, |
|
"learning_rate": 1.9082392944938467e-05, |
|
"loss": 1.2733, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22631578947368422, |
|
"grad_norm": 1.630060745845602, |
|
"learning_rate": 1.9043571606975776e-05, |
|
"loss": 1.2548, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.22894736842105262, |
|
"grad_norm": 1.5126387044839233, |
|
"learning_rate": 1.9003987163728535e-05, |
|
"loss": 1.2, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23157894736842105, |
|
"grad_norm": 1.6073465137634237, |
|
"learning_rate": 1.8963642955370203e-05, |
|
"loss": 1.2256, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23421052631578948, |
|
"grad_norm": 1.697359488197219, |
|
"learning_rate": 1.8922542386183942e-05, |
|
"loss": 1.2801, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23684210526315788, |
|
"grad_norm": 1.4698043032405201, |
|
"learning_rate": 1.888068892427538e-05, |
|
"loss": 1.1803, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2394736842105263, |
|
"grad_norm": 1.505416525863708, |
|
"learning_rate": 1.8838086101279946e-05, |
|
"loss": 1.2403, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24210526315789474, |
|
"grad_norm": 1.7591893486363919, |
|
"learning_rate": 1.879473751206489e-05, |
|
"loss": 1.23, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24473684210526317, |
|
"grad_norm": 1.6372396258203044, |
|
"learning_rate": 1.875064681442594e-05, |
|
"loss": 1.2144, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24736842105263157, |
|
"grad_norm": 1.5964554118260599, |
|
"learning_rate": 1.8705817728778626e-05, |
|
"loss": 1.2389, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.5889929908257716, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 1.1556, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25263157894736843, |
|
"grad_norm": 1.4618065158181492, |
|
"learning_rate": 1.8613959586331364e-05, |
|
"loss": 1.2511, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.25526315789473686, |
|
"grad_norm": 1.5332613540370528, |
|
"learning_rate": 1.8566938280609965e-05, |
|
"loss": 1.2578, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2578947368421053, |
|
"grad_norm": 1.5645156940351497, |
|
"learning_rate": 1.851919408838327e-05, |
|
"loss": 1.2014, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.26052631578947366, |
|
"grad_norm": 1.6458424744447175, |
|
"learning_rate": 1.847073103835222e-05, |
|
"loss": 1.251, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 1.6298298976442505, |
|
"learning_rate": 1.842155321987566e-05, |
|
"loss": 1.237, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2657894736842105, |
|
"grad_norm": 1.4753056372696693, |
|
"learning_rate": 1.8371664782625287e-05, |
|
"loss": 1.2234, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26842105263157895, |
|
"grad_norm": 1.5302926579741902, |
|
"learning_rate": 1.8321069936235503e-05, |
|
"loss": 1.2159, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2710526315789474, |
|
"grad_norm": 1.4566906526101497, |
|
"learning_rate": 1.8269772949948185e-05, |
|
"loss": 1.2219, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2736842105263158, |
|
"grad_norm": 1.4798388569320753, |
|
"learning_rate": 1.821777815225245e-05, |
|
"loss": 1.2168, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27631578947368424, |
|
"grad_norm": 1.578257631631102, |
|
"learning_rate": 1.816508993051943e-05, |
|
"loss": 1.2145, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2789473684210526, |
|
"grad_norm": 1.535918888434136, |
|
"learning_rate": 1.8111712730632024e-05, |
|
"loss": 1.2309, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.28157894736842104, |
|
"grad_norm": 1.56754973743785, |
|
"learning_rate": 1.8057651056609784e-05, |
|
"loss": 1.2554, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28421052631578947, |
|
"grad_norm": 1.5522488842058326, |
|
"learning_rate": 1.800290947022884e-05, |
|
"loss": 1.2529, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2868421052631579, |
|
"grad_norm": 1.475576507279166, |
|
"learning_rate": 1.7947492590636998e-05, |
|
"loss": 1.268, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.2894736842105263, |
|
"grad_norm": 1.5056156987526548, |
|
"learning_rate": 1.789140509396394e-05, |
|
"loss": 1.2427, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29210526315789476, |
|
"grad_norm": 1.643462413142722, |
|
"learning_rate": 1.7834651712926662e-05, |
|
"loss": 1.2299, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.29473684210526313, |
|
"grad_norm": 1.636081381079421, |
|
"learning_rate": 1.777723723643014e-05, |
|
"loss": 1.2792, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.29736842105263156, |
|
"grad_norm": 1.6196644995812366, |
|
"learning_rate": 1.771916650916321e-05, |
|
"loss": 1.222, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4763314093013102, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 1.2511, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3026315789473684, |
|
"grad_norm": 1.555093791715219, |
|
"learning_rate": 1.7601075957535366e-05, |
|
"loss": 1.205, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.30526315789473685, |
|
"grad_norm": 1.524333754681362, |
|
"learning_rate": 1.7541066097768965e-05, |
|
"loss": 1.2326, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3078947368421053, |
|
"grad_norm": 1.4619146723611067, |
|
"learning_rate": 1.7480419915580357e-05, |
|
"loss": 1.2466, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3105263157894737, |
|
"grad_norm": 1.4785833344608155, |
|
"learning_rate": 1.7419142528352815e-05, |
|
"loss": 1.2773, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3131578947368421, |
|
"grad_norm": 1.4743114052842876, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 1.224, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.6746586071080014, |
|
"learning_rate": 1.729471487418621e-05, |
|
"loss": 1.2031, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.31842105263157894, |
|
"grad_norm": 11.560581182062958, |
|
"learning_rate": 1.723157510657247e-05, |
|
"loss": 1.2641, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.32105263157894737, |
|
"grad_norm": 1.5785699977927574, |
|
"learning_rate": 1.7167825131684516e-05, |
|
"loss": 1.2406, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3236842105263158, |
|
"grad_norm": 1.4907215745859081, |
|
"learning_rate": 1.710347032880664e-05, |
|
"loss": 1.2393, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3263157894736842, |
|
"grad_norm": 1.4873798233212445, |
|
"learning_rate": 1.7038516128259118e-05, |
|
"loss": 1.2613, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.32894736842105265, |
|
"grad_norm": 1.5996710317262621, |
|
"learning_rate": 1.6972968010939953e-05, |
|
"loss": 1.226, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.33157894736842103, |
|
"grad_norm": 1.5258666469896836, |
|
"learning_rate": 1.6906831507862446e-05, |
|
"loss": 1.1765, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33421052631578946, |
|
"grad_norm": 1.65626890509294, |
|
"learning_rate": 1.6840112199688432e-05, |
|
"loss": 1.2042, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3368421052631579, |
|
"grad_norm": 1.5999899196235836, |
|
"learning_rate": 1.6772815716257414e-05, |
|
"loss": 1.2165, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3394736842105263, |
|
"grad_norm": 1.539114071232729, |
|
"learning_rate": 1.670494773611149e-05, |
|
"loss": 1.2325, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.34210526315789475, |
|
"grad_norm": 1.5498968903780033, |
|
"learning_rate": 1.6636513986016215e-05, |
|
"loss": 1.2489, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3447368421052632, |
|
"grad_norm": 1.6929154737187613, |
|
"learning_rate": 1.6567520240477344e-05, |
|
"loss": 1.2733, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3473684210526316, |
|
"grad_norm": 1.5186251712267134, |
|
"learning_rate": 1.64979723212536e-05, |
|
"loss": 1.2163, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5656728169955136, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 1.1686, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3526315789473684, |
|
"grad_norm": 1.7024084700143047, |
|
"learning_rate": 1.6357237482099682e-05, |
|
"loss": 1.2607, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.35526315789473684, |
|
"grad_norm": 1.5453936941229016, |
|
"learning_rate": 1.6286062437510823e-05, |
|
"loss": 1.2788, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.35789473684210527, |
|
"grad_norm": 1.5519495902650802, |
|
"learning_rate": 1.621435696891765e-05, |
|
"loss": 1.2323, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3605263157894737, |
|
"grad_norm": 1.4847378374853688, |
|
"learning_rate": 1.6142127126896682e-05, |
|
"loss": 1.2871, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3631578947368421, |
|
"grad_norm": 1.426933507759982, |
|
"learning_rate": 1.606937900627157e-05, |
|
"loss": 1.1768, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36578947368421055, |
|
"grad_norm": 1.4919758909790284, |
|
"learning_rate": 1.5996118745598817e-05, |
|
"loss": 1.2079, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 1.4485312135827444, |
|
"learning_rate": 1.5922352526649803e-05, |
|
"loss": 1.2442, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.37105263157894736, |
|
"grad_norm": 1.4755986147337776, |
|
"learning_rate": 1.584808657388914e-05, |
|
"loss": 1.2329, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3736842105263158, |
|
"grad_norm": 1.5552070636270465, |
|
"learning_rate": 1.5773327153949465e-05, |
|
"loss": 1.2234, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3763157894736842, |
|
"grad_norm": 1.9398418581511307, |
|
"learning_rate": 1.5698080575102662e-05, |
|
"loss": 1.2528, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.37894736842105264, |
|
"grad_norm": 1.5594003459102397, |
|
"learning_rate": 1.5622353186727542e-05, |
|
"loss": 1.2619, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3815789473684211, |
|
"grad_norm": 1.5755383497580695, |
|
"learning_rate": 1.554615137877409e-05, |
|
"loss": 1.2643, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38421052631578945, |
|
"grad_norm": 1.5535461809815971, |
|
"learning_rate": 1.5469481581224274e-05, |
|
"loss": 1.1972, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3868421052631579, |
|
"grad_norm": 1.5115819913848945, |
|
"learning_rate": 1.5392350263549462e-05, |
|
"loss": 1.243, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3894736842105263, |
|
"grad_norm": 1.527666218311888, |
|
"learning_rate": 1.531476393416456e-05, |
|
"loss": 1.2062, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.39210526315789473, |
|
"grad_norm": 1.4753990139855222, |
|
"learning_rate": 1.523672913987878e-05, |
|
"loss": 1.218, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 1.4294571826626545, |
|
"learning_rate": 1.5158252465343242e-05, |
|
"loss": 1.1627, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3973684210526316, |
|
"grad_norm": 1.5436073392476704, |
|
"learning_rate": 1.5079340532495344e-05, |
|
"loss": 1.1995, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.498938650619184, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.2387, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4026315789473684, |
|
"grad_norm": 1.4966516858258112, |
|
"learning_rate": 1.4920237562687784e-05, |
|
"loss": 1.2343, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4052631578947368, |
|
"grad_norm": 1.600369428001101, |
|
"learning_rate": 1.4840059950989992e-05, |
|
"loss": 1.2335, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.40789473684210525, |
|
"grad_norm": 1.6146064486984097, |
|
"learning_rate": 1.4759473930370738e-05, |
|
"loss": 1.2445, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4105263157894737, |
|
"grad_norm": 1.4179121925460856, |
|
"learning_rate": 1.467848630075608e-05, |
|
"loss": 1.2005, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4131578947368421, |
|
"grad_norm": 1.4026365243596637, |
|
"learning_rate": 1.4597103895960228e-05, |
|
"loss": 1.1707, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.41578947368421054, |
|
"grad_norm": 1.608224849561234, |
|
"learning_rate": 1.4515333583108896e-05, |
|
"loss": 1.2172, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.41842105263157897, |
|
"grad_norm": 1.5892555814004448, |
|
"learning_rate": 1.4433182262059861e-05, |
|
"loss": 1.2437, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 1.5528172119361345, |
|
"learning_rate": 1.4350656864820733e-05, |
|
"loss": 1.2458, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4236842105263158, |
|
"grad_norm": 1.4895924962034135, |
|
"learning_rate": 1.4267764354964038e-05, |
|
"loss": 1.217, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4263157894736842, |
|
"grad_norm": 1.4853890475688154, |
|
"learning_rate": 1.4184511727039612e-05, |
|
"loss": 1.1886, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.42894736842105263, |
|
"grad_norm": 1.484648338587035, |
|
"learning_rate": 1.4100906005984404e-05, |
|
"loss": 1.1946, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.43157894736842106, |
|
"grad_norm": 1.4930103966077508, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.168, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4342105263157895, |
|
"grad_norm": 1.5631231114882895, |
|
"learning_rate": 1.3932663532605832e-05, |
|
"loss": 1.1962, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4368421052631579, |
|
"grad_norm": 1.4246463220095698, |
|
"learning_rate": 1.3848040976744459e-05, |
|
"loss": 1.1903, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4394736842105263, |
|
"grad_norm": 1.652743816078508, |
|
"learning_rate": 1.3763093719478357e-05, |
|
"loss": 1.1791, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4421052631578947, |
|
"grad_norm": 1.5195763177089594, |
|
"learning_rate": 1.3677828928738934e-05, |
|
"loss": 1.2274, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.44473684210526315, |
|
"grad_norm": 1.418736373104399, |
|
"learning_rate": 1.3592253799251377e-05, |
|
"loss": 1.203, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4473684210526316, |
|
"grad_norm": 1.4260404389599979, |
|
"learning_rate": 1.3506375551927546e-05, |
|
"loss": 1.1997, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.6460529001511097, |
|
"learning_rate": 1.342020143325669e-05, |
|
"loss": 1.237, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.45263157894736844, |
|
"grad_norm": 1.486629400358489, |
|
"learning_rate": 1.3333738714693958e-05, |
|
"loss": 1.247, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.45526315789473687, |
|
"grad_norm": 1.3972348468520703, |
|
"learning_rate": 1.3246994692046837e-05, |
|
"loss": 1.2234, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.45789473684210524, |
|
"grad_norm": 1.6487432488778413, |
|
"learning_rate": 1.3159976684859528e-05, |
|
"loss": 1.2329, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4605263157894737, |
|
"grad_norm": 1.3823799200592426, |
|
"learning_rate": 1.3072692035795305e-05, |
|
"loss": 1.1959, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.4631578947368421, |
|
"grad_norm": 1.4633369850606546, |
|
"learning_rate": 1.2985148110016947e-05, |
|
"loss": 1.1918, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.46578947368421053, |
|
"grad_norm": 1.4284040647353275, |
|
"learning_rate": 1.2897352294565248e-05, |
|
"loss": 1.206, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.46842105263157896, |
|
"grad_norm": 1.557769882746258, |
|
"learning_rate": 1.2809311997735697e-05, |
|
"loss": 1.1774, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4710526315789474, |
|
"grad_norm": 1.696104251000462, |
|
"learning_rate": 1.2721034648453353e-05, |
|
"loss": 1.2192, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 1.3992290817350297, |
|
"learning_rate": 1.2632527695645993e-05, |
|
"loss": 1.1794, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4763157894736842, |
|
"grad_norm": 1.526786732330722, |
|
"learning_rate": 1.2543798607615566e-05, |
|
"loss": 1.1735, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4789473684210526, |
|
"grad_norm": 1.4303793807251683, |
|
"learning_rate": 1.2454854871407993e-05, |
|
"loss": 1.1869, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.48157894736842105, |
|
"grad_norm": 1.4381387780322723, |
|
"learning_rate": 1.2365703992181425e-05, |
|
"loss": 1.1637, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4842105263157895, |
|
"grad_norm": 1.347581682014551, |
|
"learning_rate": 1.2276353492572937e-05, |
|
"loss": 1.2333, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4868421052631579, |
|
"grad_norm": 1.4212086902989138, |
|
"learning_rate": 1.218681091206376e-05, |
|
"loss": 1.2057, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.48947368421052634, |
|
"grad_norm": 1.4767598997567368, |
|
"learning_rate": 1.2097083806343104e-05, |
|
"loss": 1.2078, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4921052631578947, |
|
"grad_norm": 1.6766286206430314, |
|
"learning_rate": 1.2007179746670592e-05, |
|
"loss": 1.178, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.49473684210526314, |
|
"grad_norm": 1.3676201703331183, |
|
"learning_rate": 1.1917106319237386e-05, |
|
"loss": 1.1578, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.49736842105263157, |
|
"grad_norm": 2.0706388440208663, |
|
"learning_rate": 1.1826871124526072e-05, |
|
"loss": 1.196, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.507077620161342, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 1.2277, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5026315789473684, |
|
"grad_norm": 1.4969439156513775, |
|
"learning_rate": 1.164594590280734e-05, |
|
"loss": 1.1897, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5052631578947369, |
|
"grad_norm": 1.413902082668003, |
|
"learning_rate": 1.1555271142444433e-05, |
|
"loss": 1.1878, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5078947368421053, |
|
"grad_norm": 1.404071675870193, |
|
"learning_rate": 1.1464465146804218e-05, |
|
"loss": 1.2204, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5105263157894737, |
|
"grad_norm": 1.420066039178241, |
|
"learning_rate": 1.1373535578184083e-05, |
|
"loss": 1.143, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5131578947368421, |
|
"grad_norm": 1.8886674026004764, |
|
"learning_rate": 1.1282490109308633e-05, |
|
"loss": 1.2461, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5157894736842106, |
|
"grad_norm": 1.4506368194815937, |
|
"learning_rate": 1.1191336422682237e-05, |
|
"loss": 1.2316, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5184210526315789, |
|
"grad_norm": 1.4848545614866153, |
|
"learning_rate": 1.1100082209940795e-05, |
|
"loss": 1.2514, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5210526315789473, |
|
"grad_norm": 1.3996822789360135, |
|
"learning_rate": 1.1008735171202685e-05, |
|
"loss": 1.2019, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5236842105263158, |
|
"grad_norm": 1.4330869323971005, |
|
"learning_rate": 1.0917303014419036e-05, |
|
"loss": 1.2031, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 1.4178423978580275, |
|
"learning_rate": 1.0825793454723325e-05, |
|
"loss": 1.266, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5289473684210526, |
|
"grad_norm": 1.5463727949510788, |
|
"learning_rate": 1.0734214213780355e-05, |
|
"loss": 1.1943, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.531578947368421, |
|
"grad_norm": 1.6232414845928462, |
|
"learning_rate": 1.0642573019134703e-05, |
|
"loss": 1.1882, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5342105263157895, |
|
"grad_norm": 1.3956188498038653, |
|
"learning_rate": 1.0550877603558656e-05, |
|
"loss": 1.2117, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5368421052631579, |
|
"grad_norm": 1.570355181053573, |
|
"learning_rate": 1.045913570439972e-05, |
|
"loss": 1.1844, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5394736842105263, |
|
"grad_norm": 1.4584214592171902, |
|
"learning_rate": 1.0367355062927726e-05, |
|
"loss": 1.1617, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5421052631578948, |
|
"grad_norm": 1.4365606902754935, |
|
"learning_rate": 1.0275543423681622e-05, |
|
"loss": 1.2129, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5447368421052632, |
|
"grad_norm": 1.5290557760088286, |
|
"learning_rate": 1.0183708533815975e-05, |
|
"loss": 1.2482, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5473684210526316, |
|
"grad_norm": 1.6278464681436842, |
|
"learning_rate": 1.0091858142447266e-05, |
|
"loss": 1.1489, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.5688826364703785, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2095, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5526315789473685, |
|
"grad_norm": 1.397745444389566, |
|
"learning_rate": 9.908141857552737e-06, |
|
"loss": 1.2355, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5552631578947368, |
|
"grad_norm": 1.4583293118359388, |
|
"learning_rate": 9.816291466184025e-06, |
|
"loss": 1.1758, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5578947368421052, |
|
"grad_norm": 1.4603630721390348, |
|
"learning_rate": 9.724456576318383e-06, |
|
"loss": 1.1652, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5605263157894737, |
|
"grad_norm": 1.4549523975843028, |
|
"learning_rate": 9.632644937072277e-06, |
|
"loss": 1.1867, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5631578947368421, |
|
"grad_norm": 1.3729856367743716, |
|
"learning_rate": 9.540864295600282e-06, |
|
"loss": 1.2406, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5657894736842105, |
|
"grad_norm": 1.4783414676807372, |
|
"learning_rate": 9.449122396441344e-06, |
|
"loss": 1.1791, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5684210526315789, |
|
"grad_norm": 1.3872265988119108, |
|
"learning_rate": 9.3574269808653e-06, |
|
"loss": 1.2156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5710526315789474, |
|
"grad_norm": 1.5787318597241866, |
|
"learning_rate": 9.265785786219647e-06, |
|
"loss": 1.2132, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5736842105263158, |
|
"grad_norm": 1.3724758535484003, |
|
"learning_rate": 9.174206545276678e-06, |
|
"loss": 1.1334, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5763157894736842, |
|
"grad_norm": 1.4868757284973377, |
|
"learning_rate": 9.082696985580964e-06, |
|
"loss": 1.2583, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 1.4676893755358382, |
|
"learning_rate": 8.991264828797319e-06, |
|
"loss": 1.2082, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5815789473684211, |
|
"grad_norm": 1.4127501220287753, |
|
"learning_rate": 8.899917790059208e-06, |
|
"loss": 1.211, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5842105263157895, |
|
"grad_norm": 1.332755695247283, |
|
"learning_rate": 8.808663577317765e-06, |
|
"loss": 1.1677, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5868421052631579, |
|
"grad_norm": 1.395357505892731, |
|
"learning_rate": 8.717509890691369e-06, |
|
"loss": 1.2085, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5894736842105263, |
|
"grad_norm": 1.6316377622079636, |
|
"learning_rate": 8.626464421815919e-06, |
|
"loss": 1.2065, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5921052631578947, |
|
"grad_norm": 1.4083070071992012, |
|
"learning_rate": 8.535534853195786e-06, |
|
"loss": 1.1612, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5947368421052631, |
|
"grad_norm": 1.3425609671566616, |
|
"learning_rate": 8.444728857555572e-06, |
|
"loss": 1.2094, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5973684210526315, |
|
"grad_norm": 1.5328975791749664, |
|
"learning_rate": 8.35405409719266e-06, |
|
"loss": 1.2155, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.5590632136999736, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 1.1807, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6026315789473684, |
|
"grad_norm": 1.5215433696265592, |
|
"learning_rate": 8.173128875473933e-06, |
|
"loss": 1.183, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6052631578947368, |
|
"grad_norm": 1.6575473417500939, |
|
"learning_rate": 8.082893680762619e-06, |
|
"loss": 1.1353, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6078947368421053, |
|
"grad_norm": 1.460845514236017, |
|
"learning_rate": 7.99282025332941e-06, |
|
"loss": 1.219, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6105263157894737, |
|
"grad_norm": 1.4579638437828222, |
|
"learning_rate": 7.902916193656898e-06, |
|
"loss": 1.1703, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6131578947368421, |
|
"grad_norm": 1.4409394463378045, |
|
"learning_rate": 7.813189087936243e-06, |
|
"loss": 1.1735, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6157894736842106, |
|
"grad_norm": 1.5164224939867756, |
|
"learning_rate": 7.72364650742707e-06, |
|
"loss": 1.2259, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.618421052631579, |
|
"grad_norm": 1.4456788889526122, |
|
"learning_rate": 7.634296007818576e-06, |
|
"loss": 1.1755, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6210526315789474, |
|
"grad_norm": 1.5021862081854962, |
|
"learning_rate": 7.545145128592009e-06, |
|
"loss": 1.1817, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6236842105263158, |
|
"grad_norm": 1.3518521758976183, |
|
"learning_rate": 7.456201392384437e-06, |
|
"loss": 1.1728, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6263157894736842, |
|
"grad_norm": 1.4319948450891455, |
|
"learning_rate": 7.367472304354011e-06, |
|
"loss": 1.1546, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6289473684210526, |
|
"grad_norm": 1.5047899843401489, |
|
"learning_rate": 7.278965351546648e-06, |
|
"loss": 1.1986, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 1.4636280005741331, |
|
"learning_rate": 7.190688002264308e-06, |
|
"loss": 1.1226, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6342105263157894, |
|
"grad_norm": 1.4404270288293848, |
|
"learning_rate": 7.102647705434755e-06, |
|
"loss": 1.1784, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6368421052631579, |
|
"grad_norm": 1.6545110847593845, |
|
"learning_rate": 7.014851889983058e-06, |
|
"loss": 1.1839, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6394736842105263, |
|
"grad_norm": 1.4871827887195601, |
|
"learning_rate": 6.927307964204695e-06, |
|
"loss": 1.1887, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6421052631578947, |
|
"grad_norm": 1.576746407677905, |
|
"learning_rate": 6.840023315140476e-06, |
|
"loss": 1.1883, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6447368421052632, |
|
"grad_norm": 1.487715101079606, |
|
"learning_rate": 6.7530053079531664e-06, |
|
"loss": 1.1416, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6473684210526316, |
|
"grad_norm": 1.3965004840051898, |
|
"learning_rate": 6.666261285306048e-06, |
|
"loss": 1.1879, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.3874007274101692, |
|
"learning_rate": 6.579798566743314e-06, |
|
"loss": 1.145, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6526315789473685, |
|
"grad_norm": 1.5069537665388066, |
|
"learning_rate": 6.4936244480724575e-06, |
|
"loss": 1.214, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6552631578947369, |
|
"grad_norm": 1.335555512816458, |
|
"learning_rate": 6.407746200748628e-06, |
|
"loss": 1.2036, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6578947368421053, |
|
"grad_norm": 1.5356423714475045, |
|
"learning_rate": 6.322171071261071e-06, |
|
"loss": 1.2023, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6605263157894737, |
|
"grad_norm": 1.366112491986329, |
|
"learning_rate": 6.236906280521646e-06, |
|
"loss": 1.154, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6631578947368421, |
|
"grad_norm": 1.4961202079264277, |
|
"learning_rate": 6.151959023255545e-06, |
|
"loss": 1.1754, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6657894736842105, |
|
"grad_norm": 1.4946644203504424, |
|
"learning_rate": 6.067336467394169e-06, |
|
"loss": 1.1957, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6684210526315789, |
|
"grad_norm": 1.344430318965404, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.2162, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6710526315789473, |
|
"grad_norm": 1.5136161435083522, |
|
"learning_rate": 5.8990939940156e-06, |
|
"loss": 1.1642, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6736842105263158, |
|
"grad_norm": 1.362924130944701, |
|
"learning_rate": 5.815488272960388e-06, |
|
"loss": 1.1774, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6763157894736842, |
|
"grad_norm": 1.5146999722224046, |
|
"learning_rate": 5.732235645035964e-06, |
|
"loss": 1.1654, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6789473684210526, |
|
"grad_norm": 1.4713789753634803, |
|
"learning_rate": 5.649343135179271e-06, |
|
"loss": 1.1692, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6815789473684211, |
|
"grad_norm": 1.488147057663755, |
|
"learning_rate": 5.566817737940142e-06, |
|
"loss": 1.1849, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 1.4591392264687968, |
|
"learning_rate": 5.484666416891109e-06, |
|
"loss": 1.1672, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6868421052631579, |
|
"grad_norm": 1.5186815097126352, |
|
"learning_rate": 5.4028961040397765e-06, |
|
"loss": 1.1856, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6894736842105263, |
|
"grad_norm": 1.3841513849936795, |
|
"learning_rate": 5.321513699243924e-06, |
|
"loss": 1.1278, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6921052631578948, |
|
"grad_norm": 1.427452900119818, |
|
"learning_rate": 5.240526069629265e-06, |
|
"loss": 1.1377, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6947368421052632, |
|
"grad_norm": 1.4548570509065128, |
|
"learning_rate": 5.159940049010015e-06, |
|
"loss": 1.1701, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6973684210526315, |
|
"grad_norm": 1.4097905499526544, |
|
"learning_rate": 5.079762437312219e-06, |
|
"loss": 1.188, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4153566806677924, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 1.1467, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7026315789473684, |
|
"grad_norm": 1.4520190387088883, |
|
"learning_rate": 4.9206594675046595e-06, |
|
"loss": 1.1669, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7052631578947368, |
|
"grad_norm": 1.4607003414092599, |
|
"learning_rate": 4.8417475346567635e-06, |
|
"loss": 1.2034, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7078947368421052, |
|
"grad_norm": 1.5189089646509135, |
|
"learning_rate": 4.763270860121222e-06, |
|
"loss": 1.1853, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7105263157894737, |
|
"grad_norm": 1.5001504965368353, |
|
"learning_rate": 4.685236065835443e-06, |
|
"loss": 1.1626, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7131578947368421, |
|
"grad_norm": 1.4272443913750283, |
|
"learning_rate": 4.607649736450539e-06, |
|
"loss": 1.1903, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7157894736842105, |
|
"grad_norm": 1.3923865065357075, |
|
"learning_rate": 4.530518418775734e-06, |
|
"loss": 1.1625, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.718421052631579, |
|
"grad_norm": 1.4677018709010743, |
|
"learning_rate": 4.453848621225913e-06, |
|
"loss": 1.122, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7210526315789474, |
|
"grad_norm": 1.5389664942910415, |
|
"learning_rate": 4.3776468132724605e-06, |
|
"loss": 1.1395, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7236842105263158, |
|
"grad_norm": 1.4779444930355223, |
|
"learning_rate": 4.301919424897339e-06, |
|
"loss": 1.1765, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7263157894736842, |
|
"grad_norm": 1.4120873508728784, |
|
"learning_rate": 4.226672846050538e-06, |
|
"loss": 1.1649, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7289473684210527, |
|
"grad_norm": 1.4481405920413495, |
|
"learning_rate": 4.151913426110864e-06, |
|
"loss": 1.1813, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7315789473684211, |
|
"grad_norm": 1.4110583437714723, |
|
"learning_rate": 4.077647473350201e-06, |
|
"loss": 1.1792, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7342105263157894, |
|
"grad_norm": 1.3998861268309197, |
|
"learning_rate": 4.003881254401183e-06, |
|
"loss": 1.1795, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 1.5389428254089865, |
|
"learning_rate": 3.930620993728434e-06, |
|
"loss": 1.1761, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7394736842105263, |
|
"grad_norm": 1.351873475574077, |
|
"learning_rate": 3.857872873103322e-06, |
|
"loss": 1.0988, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7421052631578947, |
|
"grad_norm": 2.0822849643846864, |
|
"learning_rate": 3.7856430310823546e-06, |
|
"loss": 1.1535, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7447368421052631, |
|
"grad_norm": 1.4774710448410524, |
|
"learning_rate": 3.7139375624891795e-06, |
|
"loss": 1.1669, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7473684210526316, |
|
"grad_norm": 1.3660260513616105, |
|
"learning_rate": 3.6427625179003223e-06, |
|
"loss": 1.2248, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.500711580269001, |
|
"learning_rate": 3.5721239031346067e-06, |
|
"loss": 1.177, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7526315789473684, |
|
"grad_norm": 1.4652678196971247, |
|
"learning_rate": 3.5020276787464058e-06, |
|
"loss": 1.1948, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7552631578947369, |
|
"grad_norm": 1.4313428064366354, |
|
"learning_rate": 3.4324797595226567e-06, |
|
"loss": 1.1299, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7578947368421053, |
|
"grad_norm": 1.442031209959671, |
|
"learning_rate": 3.3634860139837877e-06, |
|
"loss": 1.191, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7605263157894737, |
|
"grad_norm": 1.3495347644283935, |
|
"learning_rate": 3.2950522638885106e-06, |
|
"loss": 1.1744, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7631578947368421, |
|
"grad_norm": 1.4953661772700806, |
|
"learning_rate": 3.2271842837425917e-06, |
|
"loss": 1.1923, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7657894736842106, |
|
"grad_norm": 1.379584892765396, |
|
"learning_rate": 3.1598878003115694e-06, |
|
"loss": 1.1936, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7684210526315789, |
|
"grad_norm": 1.4357267033014562, |
|
"learning_rate": 3.0931684921375572e-06, |
|
"loss": 1.1581, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7710526315789473, |
|
"grad_norm": 1.4032602137615429, |
|
"learning_rate": 3.0270319890600465e-06, |
|
"loss": 1.195, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7736842105263158, |
|
"grad_norm": 1.3767404690973988, |
|
"learning_rate": 2.9614838717408866e-06, |
|
"loss": 1.1404, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7763157894736842, |
|
"grad_norm": 1.345562765951157, |
|
"learning_rate": 2.89652967119336e-06, |
|
"loss": 1.1435, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7789473684210526, |
|
"grad_norm": 1.3646022188703086, |
|
"learning_rate": 2.8321748683154893e-06, |
|
"loss": 1.1675, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.781578947368421, |
|
"grad_norm": 1.3246631802990927, |
|
"learning_rate": 2.7684248934275327e-06, |
|
"loss": 1.134, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7842105263157895, |
|
"grad_norm": 1.5259313637971876, |
|
"learning_rate": 2.7052851258137936e-06, |
|
"loss": 1.1572, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7868421052631579, |
|
"grad_norm": 1.4588725449617848, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 1.0957, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 1.4085665694169516, |
|
"learning_rate": 2.580857471647186e-06, |
|
"loss": 1.1724, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7921052631578948, |
|
"grad_norm": 1.4237214859236806, |
|
"learning_rate": 2.519580084419646e-06, |
|
"loss": 1.1569, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.7947368421052632, |
|
"grad_norm": 1.4112079099940082, |
|
"learning_rate": 2.4589339022310386e-06, |
|
"loss": 1.1308, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.7973684210526316, |
|
"grad_norm": 1.4559794115227898, |
|
"learning_rate": 2.3989240424646355e-06, |
|
"loss": 1.1536, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.3655004847316077, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 1.1168, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8026315789473685, |
|
"grad_norm": 1.4070429304898757, |
|
"learning_rate": 2.2808334908367914e-06, |
|
"loss": 1.1085, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8052631578947368, |
|
"grad_norm": 1.4528346751525203, |
|
"learning_rate": 2.2227627635698624e-06, |
|
"loss": 1.1411, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8078947368421052, |
|
"grad_norm": 1.4619781742001423, |
|
"learning_rate": 2.165348287073339e-06, |
|
"loss": 1.1945, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8105263157894737, |
|
"grad_norm": 1.4018656275095926, |
|
"learning_rate": 2.1085949060360654e-06, |
|
"loss": 1.1488, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8131578947368421, |
|
"grad_norm": 1.4294429101475141, |
|
"learning_rate": 2.052507409363004e-06, |
|
"loss": 1.1323, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8157894736842105, |
|
"grad_norm": 1.400567639105242, |
|
"learning_rate": 1.9970905297711606e-06, |
|
"loss": 1.1744, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8184210526315789, |
|
"grad_norm": 1.408857813703171, |
|
"learning_rate": 1.9423489433902186e-06, |
|
"loss": 1.1603, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8210526315789474, |
|
"grad_norm": 1.40296491225255, |
|
"learning_rate": 1.8882872693679787e-06, |
|
"loss": 1.1483, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8236842105263158, |
|
"grad_norm": 1.3612503327875418, |
|
"learning_rate": 1.8349100694805711e-06, |
|
"loss": 1.1416, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8263157894736842, |
|
"grad_norm": 1.3364273351895348, |
|
"learning_rate": 1.7822218477475496e-06, |
|
"loss": 1.128, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8289473684210527, |
|
"grad_norm": 1.3850144107615145, |
|
"learning_rate": 1.7302270500518181e-06, |
|
"loss": 1.1767, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8315789473684211, |
|
"grad_norm": 1.4137419991959699, |
|
"learning_rate": 1.6789300637645e-06, |
|
"loss": 1.1233, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8342105263157895, |
|
"grad_norm": 1.4262918205450703, |
|
"learning_rate": 1.6283352173747148e-06, |
|
"loss": 1.1457, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8368421052631579, |
|
"grad_norm": 1.3477003716799063, |
|
"learning_rate": 1.578446780124344e-06, |
|
"loss": 1.1403, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8394736842105263, |
|
"grad_norm": 1.622769299529781, |
|
"learning_rate": 1.5292689616477808e-06, |
|
"loss": 1.1348, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 1.6481690570937264, |
|
"learning_rate": 1.4808059116167306e-06, |
|
"loss": 1.1258, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8447368421052631, |
|
"grad_norm": 1.5428571262895439, |
|
"learning_rate": 1.4330617193900365e-06, |
|
"loss": 1.1397, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8473684210526315, |
|
"grad_norm": 1.3637682771315494, |
|
"learning_rate": 1.3860404136686411e-06, |
|
"loss": 1.1371, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.3583297786593427, |
|
"learning_rate": 1.339745962155613e-06, |
|
"loss": 1.1584, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8526315789473684, |
|
"grad_norm": 1.4145815499277343, |
|
"learning_rate": 1.294182271221377e-06, |
|
"loss": 1.1708, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8552631578947368, |
|
"grad_norm": 1.3721640621198408, |
|
"learning_rate": 1.2493531855740626e-06, |
|
"loss": 1.1529, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8578947368421053, |
|
"grad_norm": 1.352658945488651, |
|
"learning_rate": 1.2052624879351105e-06, |
|
"loss": 1.1646, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8605263157894737, |
|
"grad_norm": 1.3623686161121, |
|
"learning_rate": 1.1619138987200562e-06, |
|
"loss": 1.1597, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8631578947368421, |
|
"grad_norm": 1.4297195595936056, |
|
"learning_rate": 1.1193110757246251e-06, |
|
"loss": 1.1365, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8657894736842106, |
|
"grad_norm": 1.3650877614146917, |
|
"learning_rate": 1.0774576138160596e-06, |
|
"loss": 1.1343, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.868421052631579, |
|
"grad_norm": 1.4235475016088133, |
|
"learning_rate": 1.0363570446297999e-06, |
|
"loss": 1.1278, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8710526315789474, |
|
"grad_norm": 1.4372338077155216, |
|
"learning_rate": 9.960128362714637e-07, |
|
"loss": 1.1742, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8736842105263158, |
|
"grad_norm": 1.5566455926553144, |
|
"learning_rate": 9.564283930242258e-07, |
|
"loss": 1.1438, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8763157894736842, |
|
"grad_norm": 1.4182208939245209, |
|
"learning_rate": 9.176070550615379e-07, |
|
"loss": 1.0908, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8789473684210526, |
|
"grad_norm": 1.3665860472536333, |
|
"learning_rate": 8.79552098165296e-07, |
|
"loss": 1.1923, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.881578947368421, |
|
"grad_norm": 1.4802127975920716, |
|
"learning_rate": 8.42266733449425e-07, |
|
"loss": 1.1677, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.8842105263157894, |
|
"grad_norm": 1.3840701915014118, |
|
"learning_rate": 8.057541070889229e-07, |
|
"loss": 1.1798, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.8868421052631579, |
|
"grad_norm": 1.4442257549760622, |
|
"learning_rate": 7.700173000543742e-07, |
|
"loss": 1.1337, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8894736842105263, |
|
"grad_norm": 1.4991615103817875, |
|
"learning_rate": 7.350593278519824e-07, |
|
"loss": 1.1288, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8921052631578947, |
|
"grad_norm": 1.3415829958596235, |
|
"learning_rate": 7.00883140269123e-07, |
|
"loss": 1.1375, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 1.4310174671778038, |
|
"learning_rate": 6.67491621125429e-07, |
|
"loss": 1.1315, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8973684210526316, |
|
"grad_norm": 1.4693458756161413, |
|
"learning_rate": 6.348875880294536e-07, |
|
"loss": 1.1743, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.514756438197858, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 1.1522, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9026315789473685, |
|
"grad_norm": 1.3943946153157085, |
|
"learning_rate": 5.720529179385659e-07, |
|
"loss": 1.1513, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9052631578947369, |
|
"grad_norm": 1.3760568727093738, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.133, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9078947368421053, |
|
"grad_norm": 1.4135104685753546, |
|
"learning_rate": 5.124003377490582e-07, |
|
"loss": 1.1218, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9105263157894737, |
|
"grad_norm": 1.4715025682557128, |
|
"learning_rate": 4.837736653040825e-07, |
|
"loss": 1.1973, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9131578947368421, |
|
"grad_norm": 1.365927408406846, |
|
"learning_rate": 4.5594998120492505e-07, |
|
"loss": 1.1536, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9157894736842105, |
|
"grad_norm": 1.395477739011334, |
|
"learning_rate": 4.2893163324085886e-07, |
|
"loss": 1.1615, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9184210526315789, |
|
"grad_norm": 1.3927780195869306, |
|
"learning_rate": 4.0272090124611086e-07, |
|
"loss": 1.1419, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9210526315789473, |
|
"grad_norm": 1.3763363664716992, |
|
"learning_rate": 3.773199969074959e-07, |
|
"loss": 1.1469, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9236842105263158, |
|
"grad_norm": 2.094668256525704, |
|
"learning_rate": 3.5273106357779585e-07, |
|
"loss": 1.147, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9263157894736842, |
|
"grad_norm": 1.4485811313077628, |
|
"learning_rate": 3.2895617609489337e-07, |
|
"loss": 1.1192, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9289473684210526, |
|
"grad_norm": 1.3342469890527902, |
|
"learning_rate": 3.059973406066963e-07, |
|
"loss": 1.1634, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9315789473684211, |
|
"grad_norm": 1.4601464593441522, |
|
"learning_rate": 2.838564944018618e-07, |
|
"loss": 1.1391, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9342105263157895, |
|
"grad_norm": 1.3496811554754882, |
|
"learning_rate": 2.62535505746323e-07, |
|
"loss": 1.1296, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9368421052631579, |
|
"grad_norm": 1.325046815616541, |
|
"learning_rate": 2.420361737256438e-07, |
|
"loss": 1.1632, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9394736842105263, |
|
"grad_norm": 1.3476423015496304, |
|
"learning_rate": 2.22360228093208e-07, |
|
"loss": 1.147, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9421052631578948, |
|
"grad_norm": 1.7118305115455923, |
|
"learning_rate": 2.035093291242607e-07, |
|
"loss": 1.1268, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.9447368421052632, |
|
"grad_norm": 1.3942000674318626, |
|
"learning_rate": 1.854850674758213e-07, |
|
"loss": 1.1581, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 1.376220680811591, |
|
"learning_rate": 1.6828896405244988e-07, |
|
"loss": 1.1295, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.3431256769569868, |
|
"learning_rate": 1.519224698779198e-07, |
|
"loss": 1.0882, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9526315789473684, |
|
"grad_norm": 1.5986582331792596, |
|
"learning_rate": 1.3638696597277678e-07, |
|
"loss": 1.1812, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9552631578947368, |
|
"grad_norm": 1.4132092269970191, |
|
"learning_rate": 1.2168376323780652e-07, |
|
"loss": 1.1915, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9578947368421052, |
|
"grad_norm": 1.430700753393397, |
|
"learning_rate": 1.0781410234342093e-07, |
|
"loss": 1.1559, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9605263157894737, |
|
"grad_norm": 1.3578426630468123, |
|
"learning_rate": 9.47791536249676e-08, |
|
"loss": 1.1253, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9631578947368421, |
|
"grad_norm": 1.34199016293369, |
|
"learning_rate": 8.258001698397744e-08, |
|
"loss": 1.1275, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9657894736842105, |
|
"grad_norm": 1.3736910582430197, |
|
"learning_rate": 7.121772179535135e-08, |
|
"loss": 1.1552, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.968421052631579, |
|
"grad_norm": 1.4235287172921616, |
|
"learning_rate": 6.069322682050516e-08, |
|
"loss": 1.1396, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9710526315789474, |
|
"grad_norm": 1.5091671248608574, |
|
"learning_rate": 5.10074201264632e-08, |
|
"loss": 1.1243, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9736842105263158, |
|
"grad_norm": 1.3411846884431124, |
|
"learning_rate": 4.216111901092501e-08, |
|
"loss": 1.144, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9763157894736842, |
|
"grad_norm": 1.4483679456926384, |
|
"learning_rate": 3.4155069933301535e-08, |
|
"loss": 1.1223, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.9789473684210527, |
|
"grad_norm": 1.395711615853488, |
|
"learning_rate": 2.6989948451726643e-08, |
|
"loss": 1.1685, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9815789473684211, |
|
"grad_norm": 1.3902035335632743, |
|
"learning_rate": 2.066635916605386e-08, |
|
"loss": 1.0879, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.9842105263157894, |
|
"grad_norm": 1.4063494990348104, |
|
"learning_rate": 1.518483566683826e-08, |
|
"loss": 1.1322, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.9868421052631579, |
|
"grad_norm": 1.3644257494193968, |
|
"learning_rate": 1.0545840490313597e-08, |
|
"loss": 1.1111, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.9894736842105263, |
|
"grad_norm": 1.3443180245395077, |
|
"learning_rate": 6.749765079363535e-09, |
|
"loss": 1.1258, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.9921052631578947, |
|
"grad_norm": 1.4222815730212364, |
|
"learning_rate": 3.7969297504858445e-09, |
|
"loss": 1.15, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.9947368421052631, |
|
"grad_norm": 1.4727085543827443, |
|
"learning_rate": 1.6875836667729073e-09, |
|
"loss": 1.1451, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.9973684210526316, |
|
"grad_norm": 1.3787627195454673, |
|
"learning_rate": 4.2190481687631736e-10, |
|
"loss": 1.1622, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4191400815453412, |
|
"learning_rate": 0.0, |
|
"loss": 1.0871, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1701046228408813, |
|
"eval_runtime": 1144.8239, |
|
"eval_samples_per_second": 23.514, |
|
"eval_steps_per_second": 0.735, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1900, |
|
"total_flos": 99455336448000.0, |
|
"train_loss": 1.1960604431754664, |
|
"train_runtime": 10060.3773, |
|
"train_samples_per_second": 6.043, |
|
"train_steps_per_second": 0.189 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1900, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 99455336448000.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|