|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.21606648199446, |
|
"eval_steps": 500, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0110803324099723, |
|
"grad_norm": 1.0439653396606445, |
|
"learning_rate": 0.0001, |
|
"loss": 2.5223, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0221606648199446, |
|
"grad_norm": 1.072291612625122, |
|
"learning_rate": 9.949748743718594e-05, |
|
"loss": 2.465, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0332409972299169, |
|
"grad_norm": 0.9897931218147278, |
|
"learning_rate": 9.899497487437186e-05, |
|
"loss": 2.4409, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0443213296398892, |
|
"grad_norm": 1.0319087505340576, |
|
"learning_rate": 9.84924623115578e-05, |
|
"loss": 2.3619, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.055401662049861494, |
|
"grad_norm": 1.0002905130386353, |
|
"learning_rate": 9.798994974874372e-05, |
|
"loss": 2.1627, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0664819944598338, |
|
"grad_norm": 1.0927557945251465, |
|
"learning_rate": 9.748743718592965e-05, |
|
"loss": 2.0505, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07756232686980609, |
|
"grad_norm": 0.9562727808952332, |
|
"learning_rate": 9.698492462311559e-05, |
|
"loss": 1.9197, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0886426592797784, |
|
"grad_norm": 1.1700633764266968, |
|
"learning_rate": 9.64824120603015e-05, |
|
"loss": 1.6867, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0997229916897507, |
|
"grad_norm": 0.8975285291671753, |
|
"learning_rate": 9.597989949748745e-05, |
|
"loss": 1.6386, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.11080332409972299, |
|
"grad_norm": 0.8447593450546265, |
|
"learning_rate": 9.547738693467337e-05, |
|
"loss": 1.522, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12188365650969529, |
|
"grad_norm": 0.8749620318412781, |
|
"learning_rate": 9.49748743718593e-05, |
|
"loss": 1.4721, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1329639889196676, |
|
"grad_norm": 0.6403835415840149, |
|
"learning_rate": 9.447236180904523e-05, |
|
"loss": 1.4496, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1440443213296399, |
|
"grad_norm": 0.4993043541908264, |
|
"learning_rate": 9.396984924623115e-05, |
|
"loss": 1.4022, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.15512465373961218, |
|
"grad_norm": 0.4994354844093323, |
|
"learning_rate": 9.34673366834171e-05, |
|
"loss": 1.4015, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.16620498614958448, |
|
"grad_norm": 0.5358327627182007, |
|
"learning_rate": 9.296482412060302e-05, |
|
"loss": 1.2465, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1772853185595568, |
|
"grad_norm": 0.5239339470863342, |
|
"learning_rate": 9.246231155778895e-05, |
|
"loss": 1.3862, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1883656509695291, |
|
"grad_norm": 0.5605911612510681, |
|
"learning_rate": 9.195979899497488e-05, |
|
"loss": 1.3572, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1994459833795014, |
|
"grad_norm": 0.5271956324577332, |
|
"learning_rate": 9.14572864321608e-05, |
|
"loss": 1.363, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.5072754621505737, |
|
"learning_rate": 9.095477386934675e-05, |
|
"loss": 1.3393, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.22160664819944598, |
|
"grad_norm": 0.5984258055686951, |
|
"learning_rate": 9.045226130653267e-05, |
|
"loss": 1.4338, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.23268698060941828, |
|
"grad_norm": 0.6067867279052734, |
|
"learning_rate": 8.99497487437186e-05, |
|
"loss": 1.1891, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.24376731301939059, |
|
"grad_norm": 0.5885209441184998, |
|
"learning_rate": 8.944723618090453e-05, |
|
"loss": 1.3141, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2548476454293629, |
|
"grad_norm": 0.5784013271331787, |
|
"learning_rate": 8.894472361809045e-05, |
|
"loss": 1.4002, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2659279778393352, |
|
"grad_norm": 0.6238617897033691, |
|
"learning_rate": 8.84422110552764e-05, |
|
"loss": 1.3739, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2770083102493075, |
|
"grad_norm": 0.6549237370491028, |
|
"learning_rate": 8.793969849246232e-05, |
|
"loss": 1.4349, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2880886426592798, |
|
"grad_norm": 0.6756062507629395, |
|
"learning_rate": 8.743718592964825e-05, |
|
"loss": 1.3734, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.29916897506925205, |
|
"grad_norm": 0.7228646278381348, |
|
"learning_rate": 8.693467336683418e-05, |
|
"loss": 1.3946, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.31024930747922436, |
|
"grad_norm": 0.6804022192955017, |
|
"learning_rate": 8.64321608040201e-05, |
|
"loss": 1.3068, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.32132963988919666, |
|
"grad_norm": 0.7463417053222656, |
|
"learning_rate": 8.592964824120603e-05, |
|
"loss": 1.3368, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.33240997229916897, |
|
"grad_norm": 0.7411599159240723, |
|
"learning_rate": 8.542713567839196e-05, |
|
"loss": 1.3373, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.34349030470914127, |
|
"grad_norm": 0.7613984942436218, |
|
"learning_rate": 8.49246231155779e-05, |
|
"loss": 1.2158, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3545706371191136, |
|
"grad_norm": 0.8660432696342468, |
|
"learning_rate": 8.442211055276383e-05, |
|
"loss": 1.2104, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3656509695290859, |
|
"grad_norm": 0.8703951835632324, |
|
"learning_rate": 8.391959798994975e-05, |
|
"loss": 1.2992, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3767313019390582, |
|
"grad_norm": 0.8799692988395691, |
|
"learning_rate": 8.341708542713568e-05, |
|
"loss": 1.2525, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3878116343490305, |
|
"grad_norm": 0.8259297609329224, |
|
"learning_rate": 8.291457286432161e-05, |
|
"loss": 1.2281, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3988919667590028, |
|
"grad_norm": 0.9279691576957703, |
|
"learning_rate": 8.241206030150754e-05, |
|
"loss": 1.3589, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4099722991689751, |
|
"grad_norm": 0.8066436648368835, |
|
"learning_rate": 8.190954773869348e-05, |
|
"loss": 1.2785, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.7177894115447998, |
|
"learning_rate": 8.14070351758794e-05, |
|
"loss": 1.278, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.43213296398891965, |
|
"grad_norm": 0.6018456220626831, |
|
"learning_rate": 8.090452261306533e-05, |
|
"loss": 1.2151, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.44321329639889195, |
|
"grad_norm": 0.5777440667152405, |
|
"learning_rate": 8.040201005025126e-05, |
|
"loss": 1.235, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.45429362880886426, |
|
"grad_norm": 0.4722194969654083, |
|
"learning_rate": 7.989949748743719e-05, |
|
"loss": 1.256, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.46537396121883656, |
|
"grad_norm": 0.4909801185131073, |
|
"learning_rate": 7.939698492462313e-05, |
|
"loss": 1.2826, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.47645429362880887, |
|
"grad_norm": 0.5409879684448242, |
|
"learning_rate": 7.889447236180904e-05, |
|
"loss": 1.2748, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.48753462603878117, |
|
"grad_norm": 0.5462285876274109, |
|
"learning_rate": 7.839195979899498e-05, |
|
"loss": 1.2736, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4986149584487535, |
|
"grad_norm": 0.5091783404350281, |
|
"learning_rate": 7.788944723618091e-05, |
|
"loss": 1.1858, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5096952908587258, |
|
"grad_norm": 0.4923122227191925, |
|
"learning_rate": 7.738693467336684e-05, |
|
"loss": 1.2042, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5207756232686981, |
|
"grad_norm": 0.5117873549461365, |
|
"learning_rate": 7.688442211055277e-05, |
|
"loss": 1.2803, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5318559556786704, |
|
"grad_norm": 0.47855451703071594, |
|
"learning_rate": 7.638190954773869e-05, |
|
"loss": 1.2136, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5429362880886427, |
|
"grad_norm": 0.4981115460395813, |
|
"learning_rate": 7.587939698492463e-05, |
|
"loss": 1.2662, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.554016620498615, |
|
"grad_norm": 0.4743058681488037, |
|
"learning_rate": 7.537688442211056e-05, |
|
"loss": 1.1551, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5650969529085873, |
|
"grad_norm": 0.4888276159763336, |
|
"learning_rate": 7.487437185929649e-05, |
|
"loss": 1.1727, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5761772853185596, |
|
"grad_norm": 0.5036386251449585, |
|
"learning_rate": 7.437185929648241e-05, |
|
"loss": 1.2205, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5872576177285319, |
|
"grad_norm": 0.5121099352836609, |
|
"learning_rate": 7.386934673366834e-05, |
|
"loss": 1.2157, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5983379501385041, |
|
"grad_norm": 0.5122838616371155, |
|
"learning_rate": 7.336683417085427e-05, |
|
"loss": 1.2199, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.6094182825484764, |
|
"grad_norm": 0.5164381265640259, |
|
"learning_rate": 7.28643216080402e-05, |
|
"loss": 1.2962, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6204986149584487, |
|
"grad_norm": 0.5261529684066772, |
|
"learning_rate": 7.236180904522614e-05, |
|
"loss": 1.2283, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.5181484818458557, |
|
"learning_rate": 7.185929648241206e-05, |
|
"loss": 1.2724, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6426592797783933, |
|
"grad_norm": 0.48683488368988037, |
|
"learning_rate": 7.135678391959799e-05, |
|
"loss": 1.3003, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6537396121883656, |
|
"grad_norm": 0.4646837115287781, |
|
"learning_rate": 7.085427135678392e-05, |
|
"loss": 1.2103, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6648199445983379, |
|
"grad_norm": 0.513752818107605, |
|
"learning_rate": 7.035175879396985e-05, |
|
"loss": 1.1334, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6759002770083102, |
|
"grad_norm": 0.47100791335105896, |
|
"learning_rate": 6.984924623115579e-05, |
|
"loss": 1.2148, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6869806094182825, |
|
"grad_norm": 0.5160151124000549, |
|
"learning_rate": 6.93467336683417e-05, |
|
"loss": 1.2718, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6980609418282548, |
|
"grad_norm": 0.42398396134376526, |
|
"learning_rate": 6.884422110552764e-05, |
|
"loss": 1.119, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.7091412742382271, |
|
"grad_norm": 0.5424822568893433, |
|
"learning_rate": 6.834170854271357e-05, |
|
"loss": 1.2911, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.7202216066481995, |
|
"grad_norm": 0.557855486869812, |
|
"learning_rate": 6.78391959798995e-05, |
|
"loss": 1.1695, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.7313019390581718, |
|
"grad_norm": 0.4864124059677124, |
|
"learning_rate": 6.733668341708544e-05, |
|
"loss": 1.1762, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.7423822714681441, |
|
"grad_norm": 0.5386707186698914, |
|
"learning_rate": 6.683417085427135e-05, |
|
"loss": 1.1766, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7534626038781164, |
|
"grad_norm": 0.5114085674285889, |
|
"learning_rate": 6.633165829145729e-05, |
|
"loss": 1.2149, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7645429362880887, |
|
"grad_norm": 0.5140852928161621, |
|
"learning_rate": 6.582914572864322e-05, |
|
"loss": 1.2486, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.775623268698061, |
|
"grad_norm": 0.4860430359840393, |
|
"learning_rate": 6.532663316582915e-05, |
|
"loss": 1.2724, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7867036011080333, |
|
"grad_norm": 0.5633051991462708, |
|
"learning_rate": 6.482412060301508e-05, |
|
"loss": 1.2467, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7977839335180056, |
|
"grad_norm": 0.48254477977752686, |
|
"learning_rate": 6.4321608040201e-05, |
|
"loss": 1.1976, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.8088642659279779, |
|
"grad_norm": 0.48218846321105957, |
|
"learning_rate": 6.381909547738694e-05, |
|
"loss": 1.1644, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.8199445983379502, |
|
"grad_norm": 0.4937390387058258, |
|
"learning_rate": 6.331658291457287e-05, |
|
"loss": 1.2049, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.8310249307479224, |
|
"grad_norm": 0.48136794567108154, |
|
"learning_rate": 6.28140703517588e-05, |
|
"loss": 1.2073, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.5367251038551331, |
|
"learning_rate": 6.231155778894473e-05, |
|
"loss": 1.3024, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.853185595567867, |
|
"grad_norm": 0.5513749718666077, |
|
"learning_rate": 6.180904522613065e-05, |
|
"loss": 1.2312, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8642659279778393, |
|
"grad_norm": 0.5120927095413208, |
|
"learning_rate": 6.130653266331658e-05, |
|
"loss": 1.1814, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8753462603878116, |
|
"grad_norm": 0.47654178738594055, |
|
"learning_rate": 6.080402010050251e-05, |
|
"loss": 1.1599, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8864265927977839, |
|
"grad_norm": 0.5559302568435669, |
|
"learning_rate": 6.030150753768844e-05, |
|
"loss": 1.2203, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8975069252077562, |
|
"grad_norm": 0.5184886455535889, |
|
"learning_rate": 5.979899497487438e-05, |
|
"loss": 1.2238, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.9085872576177285, |
|
"grad_norm": 0.5314403176307678, |
|
"learning_rate": 5.929648241206031e-05, |
|
"loss": 1.2255, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.9196675900277008, |
|
"grad_norm": 0.4995604455471039, |
|
"learning_rate": 5.879396984924623e-05, |
|
"loss": 1.1613, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.9307479224376731, |
|
"grad_norm": 0.483528733253479, |
|
"learning_rate": 5.829145728643216e-05, |
|
"loss": 1.2412, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.9418282548476454, |
|
"grad_norm": 0.5035815238952637, |
|
"learning_rate": 5.778894472361809e-05, |
|
"loss": 1.2189, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.9529085872576177, |
|
"grad_norm": 0.5537089705467224, |
|
"learning_rate": 5.728643216080403e-05, |
|
"loss": 1.3265, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.96398891966759, |
|
"grad_norm": 0.5601852536201477, |
|
"learning_rate": 5.6783919597989955e-05, |
|
"loss": 1.2444, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9750692520775623, |
|
"grad_norm": 0.5105345249176025, |
|
"learning_rate": 5.628140703517588e-05, |
|
"loss": 1.169, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9861495844875346, |
|
"grad_norm": 0.533295750617981, |
|
"learning_rate": 5.577889447236181e-05, |
|
"loss": 1.1606, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.997229916897507, |
|
"grad_norm": 0.5628755688667297, |
|
"learning_rate": 5.527638190954774e-05, |
|
"loss": 1.2022, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.0083102493074791, |
|
"grad_norm": 1.1733123064041138, |
|
"learning_rate": 5.477386934673368e-05, |
|
"loss": 2.0244, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.0193905817174516, |
|
"grad_norm": 0.48274314403533936, |
|
"learning_rate": 5.4271356783919604e-05, |
|
"loss": 1.1683, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.0304709141274238, |
|
"grad_norm": 0.5582519173622131, |
|
"learning_rate": 5.376884422110553e-05, |
|
"loss": 1.3466, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.0415512465373962, |
|
"grad_norm": 0.4637548327445984, |
|
"learning_rate": 5.3266331658291455e-05, |
|
"loss": 1.0639, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 0.5016134977340698, |
|
"learning_rate": 5.276381909547739e-05, |
|
"loss": 1.2031, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.0637119113573408, |
|
"grad_norm": 0.5333296060562134, |
|
"learning_rate": 5.226130653266332e-05, |
|
"loss": 1.145, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.074792243767313, |
|
"grad_norm": 0.5505311489105225, |
|
"learning_rate": 5.175879396984925e-05, |
|
"loss": 1.2017, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.0858725761772854, |
|
"grad_norm": 0.49051010608673096, |
|
"learning_rate": 5.125628140703518e-05, |
|
"loss": 1.1612, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0969529085872576, |
|
"grad_norm": 0.526991605758667, |
|
"learning_rate": 5.0753768844221104e-05, |
|
"loss": 1.1927, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.10803324099723, |
|
"grad_norm": 0.5483986139297485, |
|
"learning_rate": 5.0251256281407036e-05, |
|
"loss": 1.2416, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.1191135734072022, |
|
"grad_norm": 0.5175971984863281, |
|
"learning_rate": 4.974874371859297e-05, |
|
"loss": 1.097, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.1301939058171746, |
|
"grad_norm": 0.5794366002082825, |
|
"learning_rate": 4.92462311557789e-05, |
|
"loss": 1.1469, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.1412742382271468, |
|
"grad_norm": 0.5408708453178406, |
|
"learning_rate": 4.874371859296483e-05, |
|
"loss": 1.1193, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.1523545706371192, |
|
"grad_norm": 0.504085123538971, |
|
"learning_rate": 4.824120603015075e-05, |
|
"loss": 0.9672, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.1634349030470914, |
|
"grad_norm": 0.6266749501228333, |
|
"learning_rate": 4.7738693467336685e-05, |
|
"loss": 1.1375, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.1745152354570636, |
|
"grad_norm": 0.5513699054718018, |
|
"learning_rate": 4.723618090452262e-05, |
|
"loss": 1.2081, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.185595567867036, |
|
"grad_norm": 0.5879850387573242, |
|
"learning_rate": 4.673366834170855e-05, |
|
"loss": 1.1576, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.1966759002770084, |
|
"grad_norm": 0.5555039048194885, |
|
"learning_rate": 4.6231155778894475e-05, |
|
"loss": 1.1212, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.2077562326869806, |
|
"grad_norm": 0.5815752744674683, |
|
"learning_rate": 4.57286432160804e-05, |
|
"loss": 1.1984, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.2188365650969528, |
|
"grad_norm": 0.6069645881652832, |
|
"learning_rate": 4.522613065326633e-05, |
|
"loss": 1.1879, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.2299168975069252, |
|
"grad_norm": 0.6030775308609009, |
|
"learning_rate": 4.4723618090452266e-05, |
|
"loss": 1.1959, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.2409972299168974, |
|
"grad_norm": 0.5615729093551636, |
|
"learning_rate": 4.42211055276382e-05, |
|
"loss": 1.1266, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.2520775623268698, |
|
"grad_norm": 0.5482991337776184, |
|
"learning_rate": 4.3718592964824124e-05, |
|
"loss": 1.158, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.263157894736842, |
|
"grad_norm": 0.6517236232757568, |
|
"learning_rate": 4.321608040201005e-05, |
|
"loss": 1.2302, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.2742382271468145, |
|
"grad_norm": 0.5991782546043396, |
|
"learning_rate": 4.271356783919598e-05, |
|
"loss": 1.1322, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.2853185595567866, |
|
"grad_norm": 0.6047670841217041, |
|
"learning_rate": 4.2211055276381914e-05, |
|
"loss": 1.1633, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.296398891966759, |
|
"grad_norm": 0.6029966473579407, |
|
"learning_rate": 4.170854271356784e-05, |
|
"loss": 1.1269, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.3074792243767313, |
|
"grad_norm": 0.5472580790519714, |
|
"learning_rate": 4.120603015075377e-05, |
|
"loss": 1.0763, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.3185595567867037, |
|
"grad_norm": 0.5828231573104858, |
|
"learning_rate": 4.07035175879397e-05, |
|
"loss": 1.1432, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.3296398891966759, |
|
"grad_norm": 0.6450657844543457, |
|
"learning_rate": 4.020100502512563e-05, |
|
"loss": 1.2425, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3407202216066483, |
|
"grad_norm": 0.6098791360855103, |
|
"learning_rate": 3.969849246231156e-05, |
|
"loss": 1.0802, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.3518005540166205, |
|
"grad_norm": 0.5955787897109985, |
|
"learning_rate": 3.919597989949749e-05, |
|
"loss": 1.0539, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.3628808864265927, |
|
"grad_norm": 0.651118814945221, |
|
"learning_rate": 3.869346733668342e-05, |
|
"loss": 1.2524, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.373961218836565, |
|
"grad_norm": 0.6121578216552734, |
|
"learning_rate": 3.8190954773869346e-05, |
|
"loss": 1.1569, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.3850415512465375, |
|
"grad_norm": 0.6717909574508667, |
|
"learning_rate": 3.768844221105528e-05, |
|
"loss": 1.2191, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.3961218836565097, |
|
"grad_norm": 0.6332257390022278, |
|
"learning_rate": 3.7185929648241204e-05, |
|
"loss": 1.1298, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.4072022160664819, |
|
"grad_norm": 0.6334303617477417, |
|
"learning_rate": 3.668341708542714e-05, |
|
"loss": 1.0351, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.4182825484764543, |
|
"grad_norm": 0.655293345451355, |
|
"learning_rate": 3.618090452261307e-05, |
|
"loss": 1.177, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.4293628808864267, |
|
"grad_norm": 0.6217477321624756, |
|
"learning_rate": 3.5678391959798995e-05, |
|
"loss": 1.1263, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.440443213296399, |
|
"grad_norm": 0.6945567727088928, |
|
"learning_rate": 3.517587939698493e-05, |
|
"loss": 1.2184, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.451523545706371, |
|
"grad_norm": 0.6126496195793152, |
|
"learning_rate": 3.467336683417085e-05, |
|
"loss": 1.0179, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.4626038781163435, |
|
"grad_norm": 0.6645523309707642, |
|
"learning_rate": 3.4170854271356785e-05, |
|
"loss": 1.1921, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.4736842105263157, |
|
"grad_norm": 0.65655916929245, |
|
"learning_rate": 3.366834170854272e-05, |
|
"loss": 1.1358, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.4847645429362881, |
|
"grad_norm": 0.6353817582130432, |
|
"learning_rate": 3.3165829145728643e-05, |
|
"loss": 1.0577, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.4958448753462603, |
|
"grad_norm": 0.6068665385246277, |
|
"learning_rate": 3.2663316582914576e-05, |
|
"loss": 1.1409, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.5069252077562327, |
|
"grad_norm": 0.6636937856674194, |
|
"learning_rate": 3.21608040201005e-05, |
|
"loss": 1.1746, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.5180055401662051, |
|
"grad_norm": 0.764776885509491, |
|
"learning_rate": 3.1658291457286434e-05, |
|
"loss": 1.2315, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.5290858725761773, |
|
"grad_norm": 0.6621037125587463, |
|
"learning_rate": 3.1155778894472366e-05, |
|
"loss": 1.122, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.5401662049861495, |
|
"grad_norm": 0.6436090469360352, |
|
"learning_rate": 3.065326633165829e-05, |
|
"loss": 1.0881, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.5512465373961217, |
|
"grad_norm": 0.6181167364120483, |
|
"learning_rate": 3.015075376884422e-05, |
|
"loss": 1.1523, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.5623268698060941, |
|
"grad_norm": 0.6687259674072266, |
|
"learning_rate": 2.9648241206030153e-05, |
|
"loss": 1.0796, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.5734072022160666, |
|
"grad_norm": 0.6394988298416138, |
|
"learning_rate": 2.914572864321608e-05, |
|
"loss": 1.1142, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.5844875346260388, |
|
"grad_norm": 0.6827041506767273, |
|
"learning_rate": 2.8643216080402015e-05, |
|
"loss": 1.1727, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.595567867036011, |
|
"grad_norm": 0.6124918460845947, |
|
"learning_rate": 2.814070351758794e-05, |
|
"loss": 1.037, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.6066481994459834, |
|
"grad_norm": 0.6157639622688293, |
|
"learning_rate": 2.763819095477387e-05, |
|
"loss": 1.0598, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.6177285318559558, |
|
"grad_norm": 0.667032778263092, |
|
"learning_rate": 2.7135678391959802e-05, |
|
"loss": 1.1222, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.628808864265928, |
|
"grad_norm": 0.7053226232528687, |
|
"learning_rate": 2.6633165829145728e-05, |
|
"loss": 1.2135, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.6398891966759002, |
|
"grad_norm": 0.7048087120056152, |
|
"learning_rate": 2.613065326633166e-05, |
|
"loss": 1.1588, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.6509695290858726, |
|
"grad_norm": 0.6474671959877014, |
|
"learning_rate": 2.562814070351759e-05, |
|
"loss": 1.0781, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.662049861495845, |
|
"grad_norm": 0.65389084815979, |
|
"learning_rate": 2.5125628140703518e-05, |
|
"loss": 1.0982, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6731301939058172, |
|
"grad_norm": 0.6817282438278198, |
|
"learning_rate": 2.462311557788945e-05, |
|
"loss": 1.0967, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.6842105263157894, |
|
"grad_norm": 0.6355194449424744, |
|
"learning_rate": 2.4120603015075376e-05, |
|
"loss": 1.0279, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.6952908587257618, |
|
"grad_norm": 0.683628261089325, |
|
"learning_rate": 2.361809045226131e-05, |
|
"loss": 1.1533, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.7063711911357342, |
|
"grad_norm": 0.710445761680603, |
|
"learning_rate": 2.3115577889447238e-05, |
|
"loss": 1.182, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.7174515235457064, |
|
"grad_norm": 0.6544257998466492, |
|
"learning_rate": 2.2613065326633167e-05, |
|
"loss": 1.0936, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.7285318559556786, |
|
"grad_norm": 0.6538469791412354, |
|
"learning_rate": 2.21105527638191e-05, |
|
"loss": 1.0657, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.739612188365651, |
|
"grad_norm": 0.6852319836616516, |
|
"learning_rate": 2.1608040201005025e-05, |
|
"loss": 1.162, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.7506925207756234, |
|
"grad_norm": 0.6878906488418579, |
|
"learning_rate": 2.1105527638190957e-05, |
|
"loss": 1.0954, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.7617728531855956, |
|
"grad_norm": 0.7377908825874329, |
|
"learning_rate": 2.0603015075376886e-05, |
|
"loss": 1.0891, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.7728531855955678, |
|
"grad_norm": 0.6463127732276917, |
|
"learning_rate": 2.0100502512562815e-05, |
|
"loss": 1.0751, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.78393351800554, |
|
"grad_norm": 0.6408430337905884, |
|
"learning_rate": 1.9597989949748744e-05, |
|
"loss": 1.0861, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.7950138504155124, |
|
"grad_norm": 0.7149915099143982, |
|
"learning_rate": 1.9095477386934673e-05, |
|
"loss": 1.1801, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.8060941828254848, |
|
"grad_norm": 0.6683186888694763, |
|
"learning_rate": 1.8592964824120602e-05, |
|
"loss": 1.0473, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.817174515235457, |
|
"grad_norm": 0.6818321347236633, |
|
"learning_rate": 1.8090452261306535e-05, |
|
"loss": 1.0699, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.8282548476454292, |
|
"grad_norm": 0.7177061438560486, |
|
"learning_rate": 1.7587939698492464e-05, |
|
"loss": 1.12, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.8393351800554016, |
|
"grad_norm": 0.6971613168716431, |
|
"learning_rate": 1.7085427135678393e-05, |
|
"loss": 1.0919, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.850415512465374, |
|
"grad_norm": 0.6916444301605225, |
|
"learning_rate": 1.6582914572864322e-05, |
|
"loss": 1.0878, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.8614958448753463, |
|
"grad_norm": 0.6586862206459045, |
|
"learning_rate": 1.608040201005025e-05, |
|
"loss": 0.9554, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.8725761772853184, |
|
"grad_norm": 0.7246650457382202, |
|
"learning_rate": 1.5577889447236183e-05, |
|
"loss": 1.1607, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.8836565096952909, |
|
"grad_norm": 0.761667013168335, |
|
"learning_rate": 1.507537688442211e-05, |
|
"loss": 1.1838, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.8947368421052633, |
|
"grad_norm": 0.6918529868125916, |
|
"learning_rate": 1.457286432160804e-05, |
|
"loss": 1.1148, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.9058171745152355, |
|
"grad_norm": 0.7272382378578186, |
|
"learning_rate": 1.407035175879397e-05, |
|
"loss": 1.1342, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.9168975069252077, |
|
"grad_norm": 0.6923725008964539, |
|
"learning_rate": 1.3567839195979901e-05, |
|
"loss": 1.0149, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.92797783933518, |
|
"grad_norm": 0.7408672571182251, |
|
"learning_rate": 1.306532663316583e-05, |
|
"loss": 1.148, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.9390581717451525, |
|
"grad_norm": 0.6574199795722961, |
|
"learning_rate": 1.2562814070351759e-05, |
|
"loss": 1.0415, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.9501385041551247, |
|
"grad_norm": 0.721842885017395, |
|
"learning_rate": 1.2060301507537688e-05, |
|
"loss": 1.2188, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.9612188365650969, |
|
"grad_norm": 0.7084245681762695, |
|
"learning_rate": 1.1557788944723619e-05, |
|
"loss": 1.1464, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.9722991689750693, |
|
"grad_norm": 0.6412287354469299, |
|
"learning_rate": 1.105527638190955e-05, |
|
"loss": 0.9909, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.9833795013850417, |
|
"grad_norm": 0.7143056988716125, |
|
"learning_rate": 1.0552763819095479e-05, |
|
"loss": 1.1627, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.994459833795014, |
|
"grad_norm": 0.6697918772697449, |
|
"learning_rate": 1.0050251256281408e-05, |
|
"loss": 1.0892, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.005540166204986, |
|
"grad_norm": 1.8020601272583008, |
|
"learning_rate": 9.547738693467337e-06, |
|
"loss": 1.984, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.0166204986149583, |
|
"grad_norm": 0.7190349102020264, |
|
"learning_rate": 9.045226130653267e-06, |
|
"loss": 1.0837, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.027700831024931, |
|
"grad_norm": 0.6655665040016174, |
|
"learning_rate": 8.542713567839196e-06, |
|
"loss": 0.9812, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.038781163434903, |
|
"grad_norm": 0.7195249795913696, |
|
"learning_rate": 8.040201005025125e-06, |
|
"loss": 1.1748, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.0498614958448753, |
|
"grad_norm": 0.6796366572380066, |
|
"learning_rate": 7.537688442211055e-06, |
|
"loss": 1.07, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.0609418282548475, |
|
"grad_norm": 0.679729700088501, |
|
"learning_rate": 7.035175879396985e-06, |
|
"loss": 0.996, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.07202216066482, |
|
"grad_norm": 0.6805736422538757, |
|
"learning_rate": 6.532663316582915e-06, |
|
"loss": 1.0531, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.0831024930747923, |
|
"grad_norm": 0.698531448841095, |
|
"learning_rate": 6.030150753768844e-06, |
|
"loss": 1.1163, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.0941828254847645, |
|
"grad_norm": 0.6734586358070374, |
|
"learning_rate": 5.527638190954775e-06, |
|
"loss": 1.0584, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 0.6843752264976501, |
|
"learning_rate": 5.025125628140704e-06, |
|
"loss": 1.0854, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.1163434903047094, |
|
"grad_norm": 0.6570263504981995, |
|
"learning_rate": 4.522613065326634e-06, |
|
"loss": 1.016, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.1274238227146816, |
|
"grad_norm": 0.707304060459137, |
|
"learning_rate": 4.020100502512563e-06, |
|
"loss": 1.0311, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.1385041551246537, |
|
"grad_norm": 0.7010348439216614, |
|
"learning_rate": 3.5175879396984926e-06, |
|
"loss": 1.1732, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.149584487534626, |
|
"grad_norm": 0.6604486703872681, |
|
"learning_rate": 3.015075376884422e-06, |
|
"loss": 1.012, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.160664819944598, |
|
"grad_norm": 0.6502628922462463, |
|
"learning_rate": 2.512562814070352e-06, |
|
"loss": 0.9839, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.1717451523545708, |
|
"grad_norm": 0.6600444912910461, |
|
"learning_rate": 2.0100502512562813e-06, |
|
"loss": 1.0509, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.182825484764543, |
|
"grad_norm": 0.69642573595047, |
|
"learning_rate": 1.507537688442211e-06, |
|
"loss": 1.1109, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.193905817174515, |
|
"grad_norm": 0.691315233707428, |
|
"learning_rate": 1.0050251256281407e-06, |
|
"loss": 1.1067, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.2049861495844874, |
|
"grad_norm": 0.6788798570632935, |
|
"learning_rate": 5.025125628140703e-07, |
|
"loss": 1.0564, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.21606648199446, |
|
"grad_norm": 0.6888399124145508, |
|
"learning_rate": 0.0, |
|
"loss": 1.0406, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.18753507500032e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|