|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.944206008583691, |
|
"eval_steps": 500, |
|
"global_step": 232, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017167381974248927, |
|
"grad_norm": 24.894664359139377, |
|
"learning_rate": 0.0, |
|
"loss": 2.0734, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.034334763948497854, |
|
"grad_norm": 24.00031672844471, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 2.0836, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05150214592274678, |
|
"grad_norm": 29.716249933336616, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 2.1693, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06866952789699571, |
|
"grad_norm": 24.37331054435917, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 2.0201, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08583690987124463, |
|
"grad_norm": 19.789582138660975, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.9014, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10300429184549356, |
|
"grad_norm": 18.92438805672366, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.0578, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12017167381974249, |
|
"grad_norm": 13.872256454769323, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.9303, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.13733905579399142, |
|
"grad_norm": 17.41216169856372, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.9427, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.15450643776824036, |
|
"grad_norm": 13.884818846193818, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.7924, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17167381974248927, |
|
"grad_norm": 10.563149359389286, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.8341, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1888412017167382, |
|
"grad_norm": 17.065510513137394, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.8003, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.20600858369098712, |
|
"grad_norm": 13.365031970331449, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.7405, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22317596566523606, |
|
"grad_norm": 10.184190757772281, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.7369, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24034334763948498, |
|
"grad_norm": 8.522394959606002, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.7161, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2575107296137339, |
|
"grad_norm": 7.4639532795389885, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.7516, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.27467811158798283, |
|
"grad_norm": 6.885531173304724, |
|
"learning_rate": 3e-06, |
|
"loss": 1.7291, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2918454935622318, |
|
"grad_norm": 8.124663030531377, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.8113, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.3090128755364807, |
|
"grad_norm": 7.505534742466626, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.7011, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3261802575107296, |
|
"grad_norm": 6.546353703058948, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.679, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.34334763948497854, |
|
"grad_norm": 8.084166970057376, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.7672, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3605150214592275, |
|
"grad_norm": 7.39493724533185, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.7273, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3776824034334764, |
|
"grad_norm": 7.897097334092764, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.723, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3948497854077253, |
|
"grad_norm": 6.969310652434427, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.6652, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.41201716738197425, |
|
"grad_norm": 6.593881510049774, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.6471, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4291845493562232, |
|
"grad_norm": 7.447075197928159, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.7447, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.44635193133047213, |
|
"grad_norm": 7.593703083280446, |
|
"learning_rate": 5e-06, |
|
"loss": 1.7199, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.463519313304721, |
|
"grad_norm": 7.670358465509374, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.7528, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.48068669527896996, |
|
"grad_norm": 6.816789703879088, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.69, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4978540772532189, |
|
"grad_norm": 6.703285346233008, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.6589, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5150214592274678, |
|
"grad_norm": 6.319351833531301, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.6783, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5321888412017167, |
|
"grad_norm": 7.668634230120905, |
|
"learning_rate": 6e-06, |
|
"loss": 1.7307, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5493562231759657, |
|
"grad_norm": 7.216605386564187, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.6241, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5665236051502146, |
|
"grad_norm": 7.95986167238638, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.7027, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5836909871244635, |
|
"grad_norm": 7.567869929265614, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 1.6541, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6008583690987125, |
|
"grad_norm": 7.583204324051831, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.6605, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6180257510729614, |
|
"grad_norm": 7.927092569421214, |
|
"learning_rate": 7e-06, |
|
"loss": 1.6376, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6351931330472103, |
|
"grad_norm": 6.777169937117983, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.6076, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6523605150214592, |
|
"grad_norm": 7.376746584211704, |
|
"learning_rate": 7.4e-06, |
|
"loss": 1.6631, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6695278969957081, |
|
"grad_norm": 5.931441717917702, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.6105, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6866952789699571, |
|
"grad_norm": 7.788470294493901, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 1.6777, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.703862660944206, |
|
"grad_norm": 7.201871110419962, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.6622, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.721030042918455, |
|
"grad_norm": 7.351004222204919, |
|
"learning_rate": 8.2e-06, |
|
"loss": 1.6343, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7381974248927039, |
|
"grad_norm": 7.116370756646885, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.6713, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7553648068669528, |
|
"grad_norm": 6.804638326754223, |
|
"learning_rate": 8.6e-06, |
|
"loss": 1.5942, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7725321888412017, |
|
"grad_norm": 7.1784709545283025, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.6453, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7896995708154506, |
|
"grad_norm": 6.578248052500373, |
|
"learning_rate": 9e-06, |
|
"loss": 1.6493, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8068669527896996, |
|
"grad_norm": 5.835458640344457, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.5686, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8240343347639485, |
|
"grad_norm": 6.024182879280123, |
|
"learning_rate": 9.4e-06, |
|
"loss": 1.5686, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8412017167381974, |
|
"grad_norm": 5.931203630890797, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.5948, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8583690987124464, |
|
"grad_norm": 5.741506735885901, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 1.6167, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8755364806866953, |
|
"grad_norm": 6.248428015586314, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6021, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8927038626609443, |
|
"grad_norm": 5.523348714272916, |
|
"learning_rate": 9.999255120204248e-06, |
|
"loss": 1.6021, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9098712446351931, |
|
"grad_norm": 5.486958421466518, |
|
"learning_rate": 9.997020702755353e-06, |
|
"loss": 1.5414, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.927038626609442, |
|
"grad_norm": 4.906442579745384, |
|
"learning_rate": 9.993297413402282e-06, |
|
"loss": 1.6355, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.944206008583691, |
|
"grad_norm": 3.4477648478182217, |
|
"learning_rate": 9.98808636150624e-06, |
|
"loss": 1.5204, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9613733905579399, |
|
"grad_norm": 5.283485667760506, |
|
"learning_rate": 9.981389099710132e-06, |
|
"loss": 1.557, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9785407725321889, |
|
"grad_norm": 4.864774414469769, |
|
"learning_rate": 9.973207623475964e-06, |
|
"loss": 1.5847, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9957081545064378, |
|
"grad_norm": 4.99512035052619, |
|
"learning_rate": 9.96354437049027e-06, |
|
"loss": 1.4972, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.99512035052619, |
|
"learning_rate": 9.952402219937817e-06, |
|
"loss": 1.6505, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.425569772720337, |
|
"eval_runtime": 55.3506, |
|
"eval_samples_per_second": 6.504, |
|
"eval_steps_per_second": 0.217, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0171673819742488, |
|
"grad_norm": 3.9912629226494145, |
|
"learning_rate": 9.939784491643734e-06, |
|
"loss": 1.5689, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0343347639484979, |
|
"grad_norm": 3.5037206814091273, |
|
"learning_rate": 9.925694945084369e-06, |
|
"loss": 1.5846, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.0515021459227467, |
|
"grad_norm": 3.096134013860231, |
|
"learning_rate": 9.910137778267153e-06, |
|
"loss": 1.561, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0686695278969958, |
|
"grad_norm": 6.745525969869406, |
|
"learning_rate": 9.893117626479778e-06, |
|
"loss": 1.5086, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.0858369098712446, |
|
"grad_norm": 3.771654699845576, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 1.4463, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1030042918454936, |
|
"grad_norm": 7.348146928107017, |
|
"learning_rate": 9.854709087130261e-06, |
|
"loss": 1.5732, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1201716738197425, |
|
"grad_norm": 4.285886261726828, |
|
"learning_rate": 9.833332143466099e-06, |
|
"loss": 1.5634, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1373390557939915, |
|
"grad_norm": 9.50651645737036, |
|
"learning_rate": 9.810515099218004e-06, |
|
"loss": 1.5832, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.1545064377682404, |
|
"grad_norm": 9.85903247254424, |
|
"learning_rate": 9.78626475276808e-06, |
|
"loss": 1.5158, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.1716738197424892, |
|
"grad_norm": 4.500325598034437, |
|
"learning_rate": 9.76058832955357e-06, |
|
"loss": 1.5577, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.1888412017167382, |
|
"grad_norm": 6.726627673373393, |
|
"learning_rate": 9.733493479914031e-06, |
|
"loss": 1.5402, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.206008583690987, |
|
"grad_norm": 5.992547800498691, |
|
"learning_rate": 9.704988276811883e-06, |
|
"loss": 1.4771, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.2231759656652361, |
|
"grad_norm": 3.966804454715501, |
|
"learning_rate": 9.675081213427076e-06, |
|
"loss": 1.4715, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.240343347639485, |
|
"grad_norm": 5.018712503166457, |
|
"learning_rate": 9.643781200626512e-06, |
|
"loss": 1.483, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2575107296137338, |
|
"grad_norm": 5.147155410650178, |
|
"learning_rate": 9.611097564309054e-06, |
|
"loss": 1.529, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.2746781115879828, |
|
"grad_norm": 4.430249361377804, |
|
"learning_rate": 9.577040042626832e-06, |
|
"loss": 1.5055, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.2918454935622319, |
|
"grad_norm": 3.3331885191043824, |
|
"learning_rate": 9.54161878308377e-06, |
|
"loss": 1.5686, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3090128755364807, |
|
"grad_norm": 3.3765624318373684, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 1.4806, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3261802575107295, |
|
"grad_norm": 3.0122848173838146, |
|
"learning_rate": 9.466727668927817e-06, |
|
"loss": 1.483, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3433476394849786, |
|
"grad_norm": 2.8374889256024325, |
|
"learning_rate": 9.427280128266049e-06, |
|
"loss": 1.5371, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.3605150214592274, |
|
"grad_norm": 3.8591143057415067, |
|
"learning_rate": 9.38651347099721e-06, |
|
"loss": 1.515, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.3776824034334765, |
|
"grad_norm": 2.702387816445196, |
|
"learning_rate": 9.344439843625034e-06, |
|
"loss": 1.5065, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.3948497854077253, |
|
"grad_norm": 3.231309074741847, |
|
"learning_rate": 9.301071782067504e-06, |
|
"loss": 1.4691, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4120171673819741, |
|
"grad_norm": 2.569345186210359, |
|
"learning_rate": 9.256422207921757e-06, |
|
"loss": 1.4551, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4291845493562232, |
|
"grad_norm": 2.6936841928298763, |
|
"learning_rate": 9.21050442461406e-06, |
|
"loss": 1.5342, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.4463519313304722, |
|
"grad_norm": 3.1643389084628923, |
|
"learning_rate": 9.163332113436031e-06, |
|
"loss": 1.5134, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.463519313304721, |
|
"grad_norm": 2.0547003373714317, |
|
"learning_rate": 9.114919329468283e-06, |
|
"loss": 1.543, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.48068669527897, |
|
"grad_norm": 3.4765227820159628, |
|
"learning_rate": 9.065280497392663e-06, |
|
"loss": 1.4952, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.497854077253219, |
|
"grad_norm": 2.3605853021000245, |
|
"learning_rate": 9.014430407194413e-06, |
|
"loss": 1.4706, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5150214592274678, |
|
"grad_norm": 2.79040585245284, |
|
"learning_rate": 8.962384209755453e-06, |
|
"loss": 1.5038, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.5321888412017168, |
|
"grad_norm": 3.130637987619235, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 1.5331, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5493562231759657, |
|
"grad_norm": 2.1702749562184036, |
|
"learning_rate": 8.854765873974898e-06, |
|
"loss": 1.4362, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.5665236051502145, |
|
"grad_norm": 3.529361540533676, |
|
"learning_rate": 8.799225800722895e-06, |
|
"loss": 1.5103, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.5836909871244635, |
|
"grad_norm": 2.1288633137671362, |
|
"learning_rate": 8.742553740855507e-06, |
|
"loss": 1.4735, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6008583690987126, |
|
"grad_norm": 4.653595880593553, |
|
"learning_rate": 8.684766579921684e-06, |
|
"loss": 1.4837, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6180257510729614, |
|
"grad_norm": 4.183036771208689, |
|
"learning_rate": 8.625881535716883e-06, |
|
"loss": 1.4616, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6351931330472103, |
|
"grad_norm": 3.583624228870984, |
|
"learning_rate": 8.565916153152982e-06, |
|
"loss": 1.452, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.652360515021459, |
|
"grad_norm": 2.9353226938465538, |
|
"learning_rate": 8.504888299030748e-06, |
|
"loss": 1.5003, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.6695278969957081, |
|
"grad_norm": 3.9811908362996387, |
|
"learning_rate": 8.442816156716386e-06, |
|
"loss": 1.4712, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.6866952789699572, |
|
"grad_norm": 4.507840289564422, |
|
"learning_rate": 8.379718220723772e-06, |
|
"loss": 1.5154, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.703862660944206, |
|
"grad_norm": 1.7351061691281462, |
|
"learning_rate": 8.315613291203977e-06, |
|
"loss": 1.5071, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7210300429184548, |
|
"grad_norm": 2.1548506371633476, |
|
"learning_rate": 8.250520468343722e-06, |
|
"loss": 1.4847, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.738197424892704, |
|
"grad_norm": 2.915800387932283, |
|
"learning_rate": 8.184459146674447e-06, |
|
"loss": 1.5255, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.755364806866953, |
|
"grad_norm": 1.8127751226762168, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 1.4585, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.7725321888412018, |
|
"grad_norm": 3.2104447847604884, |
|
"learning_rate": 8.049510022000365e-06, |
|
"loss": 1.5145, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.7896995708154506, |
|
"grad_norm": 1.7409586719583194, |
|
"learning_rate": 7.980662427346127e-06, |
|
"loss": 1.5268, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8068669527896994, |
|
"grad_norm": 4.142279628383546, |
|
"learning_rate": 7.910926738603855e-06, |
|
"loss": 1.4615, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8240343347639485, |
|
"grad_norm": 3.877668467895349, |
|
"learning_rate": 7.84032373365578e-06, |
|
"loss": 1.465, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.8412017167381975, |
|
"grad_norm": 2.656628633387025, |
|
"learning_rate": 7.768874448802665e-06, |
|
"loss": 1.4922, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.8583690987124464, |
|
"grad_norm": 2.584883160911178, |
|
"learning_rate": 7.696600172495997e-06, |
|
"loss": 1.5179, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.8755364806866952, |
|
"grad_norm": 2.587010532927385, |
|
"learning_rate": 7.62352243899504e-06, |
|
"loss": 1.5014, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.8927038626609443, |
|
"grad_norm": 2.5107207102352915, |
|
"learning_rate": 7.5496630219506805e-06, |
|
"loss": 1.5064, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.909871244635193, |
|
"grad_norm": 2.0882468780951915, |
|
"learning_rate": 7.475043927917908e-06, |
|
"loss": 1.4503, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.9270386266094421, |
|
"grad_norm": 2.1486369526322924, |
|
"learning_rate": 7.399687389798933e-06, |
|
"loss": 1.5461, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.944206008583691, |
|
"grad_norm": 1.8242630296650162, |
|
"learning_rate": 7.323615860218844e-06, |
|
"loss": 1.4363, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.9613733905579398, |
|
"grad_norm": 2.145842755842824, |
|
"learning_rate": 7.246852004835807e-06, |
|
"loss": 1.4766, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.9785407725321889, |
|
"grad_norm": 1.6691578334566337, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 1.499, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.995708154506438, |
|
"grad_norm": 2.2108584637006863, |
|
"learning_rate": 7.091339003877826e-06, |
|
"loss": 1.4225, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.2108584637006863, |
|
"learning_rate": 7.012636193699838e-06, |
|
"loss": 1.5231, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.3721916675567627, |
|
"eval_runtime": 55.5761, |
|
"eval_samples_per_second": 6.478, |
|
"eval_steps_per_second": 0.216, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.017167381974249, |
|
"grad_norm": 3.7388997063310248, |
|
"learning_rate": 6.933333714707094e-06, |
|
"loss": 1.4963, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.0343347639484977, |
|
"grad_norm": 1.9374219671850537, |
|
"learning_rate": 6.8534551952253395e-06, |
|
"loss": 1.511, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.051502145922747, |
|
"grad_norm": 2.137490994962777, |
|
"learning_rate": 6.773024435212678e-06, |
|
"loss": 1.4785, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.0686695278969958, |
|
"grad_norm": 1.5840707110043721, |
|
"learning_rate": 6.692065399168352e-06, |
|
"loss": 1.4386, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.0858369098712446, |
|
"grad_norm": 2.0353319612062175, |
|
"learning_rate": 6.6106022089924535e-06, |
|
"loss": 1.3749, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.1030042918454934, |
|
"grad_norm": 1.8137709307149847, |
|
"learning_rate": 6.5286591367987655e-06, |
|
"loss": 1.4849, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.1201716738197427, |
|
"grad_norm": 2.7161222343926212, |
|
"learning_rate": 6.4462605976828395e-06, |
|
"loss": 1.4845, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.1373390557939915, |
|
"grad_norm": 1.3620483576209157, |
|
"learning_rate": 6.363431142447469e-06, |
|
"loss": 1.4977, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.1545064377682404, |
|
"grad_norm": 1.8781921403392814, |
|
"learning_rate": 6.280195450287736e-06, |
|
"loss": 1.4339, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.171673819742489, |
|
"grad_norm": 1.8249006642987649, |
|
"learning_rate": 6.1965783214377895e-06, |
|
"loss": 1.4837, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.188841201716738, |
|
"grad_norm": 1.4576334521913883, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 1.4642, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.2060085836909873, |
|
"grad_norm": 1.8699628551915364, |
|
"learning_rate": 6.028299515429683e-06, |
|
"loss": 1.3993, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.223175965665236, |
|
"grad_norm": 1.1893820742085242, |
|
"learning_rate": 5.943687977264584e-06, |
|
"loss": 1.3989, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.240343347639485, |
|
"grad_norm": 1.7116247132028521, |
|
"learning_rate": 5.858795265456382e-06, |
|
"loss": 1.4183, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.257510729613734, |
|
"grad_norm": 1.5102064250019567, |
|
"learning_rate": 5.773646673951406e-06, |
|
"loss": 1.457, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.274678111587983, |
|
"grad_norm": 1.2667706935607246, |
|
"learning_rate": 5.688267572935843e-06, |
|
"loss": 1.436, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.291845493562232, |
|
"grad_norm": 1.6963247346173285, |
|
"learning_rate": 5.6026834012766155e-06, |
|
"loss": 1.4965, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.3090128755364807, |
|
"grad_norm": 1.2569488026568798, |
|
"learning_rate": 5.51691965894185e-06, |
|
"loss": 1.4098, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.3261802575107295, |
|
"grad_norm": 1.1272036077401364, |
|
"learning_rate": 5.4310018994030974e-06, |
|
"loss": 1.4119, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.3433476394849784, |
|
"grad_norm": 1.673271865613411, |
|
"learning_rate": 5.3449557220216245e-06, |
|
"loss": 1.4661, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.3605150214592276, |
|
"grad_norm": 1.2078563404488671, |
|
"learning_rate": 5.258806764421048e-06, |
|
"loss": 1.4497, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.3776824034334765, |
|
"grad_norm": 1.308716503746094, |
|
"learning_rate": 5.172580694848541e-06, |
|
"loss": 1.4351, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.3948497854077253, |
|
"grad_norm": 1.6067743882863361, |
|
"learning_rate": 5.0863032045269435e-06, |
|
"loss": 1.4017, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.412017167381974, |
|
"grad_norm": 1.15329126746228, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3885, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.429184549356223, |
|
"grad_norm": 1.3029425990628287, |
|
"learning_rate": 4.913696795473058e-06, |
|
"loss": 1.4686, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.4463519313304722, |
|
"grad_norm": 1.3782312780671255, |
|
"learning_rate": 4.827419305151461e-06, |
|
"loss": 1.4472, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.463519313304721, |
|
"grad_norm": 1.1387922784921702, |
|
"learning_rate": 4.741193235578953e-06, |
|
"loss": 1.4717, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.48068669527897, |
|
"grad_norm": 1.4489271450805266, |
|
"learning_rate": 4.6550442779783755e-06, |
|
"loss": 1.4325, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.4978540772532187, |
|
"grad_norm": 1.3782527667091915, |
|
"learning_rate": 4.568998100596903e-06, |
|
"loss": 1.405, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.5150214592274676, |
|
"grad_norm": 1.2668976356990511, |
|
"learning_rate": 4.4830803410581506e-06, |
|
"loss": 1.4427, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.532188841201717, |
|
"grad_norm": 1.4950710057650962, |
|
"learning_rate": 4.397316598723385e-06, |
|
"loss": 1.4714, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.5493562231759657, |
|
"grad_norm": 1.0576970824111924, |
|
"learning_rate": 4.31173242706416e-06, |
|
"loss": 1.3698, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.5665236051502145, |
|
"grad_norm": 1.6518504203265545, |
|
"learning_rate": 4.226353326048594e-06, |
|
"loss": 1.4462, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.5836909871244638, |
|
"grad_norm": 1.0690977885341415, |
|
"learning_rate": 4.14120473454362e-06, |
|
"loss": 1.4094, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.6008583690987126, |
|
"grad_norm": 1.2319353745161556, |
|
"learning_rate": 4.056312022735417e-06, |
|
"loss": 1.4279, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.6180257510729614, |
|
"grad_norm": 1.4981453391586554, |
|
"learning_rate": 3.9717004845703175e-06, |
|
"loss": 1.404, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.6351931330472103, |
|
"grad_norm": 1.1718194614512722, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 1.3936, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.652360515021459, |
|
"grad_norm": 1.382020649765366, |
|
"learning_rate": 3.803421678562213e-06, |
|
"loss": 1.4432, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.6695278969957084, |
|
"grad_norm": 1.0295860006980109, |
|
"learning_rate": 3.7198045497122647e-06, |
|
"loss": 1.4189, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.686695278969957, |
|
"grad_norm": 1.6145284029231795, |
|
"learning_rate": 3.6365688575525315e-06, |
|
"loss": 1.4579, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.703862660944206, |
|
"grad_norm": 1.1519727791811123, |
|
"learning_rate": 3.553739402317162e-06, |
|
"loss": 1.4404, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.721030042918455, |
|
"grad_norm": 1.6702535379739811, |
|
"learning_rate": 3.471340863201237e-06, |
|
"loss": 1.4247, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.7381974248927037, |
|
"grad_norm": 1.4413775372344653, |
|
"learning_rate": 3.389397791007548e-06, |
|
"loss": 1.4669, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.755364806866953, |
|
"grad_norm": 1.6769364420250672, |
|
"learning_rate": 3.307934600831648e-06, |
|
"loss": 1.3987, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.772532188841202, |
|
"grad_norm": 1.4127696099779041, |
|
"learning_rate": 3.226975564787322e-06, |
|
"loss": 1.4577, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.7896995708154506, |
|
"grad_norm": 1.3981396160008468, |
|
"learning_rate": 3.1465448047746626e-06, |
|
"loss": 1.4643, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.8068669527896994, |
|
"grad_norm": 1.1640956539171732, |
|
"learning_rate": 3.0666662852929063e-06, |
|
"loss": 1.4127, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.8240343347639483, |
|
"grad_norm": 1.524450627139972, |
|
"learning_rate": 2.9873638063001633e-06, |
|
"loss": 1.4114, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.8412017167381975, |
|
"grad_norm": 1.3456136782749846, |
|
"learning_rate": 2.9086609961221758e-06, |
|
"loss": 1.435, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.8583690987124464, |
|
"grad_norm": 1.2127544846215854, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 1.464, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.875536480686695, |
|
"grad_norm": 1.1389112761416087, |
|
"learning_rate": 2.7531479951641928e-06, |
|
"loss": 1.4468, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.8927038626609445, |
|
"grad_norm": 1.2715694866770404, |
|
"learning_rate": 2.6763841397811576e-06, |
|
"loss": 1.451, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.909871244635193, |
|
"grad_norm": 1.2899452182832583, |
|
"learning_rate": 2.6003126102010696e-06, |
|
"loss": 1.3969, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.927038626609442, |
|
"grad_norm": 1.1876736667702164, |
|
"learning_rate": 2.524956072082093e-06, |
|
"loss": 1.4894, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.944206008583691, |
|
"grad_norm": 1.0044043551342703, |
|
"learning_rate": 2.450336978049322e-06, |
|
"loss": 1.3833, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.96137339055794, |
|
"grad_norm": 1.1006453119175361, |
|
"learning_rate": 2.37647756100496e-06, |
|
"loss": 1.422, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.978540772532189, |
|
"grad_norm": 1.1339844183954066, |
|
"learning_rate": 2.3033998275040047e-06, |
|
"loss": 1.4416, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.995708154506438, |
|
"grad_norm": 1.0443241596589774, |
|
"learning_rate": 2.2311255511973347e-06, |
|
"loss": 1.3739, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.0443241596589774, |
|
"learning_rate": 2.159676266344222e-06, |
|
"loss": 1.4492, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.3466758728027344, |
|
"eval_runtime": 55.4567, |
|
"eval_samples_per_second": 6.492, |
|
"eval_steps_per_second": 0.216, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 3.017167381974249, |
|
"grad_norm": 2.208608681250419, |
|
"learning_rate": 2.089073261396148e-06, |
|
"loss": 1.4496, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 3.0343347639484977, |
|
"grad_norm": 2.068961082241978, |
|
"learning_rate": 2.0193375726538737e-06, |
|
"loss": 1.4599, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 3.051502145922747, |
|
"grad_norm": 1.2686715786339267, |
|
"learning_rate": 1.9504899779996354e-06, |
|
"loss": 1.4241, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.0686695278969958, |
|
"grad_norm": 0.9766943956912364, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 1.3856, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 3.0858369098712446, |
|
"grad_norm": 0.9570891019240637, |
|
"learning_rate": 1.8155408533255553e-06, |
|
"loss": 1.3267, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 3.1030042918454934, |
|
"grad_norm": 0.9858819209987062, |
|
"learning_rate": 1.7494795316562791e-06, |
|
"loss": 1.4371, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 3.1201716738197427, |
|
"grad_norm": 0.9400585321740289, |
|
"learning_rate": 1.6843867087960252e-06, |
|
"loss": 1.4347, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 3.1373390557939915, |
|
"grad_norm": 1.0563349262899193, |
|
"learning_rate": 1.6202817792762283e-06, |
|
"loss": 1.4431, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.1545064377682404, |
|
"grad_norm": 0.9217174697986299, |
|
"learning_rate": 1.557183843283614e-06, |
|
"loss": 1.3872, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.171673819742489, |
|
"grad_norm": 0.9537020658805889, |
|
"learning_rate": 1.4951117009692528e-06, |
|
"loss": 1.4342, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 3.188841201716738, |
|
"grad_norm": 0.931415129363885, |
|
"learning_rate": 1.4340838468470198e-06, |
|
"loss": 1.4167, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 3.2060085836909873, |
|
"grad_norm": 0.8848653838809811, |
|
"learning_rate": 1.374118464283119e-06, |
|
"loss": 1.3483, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 3.223175965665236, |
|
"grad_norm": 0.8999049162268763, |
|
"learning_rate": 1.3152334200783167e-06, |
|
"loss": 1.3486, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.240343347639485, |
|
"grad_norm": 0.913466824589116, |
|
"learning_rate": 1.257446259144494e-06, |
|
"loss": 1.372, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.257510729613734, |
|
"grad_norm": 0.9910112277396748, |
|
"learning_rate": 1.2007741992771065e-06, |
|
"loss": 1.4064, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 3.274678111587983, |
|
"grad_norm": 0.883357238281907, |
|
"learning_rate": 1.145234126025102e-06, |
|
"loss": 1.3869, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 3.291845493562232, |
|
"grad_norm": 0.9234833713444595, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 1.4469, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 3.3090128755364807, |
|
"grad_norm": 0.859382750295263, |
|
"learning_rate": 1.037615790244549e-06, |
|
"loss": 1.3571, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.3261802575107295, |
|
"grad_norm": 0.815275008698639, |
|
"learning_rate": 9.85569592805588e-07, |
|
"loss": 1.3635, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 3.3433476394849784, |
|
"grad_norm": 0.8740357211724161, |
|
"learning_rate": 9.347195026073369e-07, |
|
"loss": 1.4195, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 3.3605150214592276, |
|
"grad_norm": 0.8549841652198367, |
|
"learning_rate": 8.850806705317183e-07, |
|
"loss": 1.4011, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 3.3776824034334765, |
|
"grad_norm": 0.8255973582968239, |
|
"learning_rate": 8.366678865639688e-07, |
|
"loss": 1.3907, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 3.3948497854077253, |
|
"grad_norm": 0.8469777726661422, |
|
"learning_rate": 7.894955753859412e-07, |
|
"loss": 1.3586, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.412017167381974, |
|
"grad_norm": 0.84555567071041, |
|
"learning_rate": 7.435777920782444e-07, |
|
"loss": 1.344, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.429184549356223, |
|
"grad_norm": 0.8100696398102393, |
|
"learning_rate": 6.989282179324963e-07, |
|
"loss": 1.4253, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 3.4463519313304722, |
|
"grad_norm": 0.853525501890134, |
|
"learning_rate": 6.555601563749675e-07, |
|
"loss": 1.4028, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 3.463519313304721, |
|
"grad_norm": 2.5389247756358455, |
|
"learning_rate": 6.134865290027903e-07, |
|
"loss": 1.4282, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.48068669527897, |
|
"grad_norm": 1.2453666685589984, |
|
"learning_rate": 5.727198717339511e-07, |
|
"loss": 1.3921, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.4978540772532187, |
|
"grad_norm": 0.7642367373812298, |
|
"learning_rate": 5.332723310721855e-07, |
|
"loss": 1.365, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 3.5150214592274676, |
|
"grad_norm": 0.9270502698949785, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 1.4041, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.532188841201717, |
|
"grad_norm": 0.8098170144667726, |
|
"learning_rate": 4.5838121691622995e-07, |
|
"loss": 1.4315, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 3.5493562231759657, |
|
"grad_norm": 0.7857989969064568, |
|
"learning_rate": 4.2295995737316854e-07, |
|
"loss": 1.332, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.5665236051502145, |
|
"grad_norm": 0.8648677468818124, |
|
"learning_rate": 3.8890243569094874e-07, |
|
"loss": 1.4088, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.5836909871244638, |
|
"grad_norm": 0.8148105107754193, |
|
"learning_rate": 3.5621879937348836e-07, |
|
"loss": 1.3708, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 3.6008583690987126, |
|
"grad_norm": 0.8784091247812043, |
|
"learning_rate": 3.2491878657292643e-07, |
|
"loss": 1.3931, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 3.6180257510729614, |
|
"grad_norm": 0.8314788876032824, |
|
"learning_rate": 2.9501172318811834e-07, |
|
"loss": 1.3706, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.6351931330472103, |
|
"grad_norm": 0.8122583055213561, |
|
"learning_rate": 2.6650652008597067e-07, |
|
"loss": 1.3595, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 3.652360515021459, |
|
"grad_norm": 0.8988415508956524, |
|
"learning_rate": 2.394116704464294e-07, |
|
"loss": 1.4083, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.6695278969957084, |
|
"grad_norm": 0.7489347389916948, |
|
"learning_rate": 2.137352472319215e-07, |
|
"loss": 1.3862, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.686695278969957, |
|
"grad_norm": 0.8026525023223839, |
|
"learning_rate": 1.8948490078199767e-07, |
|
"loss": 1.4263, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 3.703862660944206, |
|
"grad_norm": 0.8691178194849434, |
|
"learning_rate": 1.666678565339025e-07, |
|
"loss": 1.4107, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 3.721030042918455, |
|
"grad_norm": 0.8295342705741752, |
|
"learning_rate": 1.4529091286973994e-07, |
|
"loss": 1.3977, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.7381974248927037, |
|
"grad_norm": 0.9952946458735904, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 1.44, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.755364806866953, |
|
"grad_norm": 0.8929004736914973, |
|
"learning_rate": 1.0688237352022346e-07, |
|
"loss": 1.3744, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 3.772532188841202, |
|
"grad_norm": 0.7817159407340549, |
|
"learning_rate": 8.986222173284876e-08, |
|
"loss": 1.4323, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.7896995708154506, |
|
"grad_norm": 0.7269772041119471, |
|
"learning_rate": 7.430505491563101e-08, |
|
"loss": 1.4378, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 3.8068669527896994, |
|
"grad_norm": 0.846076015787421, |
|
"learning_rate": 6.021550835626777e-08, |
|
"loss": 1.3876, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 3.8240343347639483, |
|
"grad_norm": 0.7622844494333099, |
|
"learning_rate": 4.759778006218407e-08, |
|
"loss": 1.3879, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.8412017167381975, |
|
"grad_norm": 0.7517386279788537, |
|
"learning_rate": 3.645562950973014e-08, |
|
"loss": 1.4094, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 3.8583690987124464, |
|
"grad_norm": 0.7555615969516848, |
|
"learning_rate": 2.6792376524036878e-08, |
|
"loss": 1.4385, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 3.875536480686695, |
|
"grad_norm": 0.759032041862736, |
|
"learning_rate": 1.8610900289867673e-08, |
|
"loss": 1.4219, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 3.8927038626609445, |
|
"grad_norm": 0.8896057625292756, |
|
"learning_rate": 1.1913638493762369e-08, |
|
"loss": 1.4286, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 3.909871244635193, |
|
"grad_norm": 0.8049879121289703, |
|
"learning_rate": 6.702586597719385e-09, |
|
"loss": 1.3756, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.927038626609442, |
|
"grad_norm": 0.8081459120471564, |
|
"learning_rate": 2.9792972446479605e-09, |
|
"loss": 1.4649, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.944206008583691, |
|
"grad_norm": 0.7889695020980092, |
|
"learning_rate": 7.448797957526621e-10, |
|
"loss": 1.3612, |
|
"step": 232 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 232, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 29, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.9152650971882455e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|