|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.992, |
|
"eval_steps": 500, |
|
"global_step": 390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 6.505747529855917, |
|
"learning_rate": 1.0256410256410257e-06, |
|
"loss": 1.037, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 6.560852269618801, |
|
"learning_rate": 2.0512820512820513e-06, |
|
"loss": 1.0615, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 6.297724789047534, |
|
"learning_rate": 3.0769230769230774e-06, |
|
"loss": 1.0368, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 5.873504662538556, |
|
"learning_rate": 4.102564102564103e-06, |
|
"loss": 1.0204, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 4.260219296200858, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 0.9878, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 2.8226498262289894, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 0.9503, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 2.4814856283348576, |
|
"learning_rate": 7.17948717948718e-06, |
|
"loss": 0.9426, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 4.4472339963511995, |
|
"learning_rate": 8.205128205128205e-06, |
|
"loss": 0.9631, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 4.2452868354337046, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 0.9487, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 4.073394526067116, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.926, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 3.5593666985232364, |
|
"learning_rate": 1.1282051282051283e-05, |
|
"loss": 0.8998, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 2.503183770041401, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 0.8677, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 1.6581259245716637, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8532, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 1.6247963160720575, |
|
"learning_rate": 1.435897435897436e-05, |
|
"loss": 0.8214, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 1.3657729627592703, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.8154, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 1.345374794880054, |
|
"learning_rate": 1.641025641025641e-05, |
|
"loss": 0.8029, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 1.1040494988871448, |
|
"learning_rate": 1.7435897435897438e-05, |
|
"loss": 0.784, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 1.1197287715806619, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.7778, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 1.1386731281098734, |
|
"learning_rate": 1.9487179487179488e-05, |
|
"loss": 0.7733, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 1.2028279856088686, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.7695, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 0.9650970506021546, |
|
"learning_rate": 2.153846153846154e-05, |
|
"loss": 0.7588, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 1.2234719505580478, |
|
"learning_rate": 2.2564102564102566e-05, |
|
"loss": 0.742, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 1.0259658673857637, |
|
"learning_rate": 2.3589743589743593e-05, |
|
"loss": 0.7614, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 1.10643945152428, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 0.7286, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1095769010970096, |
|
"learning_rate": 2.5641025641025646e-05, |
|
"loss": 0.737, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 0.9761591577884013, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7132, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.9290069048161637, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 0.7003, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 0.8281825117654518, |
|
"learning_rate": 2.871794871794872e-05, |
|
"loss": 0.7189, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 0.9108454848461486, |
|
"learning_rate": 2.9743589743589747e-05, |
|
"loss": 0.7383, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.874417940004658, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.707, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 1.1312159124204537, |
|
"learning_rate": 3.1794871794871795e-05, |
|
"loss": 0.704, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 1.0176318141391008, |
|
"learning_rate": 3.282051282051282e-05, |
|
"loss": 0.7043, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 0.839050900261088, |
|
"learning_rate": 3.384615384615385e-05, |
|
"loss": 0.6938, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 0.9910989495874221, |
|
"learning_rate": 3.4871794871794875e-05, |
|
"loss": 0.7013, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 1.053641939312259, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.7013, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 1.0830842904943496, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 0.6812, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 1.2965657706346354, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.6924, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 0.7067387238358549, |
|
"learning_rate": 3.8974358974358976e-05, |
|
"loss": 0.6998, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 0.8299809452023478, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6843, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 1.0303843817695786, |
|
"learning_rate": 3.9999198907597046e-05, |
|
"loss": 0.683, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 1.336041158895854, |
|
"learning_rate": 3.9996795694563096e-05, |
|
"loss": 0.7029, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 1.2263901719907044, |
|
"learning_rate": 3.999279055341771e-05, |
|
"loss": 0.7072, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 1.0529146573986454, |
|
"learning_rate": 3.998718380500971e-05, |
|
"loss": 0.6608, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 1.586204252591105, |
|
"learning_rate": 3.997997589849145e-05, |
|
"loss": 0.6938, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 1.0880221516969555, |
|
"learning_rate": 3.9971167411282835e-05, |
|
"loss": 0.67, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 1.7002247914736648, |
|
"learning_rate": 3.99607590490251e-05, |
|
"loss": 0.6744, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 1.4497178148540242, |
|
"learning_rate": 3.9948751645524235e-05, |
|
"loss": 0.6817, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 1.2280463384232854, |
|
"learning_rate": 3.9935146162684206e-05, |
|
"loss": 0.6685, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 1.4957243167941556, |
|
"learning_rate": 3.9919943690429906e-05, |
|
"loss": 0.6629, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.3135523978551822, |
|
"learning_rate": 3.9903145446619837e-05, |
|
"loss": 0.6803, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 1.6125339909399647, |
|
"learning_rate": 3.9884752776948564e-05, |
|
"loss": 0.6683, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 1.5305881479764225, |
|
"learning_rate": 3.9864767154838864e-05, |
|
"loss": 0.682, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 1.1001238047348845, |
|
"learning_rate": 3.9843190181323744e-05, |
|
"loss": 0.6949, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 1.5798483939223087, |
|
"learning_rate": 3.982002358491817e-05, |
|
"loss": 0.6673, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 1.0082766354607982, |
|
"learning_rate": 3.979526922148058e-05, |
|
"loss": 0.6703, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 1.3422788359537228, |
|
"learning_rate": 3.9768929074064206e-05, |
|
"loss": 0.693, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 1.059295829997347, |
|
"learning_rate": 3.9741005252758255e-05, |
|
"loss": 0.683, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 0.8788638110720364, |
|
"learning_rate": 3.971149999451886e-05, |
|
"loss": 0.656, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 1.0686729053217592, |
|
"learning_rate": 3.9680415662989806e-05, |
|
"loss": 0.6692, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 1.1201463636193725, |
|
"learning_rate": 3.9647754748313294e-05, |
|
"loss": 0.6862, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 0.8559069553016561, |
|
"learning_rate": 3.96135198669304e-05, |
|
"loss": 0.67, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 0.9594052865179052, |
|
"learning_rate": 3.957771376137144e-05, |
|
"loss": 0.6715, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 1.1211901210353805, |
|
"learning_rate": 3.954033930003634e-05, |
|
"loss": 0.6512, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 0.7307537653308905, |
|
"learning_rate": 3.9501399476964806e-05, |
|
"loss": 0.6396, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 1.1601355647681502, |
|
"learning_rate": 3.946089741159648e-05, |
|
"loss": 0.6751, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.8086148186887785, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 0.6772, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 0.6390400829517844, |
|
"learning_rate": 3.937521965721831e-05, |
|
"loss": 0.6463, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.8517679495121439, |
|
"learning_rate": 3.933005083178828e-05, |
|
"loss": 0.6379, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 0.6404387302507338, |
|
"learning_rate": 3.928333349067125e-05, |
|
"loss": 0.6485, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 1.094204843701778, |
|
"learning_rate": 3.923507137635792e-05, |
|
"loss": 0.6492, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 0.9895790429658228, |
|
"learning_rate": 3.9185268355089606e-05, |
|
"loss": 0.6575, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.5129909217790354, |
|
"learning_rate": 3.913392841654851e-05, |
|
"loss": 0.6607, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.6517715838303199, |
|
"learning_rate": 3.9081055673538093e-05, |
|
"loss": 0.6561, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 0.6016007228527741, |
|
"learning_rate": 3.902665436165364e-05, |
|
"loss": 0.6328, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6075697725308501, |
|
"learning_rate": 3.897072883894291e-05, |
|
"loss": 0.6736, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.5801981604462901, |
|
"learning_rate": 3.8913283585557054e-05, |
|
"loss": 0.6588, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 0.5203710553237849, |
|
"learning_rate": 3.885432320339167e-05, |
|
"loss": 0.632, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.6212761462486293, |
|
"learning_rate": 3.879385241571817e-05, |
|
"loss": 0.6394, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.6278311348239496, |
|
"learning_rate": 3.873187606680543e-05, |
|
"loss": 0.6234, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.5673895435016599, |
|
"learning_rate": 3.866839912153168e-05, |
|
"loss": 0.5986, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 0.9310910409868183, |
|
"learning_rate": 3.860342666498677e-05, |
|
"loss": 0.6006, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 1.5466451511298254, |
|
"learning_rate": 3.853696390206484e-05, |
|
"loss": 0.607, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.7332847552442167, |
|
"learning_rate": 3.846901615704734e-05, |
|
"loss": 0.6087, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 1.01742608188952, |
|
"learning_rate": 3.839958887317649e-05, |
|
"loss": 0.6032, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 1.06572274158088, |
|
"learning_rate": 3.832868761221926e-05, |
|
"loss": 0.6019, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.7436152339326781, |
|
"learning_rate": 3.825631805402182e-05, |
|
"loss": 0.5743, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.758140015099645, |
|
"learning_rate": 3.818248599605448e-05, |
|
"loss": 0.5779, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.7580395238395826, |
|
"learning_rate": 3.810719735294731e-05, |
|
"loss": 0.6161, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.6451558758941932, |
|
"learning_rate": 3.8030458156016326e-05, |
|
"loss": 0.5793, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.8956814143718566, |
|
"learning_rate": 3.795227455278029e-05, |
|
"loss": 0.5856, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.8324646477305917, |
|
"learning_rate": 3.787265280646825e-05, |
|
"loss": 0.5738, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.8324848310252281, |
|
"learning_rate": 3.7791599295517825e-05, |
|
"loss": 0.5907, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.7151899660755547, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.5741, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.6955181797117225, |
|
"learning_rate": 3.762522306641998e-05, |
|
"loss": 0.5837, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.6526987338049262, |
|
"learning_rate": 3.7539913676545874e-05, |
|
"loss": 0.587, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.8216393160611759, |
|
"learning_rate": 3.745319917751229e-05, |
|
"loss": 0.5854, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.6978494971788809, |
|
"learning_rate": 3.736508651595188e-05, |
|
"loss": 0.5832, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.6245459269391584, |
|
"learning_rate": 3.727558275050301e-05, |
|
"loss": 0.5752, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.705216437162525, |
|
"learning_rate": 3.718469505124434e-05, |
|
"loss": 0.5873, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.7065243017186413, |
|
"learning_rate": 3.709243069912041e-05, |
|
"loss": 0.5821, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.7005390865524744, |
|
"learning_rate": 3.699879708535838e-05, |
|
"loss": 0.5779, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.7017453290655963, |
|
"learning_rate": 3.69038017108759e-05, |
|
"loss": 0.5736, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.665455906086348, |
|
"learning_rate": 3.680745218568026e-05, |
|
"loss": 0.5627, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.6040822448749334, |
|
"learning_rate": 3.6709756228258735e-05, |
|
"loss": 0.5879, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.594803911451487, |
|
"learning_rate": 3.6610721664960236e-05, |
|
"loss": 0.5819, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.485398515306605, |
|
"learning_rate": 3.65103564293684e-05, |
|
"loss": 0.5795, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.5218022624908898, |
|
"learning_rate": 3.640866856166601e-05, |
|
"loss": 0.5602, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.5054385639391414, |
|
"learning_rate": 3.6305666207990886e-05, |
|
"loss": 0.5861, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.5199852072862294, |
|
"learning_rate": 3.6201357619783336e-05, |
|
"loss": 0.5827, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.4637707990598707, |
|
"learning_rate": 3.609575115312511e-05, |
|
"loss": 0.5847, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.5291293842644805, |
|
"learning_rate": 3.598885526807003e-05, |
|
"loss": 0.5643, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.5043363436493806, |
|
"learning_rate": 3.5880678527966224e-05, |
|
"loss": 0.5764, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.47242650603067554, |
|
"learning_rate": 3.577122959877017e-05, |
|
"loss": 0.5755, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.5518150964455962, |
|
"learning_rate": 3.566051724835245e-05, |
|
"loss": 0.568, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.5224071175894263, |
|
"learning_rate": 3.554855034579532e-05, |
|
"loss": 0.5897, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.5344847267477552, |
|
"learning_rate": 3.5435337860682304e-05, |
|
"loss": 0.5757, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.5219457944856948, |
|
"learning_rate": 3.532088886237956e-05, |
|
"loss": 0.5792, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.5259599717929431, |
|
"learning_rate": 3.520521251930941e-05, |
|
"loss": 0.5787, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.5765311889493613, |
|
"learning_rate": 3.5088318098215805e-05, |
|
"loss": 0.5843, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.5327677035476945, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.574, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.5930645571466301, |
|
"learning_rate": 3.485091257608047e-05, |
|
"loss": 0.5751, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.6342759940845253, |
|
"learning_rate": 3.473042049341474e-05, |
|
"loss": 0.5813, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.48893214339925417, |
|
"learning_rate": 3.4608748367954064e-05, |
|
"loss": 0.5831, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.5647678112481018, |
|
"learning_rate": 3.4485905946759965e-05, |
|
"loss": 0.5629, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.449133967218433, |
|
"learning_rate": 3.4361903070645484e-05, |
|
"loss": 0.5683, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.5063749980986962, |
|
"learning_rate": 3.423674967338681e-05, |
|
"loss": 0.5823, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.5384760726674587, |
|
"learning_rate": 3.411045578092754e-05, |
|
"loss": 0.5768, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.49653353728746846, |
|
"learning_rate": 3.398303151057543e-05, |
|
"loss": 0.5755, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.533339384381669, |
|
"learning_rate": 3.385448707019199e-05, |
|
"loss": 0.5857, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.5120960901069407, |
|
"learning_rate": 3.372483275737468e-05, |
|
"loss": 0.5737, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.543183043341828, |
|
"learning_rate": 3.359407895863199e-05, |
|
"loss": 0.5869, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.5169503919418712, |
|
"learning_rate": 3.34622361485514e-05, |
|
"loss": 0.5902, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.5039442241500608, |
|
"learning_rate": 3.332931488896029e-05, |
|
"loss": 0.5958, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.45321280216649795, |
|
"learning_rate": 3.319532582807977e-05, |
|
"loss": 0.588, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.5556043673374238, |
|
"learning_rate": 3.30602796996717e-05, |
|
"loss": 0.5655, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.5973369067030252, |
|
"learning_rate": 3.2924187322178865e-05, |
|
"loss": 0.5672, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.49639523554292275, |
|
"learning_rate": 3.278705959785821e-05, |
|
"loss": 0.5619, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.5340197021946086, |
|
"learning_rate": 3.2648907511907544e-05, |
|
"loss": 0.5494, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.6491880061183375, |
|
"learning_rate": 3.250974213158555e-05, |
|
"loss": 0.5813, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.546412539608057, |
|
"learning_rate": 3.23695746053251e-05, |
|
"loss": 0.5727, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.4885545484226667, |
|
"learning_rate": 3.222841616184025e-05, |
|
"loss": 0.5766, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.46543308886146867, |
|
"learning_rate": 3.208627810922665e-05, |
|
"loss": 0.5784, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.4769303758780081, |
|
"learning_rate": 3.194317183405573e-05, |
|
"loss": 0.5633, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.4622281712035914, |
|
"learning_rate": 3.1799108800462466e-05, |
|
"loss": 0.5903, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.4080778019883451, |
|
"learning_rate": 3.1654100549227024e-05, |
|
"loss": 0.5732, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.4902378466504111, |
|
"learning_rate": 3.1508158696850275e-05, |
|
"loss": 0.5958, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.5328465121163308, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.5791, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.41319230509186344, |
|
"learning_rate": 3.121352102768998e-05, |
|
"loss": 0.5796, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.5124687104015371, |
|
"learning_rate": 3.106484881410628e-05, |
|
"loss": 0.5869, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.3990570138333518, |
|
"learning_rate": 3.091529020389009e-05, |
|
"loss": 0.5798, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.5701054658943702, |
|
"learning_rate": 3.076485717806808e-05, |
|
"loss": 0.57, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.49262188689391084, |
|
"learning_rate": 3.061356178771564e-05, |
|
"loss": 0.588, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.4493359276428374, |
|
"learning_rate": 3.0461416152991555e-05, |
|
"loss": 0.6051, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.4376021711271567, |
|
"learning_rate": 3.0308432462167045e-05, |
|
"loss": 0.5903, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.45186829891460095, |
|
"learning_rate": 3.015462297064936e-05, |
|
"loss": 0.5681, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.39621802994504, |
|
"learning_rate": 3.0000000000000004e-05, |
|
"loss": 0.5686, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0096, |
|
"grad_norm": 0.4455687685905931, |
|
"learning_rate": 2.98445759369477e-05, |
|
"loss": 0.5128, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.0224, |
|
"grad_norm": 0.4950940160018366, |
|
"learning_rate": 2.9688363232396056e-05, |
|
"loss": 0.4992, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.0352, |
|
"grad_norm": 0.5962708491988533, |
|
"learning_rate": 2.9531374400426158e-05, |
|
"loss": 0.5034, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 0.5705221556476063, |
|
"learning_rate": 2.9373622017294075e-05, |
|
"loss": 0.4955, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0608, |
|
"grad_norm": 0.5266122887889625, |
|
"learning_rate": 2.9215118720423375e-05, |
|
"loss": 0.4864, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.0736, |
|
"grad_norm": 0.4828539198647265, |
|
"learning_rate": 2.9055877207392752e-05, |
|
"loss": 0.4974, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.0864, |
|
"grad_norm": 0.664332411609878, |
|
"learning_rate": 2.8895910234918828e-05, |
|
"loss": 0.4929, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.0992, |
|
"grad_norm": 0.49684891767453887, |
|
"learning_rate": 2.873523061783426e-05, |
|
"loss": 0.4886, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.6173844319794962, |
|
"learning_rate": 2.8573851228061084e-05, |
|
"loss": 0.5059, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.1248, |
|
"grad_norm": 0.6185899744976351, |
|
"learning_rate": 2.8411784993579633e-05, |
|
"loss": 0.4931, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.1376, |
|
"grad_norm": 0.6306079550908135, |
|
"learning_rate": 2.8249044897392814e-05, |
|
"loss": 0.4846, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1504, |
|
"grad_norm": 0.5029994064199304, |
|
"learning_rate": 2.80856439764861e-05, |
|
"loss": 0.4995, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1632, |
|
"grad_norm": 0.48247444707199016, |
|
"learning_rate": 2.792159532078314e-05, |
|
"loss": 0.4937, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 0.44870669524781803, |
|
"learning_rate": 2.77569120720971e-05, |
|
"loss": 0.4924, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1888, |
|
"grad_norm": 0.44339227471416787, |
|
"learning_rate": 2.7591607423077932e-05, |
|
"loss": 0.4973, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2016, |
|
"grad_norm": 0.40066357035072003, |
|
"learning_rate": 2.7425694616155474e-05, |
|
"loss": 0.4877, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2144, |
|
"grad_norm": 0.39776641795055945, |
|
"learning_rate": 2.7259186942478656e-05, |
|
"loss": 0.4853, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.2272, |
|
"grad_norm": 0.49162584289757133, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.4988, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.39954986716181123, |
|
"learning_rate": 2.692444039666066e-05, |
|
"loss": 0.4783, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2528, |
|
"grad_norm": 0.4526262538802144, |
|
"learning_rate": 2.6756228340810946e-05, |
|
"loss": 0.4917, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2656, |
|
"grad_norm": 0.3576299330547625, |
|
"learning_rate": 2.6587475048641596e-05, |
|
"loss": 0.4812, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.2784, |
|
"grad_norm": 0.42376036267752043, |
|
"learning_rate": 2.6418194038850634e-05, |
|
"loss": 0.4942, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.2912, |
|
"grad_norm": 0.3358173028900977, |
|
"learning_rate": 2.624839887241115e-05, |
|
"loss": 0.4973, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 0.38778341099392927, |
|
"learning_rate": 2.607810315148494e-05, |
|
"loss": 0.4944, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3168, |
|
"grad_norm": 0.46211039098549717, |
|
"learning_rate": 2.5907320518332827e-05, |
|
"loss": 0.4896, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3296, |
|
"grad_norm": 0.36513811455550704, |
|
"learning_rate": 2.5736064654221808e-05, |
|
"loss": 0.4968, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3424, |
|
"grad_norm": 0.4777844900010711, |
|
"learning_rate": 2.5564349278329056e-05, |
|
"loss": 0.4989, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3552, |
|
"grad_norm": 0.33354829623315135, |
|
"learning_rate": 2.539218814664288e-05, |
|
"loss": 0.4983, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.42129371314935, |
|
"learning_rate": 2.521959505086075e-05, |
|
"loss": 0.485, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.3808, |
|
"grad_norm": 0.34749568893092425, |
|
"learning_rate": 2.5046583817284437e-05, |
|
"loss": 0.4963, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.3936, |
|
"grad_norm": 0.34239103504819346, |
|
"learning_rate": 2.487316830571244e-05, |
|
"loss": 0.4903, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4064, |
|
"grad_norm": 0.38153414780695555, |
|
"learning_rate": 2.4699362408329646e-05, |
|
"loss": 0.4935, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4192, |
|
"grad_norm": 0.42448009768341244, |
|
"learning_rate": 2.4525180048594452e-05, |
|
"loss": 0.4953, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 0.35999734178814113, |
|
"learning_rate": 2.435063518012335e-05, |
|
"loss": 0.494, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4448, |
|
"grad_norm": 0.4177534542860526, |
|
"learning_rate": 2.4175741785573177e-05, |
|
"loss": 0.4971, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4576000000000002, |
|
"grad_norm": 0.36614733367029884, |
|
"learning_rate": 2.4000513875520892e-05, |
|
"loss": 0.49, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.4704, |
|
"grad_norm": 0.3629898887228354, |
|
"learning_rate": 2.3824965487341247e-05, |
|
"loss": 0.4852, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.4832, |
|
"grad_norm": 0.42164087991155624, |
|
"learning_rate": 2.3649110684082258e-05, |
|
"loss": 0.501, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 0.3656740313988037, |
|
"learning_rate": 2.3472963553338614e-05, |
|
"loss": 0.5068, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5088, |
|
"grad_norm": 0.3412529225542247, |
|
"learning_rate": 2.3296538206123134e-05, |
|
"loss": 0.4909, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5216, |
|
"grad_norm": 0.36394592915518625, |
|
"learning_rate": 2.311984877573636e-05, |
|
"loss": 0.5004, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.5343999999999998, |
|
"grad_norm": 0.3977520392517829, |
|
"learning_rate": 2.2942909416634326e-05, |
|
"loss": 0.5027, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5472, |
|
"grad_norm": 0.39864770864687243, |
|
"learning_rate": 2.2765734303294666e-05, |
|
"loss": 0.491, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.45988025280273587, |
|
"learning_rate": 2.2588337629081107e-05, |
|
"loss": 0.4932, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5728, |
|
"grad_norm": 0.3546135394052879, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.5188, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.5856, |
|
"grad_norm": 0.4618601705659484, |
|
"learning_rate": 2.2232936459094158e-05, |
|
"loss": 0.4905, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.5984, |
|
"grad_norm": 0.3585916516119542, |
|
"learning_rate": 2.205496043423849e-05, |
|
"loss": 0.4887, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6112, |
|
"grad_norm": 0.38124066737860346, |
|
"learning_rate": 2.1876819788063586e-05, |
|
"loss": 0.4743, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 0.3621227727378548, |
|
"learning_rate": 2.16985287912813e-05, |
|
"loss": 0.4848, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.6368, |
|
"grad_norm": 0.35992970989896667, |
|
"learning_rate": 2.1520101726647922e-05, |
|
"loss": 0.4851, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6496, |
|
"grad_norm": 0.42114634962477715, |
|
"learning_rate": 2.1341552887820048e-05, |
|
"loss": 0.4946, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.6624, |
|
"grad_norm": 0.34299656901356307, |
|
"learning_rate": 2.1162896578209517e-05, |
|
"loss": 0.4854, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.6752000000000002, |
|
"grad_norm": 0.3613742984045961, |
|
"learning_rate": 2.0984147109837564e-05, |
|
"loss": 0.4919, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 0.36699563374978, |
|
"learning_rate": 2.0805318802188307e-05, |
|
"loss": 0.4818, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.7008, |
|
"grad_norm": 0.38569411272389953, |
|
"learning_rate": 2.0626425981061608e-05, |
|
"loss": 0.4832, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7136, |
|
"grad_norm": 0.351362373737474, |
|
"learning_rate": 2.0447482977425465e-05, |
|
"loss": 0.4851, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7264, |
|
"grad_norm": 0.32965590050667626, |
|
"learning_rate": 2.0268504126267952e-05, |
|
"loss": 0.4968, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.7392, |
|
"grad_norm": 0.3588053691920234, |
|
"learning_rate": 2.008950376544887e-05, |
|
"loss": 0.5014, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 0.32474815666912576, |
|
"learning_rate": 1.9910496234551132e-05, |
|
"loss": 0.486, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.7648, |
|
"grad_norm": 0.3547954675543647, |
|
"learning_rate": 1.9731495873732055e-05, |
|
"loss": 0.477, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.7776, |
|
"grad_norm": 0.33714252571554143, |
|
"learning_rate": 1.9552517022574542e-05, |
|
"loss": 0.4823, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.7904, |
|
"grad_norm": 0.3334975346532029, |
|
"learning_rate": 1.93735740189384e-05, |
|
"loss": 0.5008, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.8032, |
|
"grad_norm": 0.34222937404032383, |
|
"learning_rate": 1.9194681197811703e-05, |
|
"loss": 0.5082, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 0.38345392178288507, |
|
"learning_rate": 1.901585289016244e-05, |
|
"loss": 0.483, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.8288, |
|
"grad_norm": 0.34990266060655323, |
|
"learning_rate": 1.8837103421790486e-05, |
|
"loss": 0.5169, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.8416, |
|
"grad_norm": 0.3807577512849688, |
|
"learning_rate": 1.8658447112179952e-05, |
|
"loss": 0.4929, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8544, |
|
"grad_norm": 0.373071861585422, |
|
"learning_rate": 1.8479898273352084e-05, |
|
"loss": 0.4848, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.8672, |
|
"grad_norm": 0.37889759627756314, |
|
"learning_rate": 1.83014712087187e-05, |
|
"loss": 0.4909, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.40100078003762757, |
|
"learning_rate": 1.8123180211936417e-05, |
|
"loss": 0.4946, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8928000000000003, |
|
"grad_norm": 0.30764547855271984, |
|
"learning_rate": 1.794503956576152e-05, |
|
"loss": 0.4689, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9055999999999997, |
|
"grad_norm": 0.3698265310731593, |
|
"learning_rate": 1.776706354090585e-05, |
|
"loss": 0.4988, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.9184, |
|
"grad_norm": 0.3267836189939952, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.4934, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.9312, |
|
"grad_norm": 0.3371861051483228, |
|
"learning_rate": 1.7411662370918893e-05, |
|
"loss": 0.5008, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.36401939606068423, |
|
"learning_rate": 1.7234265696705344e-05, |
|
"loss": 0.4884, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9568, |
|
"grad_norm": 0.31703438302188913, |
|
"learning_rate": 1.7057090583365678e-05, |
|
"loss": 0.4929, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.9696, |
|
"grad_norm": 0.3845426890198222, |
|
"learning_rate": 1.6880151224263646e-05, |
|
"loss": 0.4798, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.9824, |
|
"grad_norm": 0.3091458428078054, |
|
"learning_rate": 1.6703461793876876e-05, |
|
"loss": 0.472, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.9952, |
|
"grad_norm": 0.3772945785914719, |
|
"learning_rate": 1.6527036446661396e-05, |
|
"loss": 0.4759, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 0.4571467635241678, |
|
"learning_rate": 1.635088931591775e-05, |
|
"loss": 0.4717, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.0208, |
|
"grad_norm": 0.4028002846921973, |
|
"learning_rate": 1.6175034512658753e-05, |
|
"loss": 0.4433, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0336, |
|
"grad_norm": 0.6820582355550069, |
|
"learning_rate": 1.5999486124479115e-05, |
|
"loss": 0.4285, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.0464, |
|
"grad_norm": 0.4056991997120835, |
|
"learning_rate": 1.5824258214426833e-05, |
|
"loss": 0.4167, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.0592, |
|
"grad_norm": 0.5211806687138262, |
|
"learning_rate": 1.5649364819876655e-05, |
|
"loss": 0.424, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.36529791684800383, |
|
"learning_rate": 1.547481995140556e-05, |
|
"loss": 0.4265, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.0848, |
|
"grad_norm": 0.42697884443951184, |
|
"learning_rate": 1.5300637591670357e-05, |
|
"loss": 0.4242, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.0976, |
|
"grad_norm": 0.38598208321475, |
|
"learning_rate": 1.5126831694287564e-05, |
|
"loss": 0.4156, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1104, |
|
"grad_norm": 0.40166934667311494, |
|
"learning_rate": 1.4953416182715566e-05, |
|
"loss": 0.4285, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.1232, |
|
"grad_norm": 0.3453676456399864, |
|
"learning_rate": 1.478040494913926e-05, |
|
"loss": 0.4332, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.43244023029113954, |
|
"learning_rate": 1.460781185335713e-05, |
|
"loss": 0.4, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1488, |
|
"grad_norm": 0.31845478103294494, |
|
"learning_rate": 1.443565072167095e-05, |
|
"loss": 0.4118, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.1616, |
|
"grad_norm": 0.36264818372252705, |
|
"learning_rate": 1.4263935345778202e-05, |
|
"loss": 0.4361, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.1744, |
|
"grad_norm": 0.37162158467221174, |
|
"learning_rate": 1.409267948166718e-05, |
|
"loss": 0.4207, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.1872, |
|
"grad_norm": 0.3621435380200018, |
|
"learning_rate": 1.3921896848515064e-05, |
|
"loss": 0.417, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.39785127385776103, |
|
"learning_rate": 1.3751601127588849e-05, |
|
"loss": 0.4047, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2128, |
|
"grad_norm": 0.3320536307525037, |
|
"learning_rate": 1.3581805961149371e-05, |
|
"loss": 0.4174, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2256, |
|
"grad_norm": 0.3487901808850689, |
|
"learning_rate": 1.341252495135841e-05, |
|
"loss": 0.4065, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2384, |
|
"grad_norm": 0.3253802566432799, |
|
"learning_rate": 1.324377165918906e-05, |
|
"loss": 0.4119, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2512, |
|
"grad_norm": 0.3542620704801422, |
|
"learning_rate": 1.3075559603339354e-05, |
|
"loss": 0.4154, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.32690488879784907, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.4086, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.2768, |
|
"grad_norm": 0.3371738833265028, |
|
"learning_rate": 1.274081305752135e-05, |
|
"loss": 0.4421, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.2896, |
|
"grad_norm": 0.2973807352876402, |
|
"learning_rate": 1.2574305383844528e-05, |
|
"loss": 0.4132, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3024, |
|
"grad_norm": 0.3268544820181663, |
|
"learning_rate": 1.2408392576922075e-05, |
|
"loss": 0.4045, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3152, |
|
"grad_norm": 0.3122600328277054, |
|
"learning_rate": 1.2243087927902905e-05, |
|
"loss": 0.4023, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.3119871226916449, |
|
"learning_rate": 1.2078404679216864e-05, |
|
"loss": 0.4251, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3407999999999998, |
|
"grad_norm": 0.3046890124826961, |
|
"learning_rate": 1.1914356023513904e-05, |
|
"loss": 0.4099, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.3536, |
|
"grad_norm": 0.29856493534107525, |
|
"learning_rate": 1.1750955102607193e-05, |
|
"loss": 0.4136, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.3664, |
|
"grad_norm": 0.34381708669262084, |
|
"learning_rate": 1.1588215006420374e-05, |
|
"loss": 0.4009, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.3792, |
|
"grad_norm": 0.2859674479950849, |
|
"learning_rate": 1.1426148771938915e-05, |
|
"loss": 0.4257, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.38519333602943867, |
|
"learning_rate": 1.1264769382165748e-05, |
|
"loss": 0.4088, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4048, |
|
"grad_norm": 0.27666917731180446, |
|
"learning_rate": 1.110408976508118e-05, |
|
"loss": 0.4253, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4176, |
|
"grad_norm": 0.31045511439888457, |
|
"learning_rate": 1.094412279260726e-05, |
|
"loss": 0.4137, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4304, |
|
"grad_norm": 0.3064610638812842, |
|
"learning_rate": 1.0784881279576635e-05, |
|
"loss": 0.4063, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4432, |
|
"grad_norm": 0.3141935716400805, |
|
"learning_rate": 1.0626377982705929e-05, |
|
"loss": 0.4287, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.29111101358636754, |
|
"learning_rate": 1.0468625599573842e-05, |
|
"loss": 0.4192, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.4688, |
|
"grad_norm": 0.2956183665834541, |
|
"learning_rate": 1.0311636767603952e-05, |
|
"loss": 0.3997, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.4816, |
|
"grad_norm": 0.29005540179618783, |
|
"learning_rate": 1.0155424063052306e-05, |
|
"loss": 0.4152, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.4944, |
|
"grad_norm": 0.28290805212326664, |
|
"learning_rate": 1.0000000000000006e-05, |
|
"loss": 0.4304, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5072, |
|
"grad_norm": 0.30310967758971785, |
|
"learning_rate": 9.84537702935065e-06, |
|
"loss": 0.4106, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.30287471467342203, |
|
"learning_rate": 9.691567537832964e-06, |
|
"loss": 0.4175, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5328, |
|
"grad_norm": 0.2719696548648095, |
|
"learning_rate": 9.538583847008452e-06, |
|
"loss": 0.4037, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5456, |
|
"grad_norm": 0.2899661973448743, |
|
"learning_rate": 9.386438212284372e-06, |
|
"loss": 0.4181, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.5584, |
|
"grad_norm": 0.2588170019747225, |
|
"learning_rate": 9.235142821931928e-06, |
|
"loss": 0.4093, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.5712, |
|
"grad_norm": 0.2888704451594072, |
|
"learning_rate": 9.084709796109907e-06, |
|
"loss": 0.4264, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.2837738692602651, |
|
"learning_rate": 8.93515118589373e-06, |
|
"loss": 0.4143, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.5968, |
|
"grad_norm": 0.2513312536013756, |
|
"learning_rate": 8.786478972310023e-06, |
|
"loss": 0.4283, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6096, |
|
"grad_norm": 0.2829288918717478, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.4098, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.6224, |
|
"grad_norm": 0.2943511931701685, |
|
"learning_rate": 8.491841303149728e-06, |
|
"loss": 0.421, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.6352, |
|
"grad_norm": 0.2504237668330089, |
|
"learning_rate": 8.345899450772975e-06, |
|
"loss": 0.4345, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.2722020719297627, |
|
"learning_rate": 8.200891199537549e-06, |
|
"loss": 0.4112, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.6608, |
|
"grad_norm": 0.2613503479910776, |
|
"learning_rate": 8.056828165944282e-06, |
|
"loss": 0.4255, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.6736, |
|
"grad_norm": 0.24873238194791902, |
|
"learning_rate": 7.913721890773354e-06, |
|
"loss": 0.4302, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.6864, |
|
"grad_norm": 0.27468461483172774, |
|
"learning_rate": 7.771583838159756e-06, |
|
"loss": 0.4116, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.6992000000000003, |
|
"grad_norm": 0.26279435284454206, |
|
"learning_rate": 7.630425394674903e-06, |
|
"loss": 0.4123, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.25842123842807174, |
|
"learning_rate": 7.49025786841445e-06, |
|
"loss": 0.4137, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.7248, |
|
"grad_norm": 0.2542193875851777, |
|
"learning_rate": 7.3510924880924575e-06, |
|
"loss": 0.4205, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.7376, |
|
"grad_norm": 0.28371884420881716, |
|
"learning_rate": 7.212940402141808e-06, |
|
"loss": 0.4064, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.7504, |
|
"grad_norm": 0.2736582886132325, |
|
"learning_rate": 7.075812677821145e-06, |
|
"loss": 0.4258, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.7632, |
|
"grad_norm": 0.25498388539426536, |
|
"learning_rate": 6.939720300328303e-06, |
|
"loss": 0.4234, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.24724201903664575, |
|
"learning_rate": 6.8046741719202385e-06, |
|
"loss": 0.4162, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.7888, |
|
"grad_norm": 0.2502405252119946, |
|
"learning_rate": 6.67068511103971e-06, |
|
"loss": 0.4279, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8016, |
|
"grad_norm": 0.2690069016659926, |
|
"learning_rate": 6.537763851448593e-06, |
|
"loss": 0.3935, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.8144, |
|
"grad_norm": 0.25959229005991064, |
|
"learning_rate": 6.4059210413680175e-06, |
|
"loss": 0.4107, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.8272, |
|
"grad_norm": 0.2403156005885191, |
|
"learning_rate": 6.275167242625331e-06, |
|
"loss": 0.4438, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.27570234448588693, |
|
"learning_rate": 6.145512929808013e-06, |
|
"loss": 0.4184, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.8528000000000002, |
|
"grad_norm": 0.265119969859078, |
|
"learning_rate": 6.016968489424572e-06, |
|
"loss": 0.4104, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.8656, |
|
"grad_norm": 0.27513305634954666, |
|
"learning_rate": 5.889544219072465e-06, |
|
"loss": 0.408, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.8784, |
|
"grad_norm": 0.28848517807921864, |
|
"learning_rate": 5.7632503266131925e-06, |
|
"loss": 0.4199, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.8912, |
|
"grad_norm": 0.2591173579414495, |
|
"learning_rate": 5.638096929354522e-06, |
|
"loss": 0.4175, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.2770146299256752, |
|
"learning_rate": 5.514094053240035e-06, |
|
"loss": 0.4311, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.9168, |
|
"grad_norm": 0.27644111801793986, |
|
"learning_rate": 5.39125163204594e-06, |
|
"loss": 0.421, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9295999999999998, |
|
"grad_norm": 0.24589692673140315, |
|
"learning_rate": 5.269579506585259e-06, |
|
"loss": 0.4067, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.9424, |
|
"grad_norm": 0.2605714132372804, |
|
"learning_rate": 5.149087423919541e-06, |
|
"loss": 0.403, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9552, |
|
"grad_norm": 0.2569195095458623, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.4306, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 0.25435460637415475, |
|
"learning_rate": 4.911681901784198e-06, |
|
"loss": 0.431, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.9808, |
|
"grad_norm": 0.25434555401757375, |
|
"learning_rate": 4.794787480690597e-06, |
|
"loss": 0.42, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.9936, |
|
"grad_norm": 0.25847082059352217, |
|
"learning_rate": 4.679111137620442e-06, |
|
"loss": 0.4262, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.0064, |
|
"grad_norm": 0.3078836967914429, |
|
"learning_rate": 4.5646621393177e-06, |
|
"loss": 0.4081, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.0192, |
|
"grad_norm": 0.3897456159789277, |
|
"learning_rate": 4.451449654204685e-06, |
|
"loss": 0.3757, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.27818616231947, |
|
"learning_rate": 4.339482751647557e-06, |
|
"loss": 0.3797, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.0448, |
|
"grad_norm": 0.3086382203339711, |
|
"learning_rate": 4.228770401229824e-06, |
|
"loss": 0.3774, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.0576, |
|
"grad_norm": 0.4222041747862514, |
|
"learning_rate": 4.119321472033779e-06, |
|
"loss": 0.3639, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.0704, |
|
"grad_norm": 0.33504576758682486, |
|
"learning_rate": 4.011144731929981e-06, |
|
"loss": 0.3674, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.0832, |
|
"grad_norm": 0.2908160967324533, |
|
"learning_rate": 3.904248846874894e-06, |
|
"loss": 0.3581, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.3170950976592653, |
|
"learning_rate": 3.7986423802166705e-06, |
|
"loss": 0.3835, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1088, |
|
"grad_norm": 0.3184290229549622, |
|
"learning_rate": 3.694333792009115e-06, |
|
"loss": 0.3694, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.1216, |
|
"grad_norm": 0.31114641978405194, |
|
"learning_rate": 3.5913314383339937e-06, |
|
"loss": 0.3842, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.1344, |
|
"grad_norm": 0.28491189296455693, |
|
"learning_rate": 3.4896435706316e-06, |
|
"loss": 0.371, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.1472, |
|
"grad_norm": 0.27387529528555665, |
|
"learning_rate": 3.3892783350397675e-06, |
|
"loss": 0.3853, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.26309335177219156, |
|
"learning_rate": 3.290243771741275e-06, |
|
"loss": 0.3786, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.1728, |
|
"grad_norm": 0.30160287904440136, |
|
"learning_rate": 3.1925478143197418e-06, |
|
"loss": 0.382, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.1856, |
|
"grad_norm": 0.29927424315024315, |
|
"learning_rate": 3.0961982891241083e-06, |
|
"loss": 0.366, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.1984, |
|
"grad_norm": 0.272326049814458, |
|
"learning_rate": 3.001202914641628e-06, |
|
"loss": 0.3998, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2112, |
|
"grad_norm": 0.2502193014427093, |
|
"learning_rate": 2.907569300879596e-06, |
|
"loss": 0.3791, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.2728654969279862, |
|
"learning_rate": 2.815304948755664e-06, |
|
"loss": 0.3701, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.2368, |
|
"grad_norm": 0.2714056788321398, |
|
"learning_rate": 2.7244172494969978e-06, |
|
"loss": 0.3723, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.2496, |
|
"grad_norm": 0.2751739558506907, |
|
"learning_rate": 2.6349134840481294e-06, |
|
"loss": 0.3863, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.2624, |
|
"grad_norm": 0.24742383567080395, |
|
"learning_rate": 2.546800822487714e-06, |
|
"loss": 0.3694, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.2752, |
|
"grad_norm": 0.24259595387693322, |
|
"learning_rate": 2.4600863234541338e-06, |
|
"loss": 0.3627, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.24217750702784588, |
|
"learning_rate": 2.374776933580025e-06, |
|
"loss": 0.3805, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.3008, |
|
"grad_norm": 0.25913091700415253, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.3663, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.3136, |
|
"grad_norm": 0.2586239046376112, |
|
"learning_rate": 2.2084007044821764e-06, |
|
"loss": 0.3563, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3264, |
|
"grad_norm": 0.23579942630271755, |
|
"learning_rate": 2.127347193531757e-06, |
|
"loss": 0.3543, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.3392, |
|
"grad_norm": 0.23373961445761668, |
|
"learning_rate": 2.0477254472197237e-06, |
|
"loss": 0.3837, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.23591825589341625, |
|
"learning_rate": 1.96954184398368e-06, |
|
"loss": 0.3829, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.3648, |
|
"grad_norm": 0.2353869613244477, |
|
"learning_rate": 1.8928026470526917e-06, |
|
"loss": 0.367, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.3776, |
|
"grad_norm": 0.2357626051562776, |
|
"learning_rate": 1.817514003945524e-06, |
|
"loss": 0.3859, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.3904, |
|
"grad_norm": 0.24024177768466073, |
|
"learning_rate": 1.743681945978184e-06, |
|
"loss": 0.3762, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.4032, |
|
"grad_norm": 0.21474250757431293, |
|
"learning_rate": 1.6713123877807413e-06, |
|
"loss": 0.3791, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.2230994305247215, |
|
"learning_rate": 1.6004111268235156e-06, |
|
"loss": 0.3881, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4288, |
|
"grad_norm": 0.21437251622971168, |
|
"learning_rate": 1.5309838429526714e-06, |
|
"loss": 0.4006, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.4416, |
|
"grad_norm": 0.23472458900465512, |
|
"learning_rate": 1.4630360979351644e-06, |
|
"loss": 0.3714, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.4544, |
|
"grad_norm": 0.22834685454037576, |
|
"learning_rate": 1.396573335013236e-06, |
|
"loss": 0.3719, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.4672, |
|
"grad_norm": 0.2321116838852033, |
|
"learning_rate": 1.3316008784683265e-06, |
|
"loss": 0.3726, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.25799653704355013, |
|
"learning_rate": 1.2681239331945695e-06, |
|
"loss": 0.3611, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.4928, |
|
"grad_norm": 0.20430724621101382, |
|
"learning_rate": 1.2061475842818337e-06, |
|
"loss": 0.3721, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.5056, |
|
"grad_norm": 0.22113708085646827, |
|
"learning_rate": 1.1456767966083393e-06, |
|
"loss": 0.3723, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.5184, |
|
"grad_norm": 0.22012548360875808, |
|
"learning_rate": 1.086716414442952e-06, |
|
"loss": 0.3732, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.5312, |
|
"grad_norm": 0.22222847245134936, |
|
"learning_rate": 1.0292711610570904e-06, |
|
"loss": 0.3768, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.21450792013557687, |
|
"learning_rate": 9.733456383463658e-07, |
|
"loss": 0.3631, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.5568, |
|
"grad_norm": 0.21395918035222308, |
|
"learning_rate": 9.189443264619102e-07, |
|
"loss": 0.3745, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.5696, |
|
"grad_norm": 0.2295027175541579, |
|
"learning_rate": 8.660715834514977e-07, |
|
"loss": 0.3735, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.5824, |
|
"grad_norm": 0.2074700163824343, |
|
"learning_rate": 8.147316449103959e-07, |
|
"loss": 0.3808, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.5952, |
|
"grad_norm": 0.20627809231126568, |
|
"learning_rate": 7.649286236420806e-07, |
|
"loss": 0.3881, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.2042926461719478, |
|
"learning_rate": 7.166665093287539e-07, |
|
"loss": 0.3829, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.6208, |
|
"grad_norm": 0.22097127858331148, |
|
"learning_rate": 6.69949168211721e-07, |
|
"loss": 0.3726, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.6336, |
|
"grad_norm": 0.2132949161183552, |
|
"learning_rate": 6.247803427816945e-07, |
|
"loss": 0.3746, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.6464, |
|
"grad_norm": 0.21350811403401795, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.3648, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.6592, |
|
"grad_norm": 0.21619081913763516, |
|
"learning_rate": 5.391025884035239e-07, |
|
"loss": 0.3658, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.22040882740180187, |
|
"learning_rate": 4.986005230351954e-07, |
|
"loss": 0.3544, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.6848, |
|
"grad_norm": 0.20686894830760133, |
|
"learning_rate": 4.5966069996365993e-07, |
|
"loss": 0.3632, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.6975999999999996, |
|
"grad_norm": 0.24523317979996895, |
|
"learning_rate": 4.22286238628562e-07, |
|
"loss": 0.3614, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.7104, |
|
"grad_norm": 0.2094299975341201, |
|
"learning_rate": 3.8648013306960664e-07, |
|
"loss": 0.3592, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.7232, |
|
"grad_norm": 0.20902438069966647, |
|
"learning_rate": 3.522452516867048e-07, |
|
"loss": 0.3762, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.20521596935129183, |
|
"learning_rate": 3.1958433701019697e-07, |
|
"loss": 0.3515, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.7488, |
|
"grad_norm": 0.2153152039476982, |
|
"learning_rate": 2.8850000548115155e-07, |
|
"loss": 0.3874, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.7616, |
|
"grad_norm": 0.21742854143936244, |
|
"learning_rate": 2.5899474724174313e-07, |
|
"loss": 0.3981, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.7744, |
|
"grad_norm": 0.20306555223195863, |
|
"learning_rate": 2.3107092593579905e-07, |
|
"loss": 0.3915, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.7872, |
|
"grad_norm": 0.21261935023625392, |
|
"learning_rate": 2.0473077851942858e-07, |
|
"loss": 0.3624, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.21265692761867103, |
|
"learning_rate": 1.799764150818306e-07, |
|
"loss": 0.3762, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.8128, |
|
"grad_norm": 0.2092693170717507, |
|
"learning_rate": 1.5680981867625566e-07, |
|
"loss": 0.3573, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.8256, |
|
"grad_norm": 0.21381173241770798, |
|
"learning_rate": 1.3523284516113955e-07, |
|
"loss": 0.3757, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.8384, |
|
"grad_norm": 0.20722467428358596, |
|
"learning_rate": 1.1524722305144231e-07, |
|
"loss": 0.3699, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.8512, |
|
"grad_norm": 0.21363168538840632, |
|
"learning_rate": 9.685455338016347e-08, |
|
"loss": 0.3723, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 0.21187736559106565, |
|
"learning_rate": 8.005630957010014e-08, |
|
"loss": 0.3733, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.8768, |
|
"grad_norm": 0.21096163827566547, |
|
"learning_rate": 6.485383731580142e-08, |
|
"loss": 0.3638, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.8896, |
|
"grad_norm": 0.21188626449673448, |
|
"learning_rate": 5.1248354475768034e-08, |
|
"loss": 0.3594, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.9024, |
|
"grad_norm": 0.20552467769646116, |
|
"learning_rate": 3.924095097489922e-08, |
|
"loss": 0.3754, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.9152000000000005, |
|
"grad_norm": 0.21318494767306526, |
|
"learning_rate": 2.8832588717164766e-08, |
|
"loss": 0.3672, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.2090068621882043, |
|
"learning_rate": 2.0024101508555604e-08, |
|
"loss": 0.363, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.9408, |
|
"grad_norm": 0.20763808048762358, |
|
"learning_rate": 1.281619499029274e-08, |
|
"loss": 0.3819, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 4.9536, |
|
"grad_norm": 0.20795948050505664, |
|
"learning_rate": 7.209446582292501e-09, |
|
"loss": 0.3558, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 4.9664, |
|
"grad_norm": 0.2054478121680009, |
|
"learning_rate": 3.2043054369057523e-09, |
|
"loss": 0.3678, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.9792, |
|
"grad_norm": 0.21770520240117117, |
|
"learning_rate": 8.010924029533406e-10, |
|
"loss": 0.3493, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.20845976699164964, |
|
"learning_rate": 0.0, |
|
"loss": 0.3647, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"step": 390, |
|
"total_flos": 1.9327518915700982e+18, |
|
"train_loss": 0.5209115049013725, |
|
"train_runtime": 62363.4021, |
|
"train_samples_per_second": 0.802, |
|
"train_steps_per_second": 0.006 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.9327518915700982e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|