|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.998837479655894, |
|
"eval_steps": 500, |
|
"global_step": 1432, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013950244129272262, |
|
"grad_norm": 9.077165796122205, |
|
"learning_rate": 0.0, |
|
"loss": 1.244, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0027900488258544524, |
|
"grad_norm": 7.807074173265791, |
|
"learning_rate": 6.944444444444444e-08, |
|
"loss": 1.1223, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0041850732387816785, |
|
"grad_norm": 6.324174799146362, |
|
"learning_rate": 1.3888888888888888e-07, |
|
"loss": 0.9673, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005580097651708905, |
|
"grad_norm": 8.27030483810195, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"loss": 1.1685, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006975122064636131, |
|
"grad_norm": 7.60016514591344, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 1.0968, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008370146477563357, |
|
"grad_norm": 8.060333500293979, |
|
"learning_rate": 3.472222222222223e-07, |
|
"loss": 1.1373, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009765170890490584, |
|
"grad_norm": 8.048732320406836, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"loss": 1.2183, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01116019530341781, |
|
"grad_norm": 8.367676393723716, |
|
"learning_rate": 4.861111111111112e-07, |
|
"loss": 1.2232, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.012555219716345037, |
|
"grad_norm": 6.936859591765737, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 1.0223, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013950244129272262, |
|
"grad_norm": 7.474340880350743, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.0451, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015345268542199489, |
|
"grad_norm": 7.924791845739248, |
|
"learning_rate": 6.944444444444446e-07, |
|
"loss": 1.0904, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.016740292955126714, |
|
"grad_norm": 6.700238239267023, |
|
"learning_rate": 7.63888888888889e-07, |
|
"loss": 1.0017, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01813531736805394, |
|
"grad_norm": 7.121912596616418, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.1195, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01953034178098117, |
|
"grad_norm": 7.593714162127442, |
|
"learning_rate": 9.027777777777779e-07, |
|
"loss": 1.1526, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.020925366193908394, |
|
"grad_norm": 5.598325711309053, |
|
"learning_rate": 9.722222222222224e-07, |
|
"loss": 0.9568, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02232039060683562, |
|
"grad_norm": 6.502502784546973, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 1.0338, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.023715415019762844, |
|
"grad_norm": 4.2688358482531585, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.8272, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.025110439432690073, |
|
"grad_norm": 6.141756294060148, |
|
"learning_rate": 1.1805555555555556e-06, |
|
"loss": 1.1201, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0265054638456173, |
|
"grad_norm": 3.803027333138855, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.873, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.027900488258544524, |
|
"grad_norm": 5.014334478810172, |
|
"learning_rate": 1.3194444444444446e-06, |
|
"loss": 0.9899, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.029295512671471752, |
|
"grad_norm": 4.551325462075463, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.942, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.030690537084398978, |
|
"grad_norm": 4.812375868902633, |
|
"learning_rate": 1.4583333333333335e-06, |
|
"loss": 0.9865, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03208556149732621, |
|
"grad_norm": 4.357883881047429, |
|
"learning_rate": 1.527777777777778e-06, |
|
"loss": 0.9614, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03348058591025343, |
|
"grad_norm": 3.6268224514828318, |
|
"learning_rate": 1.5972222222222221e-06, |
|
"loss": 0.8257, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03487561032318066, |
|
"grad_norm": 3.4656554743615047, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.9127, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03627063473610788, |
|
"grad_norm": 3.876455217129128, |
|
"learning_rate": 1.7361111111111112e-06, |
|
"loss": 0.9278, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03766565914903511, |
|
"grad_norm": 4.09517147857438, |
|
"learning_rate": 1.8055555555555557e-06, |
|
"loss": 0.9556, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03906068356196234, |
|
"grad_norm": 3.5545716555392737, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.8878, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04045570797488956, |
|
"grad_norm": 3.835208660111137, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.885, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04185073238781679, |
|
"grad_norm": 3.531205403980982, |
|
"learning_rate": 2.0138888888888893e-06, |
|
"loss": 0.886, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.043245756800744016, |
|
"grad_norm": 4.231398251952698, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 0.8223, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04464078121367124, |
|
"grad_norm": 3.3069565195805386, |
|
"learning_rate": 2.152777777777778e-06, |
|
"loss": 0.8934, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04603580562659847, |
|
"grad_norm": 3.3388892924336737, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.9336, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04743083003952569, |
|
"grad_norm": 3.0257164444225895, |
|
"learning_rate": 2.2916666666666666e-06, |
|
"loss": 0.8879, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04882585445245292, |
|
"grad_norm": 3.228639273179126, |
|
"learning_rate": 2.361111111111111e-06, |
|
"loss": 0.8922, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.050220878865380146, |
|
"grad_norm": 3.620966182021972, |
|
"learning_rate": 2.4305555555555557e-06, |
|
"loss": 0.9343, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05161590327830737, |
|
"grad_norm": 3.182907050050726, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8324, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0530109276912346, |
|
"grad_norm": 3.232818032867581, |
|
"learning_rate": 2.5694444444444443e-06, |
|
"loss": 0.7873, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.054405952104161825, |
|
"grad_norm": 3.2312814843076434, |
|
"learning_rate": 2.6388888888888893e-06, |
|
"loss": 0.8887, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05580097651708905, |
|
"grad_norm": 3.356427492997714, |
|
"learning_rate": 2.7083333333333334e-06, |
|
"loss": 0.8288, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.057196000930016276, |
|
"grad_norm": 2.8004715481014286, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.8025, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.058591025342943505, |
|
"grad_norm": 2.872064290513942, |
|
"learning_rate": 2.8472222222222224e-06, |
|
"loss": 0.7969, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05998604975587073, |
|
"grad_norm": 2.604055696429484, |
|
"learning_rate": 2.916666666666667e-06, |
|
"loss": 0.7796, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.061381074168797956, |
|
"grad_norm": 2.588383255024964, |
|
"learning_rate": 2.986111111111111e-06, |
|
"loss": 0.8146, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06277609858172518, |
|
"grad_norm": 2.957391150131729, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.8706, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06417112299465241, |
|
"grad_norm": 2.7716857456581208, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.7553, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06556614740757963, |
|
"grad_norm": 2.8704927650550305, |
|
"learning_rate": 3.1944444444444443e-06, |
|
"loss": 0.835, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06696117182050686, |
|
"grad_norm": 2.926448629599372, |
|
"learning_rate": 3.2638888888888892e-06, |
|
"loss": 0.8358, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06835619623343409, |
|
"grad_norm": 2.653881772418661, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.8573, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06975122064636131, |
|
"grad_norm": 2.7680654495558796, |
|
"learning_rate": 3.4027777777777783e-06, |
|
"loss": 0.8155, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07114624505928854, |
|
"grad_norm": 2.690527707536472, |
|
"learning_rate": 3.4722222222222224e-06, |
|
"loss": 0.8718, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07254126947221576, |
|
"grad_norm": 2.539274614289565, |
|
"learning_rate": 3.5416666666666673e-06, |
|
"loss": 0.8143, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07393629388514299, |
|
"grad_norm": 2.6150798045040564, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.8129, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07533131829807022, |
|
"grad_norm": 2.77487818448589, |
|
"learning_rate": 3.680555555555556e-06, |
|
"loss": 0.8431, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07672634271099744, |
|
"grad_norm": 2.4190012994996404, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.7949, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07812136712392467, |
|
"grad_norm": 2.5658926847545143, |
|
"learning_rate": 3.819444444444444e-06, |
|
"loss": 0.8349, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07951639153685189, |
|
"grad_norm": 2.465553073665557, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.8001, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08091141594977912, |
|
"grad_norm": 2.5769268931786113, |
|
"learning_rate": 3.958333333333333e-06, |
|
"loss": 0.7853, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08230644036270635, |
|
"grad_norm": 2.762238617657559, |
|
"learning_rate": 4.027777777777779e-06, |
|
"loss": 0.8858, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08370146477563357, |
|
"grad_norm": 2.459676043622874, |
|
"learning_rate": 4.097222222222222e-06, |
|
"loss": 0.7484, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0850964891885608, |
|
"grad_norm": 2.281452061978854, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.7395, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08649151360148803, |
|
"grad_norm": 2.5945093959933923, |
|
"learning_rate": 4.236111111111111e-06, |
|
"loss": 0.8383, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08788653801441525, |
|
"grad_norm": 2.4907580232536337, |
|
"learning_rate": 4.305555555555556e-06, |
|
"loss": 0.7387, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08928156242734248, |
|
"grad_norm": 2.5357156725837844, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.8378, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0906765868402697, |
|
"grad_norm": 2.3775000896329814, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.7606, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09207161125319693, |
|
"grad_norm": 2.6462676515975367, |
|
"learning_rate": 4.5138888888888895e-06, |
|
"loss": 0.8067, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09346663566612416, |
|
"grad_norm": 2.484589313471558, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 0.8517, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09486166007905138, |
|
"grad_norm": 2.5630782397634135, |
|
"learning_rate": 4.652777777777779e-06, |
|
"loss": 0.8104, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0962566844919786, |
|
"grad_norm": 2.603099792013807, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.7562, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09765170890490583, |
|
"grad_norm": 2.458040997496572, |
|
"learning_rate": 4.791666666666668e-06, |
|
"loss": 0.7245, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09904673331783306, |
|
"grad_norm": 2.6548832786314387, |
|
"learning_rate": 4.861111111111111e-06, |
|
"loss": 0.7604, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10044175773076029, |
|
"grad_norm": 2.5407350711188137, |
|
"learning_rate": 4.930555555555556e-06, |
|
"loss": 0.7614, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10183678214368752, |
|
"grad_norm": 2.466098001195273, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7708, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10323180655661474, |
|
"grad_norm": 2.4710838556933634, |
|
"learning_rate": 5.069444444444445e-06, |
|
"loss": 0.781, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.10462683096954196, |
|
"grad_norm": 2.5745603388615788, |
|
"learning_rate": 5.138888888888889e-06, |
|
"loss": 0.7617, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1060218553824692, |
|
"grad_norm": 2.4760378463602617, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 0.6586, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10741687979539642, |
|
"grad_norm": 2.1046185393371637, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.6515, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10881190420832365, |
|
"grad_norm": 2.57807326774727, |
|
"learning_rate": 5.347222222222222e-06, |
|
"loss": 0.7987, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11020692862125087, |
|
"grad_norm": 2.4541665474964884, |
|
"learning_rate": 5.416666666666667e-06, |
|
"loss": 0.7797, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1116019530341781, |
|
"grad_norm": 2.7192596934225883, |
|
"learning_rate": 5.486111111111112e-06, |
|
"loss": 0.8144, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11299697744710532, |
|
"grad_norm": 2.587955726978526, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.7718, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11439200186003255, |
|
"grad_norm": 2.8624670000905508, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.8642, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11578702627295978, |
|
"grad_norm": 2.3985540385123785, |
|
"learning_rate": 5.694444444444445e-06, |
|
"loss": 0.7585, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11718205068588701, |
|
"grad_norm": 2.4347959569209743, |
|
"learning_rate": 5.7638888888888886e-06, |
|
"loss": 0.7652, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11857707509881422, |
|
"grad_norm": 2.472216781380437, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.7593, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11997209951174145, |
|
"grad_norm": 2.3683241185527684, |
|
"learning_rate": 5.9027777777777785e-06, |
|
"loss": 0.6797, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.12136712392466868, |
|
"grad_norm": 2.585048523140377, |
|
"learning_rate": 5.972222222222222e-06, |
|
"loss": 0.8135, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12276214833759591, |
|
"grad_norm": 2.583706669085246, |
|
"learning_rate": 6.041666666666667e-06, |
|
"loss": 0.8272, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12415717275052314, |
|
"grad_norm": 2.4831147364219253, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.8588, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12555219716345037, |
|
"grad_norm": 2.372052290928504, |
|
"learning_rate": 6.180555555555557e-06, |
|
"loss": 0.7559, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12694722157637758, |
|
"grad_norm": 2.3693742709173997, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.7523, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12834224598930483, |
|
"grad_norm": 2.56593824677806, |
|
"learning_rate": 6.319444444444445e-06, |
|
"loss": 0.7976, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12973727040223204, |
|
"grad_norm": 2.6643468086221485, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.7603, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13113229481515926, |
|
"grad_norm": 2.431324445820708, |
|
"learning_rate": 6.458333333333334e-06, |
|
"loss": 0.7177, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1325273192280865, |
|
"grad_norm": 2.0286167424640786, |
|
"learning_rate": 6.5277777777777784e-06, |
|
"loss": 0.6637, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1339223436410137, |
|
"grad_norm": 2.381250278728561, |
|
"learning_rate": 6.597222222222223e-06, |
|
"loss": 0.7543, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13531736805394096, |
|
"grad_norm": 2.5224874846398304, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.748, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13671239246686817, |
|
"grad_norm": 2.367382093641696, |
|
"learning_rate": 6.736111111111112e-06, |
|
"loss": 0.7078, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13810741687979539, |
|
"grad_norm": 2.2891454288500785, |
|
"learning_rate": 6.8055555555555566e-06, |
|
"loss": 0.7778, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13950244129272263, |
|
"grad_norm": 2.326427675547096, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.7, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14089746570564984, |
|
"grad_norm": 2.43784471948663, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.7845, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1422924901185771, |
|
"grad_norm": 2.434928494698733, |
|
"learning_rate": 7.013888888888889e-06, |
|
"loss": 0.7741, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1436875145315043, |
|
"grad_norm": 2.544677188997524, |
|
"learning_rate": 7.083333333333335e-06, |
|
"loss": 0.7321, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14508253894443152, |
|
"grad_norm": 2.4317471320917314, |
|
"learning_rate": 7.152777777777778e-06, |
|
"loss": 0.7301, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.14647756335735876, |
|
"grad_norm": 2.3758040864865535, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.7168, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14787258777028597, |
|
"grad_norm": 2.427323974558636, |
|
"learning_rate": 7.291666666666667e-06, |
|
"loss": 0.7122, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14926761218321322, |
|
"grad_norm": 2.5718099527792515, |
|
"learning_rate": 7.361111111111112e-06, |
|
"loss": 0.7777, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.15066263659614043, |
|
"grad_norm": 2.3984442193810303, |
|
"learning_rate": 7.4305555555555565e-06, |
|
"loss": 0.76, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.15205766100906765, |
|
"grad_norm": 2.809886900720292, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9409, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1534526854219949, |
|
"grad_norm": 2.1909629178912735, |
|
"learning_rate": 7.569444444444445e-06, |
|
"loss": 0.6928, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1548477098349221, |
|
"grad_norm": 2.299999050133801, |
|
"learning_rate": 7.638888888888888e-06, |
|
"loss": 0.7331, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15624273424784935, |
|
"grad_norm": 2.4657027253176658, |
|
"learning_rate": 7.708333333333334e-06, |
|
"loss": 0.7734, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.15763775866077656, |
|
"grad_norm": 2.499003482696436, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.7568, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15903278307370378, |
|
"grad_norm": 2.556353786561783, |
|
"learning_rate": 7.847222222222223e-06, |
|
"loss": 0.7813, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.16042780748663102, |
|
"grad_norm": 2.398942685028162, |
|
"learning_rate": 7.916666666666667e-06, |
|
"loss": 0.773, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16182283189955823, |
|
"grad_norm": 2.437022751579472, |
|
"learning_rate": 7.986111111111112e-06, |
|
"loss": 0.7423, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.16321785631248548, |
|
"grad_norm": 2.5361969141214753, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.8085, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1646128807254127, |
|
"grad_norm": 2.5686473875754148, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.7602, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16600790513833993, |
|
"grad_norm": 2.6355454558922786, |
|
"learning_rate": 8.194444444444445e-06, |
|
"loss": 0.7348, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.16740292955126715, |
|
"grad_norm": 2.5297512188295457, |
|
"learning_rate": 8.263888888888888e-06, |
|
"loss": 0.7398, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16879795396419436, |
|
"grad_norm": 2.4649882854746354, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.7636, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1701929783771216, |
|
"grad_norm": 2.4209538379807443, |
|
"learning_rate": 8.402777777777779e-06, |
|
"loss": 0.7799, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.17158800279004882, |
|
"grad_norm": 2.7076603164565665, |
|
"learning_rate": 8.472222222222223e-06, |
|
"loss": 0.8096, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.17298302720297606, |
|
"grad_norm": 2.5198462673809643, |
|
"learning_rate": 8.541666666666666e-06, |
|
"loss": 0.7566, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.17437805161590328, |
|
"grad_norm": 2.3301540316911935, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.7885, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1757730760288305, |
|
"grad_norm": 2.528981320323845, |
|
"learning_rate": 8.680555555555557e-06, |
|
"loss": 0.7747, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17716810044175774, |
|
"grad_norm": 2.6228409614962938, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.7827, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.17856312485468495, |
|
"grad_norm": 2.3530092565699805, |
|
"learning_rate": 8.819444444444445e-06, |
|
"loss": 0.775, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1799581492676122, |
|
"grad_norm": 2.4030846577696834, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.8468, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1813531736805394, |
|
"grad_norm": 2.397665315329076, |
|
"learning_rate": 8.958333333333334e-06, |
|
"loss": 0.7555, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18274819809346662, |
|
"grad_norm": 2.4944187750790165, |
|
"learning_rate": 9.027777777777779e-06, |
|
"loss": 0.6983, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.18414322250639387, |
|
"grad_norm": 2.4633848637523266, |
|
"learning_rate": 9.097222222222223e-06, |
|
"loss": 0.8095, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18553824691932108, |
|
"grad_norm": 2.4514234526609755, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.7669, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18693327133224832, |
|
"grad_norm": 2.2348180975167002, |
|
"learning_rate": 9.236111111111112e-06, |
|
"loss": 0.7445, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.18832829574517554, |
|
"grad_norm": 2.7654592189288127, |
|
"learning_rate": 9.305555555555557e-06, |
|
"loss": 0.8222, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18972332015810275, |
|
"grad_norm": 2.657505427715525, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.7993, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.19111834457103, |
|
"grad_norm": 2.4757416453669037, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.7553, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1925133689839572, |
|
"grad_norm": 2.4710024870459546, |
|
"learning_rate": 9.51388888888889e-06, |
|
"loss": 0.7893, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.19390839339688445, |
|
"grad_norm": 2.484340628816311, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 0.7435, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19530341780981167, |
|
"grad_norm": 2.3567750021925864, |
|
"learning_rate": 9.652777777777779e-06, |
|
"loss": 0.7991, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1966984422227389, |
|
"grad_norm": 2.6403706679259376, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.8204, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19809346663566613, |
|
"grad_norm": 2.7772690056581406, |
|
"learning_rate": 9.791666666666666e-06, |
|
"loss": 0.764, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19948849104859334, |
|
"grad_norm": 2.38948338269402, |
|
"learning_rate": 9.861111111111112e-06, |
|
"loss": 0.7588, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.20088351546152058, |
|
"grad_norm": 2.364591011786826, |
|
"learning_rate": 9.930555555555557e-06, |
|
"loss": 0.7382, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2022785398744478, |
|
"grad_norm": 2.39460109071618, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8297, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.20367356428737504, |
|
"grad_norm": 2.385277772966096, |
|
"learning_rate": 9.99998512668374e-06, |
|
"loss": 0.762, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.20506858870030226, |
|
"grad_norm": 2.550269196061235, |
|
"learning_rate": 9.999940506823446e-06, |
|
"loss": 0.8577, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.20646361311322947, |
|
"grad_norm": 2.3828419813394768, |
|
"learning_rate": 9.999866140684576e-06, |
|
"loss": 0.7526, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.20785863752615671, |
|
"grad_norm": 2.674659581827359, |
|
"learning_rate": 9.999762028709558e-06, |
|
"loss": 0.8444, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.20925366193908393, |
|
"grad_norm": 2.438042726374861, |
|
"learning_rate": 9.999628171517788e-06, |
|
"loss": 0.7834, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21064868635201117, |
|
"grad_norm": 2.527684334680155, |
|
"learning_rate": 9.999464569905628e-06, |
|
"loss": 0.778, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2120437107649384, |
|
"grad_norm": 2.164235216275893, |
|
"learning_rate": 9.999271224846397e-06, |
|
"loss": 0.7357, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2134387351778656, |
|
"grad_norm": 2.568663637972533, |
|
"learning_rate": 9.999048137490364e-06, |
|
"loss": 0.7864, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.21483375959079284, |
|
"grad_norm": 2.328376334217868, |
|
"learning_rate": 9.998795309164754e-06, |
|
"loss": 0.741, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21622878400372006, |
|
"grad_norm": 2.3440798093116815, |
|
"learning_rate": 9.99851274137372e-06, |
|
"loss": 0.752, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2176238084166473, |
|
"grad_norm": 2.3252703261044587, |
|
"learning_rate": 9.998200435798355e-06, |
|
"loss": 0.7822, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.21901883282957452, |
|
"grad_norm": 2.1066008364671944, |
|
"learning_rate": 9.997858394296666e-06, |
|
"loss": 0.6879, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.22041385724250173, |
|
"grad_norm": 2.4582290754964067, |
|
"learning_rate": 9.997486618903566e-06, |
|
"loss": 0.7594, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.22180888165542897, |
|
"grad_norm": 2.202899892319989, |
|
"learning_rate": 9.99708511183087e-06, |
|
"loss": 0.7461, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2232039060683562, |
|
"grad_norm": 2.367202675165834, |
|
"learning_rate": 9.996653875467278e-06, |
|
"loss": 0.7755, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22459893048128343, |
|
"grad_norm": 2.4283257637167743, |
|
"learning_rate": 9.99619291237835e-06, |
|
"loss": 0.7694, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.22599395489421065, |
|
"grad_norm": 2.393310544522537, |
|
"learning_rate": 9.99570222530651e-06, |
|
"loss": 0.7624, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22738897930713786, |
|
"grad_norm": 2.456620826384989, |
|
"learning_rate": 9.995181817171016e-06, |
|
"loss": 0.7943, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2287840037200651, |
|
"grad_norm": 2.33795920230864, |
|
"learning_rate": 9.994631691067941e-06, |
|
"loss": 0.7064, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.23017902813299232, |
|
"grad_norm": 2.380769585125721, |
|
"learning_rate": 9.994051850270172e-06, |
|
"loss": 0.7785, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23157405254591956, |
|
"grad_norm": 2.0565750954623954, |
|
"learning_rate": 9.993442298227365e-06, |
|
"loss": 0.6899, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.23296907695884678, |
|
"grad_norm": 2.4629114081369154, |
|
"learning_rate": 9.992803038565947e-06, |
|
"loss": 0.7318, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.23436410137177402, |
|
"grad_norm": 2.5260371511690227, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.7492, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.23575912578470123, |
|
"grad_norm": 2.2870342101598573, |
|
"learning_rate": 9.991435411776654e-06, |
|
"loss": 0.7546, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23715415019762845, |
|
"grad_norm": 2.2176561807734463, |
|
"learning_rate": 9.990707052785236e-06, |
|
"loss": 0.6861, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2385491746105557, |
|
"grad_norm": 2.1930980336038015, |
|
"learning_rate": 9.989949002448076e-06, |
|
"loss": 0.7527, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2399441990234829, |
|
"grad_norm": 2.3770728495629654, |
|
"learning_rate": 9.98916126527506e-06, |
|
"loss": 0.7358, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.24133922343641015, |
|
"grad_norm": 2.503959470995855, |
|
"learning_rate": 9.988343845952697e-06, |
|
"loss": 0.7925, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.24273424784933736, |
|
"grad_norm": 2.295115004392427, |
|
"learning_rate": 9.98749674934408e-06, |
|
"loss": 0.7493, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.24412927226226458, |
|
"grad_norm": 2.3785544817722912, |
|
"learning_rate": 9.98661998048886e-06, |
|
"loss": 0.784, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24552429667519182, |
|
"grad_norm": 2.460048362058096, |
|
"learning_rate": 9.985713544603229e-06, |
|
"loss": 0.7982, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24691932108811904, |
|
"grad_norm": 2.5187516427864503, |
|
"learning_rate": 9.984777447079861e-06, |
|
"loss": 0.7899, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24831434550104628, |
|
"grad_norm": 2.4375799853969387, |
|
"learning_rate": 9.983811693487913e-06, |
|
"loss": 0.8014, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2497093699139735, |
|
"grad_norm": 2.262358785483013, |
|
"learning_rate": 9.982816289572966e-06, |
|
"loss": 0.7308, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.25110439432690074, |
|
"grad_norm": 2.4027476974294504, |
|
"learning_rate": 9.981791241257001e-06, |
|
"loss": 0.7808, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.25249941873982795, |
|
"grad_norm": 2.3785122102380387, |
|
"learning_rate": 9.980736554638367e-06, |
|
"loss": 0.7762, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.25389444315275517, |
|
"grad_norm": 2.2101376692736086, |
|
"learning_rate": 9.979652235991738e-06, |
|
"loss": 0.7199, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2552894675656824, |
|
"grad_norm": 2.3853547321892594, |
|
"learning_rate": 9.978538291768083e-06, |
|
"loss": 0.7662, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.25668449197860965, |
|
"grad_norm": 2.4629393864421103, |
|
"learning_rate": 9.977394728594614e-06, |
|
"loss": 0.7372, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25807951639153687, |
|
"grad_norm": 2.3117069679784077, |
|
"learning_rate": 9.976221553274767e-06, |
|
"loss": 0.7159, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2594745408044641, |
|
"grad_norm": 2.061689977353346, |
|
"learning_rate": 9.975018772788144e-06, |
|
"loss": 0.6528, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2608695652173913, |
|
"grad_norm": 2.276599208142271, |
|
"learning_rate": 9.973786394290475e-06, |
|
"loss": 0.8088, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2622645896303185, |
|
"grad_norm": 2.3657045018285308, |
|
"learning_rate": 9.972524425113585e-06, |
|
"loss": 0.7563, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2636596140432458, |
|
"grad_norm": 2.1739723205164907, |
|
"learning_rate": 9.971232872765344e-06, |
|
"loss": 0.717, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.265054638456173, |
|
"grad_norm": 2.2224227720319907, |
|
"learning_rate": 9.969911744929613e-06, |
|
"loss": 0.7291, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2664496628691002, |
|
"grad_norm": 2.312701932028756, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.7788, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2678446872820274, |
|
"grad_norm": 2.2886619359808362, |
|
"learning_rate": 9.967180794410878e-06, |
|
"loss": 0.7303, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26923971169495464, |
|
"grad_norm": 2.2800673609129887, |
|
"learning_rate": 9.96577098797519e-06, |
|
"loss": 0.7756, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2706347361078819, |
|
"grad_norm": 2.3599405553425474, |
|
"learning_rate": 9.96433163854655e-06, |
|
"loss": 0.7285, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2720297605208091, |
|
"grad_norm": 2.3035254395464335, |
|
"learning_rate": 9.962862754688118e-06, |
|
"loss": 0.7317, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.27342478493373634, |
|
"grad_norm": 2.1983487142998337, |
|
"learning_rate": 9.961364345138764e-06, |
|
"loss": 0.8082, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.27481980934666356, |
|
"grad_norm": 2.1891940591475256, |
|
"learning_rate": 9.959836418813016e-06, |
|
"loss": 0.7296, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.27621483375959077, |
|
"grad_norm": 2.3364824298619107, |
|
"learning_rate": 9.958278984801003e-06, |
|
"loss": 0.735, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.27760985817251804, |
|
"grad_norm": 2.2975941856825917, |
|
"learning_rate": 9.956692052368416e-06, |
|
"loss": 0.7233, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27900488258544526, |
|
"grad_norm": 2.3372101943251726, |
|
"learning_rate": 9.955075630956425e-06, |
|
"loss": 0.7404, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2803999069983725, |
|
"grad_norm": 2.1853548847254145, |
|
"learning_rate": 9.953429730181653e-06, |
|
"loss": 0.7436, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2817949314112997, |
|
"grad_norm": 2.224468630946785, |
|
"learning_rate": 9.951754359836104e-06, |
|
"loss": 0.7623, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2831899558242269, |
|
"grad_norm": 2.2750380642685286, |
|
"learning_rate": 9.950049529887098e-06, |
|
"loss": 0.7888, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2845849802371542, |
|
"grad_norm": 2.297526158131741, |
|
"learning_rate": 9.94831525047723e-06, |
|
"loss": 0.7366, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2859800046500814, |
|
"grad_norm": 2.3677703079300203, |
|
"learning_rate": 9.94655153192429e-06, |
|
"loss": 0.7912, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2873750290630086, |
|
"grad_norm": 2.539938055319808, |
|
"learning_rate": 9.944758384721218e-06, |
|
"loss": 0.7649, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2887700534759358, |
|
"grad_norm": 2.1358951407571625, |
|
"learning_rate": 9.94293581953603e-06, |
|
"loss": 0.7197, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.29016507788886303, |
|
"grad_norm": 2.3752369254146948, |
|
"learning_rate": 9.941083847211765e-06, |
|
"loss": 0.7811, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2915601023017903, |
|
"grad_norm": 2.2366003263543974, |
|
"learning_rate": 9.939202478766408e-06, |
|
"loss": 0.7838, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2929551267147175, |
|
"grad_norm": 2.353138945767122, |
|
"learning_rate": 9.937291725392835e-06, |
|
"loss": 0.791, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.29435015112764473, |
|
"grad_norm": 2.26497443267959, |
|
"learning_rate": 9.935351598458743e-06, |
|
"loss": 0.7473, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.29574517554057195, |
|
"grad_norm": 2.517368832160316, |
|
"learning_rate": 9.933382109506577e-06, |
|
"loss": 0.7834, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.29714019995349916, |
|
"grad_norm": 2.3571681636390194, |
|
"learning_rate": 9.931383270253475e-06, |
|
"loss": 0.759, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.29853522436642643, |
|
"grad_norm": 2.2330737534866283, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.7235, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.29993024877935365, |
|
"grad_norm": 2.348646725591813, |
|
"learning_rate": 9.927297588585984e-06, |
|
"loss": 0.7873, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.30132527319228086, |
|
"grad_norm": 2.2883655282601665, |
|
"learning_rate": 9.925210770478653e-06, |
|
"loss": 0.7567, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3027202976052081, |
|
"grad_norm": 2.365988328655639, |
|
"learning_rate": 9.923094650684346e-06, |
|
"loss": 0.8062, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3041153220181353, |
|
"grad_norm": 2.1815309873800106, |
|
"learning_rate": 9.92094924179255e-06, |
|
"loss": 0.6704, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.30551034643106256, |
|
"grad_norm": 2.2684610053286165, |
|
"learning_rate": 9.918774556567007e-06, |
|
"loss": 0.7228, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3069053708439898, |
|
"grad_norm": 2.1987712868445985, |
|
"learning_rate": 9.916570607945627e-06, |
|
"loss": 0.7303, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.308300395256917, |
|
"grad_norm": 2.1467480973519466, |
|
"learning_rate": 9.914337409040418e-06, |
|
"loss": 0.7773, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3096954196698442, |
|
"grad_norm": 2.1981875970164015, |
|
"learning_rate": 9.912074973137413e-06, |
|
"loss": 0.7558, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3110904440827714, |
|
"grad_norm": 2.370866601678729, |
|
"learning_rate": 9.90978331369658e-06, |
|
"loss": 0.8031, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.3124854684956987, |
|
"grad_norm": 2.0958671373974656, |
|
"learning_rate": 9.90746244435175e-06, |
|
"loss": 0.7119, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3138804929086259, |
|
"grad_norm": 2.276250356937496, |
|
"learning_rate": 9.905112378910532e-06, |
|
"loss": 0.7132, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3152755173215531, |
|
"grad_norm": 2.349183832747072, |
|
"learning_rate": 9.902733131354232e-06, |
|
"loss": 0.7279, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.31667054173448034, |
|
"grad_norm": 2.2645947401920865, |
|
"learning_rate": 9.900324715837772e-06, |
|
"loss": 0.772, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.31806556614740755, |
|
"grad_norm": 2.420057647782548, |
|
"learning_rate": 9.897887146689603e-06, |
|
"loss": 0.7967, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3194605905603348, |
|
"grad_norm": 2.09035311023506, |
|
"learning_rate": 9.895420438411616e-06, |
|
"loss": 0.7409, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.32085561497326204, |
|
"grad_norm": 2.128576564240619, |
|
"learning_rate": 9.892924605679066e-06, |
|
"loss": 0.7108, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32225063938618925, |
|
"grad_norm": 2.3557840243006303, |
|
"learning_rate": 9.890399663340478e-06, |
|
"loss": 0.7989, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.32364566379911647, |
|
"grad_norm": 2.262147642657486, |
|
"learning_rate": 9.887845626417556e-06, |
|
"loss": 0.7881, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.32504068821204374, |
|
"grad_norm": 2.308747699785618, |
|
"learning_rate": 9.885262510105102e-06, |
|
"loss": 0.7044, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.32643571262497095, |
|
"grad_norm": 2.1776401707207924, |
|
"learning_rate": 9.882650329770919e-06, |
|
"loss": 0.789, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.32783073703789817, |
|
"grad_norm": 2.1837014708981872, |
|
"learning_rate": 9.880009100955716e-06, |
|
"loss": 0.7991, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3292257614508254, |
|
"grad_norm": 2.344957858708995, |
|
"learning_rate": 9.877338839373032e-06, |
|
"loss": 0.8075, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3306207858637526, |
|
"grad_norm": 2.3077392669789565, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.7442, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.33201581027667987, |
|
"grad_norm": 2.231581492055234, |
|
"learning_rate": 9.871911281622869e-06, |
|
"loss": 0.7426, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3334108346896071, |
|
"grad_norm": 2.326813603280547, |
|
"learning_rate": 9.869154017745706e-06, |
|
"loss": 0.7747, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3348058591025343, |
|
"grad_norm": 2.086609390067158, |
|
"learning_rate": 9.866367785681493e-06, |
|
"loss": 0.7518, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3362008835154615, |
|
"grad_norm": 2.363099222001944, |
|
"learning_rate": 9.863552602006435e-06, |
|
"loss": 0.7724, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.3375959079283887, |
|
"grad_norm": 2.077033177420949, |
|
"learning_rate": 9.860708483468975e-06, |
|
"loss": 0.7175, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.338990932341316, |
|
"grad_norm": 2.246936114717258, |
|
"learning_rate": 9.857835446989708e-06, |
|
"loss": 0.7709, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3403859567542432, |
|
"grad_norm": 2.1831810988074145, |
|
"learning_rate": 9.854933509661264e-06, |
|
"loss": 0.7632, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.34178098116717043, |
|
"grad_norm": 2.452152625128349, |
|
"learning_rate": 9.852002688748214e-06, |
|
"loss": 0.7713, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.34317600558009764, |
|
"grad_norm": 2.101403660363672, |
|
"learning_rate": 9.84904300168697e-06, |
|
"loss": 0.7556, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.34457102999302486, |
|
"grad_norm": 2.196554440781837, |
|
"learning_rate": 9.84605446608568e-06, |
|
"loss": 0.7352, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.34596605440595213, |
|
"grad_norm": 2.1581190947183906, |
|
"learning_rate": 9.843037099724111e-06, |
|
"loss": 0.7505, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.34736107881887934, |
|
"grad_norm": 2.4174065292937503, |
|
"learning_rate": 9.839990920553566e-06, |
|
"loss": 0.7461, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.34875610323180656, |
|
"grad_norm": 2.2765578992139663, |
|
"learning_rate": 9.83691594669676e-06, |
|
"loss": 0.7607, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3501511276447338, |
|
"grad_norm": 2.2025411404466735, |
|
"learning_rate": 9.833812196447712e-06, |
|
"loss": 0.7696, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.351546152057661, |
|
"grad_norm": 2.316141072305499, |
|
"learning_rate": 9.830679688271647e-06, |
|
"loss": 0.7743, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 2.2989681090795266, |
|
"learning_rate": 9.827518440804882e-06, |
|
"loss": 0.8095, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3543362008835155, |
|
"grad_norm": 2.4056158179853884, |
|
"learning_rate": 9.824328472854706e-06, |
|
"loss": 0.7212, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3557312252964427, |
|
"grad_norm": 2.2264588040031716, |
|
"learning_rate": 9.821109803399283e-06, |
|
"loss": 0.7479, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3571262497093699, |
|
"grad_norm": 2.1513171263920223, |
|
"learning_rate": 9.817862451587527e-06, |
|
"loss": 0.6563, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3585212741222971, |
|
"grad_norm": 2.231437998260098, |
|
"learning_rate": 9.814586436738998e-06, |
|
"loss": 0.6965, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3599162985352244, |
|
"grad_norm": 2.164610078417342, |
|
"learning_rate": 9.811281778343773e-06, |
|
"loss": 0.7445, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3613113229481516, |
|
"grad_norm": 2.532376368559481, |
|
"learning_rate": 9.807948496062344e-06, |
|
"loss": 0.7725, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3627063473610788, |
|
"grad_norm": 2.339159697690894, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.7535, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.36410137177400603, |
|
"grad_norm": 2.166756913084875, |
|
"learning_rate": 9.801196139334195e-06, |
|
"loss": 0.7147, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.36549639618693325, |
|
"grad_norm": 2.139276056908412, |
|
"learning_rate": 9.797777105059448e-06, |
|
"loss": 0.7404, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3668914205998605, |
|
"grad_norm": 2.4733715470175284, |
|
"learning_rate": 9.794329527242209e-06, |
|
"loss": 0.8087, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.36828644501278773, |
|
"grad_norm": 2.0018490521631866, |
|
"learning_rate": 9.790853426393246e-06, |
|
"loss": 0.6991, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.36968146942571495, |
|
"grad_norm": 2.1908532559835048, |
|
"learning_rate": 9.787348823193013e-06, |
|
"loss": 0.7728, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.37107649383864216, |
|
"grad_norm": 2.5151311136851264, |
|
"learning_rate": 9.783815738491544e-06, |
|
"loss": 0.7751, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3724715182515694, |
|
"grad_norm": 2.2480235747495327, |
|
"learning_rate": 9.780254193308312e-06, |
|
"loss": 0.6931, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.37386654266449665, |
|
"grad_norm": 2.4308792768188194, |
|
"learning_rate": 9.776664208832111e-06, |
|
"loss": 0.729, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.37526156707742386, |
|
"grad_norm": 2.148993722755254, |
|
"learning_rate": 9.77304580642093e-06, |
|
"loss": 0.7566, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3766565914903511, |
|
"grad_norm": 2.2131611670886113, |
|
"learning_rate": 9.76939900760183e-06, |
|
"loss": 0.692, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3780516159032783, |
|
"grad_norm": 2.284885600630375, |
|
"learning_rate": 9.765723834070805e-06, |
|
"loss": 0.7334, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3794466403162055, |
|
"grad_norm": 2.1803327517414637, |
|
"learning_rate": 9.762020307692662e-06, |
|
"loss": 0.7022, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3808416647291328, |
|
"grad_norm": 2.0737093304589997, |
|
"learning_rate": 9.75828845050089e-06, |
|
"loss": 0.7178, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.38223668914206, |
|
"grad_norm": 2.1969087943561023, |
|
"learning_rate": 9.754528284697526e-06, |
|
"loss": 0.8056, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3836317135549872, |
|
"grad_norm": 2.0470925124450723, |
|
"learning_rate": 9.750739832653023e-06, |
|
"loss": 0.7027, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3850267379679144, |
|
"grad_norm": 2.206936141731334, |
|
"learning_rate": 9.74692311690612e-06, |
|
"loss": 0.7628, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.38642176238084164, |
|
"grad_norm": 2.259665231259676, |
|
"learning_rate": 9.743078160163703e-06, |
|
"loss": 0.8171, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3878167867937689, |
|
"grad_norm": 2.069519176675941, |
|
"learning_rate": 9.73920498530068e-06, |
|
"loss": 0.7066, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3892118112066961, |
|
"grad_norm": 2.18198802420733, |
|
"learning_rate": 9.735303615359827e-06, |
|
"loss": 0.7137, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.39060683561962334, |
|
"grad_norm": 2.065628530712166, |
|
"learning_rate": 9.731374073551671e-06, |
|
"loss": 0.7051, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.39200186003255055, |
|
"grad_norm": 2.011089691307504, |
|
"learning_rate": 9.72741638325434e-06, |
|
"loss": 0.7244, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3933968844454778, |
|
"grad_norm": 2.1660029235899727, |
|
"learning_rate": 9.723430568013423e-06, |
|
"loss": 0.7099, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.39479190885840504, |
|
"grad_norm": 2.3105910990971426, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.793, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.39618693327133225, |
|
"grad_norm": 2.2318565333793923, |
|
"learning_rate": 9.715374657719685e-06, |
|
"loss": 0.7452, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.39758195768425947, |
|
"grad_norm": 2.2160005494950563, |
|
"learning_rate": 9.711304610594104e-06, |
|
"loss": 0.7714, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3989769820971867, |
|
"grad_norm": 2.1886833487875528, |
|
"learning_rate": 9.707206534379133e-06, |
|
"loss": 0.7291, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.40037200651011395, |
|
"grad_norm": 2.243344953149145, |
|
"learning_rate": 9.703080453455569e-06, |
|
"loss": 0.7543, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.40176703092304117, |
|
"grad_norm": 2.0905937282876588, |
|
"learning_rate": 9.69892639237081e-06, |
|
"loss": 0.6654, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.4031620553359684, |
|
"grad_norm": 1.8899757251612446, |
|
"learning_rate": 9.694744375838725e-06, |
|
"loss": 0.6204, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.4045570797488956, |
|
"grad_norm": 2.2344840253872045, |
|
"learning_rate": 9.690534428739497e-06, |
|
"loss": 0.7321, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4059521041618228, |
|
"grad_norm": 2.212703648608523, |
|
"learning_rate": 9.68629657611947e-06, |
|
"loss": 0.7184, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.4073471285747501, |
|
"grad_norm": 2.24263720075315, |
|
"learning_rate": 9.682030843191021e-06, |
|
"loss": 0.7453, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.4087421529876773, |
|
"grad_norm": 2.37707226734299, |
|
"learning_rate": 9.677737255332381e-06, |
|
"loss": 0.7764, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.4101371774006045, |
|
"grad_norm": 2.4391824660001884, |
|
"learning_rate": 9.67341583808751e-06, |
|
"loss": 0.8483, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.41153220181353173, |
|
"grad_norm": 2.149031048916406, |
|
"learning_rate": 9.66906661716593e-06, |
|
"loss": 0.6946, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.41292722622645894, |
|
"grad_norm": 2.181280105557748, |
|
"learning_rate": 9.664689618442572e-06, |
|
"loss": 0.726, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4143222506393862, |
|
"grad_norm": 2.438181466583836, |
|
"learning_rate": 9.660284867957637e-06, |
|
"loss": 0.7853, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.41571727505231343, |
|
"grad_norm": 2.0436662273501764, |
|
"learning_rate": 9.655852391916418e-06, |
|
"loss": 0.6586, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.41711229946524064, |
|
"grad_norm": 2.242275872115051, |
|
"learning_rate": 9.651392216689167e-06, |
|
"loss": 0.73, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.41850732387816786, |
|
"grad_norm": 2.1669552119535487, |
|
"learning_rate": 9.646904368810919e-06, |
|
"loss": 0.7404, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4199023482910951, |
|
"grad_norm": 2.110720652569428, |
|
"learning_rate": 9.642388874981348e-06, |
|
"loss": 0.7876, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.42129737270402234, |
|
"grad_norm": 2.1970190123405713, |
|
"learning_rate": 9.637845762064602e-06, |
|
"loss": 0.8062, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.42269239711694956, |
|
"grad_norm": 2.1979388264310264, |
|
"learning_rate": 9.63327505708914e-06, |
|
"loss": 0.7249, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4240874215298768, |
|
"grad_norm": 2.125775928234541, |
|
"learning_rate": 9.628676787247582e-06, |
|
"loss": 0.7102, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.425482445942804, |
|
"grad_norm": 2.172104374274606, |
|
"learning_rate": 9.624050979896533e-06, |
|
"loss": 0.7133, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4268774703557312, |
|
"grad_norm": 2.2758064319488254, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.7644, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.4282724947686585, |
|
"grad_norm": 2.2834156413794187, |
|
"learning_rate": 9.614716862911388e-06, |
|
"loss": 0.7374, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4296675191815857, |
|
"grad_norm": 2.1152816433907766, |
|
"learning_rate": 9.610008608809003e-06, |
|
"loss": 0.7275, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.4310625435945129, |
|
"grad_norm": 2.3679870496314277, |
|
"learning_rate": 9.605272928260215e-06, |
|
"loss": 0.7888, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.4324575680074401, |
|
"grad_norm": 2.1664140472506226, |
|
"learning_rate": 9.600509849439136e-06, |
|
"loss": 0.7353, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.43385259242036733, |
|
"grad_norm": 2.0854217082250397, |
|
"learning_rate": 9.595719400682882e-06, |
|
"loss": 0.7598, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.4352476168332946, |
|
"grad_norm": 2.2708571552407752, |
|
"learning_rate": 9.590901610491388e-06, |
|
"loss": 0.7916, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.4366426412462218, |
|
"grad_norm": 2.3552880421408693, |
|
"learning_rate": 9.586056507527266e-06, |
|
"loss": 0.7688, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.43803766565914903, |
|
"grad_norm": 2.3856621240655715, |
|
"learning_rate": 9.581184120615615e-06, |
|
"loss": 0.7934, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.43943269007207625, |
|
"grad_norm": 2.2590188978724246, |
|
"learning_rate": 9.576284478743855e-06, |
|
"loss": 0.7218, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.44082771448500346, |
|
"grad_norm": 2.158495761665584, |
|
"learning_rate": 9.571357611061556e-06, |
|
"loss": 0.7889, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.44222273889793073, |
|
"grad_norm": 2.196219848841173, |
|
"learning_rate": 9.566403546880262e-06, |
|
"loss": 0.7683, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.44361776331085795, |
|
"grad_norm": 2.189233757126287, |
|
"learning_rate": 9.561422315673316e-06, |
|
"loss": 0.7502, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.44501278772378516, |
|
"grad_norm": 2.0349711390348153, |
|
"learning_rate": 9.556413947075693e-06, |
|
"loss": 0.705, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.4464078121367124, |
|
"grad_norm": 2.201388276226635, |
|
"learning_rate": 9.551378470883813e-06, |
|
"loss": 0.7401, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4478028365496396, |
|
"grad_norm": 2.244300272417924, |
|
"learning_rate": 9.546315917055362e-06, |
|
"loss": 0.7801, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.44919786096256686, |
|
"grad_norm": 2.1314497471199405, |
|
"learning_rate": 9.541226315709133e-06, |
|
"loss": 0.6883, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4505928853754941, |
|
"grad_norm": 2.3554603493032817, |
|
"learning_rate": 9.536109697124822e-06, |
|
"loss": 0.7873, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.4519879097884213, |
|
"grad_norm": 2.200548520780313, |
|
"learning_rate": 9.530966091742863e-06, |
|
"loss": 0.7228, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4533829342013485, |
|
"grad_norm": 2.267412327006732, |
|
"learning_rate": 9.525795530164248e-06, |
|
"loss": 0.7721, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4547779586142757, |
|
"grad_norm": 2.0314685715107688, |
|
"learning_rate": 9.520598043150333e-06, |
|
"loss": 0.7193, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.456172983027203, |
|
"grad_norm": 2.1973000004466123, |
|
"learning_rate": 9.515373661622665e-06, |
|
"loss": 0.72, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4575680074401302, |
|
"grad_norm": 2.1656009071337, |
|
"learning_rate": 9.510122416662795e-06, |
|
"loss": 0.7795, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4589630318530574, |
|
"grad_norm": 2.00595088768482, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.7025, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.46035805626598464, |
|
"grad_norm": 2.1820250634406984, |
|
"learning_rate": 9.499539461571571e-06, |
|
"loss": 0.7006, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.46175308067891185, |
|
"grad_norm": 2.000907366581318, |
|
"learning_rate": 9.494207814401673e-06, |
|
"loss": 0.666, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4631481050918391, |
|
"grad_norm": 2.2593656128744932, |
|
"learning_rate": 9.488849429722108e-06, |
|
"loss": 0.7533, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.46454312950476634, |
|
"grad_norm": 2.1682782980420883, |
|
"learning_rate": 9.483464339411658e-06, |
|
"loss": 0.7769, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.46593815391769355, |
|
"grad_norm": 1.9957981770291782, |
|
"learning_rate": 9.478052575507983e-06, |
|
"loss": 0.7196, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.46733317833062077, |
|
"grad_norm": 2.120678305069012, |
|
"learning_rate": 9.472614170207435e-06, |
|
"loss": 0.7199, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.46872820274354804, |
|
"grad_norm": 2.1087923774164095, |
|
"learning_rate": 9.467149155864864e-06, |
|
"loss": 0.7437, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.47012322715647525, |
|
"grad_norm": 2.040923833208006, |
|
"learning_rate": 9.461657564993419e-06, |
|
"loss": 0.7162, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.47151825156940247, |
|
"grad_norm": 2.2596566935512534, |
|
"learning_rate": 9.456139430264371e-06, |
|
"loss": 0.7497, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4729132759823297, |
|
"grad_norm": 2.1167532185301496, |
|
"learning_rate": 9.450594784506906e-06, |
|
"loss": 0.6863, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4743083003952569, |
|
"grad_norm": 2.287756664833289, |
|
"learning_rate": 9.445023660707928e-06, |
|
"loss": 0.7678, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.47570332480818417, |
|
"grad_norm": 2.141914143165075, |
|
"learning_rate": 9.439426092011877e-06, |
|
"loss": 0.73, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4770983492211114, |
|
"grad_norm": 2.2372702217770155, |
|
"learning_rate": 9.433802111720513e-06, |
|
"loss": 0.7579, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.4784933736340386, |
|
"grad_norm": 2.22351509996517, |
|
"learning_rate": 9.42815175329273e-06, |
|
"loss": 0.7907, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.4798883980469658, |
|
"grad_norm": 1.9645495905633132, |
|
"learning_rate": 9.42247505034436e-06, |
|
"loss": 0.7163, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.48128342245989303, |
|
"grad_norm": 2.159416398374179, |
|
"learning_rate": 9.416772036647959e-06, |
|
"loss": 0.7403, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4826784468728203, |
|
"grad_norm": 2.2212715006212234, |
|
"learning_rate": 9.411042746132617e-06, |
|
"loss": 0.7626, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.4840734712857475, |
|
"grad_norm": 2.205058581650926, |
|
"learning_rate": 9.405287212883756e-06, |
|
"loss": 0.7893, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.48546849569867473, |
|
"grad_norm": 2.2372122036997073, |
|
"learning_rate": 9.39950547114292e-06, |
|
"loss": 0.7057, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.48686352011160194, |
|
"grad_norm": 2.1030876662201283, |
|
"learning_rate": 9.393697555307581e-06, |
|
"loss": 0.7356, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.48825854452452916, |
|
"grad_norm": 2.0752820389734326, |
|
"learning_rate": 9.387863499930925e-06, |
|
"loss": 0.7499, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.48965356893745643, |
|
"grad_norm": 2.117336157253129, |
|
"learning_rate": 9.382003339721653e-06, |
|
"loss": 0.7291, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.49104859335038364, |
|
"grad_norm": 2.165090408987389, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.7798, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.49244361776331086, |
|
"grad_norm": 2.105593870513407, |
|
"learning_rate": 9.370204844416381e-06, |
|
"loss": 0.728, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4938386421762381, |
|
"grad_norm": 2.1682735744184796, |
|
"learning_rate": 9.364266579513486e-06, |
|
"loss": 0.7556, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4952336665891653, |
|
"grad_norm": 1.9607721258164204, |
|
"learning_rate": 9.358302350163758e-06, |
|
"loss": 0.7675, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.49662869100209256, |
|
"grad_norm": 2.1649840762675714, |
|
"learning_rate": 9.352312191850343e-06, |
|
"loss": 0.7306, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4980237154150198, |
|
"grad_norm": 2.1522032544678864, |
|
"learning_rate": 9.346296140210653e-06, |
|
"loss": 0.7229, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.499418739827947, |
|
"grad_norm": 2.200548786429278, |
|
"learning_rate": 9.340254231036141e-06, |
|
"loss": 0.733, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5008137642408742, |
|
"grad_norm": 2.2849970958246364, |
|
"learning_rate": 9.334186500272098e-06, |
|
"loss": 0.7652, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5022087886538015, |
|
"grad_norm": 2.124552667084255, |
|
"learning_rate": 9.328092984017436e-06, |
|
"loss": 0.7165, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5036038130667286, |
|
"grad_norm": 2.16869861129638, |
|
"learning_rate": 9.321973718524472e-06, |
|
"loss": 0.7885, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.5049988374796559, |
|
"grad_norm": 2.4095767757689814, |
|
"learning_rate": 9.315828740198714e-06, |
|
"loss": 0.8033, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5063938618925832, |
|
"grad_norm": 2.309285351320161, |
|
"learning_rate": 9.309658085598646e-06, |
|
"loss": 0.7282, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.5077888863055103, |
|
"grad_norm": 2.2278160246933556, |
|
"learning_rate": 9.303461791435504e-06, |
|
"loss": 0.8171, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5091839107184376, |
|
"grad_norm": 2.1439102159782117, |
|
"learning_rate": 9.29723989457307e-06, |
|
"loss": 0.6746, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5105789351313648, |
|
"grad_norm": 1.9144337516608447, |
|
"learning_rate": 9.290992432027433e-06, |
|
"loss": 0.6326, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.511973959544292, |
|
"grad_norm": 2.2661637453850565, |
|
"learning_rate": 9.284719440966794e-06, |
|
"loss": 0.7294, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5133689839572193, |
|
"grad_norm": 2.094539074711194, |
|
"learning_rate": 9.278420958711221e-06, |
|
"loss": 0.786, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.5147640083701465, |
|
"grad_norm": 2.2536289273732932, |
|
"learning_rate": 9.272097022732444e-06, |
|
"loss": 0.7645, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5161590327830737, |
|
"grad_norm": 2.069331048688941, |
|
"learning_rate": 9.26574767065362e-06, |
|
"loss": 0.7502, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5175540571960009, |
|
"grad_norm": 2.1134396544547873, |
|
"learning_rate": 9.259372940249121e-06, |
|
"loss": 0.7404, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5189490816089282, |
|
"grad_norm": 2.353506566585747, |
|
"learning_rate": 9.252972869444297e-06, |
|
"loss": 0.7531, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5203441060218554, |
|
"grad_norm": 2.216924375444058, |
|
"learning_rate": 9.24654749631526e-06, |
|
"loss": 0.7246, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 2.021849159288162, |
|
"learning_rate": 9.240096859088653e-06, |
|
"loss": 0.6641, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.5231341548477099, |
|
"grad_norm": 2.138973326205776, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.7311, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.524529179260637, |
|
"grad_norm": 2.2741045274827645, |
|
"learning_rate": 9.22711994600059e-06, |
|
"loss": 0.713, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5259242036735643, |
|
"grad_norm": 2.0689344331701425, |
|
"learning_rate": 9.220593747343028e-06, |
|
"loss": 0.6859, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5273192280864916, |
|
"grad_norm": 2.0106894075204296, |
|
"learning_rate": 9.214042438995222e-06, |
|
"loss": 0.6851, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.5287142524994187, |
|
"grad_norm": 2.199245862276838, |
|
"learning_rate": 9.207466059933044e-06, |
|
"loss": 0.7493, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.530109276912346, |
|
"grad_norm": 2.012825000382447, |
|
"learning_rate": 9.20086464928152e-06, |
|
"loss": 0.6697, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5315043013252732, |
|
"grad_norm": 2.2127655874082937, |
|
"learning_rate": 9.194238246314599e-06, |
|
"loss": 0.7136, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.5328993257382004, |
|
"grad_norm": 2.2061111696963147, |
|
"learning_rate": 9.187586890454916e-06, |
|
"loss": 0.7519, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.5342943501511277, |
|
"grad_norm": 2.1376385903011332, |
|
"learning_rate": 9.180910621273555e-06, |
|
"loss": 0.7361, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5356893745640549, |
|
"grad_norm": 2.073331825074511, |
|
"learning_rate": 9.174209478489826e-06, |
|
"loss": 0.7517, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5370843989769821, |
|
"grad_norm": 2.2158601789564325, |
|
"learning_rate": 9.16748350197101e-06, |
|
"loss": 0.7798, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5384794233899093, |
|
"grad_norm": 2.022881887128006, |
|
"learning_rate": 9.160732731732144e-06, |
|
"loss": 0.7126, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5398744478028366, |
|
"grad_norm": 2.2358497538691697, |
|
"learning_rate": 9.153957207935758e-06, |
|
"loss": 0.73, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.5412694722157638, |
|
"grad_norm": 2.1270789504102225, |
|
"learning_rate": 9.14715697089166e-06, |
|
"loss": 0.7004, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.542664496628691, |
|
"grad_norm": 2.1028850233874983, |
|
"learning_rate": 9.140332061056678e-06, |
|
"loss": 0.7159, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5440595210416183, |
|
"grad_norm": 2.1974538578891276, |
|
"learning_rate": 9.133482519034428e-06, |
|
"loss": 0.7684, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 2.177428704099495, |
|
"learning_rate": 9.126608385575076e-06, |
|
"loss": 0.772, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5468495698674727, |
|
"grad_norm": 2.0803276673189663, |
|
"learning_rate": 9.119709701575084e-06, |
|
"loss": 0.7218, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5482445942804, |
|
"grad_norm": 2.1394984209728527, |
|
"learning_rate": 9.112786508076972e-06, |
|
"loss": 0.8417, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5496396186933271, |
|
"grad_norm": 2.0166430885945767, |
|
"learning_rate": 9.105838846269085e-06, |
|
"loss": 0.6464, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5510346431062544, |
|
"grad_norm": 2.2423610305000876, |
|
"learning_rate": 9.098866757485328e-06, |
|
"loss": 0.7855, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5524296675191815, |
|
"grad_norm": 2.1328865598737368, |
|
"learning_rate": 9.091870283204934e-06, |
|
"loss": 0.679, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5538246919321088, |
|
"grad_norm": 2.172832655132092, |
|
"learning_rate": 9.08484946505221e-06, |
|
"loss": 0.7611, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5552197163450361, |
|
"grad_norm": 2.0620732675389704, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.6987, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5566147407579632, |
|
"grad_norm": 1.9565268483779634, |
|
"learning_rate": 9.070734964350926e-06, |
|
"loss": 0.7735, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5580097651708905, |
|
"grad_norm": 2.121111544028029, |
|
"learning_rate": 9.063641365774136e-06, |
|
"loss": 0.7207, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5594047895838177, |
|
"grad_norm": 1.9141105047869074, |
|
"learning_rate": 9.056523591268064e-06, |
|
"loss": 0.6825, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.560799813996745, |
|
"grad_norm": 2.065193794568569, |
|
"learning_rate": 9.049381683178677e-06, |
|
"loss": 0.7655, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5621948384096722, |
|
"grad_norm": 2.19597171898501, |
|
"learning_rate": 9.042215683995516e-06, |
|
"loss": 0.7141, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5635898628225994, |
|
"grad_norm": 2.2486319147493288, |
|
"learning_rate": 9.035025636351453e-06, |
|
"loss": 0.7203, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5649848872355266, |
|
"grad_norm": 1.9797285784818799, |
|
"learning_rate": 9.027811583022427e-06, |
|
"loss": 0.7036, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5663799116484538, |
|
"grad_norm": 2.083913898592003, |
|
"learning_rate": 9.020573566927196e-06, |
|
"loss": 0.6862, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5677749360613811, |
|
"grad_norm": 2.2827371402829337, |
|
"learning_rate": 9.013311631127082e-06, |
|
"loss": 0.7569, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.5691699604743083, |
|
"grad_norm": 2.3112222529668123, |
|
"learning_rate": 9.006025818825712e-06, |
|
"loss": 0.764, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5705649848872355, |
|
"grad_norm": 2.3484053864302865, |
|
"learning_rate": 8.998716173368762e-06, |
|
"loss": 0.7465, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5719600093001628, |
|
"grad_norm": 2.1126751658131178, |
|
"learning_rate": 8.991382738243699e-06, |
|
"loss": 0.7121, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5733550337130899, |
|
"grad_norm": 2.1372677481421256, |
|
"learning_rate": 8.984025557079523e-06, |
|
"loss": 0.7225, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5747500581260172, |
|
"grad_norm": 2.00580823797283, |
|
"learning_rate": 8.97664467364651e-06, |
|
"loss": 0.7518, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5761450825389445, |
|
"grad_norm": 2.1727210121609866, |
|
"learning_rate": 8.96924013185594e-06, |
|
"loss": 0.713, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5775401069518716, |
|
"grad_norm": 1.9451673522406057, |
|
"learning_rate": 8.961811975759854e-06, |
|
"loss": 0.762, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5789351313647989, |
|
"grad_norm": 2.032556221899384, |
|
"learning_rate": 8.954360249550776e-06, |
|
"loss": 0.7239, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5803301557777261, |
|
"grad_norm": 2.2282200468500073, |
|
"learning_rate": 8.946884997561458e-06, |
|
"loss": 0.7406, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5817251801906533, |
|
"grad_norm": 2.131925151265735, |
|
"learning_rate": 8.939386264264616e-06, |
|
"loss": 0.6724, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5831202046035806, |
|
"grad_norm": 2.201382629326267, |
|
"learning_rate": 8.931864094272663e-06, |
|
"loss": 0.7676, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5845152290165078, |
|
"grad_norm": 2.3134699271246624, |
|
"learning_rate": 8.924318532337444e-06, |
|
"loss": 0.834, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.585910253429435, |
|
"grad_norm": 2.058607617038394, |
|
"learning_rate": 8.91674962334997e-06, |
|
"loss": 0.7565, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5873052778423622, |
|
"grad_norm": 1.994362364952429, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.7187, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5887003022552895, |
|
"grad_norm": 2.003476914786332, |
|
"learning_rate": 8.901541944476529e-06, |
|
"loss": 0.6792, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5900953266682167, |
|
"grad_norm": 1.9930163818445414, |
|
"learning_rate": 8.893903265066011e-06, |
|
"loss": 0.6705, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5914903510811439, |
|
"grad_norm": 2.1185382041837255, |
|
"learning_rate": 8.886241419553595e-06, |
|
"loss": 0.7642, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5928853754940712, |
|
"grad_norm": 2.1802562933202574, |
|
"learning_rate": 8.8785564535221e-06, |
|
"loss": 0.7147, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5942803999069983, |
|
"grad_norm": 1.8272633305809787, |
|
"learning_rate": 8.870848412691899e-06, |
|
"loss": 0.6621, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5956754243199256, |
|
"grad_norm": 2.1501438887935413, |
|
"learning_rate": 8.863117342920641e-06, |
|
"loss": 0.7255, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5970704487328529, |
|
"grad_norm": 2.1126997186333107, |
|
"learning_rate": 8.855363290202988e-06, |
|
"loss": 0.7365, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.59846547314578, |
|
"grad_norm": 2.0459113355918452, |
|
"learning_rate": 8.84758630067033e-06, |
|
"loss": 0.6821, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5998604975587073, |
|
"grad_norm": 1.9951460974089266, |
|
"learning_rate": 8.839786420590517e-06, |
|
"loss": 0.6705, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6012555219716345, |
|
"grad_norm": 1.9291383549477397, |
|
"learning_rate": 8.83196369636758e-06, |
|
"loss": 0.6743, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6026505463845617, |
|
"grad_norm": 2.2262281937242014, |
|
"learning_rate": 8.824118174541464e-06, |
|
"loss": 0.7751, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.604045570797489, |
|
"grad_norm": 1.9776840814793826, |
|
"learning_rate": 8.816249901787736e-06, |
|
"loss": 0.689, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.6054405952104162, |
|
"grad_norm": 2.172712749490356, |
|
"learning_rate": 8.808358924917322e-06, |
|
"loss": 0.729, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.6068356196233434, |
|
"grad_norm": 2.1471896348433215, |
|
"learning_rate": 8.800445290876218e-06, |
|
"loss": 0.6904, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6082306440362706, |
|
"grad_norm": 2.1350919843471514, |
|
"learning_rate": 8.79250904674522e-06, |
|
"loss": 0.7687, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.6096256684491979, |
|
"grad_norm": 2.143036898650528, |
|
"learning_rate": 8.78455023973963e-06, |
|
"loss": 0.7017, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.6110206928621251, |
|
"grad_norm": 2.173109662595841, |
|
"learning_rate": 8.776568917208996e-06, |
|
"loss": 0.7589, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.6124157172750523, |
|
"grad_norm": 2.083376789780831, |
|
"learning_rate": 8.768565126636806e-06, |
|
"loss": 0.7373, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.6138107416879796, |
|
"grad_norm": 2.189932954692665, |
|
"learning_rate": 8.760538915640227e-06, |
|
"loss": 0.8117, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6152057661009067, |
|
"grad_norm": 2.1260961375700913, |
|
"learning_rate": 8.752490331969807e-06, |
|
"loss": 0.7294, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.616600790513834, |
|
"grad_norm": 2.084366245285451, |
|
"learning_rate": 8.744419423509198e-06, |
|
"loss": 0.7356, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.6179958149267613, |
|
"grad_norm": 1.929973613210403, |
|
"learning_rate": 8.736326238274873e-06, |
|
"loss": 0.7399, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.6193908393396884, |
|
"grad_norm": 2.191233147651038, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.7315, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.6207858637526157, |
|
"grad_norm": 1.9136719871153927, |
|
"learning_rate": 8.720073230213315e-06, |
|
"loss": 0.6889, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6221808881655428, |
|
"grad_norm": 2.162274660157476, |
|
"learning_rate": 8.711913504080534e-06, |
|
"loss": 0.7939, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.6235759125784701, |
|
"grad_norm": 2.141540357979124, |
|
"learning_rate": 8.703731694562364e-06, |
|
"loss": 0.7708, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.6249709369913974, |
|
"grad_norm": 2.0717267261918715, |
|
"learning_rate": 8.69552785033506e-06, |
|
"loss": 0.6995, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.6263659614043245, |
|
"grad_norm": 2.0354790587901985, |
|
"learning_rate": 8.687302020205967e-06, |
|
"loss": 0.7891, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.6277609858172518, |
|
"grad_norm": 2.054315554224394, |
|
"learning_rate": 8.679054253113239e-06, |
|
"loss": 0.7138, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.629156010230179, |
|
"grad_norm": 1.97142066127702, |
|
"learning_rate": 8.670784598125532e-06, |
|
"loss": 0.76, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.6305510346431062, |
|
"grad_norm": 2.180322598168826, |
|
"learning_rate": 8.662493104441725e-06, |
|
"loss": 0.7577, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.6319460590560335, |
|
"grad_norm": 2.182404687855443, |
|
"learning_rate": 8.65417982139062e-06, |
|
"loss": 0.7551, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.6333410834689607, |
|
"grad_norm": 2.2330302295789832, |
|
"learning_rate": 8.645844798430656e-06, |
|
"loss": 0.7855, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.634736107881888, |
|
"grad_norm": 2.1948799561608903, |
|
"learning_rate": 8.637488085149598e-06, |
|
"loss": 0.767, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6361311322948151, |
|
"grad_norm": 2.1454216259212524, |
|
"learning_rate": 8.629109731264272e-06, |
|
"loss": 0.7497, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.6375261567077424, |
|
"grad_norm": 2.0658556646093693, |
|
"learning_rate": 8.620709786620231e-06, |
|
"loss": 0.7532, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.6389211811206696, |
|
"grad_norm": 2.097430113130354, |
|
"learning_rate": 8.612288301191497e-06, |
|
"loss": 0.7004, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.6403162055335968, |
|
"grad_norm": 2.035049464077464, |
|
"learning_rate": 8.60384532508023e-06, |
|
"loss": 0.7773, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.6417112299465241, |
|
"grad_norm": 2.0899400724224413, |
|
"learning_rate": 8.595380908516454e-06, |
|
"loss": 0.7223, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6431062543594513, |
|
"grad_norm": 2.0042098558669723, |
|
"learning_rate": 8.586895101857747e-06, |
|
"loss": 0.704, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.6445012787723785, |
|
"grad_norm": 2.2055901826008073, |
|
"learning_rate": 8.578387955588944e-06, |
|
"loss": 0.7232, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.6458963031853058, |
|
"grad_norm": 2.0614289481289902, |
|
"learning_rate": 8.569859520321835e-06, |
|
"loss": 0.7337, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.6472913275982329, |
|
"grad_norm": 2.125733860253932, |
|
"learning_rate": 8.561309846794865e-06, |
|
"loss": 0.7331, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.6486863520111602, |
|
"grad_norm": 1.9885070040021375, |
|
"learning_rate": 8.552738985872834e-06, |
|
"loss": 0.7158, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6500813764240875, |
|
"grad_norm": 1.9749465667331927, |
|
"learning_rate": 8.544146988546594e-06, |
|
"loss": 0.7078, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.6514764008370146, |
|
"grad_norm": 2.232698100517746, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.7403, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.6528714252499419, |
|
"grad_norm": 2.0420723444019786, |
|
"learning_rate": 8.526899789273312e-06, |
|
"loss": 0.6924, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.6542664496628691, |
|
"grad_norm": 2.110810966782399, |
|
"learning_rate": 8.518244689935491e-06, |
|
"loss": 0.7136, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.6556614740757963, |
|
"grad_norm": 2.205912657877506, |
|
"learning_rate": 8.509568659411288e-06, |
|
"loss": 0.7277, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6570564984887236, |
|
"grad_norm": 1.932306625936653, |
|
"learning_rate": 8.500871749317244e-06, |
|
"loss": 0.6695, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.6584515229016508, |
|
"grad_norm": 1.9804217745012933, |
|
"learning_rate": 8.492154011394113e-06, |
|
"loss": 0.6991, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.659846547314578, |
|
"grad_norm": 1.9943703141722595, |
|
"learning_rate": 8.483415497506567e-06, |
|
"loss": 0.7411, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6612415717275052, |
|
"grad_norm": 1.9105485588983695, |
|
"learning_rate": 8.474656259642874e-06, |
|
"loss": 0.6874, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6626365961404325, |
|
"grad_norm": 1.981598209924136, |
|
"learning_rate": 8.465876349914605e-06, |
|
"loss": 0.7693, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6640316205533597, |
|
"grad_norm": 2.0468485449192526, |
|
"learning_rate": 8.457075820556306e-06, |
|
"loss": 0.7369, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6654266449662869, |
|
"grad_norm": 1.9731837624024704, |
|
"learning_rate": 8.448254723925205e-06, |
|
"loss": 0.7595, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6668216693792142, |
|
"grad_norm": 1.997881570988928, |
|
"learning_rate": 8.439413112500878e-06, |
|
"loss": 0.6957, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.6682166937921413, |
|
"grad_norm": 2.159991859034791, |
|
"learning_rate": 8.430551038884965e-06, |
|
"loss": 0.7373, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.6696117182050686, |
|
"grad_norm": 2.1318311718821294, |
|
"learning_rate": 8.421668555800831e-06, |
|
"loss": 0.7677, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6710067426179959, |
|
"grad_norm": 1.8971083557397548, |
|
"learning_rate": 8.412765716093273e-06, |
|
"loss": 0.6238, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.672401767030923, |
|
"grad_norm": 2.0724177873895413, |
|
"learning_rate": 8.403842572728186e-06, |
|
"loss": 0.7503, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.6737967914438503, |
|
"grad_norm": 2.2067404032033977, |
|
"learning_rate": 8.394899178792262e-06, |
|
"loss": 0.8363, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6751918158567775, |
|
"grad_norm": 2.165214652267942, |
|
"learning_rate": 8.38593558749268e-06, |
|
"loss": 0.7771, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6765868402697047, |
|
"grad_norm": 1.9259138178524096, |
|
"learning_rate": 8.376951852156764e-06, |
|
"loss": 0.696, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.677981864682632, |
|
"grad_norm": 2.026340364479184, |
|
"learning_rate": 8.367948026231692e-06, |
|
"loss": 0.6948, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6793768890955592, |
|
"grad_norm": 2.0838205387408153, |
|
"learning_rate": 8.358924163284164e-06, |
|
"loss": 0.7341, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6807719135084864, |
|
"grad_norm": 1.8433343030714116, |
|
"learning_rate": 8.349880317000083e-06, |
|
"loss": 0.6905, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6821669379214136, |
|
"grad_norm": 2.2138420393341622, |
|
"learning_rate": 8.34081654118425e-06, |
|
"loss": 0.7597, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.6835619623343409, |
|
"grad_norm": 2.0648043221670282, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.7127, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6849569867472681, |
|
"grad_norm": 1.8122602084109327, |
|
"learning_rate": 8.322629416769007e-06, |
|
"loss": 0.6499, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6863520111601953, |
|
"grad_norm": 2.049450313295537, |
|
"learning_rate": 8.313506176370741e-06, |
|
"loss": 0.7513, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6877470355731226, |
|
"grad_norm": 2.102764954451964, |
|
"learning_rate": 8.304363222842358e-06, |
|
"loss": 0.7296, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6891420599860497, |
|
"grad_norm": 2.1307554461817833, |
|
"learning_rate": 8.295200610578275e-06, |
|
"loss": 0.7337, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.690537084398977, |
|
"grad_norm": 2.025713936131322, |
|
"learning_rate": 8.286018394089864e-06, |
|
"loss": 0.7054, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6919321088119043, |
|
"grad_norm": 2.0302102772230946, |
|
"learning_rate": 8.276816628005127e-06, |
|
"loss": 0.7164, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6933271332248314, |
|
"grad_norm": 2.154855250037536, |
|
"learning_rate": 8.267595367068375e-06, |
|
"loss": 0.8253, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.6947221576377587, |
|
"grad_norm": 2.096982928844388, |
|
"learning_rate": 8.258354666139903e-06, |
|
"loss": 0.734, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.6961171820506858, |
|
"grad_norm": 1.9902700249029772, |
|
"learning_rate": 8.249094580195655e-06, |
|
"loss": 0.6467, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6975122064636131, |
|
"grad_norm": 2.0374289309216738, |
|
"learning_rate": 8.239815164326907e-06, |
|
"loss": 0.7467, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6989072308765404, |
|
"grad_norm": 2.048360160329615, |
|
"learning_rate": 8.230516473739934e-06, |
|
"loss": 0.7237, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.7003022552894675, |
|
"grad_norm": 2.0626472692261024, |
|
"learning_rate": 8.221198563755683e-06, |
|
"loss": 0.702, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.7016972797023948, |
|
"grad_norm": 2.34156638675061, |
|
"learning_rate": 8.211861489809441e-06, |
|
"loss": 0.812, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.703092304115322, |
|
"grad_norm": 2.007044034307796, |
|
"learning_rate": 8.202505307450509e-06, |
|
"loss": 0.6949, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.7044873285282492, |
|
"grad_norm": 2.0150876342231814, |
|
"learning_rate": 8.193130072341872e-06, |
|
"loss": 0.723, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 1.9508899560375437, |
|
"learning_rate": 8.183735840259867e-06, |
|
"loss": 0.7058, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.7072773773541037, |
|
"grad_norm": 2.2595995756080933, |
|
"learning_rate": 8.174322667093843e-06, |
|
"loss": 0.7212, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.708672401767031, |
|
"grad_norm": 1.952292379501302, |
|
"learning_rate": 8.164890608845846e-06, |
|
"loss": 0.6669, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.7100674261799581, |
|
"grad_norm": 2.198574056436635, |
|
"learning_rate": 8.155439721630265e-06, |
|
"loss": 0.7418, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.7114624505928854, |
|
"grad_norm": 1.938496311102572, |
|
"learning_rate": 8.145970061673517e-06, |
|
"loss": 0.7191, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7128574750058126, |
|
"grad_norm": 2.088136098266112, |
|
"learning_rate": 8.136481685313699e-06, |
|
"loss": 0.7175, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.7142524994187398, |
|
"grad_norm": 2.1117510039721554, |
|
"learning_rate": 8.126974649000264e-06, |
|
"loss": 0.7126, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.7156475238316671, |
|
"grad_norm": 2.126192424349008, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.8132, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.7170425482445942, |
|
"grad_norm": 2.296212315016324, |
|
"learning_rate": 8.107904822865058e-06, |
|
"loss": 0.7323, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.7184375726575215, |
|
"grad_norm": 2.0080964113695607, |
|
"learning_rate": 8.098342146495913e-06, |
|
"loss": 0.7047, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7198325970704488, |
|
"grad_norm": 2.138945439775357, |
|
"learning_rate": 8.088761037077718e-06, |
|
"loss": 0.7555, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.7212276214833759, |
|
"grad_norm": 2.042853021947169, |
|
"learning_rate": 8.07916155161162e-06, |
|
"loss": 0.7318, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.7226226458963032, |
|
"grad_norm": 2.0036651770223535, |
|
"learning_rate": 8.069543747208092e-06, |
|
"loss": 0.7412, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.7240176703092304, |
|
"grad_norm": 2.005019684601913, |
|
"learning_rate": 8.059907681086594e-06, |
|
"loss": 0.6975, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.7254126947221576, |
|
"grad_norm": 2.035919816731715, |
|
"learning_rate": 8.050253410575228e-06, |
|
"loss": 0.7043, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7268077191350849, |
|
"grad_norm": 2.039348786785761, |
|
"learning_rate": 8.040580993110404e-06, |
|
"loss": 0.734, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.7282027435480121, |
|
"grad_norm": 2.0316240809553943, |
|
"learning_rate": 8.030890486236489e-06, |
|
"loss": 0.7328, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.7295977679609393, |
|
"grad_norm": 1.9987332546618835, |
|
"learning_rate": 8.021181947605474e-06, |
|
"loss": 0.7094, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.7309927923738665, |
|
"grad_norm": 2.065991018116272, |
|
"learning_rate": 8.011455434976622e-06, |
|
"loss": 0.7567, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.7323878167867938, |
|
"grad_norm": 2.086968123475238, |
|
"learning_rate": 8.001711006216138e-06, |
|
"loss": 0.7156, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.733782841199721, |
|
"grad_norm": 1.9571829765680209, |
|
"learning_rate": 7.991948719296806e-06, |
|
"loss": 0.7218, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.7351778656126482, |
|
"grad_norm": 2.026547784479228, |
|
"learning_rate": 7.98216863229766e-06, |
|
"loss": 0.6966, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.7365728900255755, |
|
"grad_norm": 2.028697912817288, |
|
"learning_rate": 7.972370803403628e-06, |
|
"loss": 0.7385, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.7379679144385026, |
|
"grad_norm": 1.9258071371157235, |
|
"learning_rate": 7.962555290905198e-06, |
|
"loss": 0.6961, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.7393629388514299, |
|
"grad_norm": 1.961806972062881, |
|
"learning_rate": 7.952722153198054e-06, |
|
"loss": 0.6566, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7407579632643572, |
|
"grad_norm": 2.118250501493784, |
|
"learning_rate": 7.942871448782747e-06, |
|
"loss": 0.7124, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.7421529876772843, |
|
"grad_norm": 2.032119437291435, |
|
"learning_rate": 7.93300323626433e-06, |
|
"loss": 0.6809, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.7435480120902116, |
|
"grad_norm": 1.9186122345582644, |
|
"learning_rate": 7.923117574352024e-06, |
|
"loss": 0.6946, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.7449430365031388, |
|
"grad_norm": 2.0031416130525295, |
|
"learning_rate": 7.913214521858858e-06, |
|
"loss": 0.7326, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.746338060916066, |
|
"grad_norm": 2.03670901008655, |
|
"learning_rate": 7.903294137701324e-06, |
|
"loss": 0.7095, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.7477330853289933, |
|
"grad_norm": 2.178152666110026, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.7478, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.7491281097419205, |
|
"grad_norm": 2.190710149751755, |
|
"learning_rate": 7.883401610574338e-06, |
|
"loss": 0.7086, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.7505231341548477, |
|
"grad_norm": 1.8616953148508328, |
|
"learning_rate": 7.87342958595202e-06, |
|
"loss": 0.6846, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.7519181585677749, |
|
"grad_norm": 2.118255012504836, |
|
"learning_rate": 7.86344046635891e-06, |
|
"loss": 0.7116, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.7533131829807022, |
|
"grad_norm": 2.165584888520477, |
|
"learning_rate": 7.853434311223542e-06, |
|
"loss": 0.7058, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7547082073936294, |
|
"grad_norm": 2.097090278458252, |
|
"learning_rate": 7.843411180075795e-06, |
|
"loss": 0.7621, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.7561032318065566, |
|
"grad_norm": 1.9372803982312123, |
|
"learning_rate": 7.833371132546556e-06, |
|
"loss": 0.7135, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.7574982562194839, |
|
"grad_norm": 2.0536548633310683, |
|
"learning_rate": 7.82331422836734e-06, |
|
"loss": 0.7382, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.758893280632411, |
|
"grad_norm": 2.1267027897799076, |
|
"learning_rate": 7.813240527369958e-06, |
|
"loss": 0.7244, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.7602883050453383, |
|
"grad_norm": 2.001368846615703, |
|
"learning_rate": 7.803150089486144e-06, |
|
"loss": 0.6422, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7616833294582656, |
|
"grad_norm": 2.256318701230448, |
|
"learning_rate": 7.793042974747204e-06, |
|
"loss": 0.73, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.7630783538711927, |
|
"grad_norm": 2.135892386175284, |
|
"learning_rate": 7.782919243283671e-06, |
|
"loss": 0.7395, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.76447337828412, |
|
"grad_norm": 1.9809438615716088, |
|
"learning_rate": 7.772778955324925e-06, |
|
"loss": 0.7646, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.7658684026970471, |
|
"grad_norm": 1.8728289473933306, |
|
"learning_rate": 7.76262217119885e-06, |
|
"loss": 0.6357, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.7672634271099744, |
|
"grad_norm": 2.0652897245599724, |
|
"learning_rate": 7.75244895133147e-06, |
|
"loss": 0.7535, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7686584515229017, |
|
"grad_norm": 2.0152707634156055, |
|
"learning_rate": 7.742259356246594e-06, |
|
"loss": 0.7321, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.7700534759358288, |
|
"grad_norm": 2.061246460614199, |
|
"learning_rate": 7.732053446565448e-06, |
|
"loss": 0.7311, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7714485003487561, |
|
"grad_norm": 1.996910814435142, |
|
"learning_rate": 7.721831283006323e-06, |
|
"loss": 0.7189, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7728435247616833, |
|
"grad_norm": 2.0814067515772026, |
|
"learning_rate": 7.711592926384206e-06, |
|
"loss": 0.7267, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7742385491746105, |
|
"grad_norm": 2.0543479840235706, |
|
"learning_rate": 7.701338437610423e-06, |
|
"loss": 0.6839, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7756335735875378, |
|
"grad_norm": 2.0281629381403277, |
|
"learning_rate": 7.691067877692277e-06, |
|
"loss": 0.7246, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.777028598000465, |
|
"grad_norm": 2.106561110156005, |
|
"learning_rate": 7.680781307732683e-06, |
|
"loss": 0.6899, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.7784236224133922, |
|
"grad_norm": 2.061215326909577, |
|
"learning_rate": 7.670478788929803e-06, |
|
"loss": 0.719, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.7798186468263194, |
|
"grad_norm": 2.135026397893648, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.7804, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.7812136712392467, |
|
"grad_norm": 2.0080564958576543, |
|
"learning_rate": 7.649826150060896e-06, |
|
"loss": 0.7281, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.782608695652174, |
|
"grad_norm": 2.0202687058975037, |
|
"learning_rate": 7.639476152864163e-06, |
|
"loss": 0.7449, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.7840037200651011, |
|
"grad_norm": 1.846271729332286, |
|
"learning_rate": 7.6291104525619985e-06, |
|
"loss": 0.6683, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.7853987444780284, |
|
"grad_norm": 1.9959064783243314, |
|
"learning_rate": 7.618729110823334e-06, |
|
"loss": 0.7126, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.7867937688909556, |
|
"grad_norm": 2.2503784043031634, |
|
"learning_rate": 7.608332189410163e-06, |
|
"loss": 0.7813, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.7881887933038828, |
|
"grad_norm": 2.0759417611731794, |
|
"learning_rate": 7.597919750177168e-06, |
|
"loss": 0.7075, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7895838177168101, |
|
"grad_norm": 2.0949390177519582, |
|
"learning_rate": 7.587491855071347e-06, |
|
"loss": 0.7353, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.7909788421297372, |
|
"grad_norm": 2.061841753787729, |
|
"learning_rate": 7.577048566131655e-06, |
|
"loss": 0.7618, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.7923738665426645, |
|
"grad_norm": 2.0082580457792356, |
|
"learning_rate": 7.566589945488625e-06, |
|
"loss": 0.7141, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.7937688909555918, |
|
"grad_norm": 1.9947755511109906, |
|
"learning_rate": 7.556116055364008e-06, |
|
"loss": 0.6938, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.7951639153685189, |
|
"grad_norm": 1.9368746356723086, |
|
"learning_rate": 7.545626958070394e-06, |
|
"loss": 0.6693, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7965589397814462, |
|
"grad_norm": 1.9558568434157733, |
|
"learning_rate": 7.5351227160108495e-06, |
|
"loss": 0.6476, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.7979539641943734, |
|
"grad_norm": 2.173108067680643, |
|
"learning_rate": 7.524603391678541e-06, |
|
"loss": 0.7895, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7993489886073006, |
|
"grad_norm": 2.0777332617849766, |
|
"learning_rate": 7.51406904765636e-06, |
|
"loss": 0.7105, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.8007440130202279, |
|
"grad_norm": 2.087386445741957, |
|
"learning_rate": 7.503519746616562e-06, |
|
"loss": 0.6785, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.8021390374331551, |
|
"grad_norm": 2.271079421275984, |
|
"learning_rate": 7.492955551320381e-06, |
|
"loss": 0.7688, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.8035340618460823, |
|
"grad_norm": 1.973674786364812, |
|
"learning_rate": 7.482376524617666e-06, |
|
"loss": 0.6533, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.8049290862590095, |
|
"grad_norm": 2.032036898755386, |
|
"learning_rate": 7.4717827294464996e-06, |
|
"loss": 0.6638, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.8063241106719368, |
|
"grad_norm": 2.1959112192923587, |
|
"learning_rate": 7.461174228832828e-06, |
|
"loss": 0.7083, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.807719135084864, |
|
"grad_norm": 2.075700755518968, |
|
"learning_rate": 7.450551085890087e-06, |
|
"loss": 0.655, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.8091141594977912, |
|
"grad_norm": 2.0051021855991324, |
|
"learning_rate": 7.43991336381882e-06, |
|
"loss": 0.6845, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8105091839107185, |
|
"grad_norm": 1.9459356134656116, |
|
"learning_rate": 7.4292611259063105e-06, |
|
"loss": 0.6395, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.8119042083236456, |
|
"grad_norm": 1.9796968662023986, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.7124, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.8132992327365729, |
|
"grad_norm": 2.1426664382476877, |
|
"learning_rate": 7.40791335613811e-06, |
|
"loss": 0.7238, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.8146942571495002, |
|
"grad_norm": 2.0380264945859135, |
|
"learning_rate": 7.397217951287272e-06, |
|
"loss": 0.7544, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.8160892815624273, |
|
"grad_norm": 2.088811897297003, |
|
"learning_rate": 7.3865082846041415e-06, |
|
"loss": 0.7219, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8174843059753546, |
|
"grad_norm": 2.0232132320288274, |
|
"learning_rate": 7.375784419804018e-06, |
|
"loss": 0.723, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.8188793303882818, |
|
"grad_norm": 1.9817499587673113, |
|
"learning_rate": 7.365046420686681e-06, |
|
"loss": 0.6869, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.820274354801209, |
|
"grad_norm": 1.9568149151810101, |
|
"learning_rate": 7.354294351135989e-06, |
|
"loss": 0.7194, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.8216693792141363, |
|
"grad_norm": 2.0418687608379, |
|
"learning_rate": 7.343528275119515e-06, |
|
"loss": 0.668, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.8230644036270635, |
|
"grad_norm": 2.0729841048913014, |
|
"learning_rate": 7.33274825668816e-06, |
|
"loss": 0.7662, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8244594280399907, |
|
"grad_norm": 2.1493379475849257, |
|
"learning_rate": 7.321954359975776e-06, |
|
"loss": 0.7593, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.8258544524529179, |
|
"grad_norm": 1.9637752702289315, |
|
"learning_rate": 7.311146649198777e-06, |
|
"loss": 0.6883, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.8272494768658452, |
|
"grad_norm": 1.965138108111032, |
|
"learning_rate": 7.300325188655762e-06, |
|
"loss": 0.6966, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.8286445012787724, |
|
"grad_norm": 2.0032671568946263, |
|
"learning_rate": 7.289490042727134e-06, |
|
"loss": 0.7509, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.8300395256916996, |
|
"grad_norm": 2.1322117610392737, |
|
"learning_rate": 7.278641275874714e-06, |
|
"loss": 0.707, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8314345501046269, |
|
"grad_norm": 2.005735182999333, |
|
"learning_rate": 7.267778952641358e-06, |
|
"loss": 0.7319, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.832829574517554, |
|
"grad_norm": 2.089837838389634, |
|
"learning_rate": 7.256903137650575e-06, |
|
"loss": 0.6796, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.8342245989304813, |
|
"grad_norm": 1.9987825880429384, |
|
"learning_rate": 7.2460138956061375e-06, |
|
"loss": 0.709, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.8356196233434086, |
|
"grad_norm": 2.181112130425503, |
|
"learning_rate": 7.235111291291702e-06, |
|
"loss": 0.7567, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.8370146477563357, |
|
"grad_norm": 2.1538149633529864, |
|
"learning_rate": 7.224195389570422e-06, |
|
"loss": 0.7677, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.838409672169263, |
|
"grad_norm": 2.0301628640511162, |
|
"learning_rate": 7.21326625538456e-06, |
|
"loss": 0.7271, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.8398046965821901, |
|
"grad_norm": 2.0783608757927925, |
|
"learning_rate": 7.202323953755106e-06, |
|
"loss": 0.7095, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.8411997209951174, |
|
"grad_norm": 2.047512467999536, |
|
"learning_rate": 7.191368549781382e-06, |
|
"loss": 0.7095, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.8425947454080447, |
|
"grad_norm": 2.2284280535892127, |
|
"learning_rate": 7.180400108640665e-06, |
|
"loss": 0.7147, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.8439897698209718, |
|
"grad_norm": 2.14488402612481, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.6823, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.8453847942338991, |
|
"grad_norm": 1.9834391297217207, |
|
"learning_rate": 7.158424375954775e-06, |
|
"loss": 0.6668, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.8467798186468263, |
|
"grad_norm": 2.001260189158431, |
|
"learning_rate": 7.147417215150411e-06, |
|
"loss": 0.6704, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.8481748430597535, |
|
"grad_norm": 2.0048038596898503, |
|
"learning_rate": 7.1363972786598925e-06, |
|
"loss": 0.7168, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.8495698674726808, |
|
"grad_norm": 2.0033946556880142, |
|
"learning_rate": 7.125364632044423e-06, |
|
"loss": 0.7098, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.850964891885608, |
|
"grad_norm": 2.0693558697499923, |
|
"learning_rate": 7.1143193409408165e-06, |
|
"loss": 0.748, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8523599162985352, |
|
"grad_norm": 1.9079762698449945, |
|
"learning_rate": 7.103261471061116e-06, |
|
"loss": 0.6782, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.8537549407114624, |
|
"grad_norm": 2.173084797654854, |
|
"learning_rate": 7.092191088192201e-06, |
|
"loss": 0.7465, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.8551499651243897, |
|
"grad_norm": 1.999853696266806, |
|
"learning_rate": 7.0811082581953935e-06, |
|
"loss": 0.7148, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.856544989537317, |
|
"grad_norm": 1.9796425973325376, |
|
"learning_rate": 7.070013047006068e-06, |
|
"loss": 0.7086, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.8579400139502441, |
|
"grad_norm": 2.004513750636165, |
|
"learning_rate": 7.058905520633259e-06, |
|
"loss": 0.7046, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8593350383631714, |
|
"grad_norm": 1.9307586260927132, |
|
"learning_rate": 7.047785745159266e-06, |
|
"loss": 0.634, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.8607300627760985, |
|
"grad_norm": 2.2131652759757032, |
|
"learning_rate": 7.036653786739264e-06, |
|
"loss": 0.6867, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.8621250871890258, |
|
"grad_norm": 2.11762373803619, |
|
"learning_rate": 7.02550971160091e-06, |
|
"loss": 0.7141, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.8635201116019531, |
|
"grad_norm": 2.062189920448756, |
|
"learning_rate": 7.0143535860439425e-06, |
|
"loss": 0.6678, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.8649151360148802, |
|
"grad_norm": 2.0311504435657266, |
|
"learning_rate": 7.0031854764398e-06, |
|
"loss": 0.7157, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8663101604278075, |
|
"grad_norm": 2.0490888006871346, |
|
"learning_rate": 6.9920054492312086e-06, |
|
"loss": 0.726, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.8677051848407347, |
|
"grad_norm": 2.1515674827759677, |
|
"learning_rate": 6.9808135709318015e-06, |
|
"loss": 0.7118, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.8691002092536619, |
|
"grad_norm": 1.929335451435658, |
|
"learning_rate": 6.9696099081257176e-06, |
|
"loss": 0.6872, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.8704952336665892, |
|
"grad_norm": 2.0572983608669855, |
|
"learning_rate": 6.958394527467204e-06, |
|
"loss": 0.6748, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.8718902580795164, |
|
"grad_norm": 1.9837573813226999, |
|
"learning_rate": 6.947167495680224e-06, |
|
"loss": 0.707, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.8732852824924436, |
|
"grad_norm": 2.04771350119922, |
|
"learning_rate": 6.935928879558052e-06, |
|
"loss": 0.6617, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.8746803069053708, |
|
"grad_norm": 2.1485933426462758, |
|
"learning_rate": 6.9246787459628895e-06, |
|
"loss": 0.6927, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.8760753313182981, |
|
"grad_norm": 1.9152886720507631, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.6327, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.8774703557312253, |
|
"grad_norm": 2.045004872767665, |
|
"learning_rate": 6.902144194144576e-06, |
|
"loss": 0.6979, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.8788653801441525, |
|
"grad_norm": 2.0896741032979214, |
|
"learning_rate": 6.890859909986835e-06, |
|
"loss": 0.68, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8802604045570798, |
|
"grad_norm": 1.9208151556554776, |
|
"learning_rate": 6.8795643764861144e-06, |
|
"loss": 0.6964, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8816554289700069, |
|
"grad_norm": 1.9767473419286516, |
|
"learning_rate": 6.868257660843234e-06, |
|
"loss": 0.6924, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8830504533829342, |
|
"grad_norm": 2.0314146752614706, |
|
"learning_rate": 6.8569398303255345e-06, |
|
"loss": 0.802, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.8844454777958615, |
|
"grad_norm": 2.0170897943284958, |
|
"learning_rate": 6.845610952266486e-06, |
|
"loss": 0.6554, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8858405022087886, |
|
"grad_norm": 1.9814807999478814, |
|
"learning_rate": 6.834271094065284e-06, |
|
"loss": 0.6943, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8872355266217159, |
|
"grad_norm": 1.9714093373330037, |
|
"learning_rate": 6.822920323186445e-06, |
|
"loss": 0.7085, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.8886305510346431, |
|
"grad_norm": 2.086916641932359, |
|
"learning_rate": 6.811558707159414e-06, |
|
"loss": 0.7342, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.8900255754475703, |
|
"grad_norm": 1.870451490838753, |
|
"learning_rate": 6.800186313578152e-06, |
|
"loss": 0.6899, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.8914205998604976, |
|
"grad_norm": 2.3721855893316186, |
|
"learning_rate": 6.7888032101007416e-06, |
|
"loss": 0.778, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.8928156242734248, |
|
"grad_norm": 1.9186274740516478, |
|
"learning_rate": 6.777409464448983e-06, |
|
"loss": 0.6948, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.894210648686352, |
|
"grad_norm": 2.058282213173254, |
|
"learning_rate": 6.76600514440799e-06, |
|
"loss": 0.7115, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.8956056730992792, |
|
"grad_norm": 2.006384543727099, |
|
"learning_rate": 6.754590317825785e-06, |
|
"loss": 0.6537, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.8970006975122065, |
|
"grad_norm": 1.8915235746908028, |
|
"learning_rate": 6.743165052612899e-06, |
|
"loss": 0.729, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.8983957219251337, |
|
"grad_norm": 1.9836308406433447, |
|
"learning_rate": 6.731729416741961e-06, |
|
"loss": 0.7091, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.8997907463380609, |
|
"grad_norm": 1.91315435206782, |
|
"learning_rate": 6.72028347824731e-06, |
|
"loss": 0.6782, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.9011857707509882, |
|
"grad_norm": 2.164605036051746, |
|
"learning_rate": 6.708827305224566e-06, |
|
"loss": 0.7599, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.9025807951639153, |
|
"grad_norm": 2.1307619231031705, |
|
"learning_rate": 6.697360965830244e-06, |
|
"loss": 0.7252, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.9039758195768426, |
|
"grad_norm": 1.9739462078301375, |
|
"learning_rate": 6.6858845282813416e-06, |
|
"loss": 0.6881, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.9053708439897699, |
|
"grad_norm": 2.305594622925065, |
|
"learning_rate": 6.674398060854931e-06, |
|
"loss": 0.7853, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.906765868402697, |
|
"grad_norm": 2.040722369484868, |
|
"learning_rate": 6.662901631887761e-06, |
|
"loss": 0.6823, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9081608928156243, |
|
"grad_norm": 1.9646186422608911, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.6535, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.9095559172285514, |
|
"grad_norm": 1.8857798039813913, |
|
"learning_rate": 6.639879162974027e-06, |
|
"loss": 0.658, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.9109509416414787, |
|
"grad_norm": 1.8401539668513296, |
|
"learning_rate": 6.62835325999565e-06, |
|
"loss": 0.6965, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.912345966054406, |
|
"grad_norm": 2.0348228305130043, |
|
"learning_rate": 6.616817669412066e-06, |
|
"loss": 0.7916, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.9137409904673331, |
|
"grad_norm": 2.0027751723218272, |
|
"learning_rate": 6.6052724598522696e-06, |
|
"loss": 0.6848, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9151360148802604, |
|
"grad_norm": 1.9205553455437767, |
|
"learning_rate": 6.59371770000248e-06, |
|
"loss": 0.6595, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.9165310392931876, |
|
"grad_norm": 1.988113920112164, |
|
"learning_rate": 6.582153458605738e-06, |
|
"loss": 0.7295, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.9179260637061148, |
|
"grad_norm": 1.969446729465624, |
|
"learning_rate": 6.570579804461492e-06, |
|
"loss": 0.7094, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.9193210881190421, |
|
"grad_norm": 1.8660933701134175, |
|
"learning_rate": 6.558996806425188e-06, |
|
"loss": 0.7219, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.9207161125319693, |
|
"grad_norm": 1.898278443506339, |
|
"learning_rate": 6.5474045334078654e-06, |
|
"loss": 0.6961, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9221111369448965, |
|
"grad_norm": 2.2155411196979053, |
|
"learning_rate": 6.5358030543757375e-06, |
|
"loss": 0.7759, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.9235061613578237, |
|
"grad_norm": 2.094799591086162, |
|
"learning_rate": 6.524192438349794e-06, |
|
"loss": 0.6898, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.924901185770751, |
|
"grad_norm": 1.924740068996983, |
|
"learning_rate": 6.51257275440538e-06, |
|
"loss": 0.6931, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.9262962101836782, |
|
"grad_norm": 2.011632926786668, |
|
"learning_rate": 6.500944071671789e-06, |
|
"loss": 0.7092, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.9276912345966054, |
|
"grad_norm": 2.0725376084974596, |
|
"learning_rate": 6.489306459331851e-06, |
|
"loss": 0.6927, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9290862590095327, |
|
"grad_norm": 2.1472076797554918, |
|
"learning_rate": 6.477659986621522e-06, |
|
"loss": 0.7601, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.93048128342246, |
|
"grad_norm": 2.108439533312848, |
|
"learning_rate": 6.46600472282947e-06, |
|
"loss": 0.7315, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.9318763078353871, |
|
"grad_norm": 2.0157312555896874, |
|
"learning_rate": 6.454340737296665e-06, |
|
"loss": 0.6428, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.9332713322483144, |
|
"grad_norm": 1.955747483057349, |
|
"learning_rate": 6.442668099415967e-06, |
|
"loss": 0.7426, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.9346663566612415, |
|
"grad_norm": 2.1284318764319545, |
|
"learning_rate": 6.430986878631708e-06, |
|
"loss": 0.695, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9360613810741688, |
|
"grad_norm": 1.9889109021997742, |
|
"learning_rate": 6.4192971444392835e-06, |
|
"loss": 0.711, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.9374564054870961, |
|
"grad_norm": 2.103606798692871, |
|
"learning_rate": 6.407598966384743e-06, |
|
"loss": 0.7219, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.9388514299000232, |
|
"grad_norm": 1.876811159303784, |
|
"learning_rate": 6.395892414064363e-06, |
|
"loss": 0.6762, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.9402464543129505, |
|
"grad_norm": 1.7987895850167992, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.6521, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.9416414787258777, |
|
"grad_norm": 2.282079516291976, |
|
"learning_rate": 6.372454465259905e-06, |
|
"loss": 0.7343, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.9430365031388049, |
|
"grad_norm": 1.939959854059506, |
|
"learning_rate": 6.360723208215838e-06, |
|
"loss": 0.6972, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.9444315275517322, |
|
"grad_norm": 2.137165196217349, |
|
"learning_rate": 6.348983855785122e-06, |
|
"loss": 0.7757, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.9458265519646594, |
|
"grad_norm": 1.869415519058021, |
|
"learning_rate": 6.3372364778090005e-06, |
|
"loss": 0.6613, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.9472215763775866, |
|
"grad_norm": 1.8737093174455668, |
|
"learning_rate": 6.325481144176458e-06, |
|
"loss": 0.6658, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.9486166007905138, |
|
"grad_norm": 1.9207669698589307, |
|
"learning_rate": 6.3137179248238145e-06, |
|
"loss": 0.6655, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9500116252034411, |
|
"grad_norm": 2.0566860383052386, |
|
"learning_rate": 6.301946889734302e-06, |
|
"loss": 0.6926, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.9514066496163683, |
|
"grad_norm": 2.0041349151703836, |
|
"learning_rate": 6.290168108937651e-06, |
|
"loss": 0.7247, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.9528016740292955, |
|
"grad_norm": 1.9546701865263272, |
|
"learning_rate": 6.2783816525096765e-06, |
|
"loss": 0.6723, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.9541966984422228, |
|
"grad_norm": 2.111279121287525, |
|
"learning_rate": 6.266587590571852e-06, |
|
"loss": 0.7451, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.9555917228551499, |
|
"grad_norm": 1.8926865719781227, |
|
"learning_rate": 6.254785993290907e-06, |
|
"loss": 0.7097, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.9569867472680772, |
|
"grad_norm": 1.9235814882501219, |
|
"learning_rate": 6.242976930878395e-06, |
|
"loss": 0.6519, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.9583817716810045, |
|
"grad_norm": 2.0782433443787425, |
|
"learning_rate": 6.231160473590283e-06, |
|
"loss": 0.7351, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.9597767960939316, |
|
"grad_norm": 1.7659331972757786, |
|
"learning_rate": 6.219336691726537e-06, |
|
"loss": 0.6201, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.9611718205068589, |
|
"grad_norm": 2.0092199359038108, |
|
"learning_rate": 6.20750565563069e-06, |
|
"loss": 0.6711, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.9625668449197861, |
|
"grad_norm": 2.0503621225664115, |
|
"learning_rate": 6.195667435689445e-06, |
|
"loss": 0.7095, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9639618693327133, |
|
"grad_norm": 2.084108781725168, |
|
"learning_rate": 6.183822102332234e-06, |
|
"loss": 0.6899, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.9653568937456406, |
|
"grad_norm": 1.9166997244216846, |
|
"learning_rate": 6.171969726030814e-06, |
|
"loss": 0.6558, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.9667519181585678, |
|
"grad_norm": 2.001051202379286, |
|
"learning_rate": 6.16011037729884e-06, |
|
"loss": 0.7175, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.968146942571495, |
|
"grad_norm": 1.9123567824105474, |
|
"learning_rate": 6.148244126691451e-06, |
|
"loss": 0.6761, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.9695419669844222, |
|
"grad_norm": 2.057429814174208, |
|
"learning_rate": 6.136371044804847e-06, |
|
"loss": 0.7319, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9709369913973495, |
|
"grad_norm": 1.962229379260547, |
|
"learning_rate": 6.124491202275866e-06, |
|
"loss": 0.6579, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.9723320158102767, |
|
"grad_norm": 2.0201399268724347, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.7438, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.9737270402232039, |
|
"grad_norm": 2.1063858179693766, |
|
"learning_rate": 6.100711518038828e-06, |
|
"loss": 0.7623, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.9751220646361312, |
|
"grad_norm": 1.898175284952164, |
|
"learning_rate": 6.088811817803877e-06, |
|
"loss": 0.6738, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.9765170890490583, |
|
"grad_norm": 1.9084924564930574, |
|
"learning_rate": 6.0769056398719205e-06, |
|
"loss": 0.6559, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9779121134619856, |
|
"grad_norm": 2.034255025855498, |
|
"learning_rate": 6.064993055076697e-06, |
|
"loss": 0.7053, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.9793071378749129, |
|
"grad_norm": 1.9943164564563032, |
|
"learning_rate": 6.053074134290065e-06, |
|
"loss": 0.6502, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.98070216228784, |
|
"grad_norm": 1.9082522000979645, |
|
"learning_rate": 6.041148948421575e-06, |
|
"loss": 0.6971, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.9820971867007673, |
|
"grad_norm": 1.9423848476086434, |
|
"learning_rate": 6.0292175684180506e-06, |
|
"loss": 0.6925, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.9834922111136944, |
|
"grad_norm": 1.9350501223395191, |
|
"learning_rate": 6.0172800652631706e-06, |
|
"loss": 0.6574, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9848872355266217, |
|
"grad_norm": 1.948394189706791, |
|
"learning_rate": 6.005336509977035e-06, |
|
"loss": 0.7232, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.986282259939549, |
|
"grad_norm": 1.9718390783155533, |
|
"learning_rate": 5.9933869736157524e-06, |
|
"loss": 0.671, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.9876772843524761, |
|
"grad_norm": 1.7860631184750806, |
|
"learning_rate": 5.981431527271022e-06, |
|
"loss": 0.6619, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.9890723087654034, |
|
"grad_norm": 2.190091158506914, |
|
"learning_rate": 5.9694702420696935e-06, |
|
"loss": 0.6881, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.9904673331783306, |
|
"grad_norm": 2.111270110174928, |
|
"learning_rate": 5.9575031891733594e-06, |
|
"loss": 0.7766, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9918623575912578, |
|
"grad_norm": 2.0590626970008383, |
|
"learning_rate": 5.945530439777924e-06, |
|
"loss": 0.6849, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9932573820041851, |
|
"grad_norm": 2.058768534188736, |
|
"learning_rate": 5.933552065113182e-06, |
|
"loss": 0.6918, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9946524064171123, |
|
"grad_norm": 1.8988454706123046, |
|
"learning_rate": 5.9215681364423975e-06, |
|
"loss": 0.6379, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9960474308300395, |
|
"grad_norm": 1.8925107930593552, |
|
"learning_rate": 5.909578725061872e-06, |
|
"loss": 0.6666, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9974424552429667, |
|
"grad_norm": 1.9105216289220566, |
|
"learning_rate": 5.897583902300531e-06, |
|
"loss": 0.6739, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.998837479655894, |
|
"grad_norm": 2.030203006310582, |
|
"learning_rate": 5.885583739519489e-06, |
|
"loss": 0.6758, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.0013950244129273, |
|
"grad_norm": 2.7447928004495568, |
|
"learning_rate": 5.873578308111636e-06, |
|
"loss": 1.1652, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.0027900488258545, |
|
"grad_norm": 1.7697258758954115, |
|
"learning_rate": 5.861567679501201e-06, |
|
"loss": 0.4832, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.0041850732387816, |
|
"grad_norm": 1.8831799173238042, |
|
"learning_rate": 5.849551925143334e-06, |
|
"loss": 0.4387, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.0055800976517089, |
|
"grad_norm": 1.8886920606969104, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.4728, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.0069751220646361, |
|
"grad_norm": 1.6199177638252187, |
|
"learning_rate": 5.825505325157962e-06, |
|
"loss": 0.4952, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.0083701464775634, |
|
"grad_norm": 1.8465149809489871, |
|
"learning_rate": 5.8134746225915305e-06, |
|
"loss": 0.4316, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.0097651708904907, |
|
"grad_norm": 1.7609710454949228, |
|
"learning_rate": 5.801439080398968e-06, |
|
"loss": 0.5242, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.0111601953034177, |
|
"grad_norm": 1.915812031265783, |
|
"learning_rate": 5.789398770183642e-06, |
|
"loss": 0.4718, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.012555219716345, |
|
"grad_norm": 2.061689014174379, |
|
"learning_rate": 5.77735376357729e-06, |
|
"loss": 0.5115, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.0139502441292723, |
|
"grad_norm": 2.0246963373463465, |
|
"learning_rate": 5.76530413223959e-06, |
|
"loss": 0.4765, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.0153452685421995, |
|
"grad_norm": 2.6269844127481217, |
|
"learning_rate": 5.753249947857731e-06, |
|
"loss": 0.5015, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.0167402929551268, |
|
"grad_norm": 2.2152920615212595, |
|
"learning_rate": 5.741191282145995e-06, |
|
"loss": 0.5109, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.0181353173680538, |
|
"grad_norm": 2.3867329877687196, |
|
"learning_rate": 5.729128206845317e-06, |
|
"loss": 0.4681, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.0195303417809811, |
|
"grad_norm": 2.026990800833781, |
|
"learning_rate": 5.717060793722876e-06, |
|
"loss": 0.4662, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.0209253661939084, |
|
"grad_norm": 2.2793548911246875, |
|
"learning_rate": 5.704989114571649e-06, |
|
"loss": 0.5151, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.0223203906068357, |
|
"grad_norm": 2.0202960449035343, |
|
"learning_rate": 5.692913241209997e-06, |
|
"loss": 0.4442, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.023715415019763, |
|
"grad_norm": 1.95038181870786, |
|
"learning_rate": 5.680833245481234e-06, |
|
"loss": 0.4744, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.02511043943269, |
|
"grad_norm": 2.04344748510266, |
|
"learning_rate": 5.668749199253198e-06, |
|
"loss": 0.4951, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.0265054638456172, |
|
"grad_norm": 1.8910556764395299, |
|
"learning_rate": 5.656661174417829e-06, |
|
"loss": 0.4809, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.0279004882585445, |
|
"grad_norm": 1.9255139030660995, |
|
"learning_rate": 5.644569242890728e-06, |
|
"loss": 0.4393, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.0292955126714718, |
|
"grad_norm": 2.2064737359647353, |
|
"learning_rate": 5.632473476610748e-06, |
|
"loss": 0.5139, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.030690537084399, |
|
"grad_norm": 1.826337759870788, |
|
"learning_rate": 5.62037394753955e-06, |
|
"loss": 0.4482, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.032085561497326, |
|
"grad_norm": 1.9833849963275132, |
|
"learning_rate": 5.608270727661183e-06, |
|
"loss": 0.4806, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.0334805859102534, |
|
"grad_norm": 1.9740005987225675, |
|
"learning_rate": 5.596163888981656e-06, |
|
"loss": 0.496, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0348756103231807, |
|
"grad_norm": 1.9411807491663124, |
|
"learning_rate": 5.584053503528503e-06, |
|
"loss": 0.4537, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.036270634736108, |
|
"grad_norm": 1.8861912900200872, |
|
"learning_rate": 5.5719396433503615e-06, |
|
"loss": 0.5442, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.0376656591490352, |
|
"grad_norm": 2.073505442126394, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.4968, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.0390606835619622, |
|
"grad_norm": 2.049207981076892, |
|
"learning_rate": 5.5477017871165925e-06, |
|
"loss": 0.4514, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.0404557079748895, |
|
"grad_norm": 1.9198267966469686, |
|
"learning_rate": 5.5355779352598884e-06, |
|
"loss": 0.5163, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.0418507323878168, |
|
"grad_norm": 2.037912592040736, |
|
"learning_rate": 5.523450897075179e-06, |
|
"loss": 0.4664, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.043245756800744, |
|
"grad_norm": 2.1410052063539076, |
|
"learning_rate": 5.511320744710171e-06, |
|
"loss": 0.4419, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.0446407812136713, |
|
"grad_norm": 2.041057827262413, |
|
"learning_rate": 5.4991875503311075e-06, |
|
"loss": 0.5258, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.0460358056265984, |
|
"grad_norm": 2.1483535732137566, |
|
"learning_rate": 5.487051386122319e-06, |
|
"loss": 0.4334, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.0474308300395256, |
|
"grad_norm": 1.987139519131828, |
|
"learning_rate": 5.47491232428581e-06, |
|
"loss": 0.4383, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.048825854452453, |
|
"grad_norm": 2.1635983832379044, |
|
"learning_rate": 5.462770437040824e-06, |
|
"loss": 0.5084, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.0502208788653802, |
|
"grad_norm": 2.1342642535994174, |
|
"learning_rate": 5.450625796623411e-06, |
|
"loss": 0.4721, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.0516159032783075, |
|
"grad_norm": 2.148708587244624, |
|
"learning_rate": 5.438478475286003e-06, |
|
"loss": 0.4217, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.0530109276912345, |
|
"grad_norm": 1.9486786460770569, |
|
"learning_rate": 5.4263285452969805e-06, |
|
"loss": 0.4737, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.0544059521041618, |
|
"grad_norm": 2.112176600834154, |
|
"learning_rate": 5.414176078940244e-06, |
|
"loss": 0.4689, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.055800976517089, |
|
"grad_norm": 2.0011944624765525, |
|
"learning_rate": 5.402021148514784e-06, |
|
"loss": 0.4545, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.0571960009300163, |
|
"grad_norm": 2.2068689969149244, |
|
"learning_rate": 5.389863826334248e-06, |
|
"loss": 0.4579, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.0585910253429436, |
|
"grad_norm": 2.0433936765493588, |
|
"learning_rate": 5.37770418472652e-06, |
|
"loss": 0.4162, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.0599860497558706, |
|
"grad_norm": 1.9980953151822662, |
|
"learning_rate": 5.365542296033274e-06, |
|
"loss": 0.4761, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.061381074168798, |
|
"grad_norm": 1.9795377015612743, |
|
"learning_rate": 5.353378232609557e-06, |
|
"loss": 0.4848, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.0627760985817252, |
|
"grad_norm": 2.1199669786852393, |
|
"learning_rate": 5.341212066823356e-06, |
|
"loss": 0.4862, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.0641711229946524, |
|
"grad_norm": 2.0944681341647713, |
|
"learning_rate": 5.329043871055161e-06, |
|
"loss": 0.4954, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.0655661474075797, |
|
"grad_norm": 2.145189792268484, |
|
"learning_rate": 5.316873717697545e-06, |
|
"loss": 0.4738, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.0669611718205068, |
|
"grad_norm": 2.0264734767440116, |
|
"learning_rate": 5.30470167915472e-06, |
|
"loss": 0.4087, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.068356196233434, |
|
"grad_norm": 1.9348073866593454, |
|
"learning_rate": 5.29252782784212e-06, |
|
"loss": 0.5049, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.0697512206463613, |
|
"grad_norm": 2.204583768085505, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.4287, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.0711462450592886, |
|
"grad_norm": 1.9937257246231703, |
|
"learning_rate": 5.268174976622811e-06, |
|
"loss": 0.4685, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.0725412694722158, |
|
"grad_norm": 2.013514450168237, |
|
"learning_rate": 5.255996121599167e-06, |
|
"loss": 0.4948, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.073936293885143, |
|
"grad_norm": 2.0772460175921665, |
|
"learning_rate": 5.243815743571009e-06, |
|
"loss": 0.4554, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.0753313182980702, |
|
"grad_norm": 2.014215366177637, |
|
"learning_rate": 5.231633915003389e-06, |
|
"loss": 0.481, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0767263427109974, |
|
"grad_norm": 1.9613576871714251, |
|
"learning_rate": 5.219450708369977e-06, |
|
"loss": 0.4583, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.0781213671239247, |
|
"grad_norm": 2.0232554045863655, |
|
"learning_rate": 5.2072661961526505e-06, |
|
"loss": 0.4802, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.079516391536852, |
|
"grad_norm": 2.1785526324306392, |
|
"learning_rate": 5.19508045084105e-06, |
|
"loss": 0.4634, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.080911415949779, |
|
"grad_norm": 2.083047280181435, |
|
"learning_rate": 5.1828935449321515e-06, |
|
"loss": 0.4648, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.0823064403627063, |
|
"grad_norm": 1.939128120577853, |
|
"learning_rate": 5.17070555092984e-06, |
|
"loss": 0.4546, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.0837014647756336, |
|
"grad_norm": 2.0490314541899015, |
|
"learning_rate": 5.15851654134447e-06, |
|
"loss": 0.4667, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.0850964891885608, |
|
"grad_norm": 2.018626496282391, |
|
"learning_rate": 5.146326588692439e-06, |
|
"loss": 0.4185, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.086491513601488, |
|
"grad_norm": 2.2258491166908354, |
|
"learning_rate": 5.1341357654957546e-06, |
|
"loss": 0.4433, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.0878865380144151, |
|
"grad_norm": 2.0556634088378294, |
|
"learning_rate": 5.121944144281606e-06, |
|
"loss": 0.4977, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.0892815624273424, |
|
"grad_norm": 2.278773856505871, |
|
"learning_rate": 5.109751797581929e-06, |
|
"loss": 0.4549, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0906765868402697, |
|
"grad_norm": 2.032042799453305, |
|
"learning_rate": 5.097558797932973e-06, |
|
"loss": 0.4469, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.092071611253197, |
|
"grad_norm": 2.1595220009518754, |
|
"learning_rate": 5.085365217874875e-06, |
|
"loss": 0.4554, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.0934666356661242, |
|
"grad_norm": 2.0764422719191664, |
|
"learning_rate": 5.073171129951223e-06, |
|
"loss": 0.513, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.0948616600790513, |
|
"grad_norm": 2.195567440808966, |
|
"learning_rate": 5.060976606708628e-06, |
|
"loss": 0.4653, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.0962566844919786, |
|
"grad_norm": 2.021246542496036, |
|
"learning_rate": 5.048781720696291e-06, |
|
"loss": 0.4598, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.0976517089049058, |
|
"grad_norm": 2.0641574251199426, |
|
"learning_rate": 5.036586544465571e-06, |
|
"loss": 0.4547, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.099046733317833, |
|
"grad_norm": 2.0272284013054804, |
|
"learning_rate": 5.024391150569551e-06, |
|
"loss": 0.4475, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.1004417577307604, |
|
"grad_norm": 2.723082585619733, |
|
"learning_rate": 5.012195611562613e-06, |
|
"loss": 0.4427, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.1018367821436876, |
|
"grad_norm": 1.7395086950703968, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4505, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.1032318065566147, |
|
"grad_norm": 2.0298920165502947, |
|
"learning_rate": 4.987804388437388e-06, |
|
"loss": 0.4817, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.104626830969542, |
|
"grad_norm": 1.9418324231053923, |
|
"learning_rate": 4.975608849430451e-06, |
|
"loss": 0.426, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.1060218553824692, |
|
"grad_norm": 2.0165571549580994, |
|
"learning_rate": 4.963413455534431e-06, |
|
"loss": 0.4341, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.1074168797953965, |
|
"grad_norm": 1.9357623746058135, |
|
"learning_rate": 4.95121827930371e-06, |
|
"loss": 0.5005, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.1088119042083235, |
|
"grad_norm": 2.21037173558054, |
|
"learning_rate": 4.939023393291373e-06, |
|
"loss": 0.4889, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.1102069286212508, |
|
"grad_norm": 2.0805765975077612, |
|
"learning_rate": 4.926828870048779e-06, |
|
"loss": 0.468, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.111601953034178, |
|
"grad_norm": 2.2572931093297357, |
|
"learning_rate": 4.914634782125127e-06, |
|
"loss": 0.4568, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.1129969774471054, |
|
"grad_norm": 1.962863301703952, |
|
"learning_rate": 4.902441202067028e-06, |
|
"loss": 0.494, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.1143920018600326, |
|
"grad_norm": 2.2253259846733235, |
|
"learning_rate": 4.890248202418073e-06, |
|
"loss": 0.5017, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.11578702627296, |
|
"grad_norm": 1.9807905202198288, |
|
"learning_rate": 4.878055855718395e-06, |
|
"loss": 0.4461, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.117182050685887, |
|
"grad_norm": 2.0776479254780997, |
|
"learning_rate": 4.865864234504246e-06, |
|
"loss": 0.424, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.1185770750988142, |
|
"grad_norm": 2.080677062759276, |
|
"learning_rate": 4.853673411307564e-06, |
|
"loss": 0.453, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.1199720995117415, |
|
"grad_norm": 2.0226685971439684, |
|
"learning_rate": 4.841483458655532e-06, |
|
"loss": 0.4705, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.1213671239246688, |
|
"grad_norm": 1.9781164700065137, |
|
"learning_rate": 4.829294449070161e-06, |
|
"loss": 0.48, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.1227621483375958, |
|
"grad_norm": 2.1702790078032694, |
|
"learning_rate": 4.817106455067848e-06, |
|
"loss": 0.4832, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.124157172750523, |
|
"grad_norm": 1.975642624026135, |
|
"learning_rate": 4.804919549158951e-06, |
|
"loss": 0.4227, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.1255521971634503, |
|
"grad_norm": 2.011695144907138, |
|
"learning_rate": 4.792733803847351e-06, |
|
"loss": 0.4225, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.1269472215763776, |
|
"grad_norm": 1.9998826305913646, |
|
"learning_rate": 4.7805492916300235e-06, |
|
"loss": 0.5039, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.1283422459893049, |
|
"grad_norm": 1.9496545539372845, |
|
"learning_rate": 4.768366084996612e-06, |
|
"loss": 0.4694, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.1297372704022322, |
|
"grad_norm": 2.067866668037809, |
|
"learning_rate": 4.756184256428992e-06, |
|
"loss": 0.4609, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.1311322948151592, |
|
"grad_norm": 2.237107223204764, |
|
"learning_rate": 4.744003878400836e-06, |
|
"loss": 0.4674, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.1325273192280865, |
|
"grad_norm": 1.9242050736003506, |
|
"learning_rate": 4.731825023377191e-06, |
|
"loss": 0.4946, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.1339223436410137, |
|
"grad_norm": 2.167480296385883, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.4624, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.135317368053941, |
|
"grad_norm": 1.8494731683182122, |
|
"learning_rate": 4.707472172157882e-06, |
|
"loss": 0.4364, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.136712392466868, |
|
"grad_norm": 2.0120379641996835, |
|
"learning_rate": 4.695298320845282e-06, |
|
"loss": 0.4794, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.1381074168797953, |
|
"grad_norm": 2.0857359999555722, |
|
"learning_rate": 4.683126282302457e-06, |
|
"loss": 0.4472, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.1395024412927226, |
|
"grad_norm": 2.052049295035602, |
|
"learning_rate": 4.67095612894484e-06, |
|
"loss": 0.4346, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.1408974657056499, |
|
"grad_norm": 1.973283052063979, |
|
"learning_rate": 4.6587879331766465e-06, |
|
"loss": 0.4249, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.1422924901185771, |
|
"grad_norm": 2.024286771149601, |
|
"learning_rate": 4.646621767390444e-06, |
|
"loss": 0.4544, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.1436875145315044, |
|
"grad_norm": 2.1509974878229516, |
|
"learning_rate": 4.634457703966729e-06, |
|
"loss": 0.4891, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.1450825389444315, |
|
"grad_norm": 2.2337578962453235, |
|
"learning_rate": 4.622295815273482e-06, |
|
"loss": 0.4643, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.1464775633573587, |
|
"grad_norm": 2.0861095386115505, |
|
"learning_rate": 4.610136173665751e-06, |
|
"loss": 0.4723, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.147872587770286, |
|
"grad_norm": 2.1235228114243347, |
|
"learning_rate": 4.597978851485217e-06, |
|
"loss": 0.4415, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.1492676121832133, |
|
"grad_norm": 2.1021074133377953, |
|
"learning_rate": 4.585823921059757e-06, |
|
"loss": 0.4446, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.1506626365961403, |
|
"grad_norm": 1.8410024802817688, |
|
"learning_rate": 4.57367145470302e-06, |
|
"loss": 0.3897, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.1520576610090676, |
|
"grad_norm": 2.0473061305669313, |
|
"learning_rate": 4.561521524713998e-06, |
|
"loss": 0.4547, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.1534526854219949, |
|
"grad_norm": 2.069770337790142, |
|
"learning_rate": 4.5493742033765906e-06, |
|
"loss": 0.524, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.1548477098349221, |
|
"grad_norm": 2.251908136507743, |
|
"learning_rate": 4.537229562959178e-06, |
|
"loss": 0.4554, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.1562427342478494, |
|
"grad_norm": 2.0846493954276784, |
|
"learning_rate": 4.525087675714191e-06, |
|
"loss": 0.4485, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.1576377586607767, |
|
"grad_norm": 1.9221877313185243, |
|
"learning_rate": 4.512948613877683e-06, |
|
"loss": 0.5125, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.1590327830737037, |
|
"grad_norm": 2.1932622962593773, |
|
"learning_rate": 4.5008124496688925e-06, |
|
"loss": 0.4417, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.160427807486631, |
|
"grad_norm": 2.0453310749751012, |
|
"learning_rate": 4.488679255289829e-06, |
|
"loss": 0.4261, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.1618228318995583, |
|
"grad_norm": 2.035542185164377, |
|
"learning_rate": 4.4765491029248235e-06, |
|
"loss": 0.4481, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.1632178563124855, |
|
"grad_norm": 2.1304838227271463, |
|
"learning_rate": 4.464422064740114e-06, |
|
"loss": 0.4402, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.1646128807254126, |
|
"grad_norm": 1.8356581488535975, |
|
"learning_rate": 4.452298212883408e-06, |
|
"loss": 0.4494, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.1660079051383399, |
|
"grad_norm": 2.125785922134183, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.5029, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.1674029295512671, |
|
"grad_norm": 2.128802200938034, |
|
"learning_rate": 4.428060356649642e-06, |
|
"loss": 0.4615, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.1687979539641944, |
|
"grad_norm": 2.133593843482753, |
|
"learning_rate": 4.4159464964715e-06, |
|
"loss": 0.4953, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.1701929783771217, |
|
"grad_norm": 2.161743883645626, |
|
"learning_rate": 4.403836111018346e-06, |
|
"loss": 0.4484, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.171588002790049, |
|
"grad_norm": 2.0148537845975785, |
|
"learning_rate": 4.391729272338817e-06, |
|
"loss": 0.4866, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.172983027202976, |
|
"grad_norm": 2.1364974416474007, |
|
"learning_rate": 4.37962605246045e-06, |
|
"loss": 0.4636, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.1743780516159033, |
|
"grad_norm": 2.1431437523336956, |
|
"learning_rate": 4.367526523389253e-06, |
|
"loss": 0.4947, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.1757730760288305, |
|
"grad_norm": 2.077327008730648, |
|
"learning_rate": 4.355430757109273e-06, |
|
"loss": 0.4128, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.1771681004417578, |
|
"grad_norm": 1.921415744960682, |
|
"learning_rate": 4.343338825582173e-06, |
|
"loss": 0.4509, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.1785631248546848, |
|
"grad_norm": 2.0628543732690607, |
|
"learning_rate": 4.331250800746803e-06, |
|
"loss": 0.4101, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.179958149267612, |
|
"grad_norm": 1.933884426694881, |
|
"learning_rate": 4.319166754518768e-06, |
|
"loss": 0.4424, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.1813531736805394, |
|
"grad_norm": 2.038887875653402, |
|
"learning_rate": 4.307086758790005e-06, |
|
"loss": 0.4501, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.1827481980934667, |
|
"grad_norm": 2.0481874903667907, |
|
"learning_rate": 4.295010885428354e-06, |
|
"loss": 0.4788, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.184143222506394, |
|
"grad_norm": 2.2983450175618603, |
|
"learning_rate": 4.282939206277126e-06, |
|
"loss": 0.4884, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.1855382469193212, |
|
"grad_norm": 2.08973082875802, |
|
"learning_rate": 4.270871793154683e-06, |
|
"loss": 0.4314, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.1869332713322482, |
|
"grad_norm": 2.1033493341055727, |
|
"learning_rate": 4.258808717854006e-06, |
|
"loss": 0.4819, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1883282957451755, |
|
"grad_norm": 1.8745501503992958, |
|
"learning_rate": 4.2467500521422696e-06, |
|
"loss": 0.4172, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.1897233201581028, |
|
"grad_norm": 2.1806886241389623, |
|
"learning_rate": 4.234695867760412e-06, |
|
"loss": 0.4628, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.19111834457103, |
|
"grad_norm": 2.0959774571958887, |
|
"learning_rate": 4.222646236422711e-06, |
|
"loss": 0.4887, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.192513368983957, |
|
"grad_norm": 2.3011996953254923, |
|
"learning_rate": 4.21060122981636e-06, |
|
"loss": 0.4285, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.1939083933968844, |
|
"grad_norm": 1.894090908268334, |
|
"learning_rate": 4.198560919601034e-06, |
|
"loss": 0.478, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.1953034178098116, |
|
"grad_norm": 2.083934832143099, |
|
"learning_rate": 4.186525377408471e-06, |
|
"loss": 0.4366, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.196698442222739, |
|
"grad_norm": 2.1053278377478977, |
|
"learning_rate": 4.174494674842038e-06, |
|
"loss": 0.4752, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.1980934666356662, |
|
"grad_norm": 2.2411830266708765, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.5062, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.1994884910485935, |
|
"grad_norm": 1.9572462330367733, |
|
"learning_rate": 4.150448074856667e-06, |
|
"loss": 0.4577, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.2008835154615205, |
|
"grad_norm": 2.3922197259695515, |
|
"learning_rate": 4.138432320498801e-06, |
|
"loss": 0.4888, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2022785398744478, |
|
"grad_norm": 2.1857450886509295, |
|
"learning_rate": 4.126421691888366e-06, |
|
"loss": 0.4311, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.203673564287375, |
|
"grad_norm": 2.19620003939438, |
|
"learning_rate": 4.114416260480512e-06, |
|
"loss": 0.4957, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.2050685887003023, |
|
"grad_norm": 2.065478261064621, |
|
"learning_rate": 4.102416097699471e-06, |
|
"loss": 0.4433, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.2064636131132294, |
|
"grad_norm": 2.014509401367039, |
|
"learning_rate": 4.09042127493813e-06, |
|
"loss": 0.4428, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.2078586375261566, |
|
"grad_norm": 2.119056066587081, |
|
"learning_rate": 4.078431863557605e-06, |
|
"loss": 0.4463, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.209253661939084, |
|
"grad_norm": 1.86625900949575, |
|
"learning_rate": 4.066447934886819e-06, |
|
"loss": 0.4728, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.2106486863520112, |
|
"grad_norm": 2.1210628992342255, |
|
"learning_rate": 4.054469560222077e-06, |
|
"loss": 0.3916, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.2120437107649384, |
|
"grad_norm": 1.9877563081010783, |
|
"learning_rate": 4.042496810826641e-06, |
|
"loss": 0.4787, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.2134387351778657, |
|
"grad_norm": 2.2079993472035637, |
|
"learning_rate": 4.030529757930308e-06, |
|
"loss": 0.501, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.2148337595907928, |
|
"grad_norm": 2.06793106880383, |
|
"learning_rate": 4.018568472728979e-06, |
|
"loss": 0.4476, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.21622878400372, |
|
"grad_norm": 2.0422389638464127, |
|
"learning_rate": 4.006613026384249e-06, |
|
"loss": 0.4518, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.2176238084166473, |
|
"grad_norm": 2.1199886116818027, |
|
"learning_rate": 3.9946634900229685e-06, |
|
"loss": 0.4741, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.2190188328295746, |
|
"grad_norm": 2.240846596784438, |
|
"learning_rate": 3.982719934736832e-06, |
|
"loss": 0.4792, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.2204138572425016, |
|
"grad_norm": 2.130464201224374, |
|
"learning_rate": 3.970782431581949e-06, |
|
"loss": 0.4438, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.221808881655429, |
|
"grad_norm": 2.068106505450987, |
|
"learning_rate": 3.958851051578425e-06, |
|
"loss": 0.4474, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.2232039060683562, |
|
"grad_norm": 2.0406851390179894, |
|
"learning_rate": 3.946925865709936e-06, |
|
"loss": 0.4367, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.2245989304812834, |
|
"grad_norm": 2.0437012220228805, |
|
"learning_rate": 3.935006944923304e-06, |
|
"loss": 0.4459, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.2259939548942107, |
|
"grad_norm": 2.1068418581233264, |
|
"learning_rate": 3.923094360128081e-06, |
|
"loss": 0.4254, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.227388979307138, |
|
"grad_norm": 1.9577971488960282, |
|
"learning_rate": 3.911188182196124e-06, |
|
"loss": 0.4433, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.228784003720065, |
|
"grad_norm": 2.0584334663525476, |
|
"learning_rate": 3.899288481961173e-06, |
|
"loss": 0.4425, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.2301790281329923, |
|
"grad_norm": 2.0639800376154396, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.4802, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.2315740525459196, |
|
"grad_norm": 2.2113590719733445, |
|
"learning_rate": 3.875508797724135e-06, |
|
"loss": 0.4898, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.2329690769588468, |
|
"grad_norm": 1.65424993527057, |
|
"learning_rate": 3.8636289551951545e-06, |
|
"loss": 0.4105, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.234364101371774, |
|
"grad_norm": 2.0237983089059357, |
|
"learning_rate": 3.851755873308549e-06, |
|
"loss": 0.437, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.2357591257847012, |
|
"grad_norm": 2.004193875938639, |
|
"learning_rate": 3.8398896227011604e-06, |
|
"loss": 0.4875, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.2371541501976284, |
|
"grad_norm": 2.217103897736817, |
|
"learning_rate": 3.8280302739691874e-06, |
|
"loss": 0.4717, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.2385491746105557, |
|
"grad_norm": 1.9857313758078003, |
|
"learning_rate": 3.816177897667767e-06, |
|
"loss": 0.4129, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.239944199023483, |
|
"grad_norm": 2.1759441467608824, |
|
"learning_rate": 3.8043325643105554e-06, |
|
"loss": 0.52, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.2413392234364102, |
|
"grad_norm": 2.1329442000650327, |
|
"learning_rate": 3.792494344369311e-06, |
|
"loss": 0.4097, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.2427342478493373, |
|
"grad_norm": 1.965987019278061, |
|
"learning_rate": 3.780663308273466e-06, |
|
"loss": 0.4527, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.2441292722622646, |
|
"grad_norm": 2.1499821644131503, |
|
"learning_rate": 3.7688395264097177e-06, |
|
"loss": 0.4778, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.2455242966751918, |
|
"grad_norm": 2.008829269853213, |
|
"learning_rate": 3.7570230691216055e-06, |
|
"loss": 0.4103, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.246919321088119, |
|
"grad_norm": 2.2628732069215958, |
|
"learning_rate": 3.7452140067090936e-06, |
|
"loss": 0.4697, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.2483143455010464, |
|
"grad_norm": 2.144442294578986, |
|
"learning_rate": 3.733412409428148e-06, |
|
"loss": 0.453, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.2497093699139734, |
|
"grad_norm": 1.919582985300559, |
|
"learning_rate": 3.7216183474903243e-06, |
|
"loss": 0.4742, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.2511043943269007, |
|
"grad_norm": 2.2316553028401658, |
|
"learning_rate": 3.70983189106235e-06, |
|
"loss": 0.4898, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.252499418739828, |
|
"grad_norm": 2.071360003336638, |
|
"learning_rate": 3.698053110265699e-06, |
|
"loss": 0.4953, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.2538944431527552, |
|
"grad_norm": 2.156296749381877, |
|
"learning_rate": 3.6862820751761863e-06, |
|
"loss": 0.4176, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.2552894675656825, |
|
"grad_norm": 2.0123119204925417, |
|
"learning_rate": 3.6745188558235443e-06, |
|
"loss": 0.4788, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.2566844919786098, |
|
"grad_norm": 2.1269095029712024, |
|
"learning_rate": 3.662763522191002e-06, |
|
"loss": 0.4729, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.2580795163915368, |
|
"grad_norm": 2.0416038709842934, |
|
"learning_rate": 3.6510161442148783e-06, |
|
"loss": 0.4471, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.259474540804464, |
|
"grad_norm": 1.976365217621231, |
|
"learning_rate": 3.639276791784163e-06, |
|
"loss": 0.48, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.2608695652173914, |
|
"grad_norm": 2.162833974202662, |
|
"learning_rate": 3.6275455347400952e-06, |
|
"loss": 0.4395, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.2622645896303184, |
|
"grad_norm": 1.9359111623466696, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.4507, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.2636596140432457, |
|
"grad_norm": 2.07327168371495, |
|
"learning_rate": 3.6041075859356383e-06, |
|
"loss": 0.4428, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.265054638456173, |
|
"grad_norm": 2.03931713471931, |
|
"learning_rate": 3.592401033615259e-06, |
|
"loss": 0.4597, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.2664496628691002, |
|
"grad_norm": 2.23742738842125, |
|
"learning_rate": 3.580702855560718e-06, |
|
"loss": 0.4982, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.2678446872820275, |
|
"grad_norm": 2.080685333486399, |
|
"learning_rate": 3.5690131213682943e-06, |
|
"loss": 0.4595, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.2692397116949548, |
|
"grad_norm": 2.0899692520393414, |
|
"learning_rate": 3.5573319005840363e-06, |
|
"loss": 0.4602, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.270634736107882, |
|
"grad_norm": 2.223490524510679, |
|
"learning_rate": 3.5456592627033355e-06, |
|
"loss": 0.4424, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.272029760520809, |
|
"grad_norm": 1.8639269617638456, |
|
"learning_rate": 3.533995277170532e-06, |
|
"loss": 0.4628, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.2734247849337363, |
|
"grad_norm": 2.093793164050054, |
|
"learning_rate": 3.52234001337848e-06, |
|
"loss": 0.4527, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.2748198093466636, |
|
"grad_norm": 2.1299607415176567, |
|
"learning_rate": 3.510693540668151e-06, |
|
"loss": 0.4349, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.2762148337595907, |
|
"grad_norm": 1.9228204200175336, |
|
"learning_rate": 3.4990559283282133e-06, |
|
"loss": 0.4353, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.277609858172518, |
|
"grad_norm": 2.0259963322884396, |
|
"learning_rate": 3.4874272455946217e-06, |
|
"loss": 0.4902, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.2790048825854452, |
|
"grad_norm": 2.0655847930615767, |
|
"learning_rate": 3.4758075616502075e-06, |
|
"loss": 0.4263, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.2803999069983725, |
|
"grad_norm": 2.0685690620076094, |
|
"learning_rate": 3.4641969456242646e-06, |
|
"loss": 0.4662, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.2817949314112997, |
|
"grad_norm": 2.1387591609101873, |
|
"learning_rate": 3.4525954665921375e-06, |
|
"loss": 0.444, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.283189955824227, |
|
"grad_norm": 2.0161975874514617, |
|
"learning_rate": 3.4410031935748124e-06, |
|
"loss": 0.4242, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.2845849802371543, |
|
"grad_norm": 2.084336680851645, |
|
"learning_rate": 3.4294201955385086e-06, |
|
"loss": 0.4702, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.2859800046500813, |
|
"grad_norm": 2.1036740916228442, |
|
"learning_rate": 3.4178465413942628e-06, |
|
"loss": 0.4808, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.2873750290630086, |
|
"grad_norm": 2.086226145543498, |
|
"learning_rate": 3.406282299997521e-06, |
|
"loss": 0.3895, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.2887700534759359, |
|
"grad_norm": 1.9685735915085314, |
|
"learning_rate": 3.394727540147732e-06, |
|
"loss": 0.4099, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.290165077888863, |
|
"grad_norm": 2.0192812446036035, |
|
"learning_rate": 3.383182330587936e-06, |
|
"loss": 0.4561, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.2915601023017902, |
|
"grad_norm": 2.0644595686003804, |
|
"learning_rate": 3.3716467400043514e-06, |
|
"loss": 0.4478, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.2929551267147175, |
|
"grad_norm": 2.1694861941362245, |
|
"learning_rate": 3.360120837025974e-06, |
|
"loss": 0.4327, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.2943501511276447, |
|
"grad_norm": 1.9412400050349086, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.458, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.295745175540572, |
|
"grad_norm": 2.1349425480132815, |
|
"learning_rate": 3.3370983681122405e-06, |
|
"loss": 0.4278, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.2971401999534993, |
|
"grad_norm": 1.9769480906565091, |
|
"learning_rate": 3.3256019391450696e-06, |
|
"loss": 0.4708, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.2985352243664265, |
|
"grad_norm": 2.1617930261944416, |
|
"learning_rate": 3.3141154717186597e-06, |
|
"loss": 0.4791, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2999302487793536, |
|
"grad_norm": 2.233031327126289, |
|
"learning_rate": 3.302639034169758e-06, |
|
"loss": 0.5107, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.3013252731922809, |
|
"grad_norm": 2.2333091138236982, |
|
"learning_rate": 3.291172694775435e-06, |
|
"loss": 0.4696, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.3027202976052081, |
|
"grad_norm": 1.9897849895374509, |
|
"learning_rate": 3.2797165217526915e-06, |
|
"loss": 0.4319, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.3041153220181352, |
|
"grad_norm": 2.1259453438639264, |
|
"learning_rate": 3.2682705832580395e-06, |
|
"loss": 0.4821, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.3055103464310625, |
|
"grad_norm": 2.1982207926862127, |
|
"learning_rate": 3.2568349473871044e-06, |
|
"loss": 0.4613, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.3069053708439897, |
|
"grad_norm": 1.943157323921308, |
|
"learning_rate": 3.245409682174217e-06, |
|
"loss": 0.4727, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.308300395256917, |
|
"grad_norm": 2.1694502623538816, |
|
"learning_rate": 3.2339948555920103e-06, |
|
"loss": 0.4462, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.3096954196698443, |
|
"grad_norm": 2.02441875061226, |
|
"learning_rate": 3.222590535551017e-06, |
|
"loss": 0.3991, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.3110904440827715, |
|
"grad_norm": 2.009032349306495, |
|
"learning_rate": 3.2111967898992597e-06, |
|
"loss": 0.4724, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.3124854684956988, |
|
"grad_norm": 1.9843180264245779, |
|
"learning_rate": 3.1998136864218497e-06, |
|
"loss": 0.4452, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.3138804929086259, |
|
"grad_norm": 2.0533825746187806, |
|
"learning_rate": 3.188441292840587e-06, |
|
"loss": 0.4482, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.3152755173215531, |
|
"grad_norm": 2.0235013309402468, |
|
"learning_rate": 3.177079676813557e-06, |
|
"loss": 0.4561, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.3166705417344804, |
|
"grad_norm": 2.1137423321444317, |
|
"learning_rate": 3.1657289059347184e-06, |
|
"loss": 0.4492, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.3180655661474074, |
|
"grad_norm": 1.7486020284932557, |
|
"learning_rate": 3.1543890477335153e-06, |
|
"loss": 0.5007, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.3194605905603347, |
|
"grad_norm": 2.156564229441565, |
|
"learning_rate": 3.1430601696744676e-06, |
|
"loss": 0.48, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.320855614973262, |
|
"grad_norm": 2.098327227462188, |
|
"learning_rate": 3.131742339156768e-06, |
|
"loss": 0.4515, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.3222506393861893, |
|
"grad_norm": 2.107981352881033, |
|
"learning_rate": 3.120435623513887e-06, |
|
"loss": 0.5015, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.3236456637991165, |
|
"grad_norm": 2.3095062879021766, |
|
"learning_rate": 3.1091400900131662e-06, |
|
"loss": 0.5035, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.3250406882120438, |
|
"grad_norm": 2.161655104957988, |
|
"learning_rate": 3.0978558058554255e-06, |
|
"loss": 0.4702, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.326435712624971, |
|
"grad_norm": 2.1231878542949265, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.441, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.3278307370378981, |
|
"grad_norm": 2.0409247579695995, |
|
"learning_rate": 3.0753212540371126e-06, |
|
"loss": 0.4371, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.3292257614508254, |
|
"grad_norm": 1.972768288020109, |
|
"learning_rate": 3.0640711204419495e-06, |
|
"loss": 0.4775, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.3306207858637527, |
|
"grad_norm": 2.0235557868912215, |
|
"learning_rate": 3.0528325043197786e-06, |
|
"loss": 0.424, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.33201581027668, |
|
"grad_norm": 2.1375250223189672, |
|
"learning_rate": 3.0416054725327975e-06, |
|
"loss": 0.4883, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.333410834689607, |
|
"grad_norm": 2.2291824467539985, |
|
"learning_rate": 3.0303900918742832e-06, |
|
"loss": 0.4629, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.3348058591025342, |
|
"grad_norm": 2.113413333177263, |
|
"learning_rate": 3.0191864290682006e-06, |
|
"loss": 0.4602, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.3362008835154615, |
|
"grad_norm": 2.1716284589357, |
|
"learning_rate": 3.007994550768793e-06, |
|
"loss": 0.4649, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.3375959079283888, |
|
"grad_norm": 1.931534866293287, |
|
"learning_rate": 2.9968145235602014e-06, |
|
"loss": 0.4473, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.338990932341316, |
|
"grad_norm": 2.0364674719890785, |
|
"learning_rate": 2.9856464139560583e-06, |
|
"loss": 0.4246, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.3403859567542433, |
|
"grad_norm": 2.108866570837351, |
|
"learning_rate": 2.9744902883990923e-06, |
|
"loss": 0.4365, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.3417809811671704, |
|
"grad_norm": 2.127000885263901, |
|
"learning_rate": 2.963346213260737e-06, |
|
"loss": 0.4443, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.3431760055800976, |
|
"grad_norm": 2.0526296607029324, |
|
"learning_rate": 2.9522142548407366e-06, |
|
"loss": 0.4588, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.344571029993025, |
|
"grad_norm": 2.241123744052401, |
|
"learning_rate": 2.941094479366744e-06, |
|
"loss": 0.4697, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.3459660544059522, |
|
"grad_norm": 2.132358400432893, |
|
"learning_rate": 2.929986952993933e-06, |
|
"loss": 0.4187, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.3473610788188792, |
|
"grad_norm": 1.8629316651965908, |
|
"learning_rate": 2.918891741804607e-06, |
|
"loss": 0.5213, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.3487561032318065, |
|
"grad_norm": 2.1685409605601227, |
|
"learning_rate": 2.9078089118077994e-06, |
|
"loss": 0.4231, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.3501511276447338, |
|
"grad_norm": 2.209136940528215, |
|
"learning_rate": 2.896738528938885e-06, |
|
"loss": 0.4455, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.351546152057661, |
|
"grad_norm": 2.139570622351985, |
|
"learning_rate": 2.885680659059187e-06, |
|
"loss": 0.4334, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.3529411764705883, |
|
"grad_norm": 2.033270980494118, |
|
"learning_rate": 2.8746353679555794e-06, |
|
"loss": 0.4568, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.3543362008835156, |
|
"grad_norm": 2.0919421978668056, |
|
"learning_rate": 2.863602721340109e-06, |
|
"loss": 0.4561, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.3557312252964426, |
|
"grad_norm": 1.9545589493519657, |
|
"learning_rate": 2.8525827848495912e-06, |
|
"loss": 0.4344, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.35712624970937, |
|
"grad_norm": 2.023560734048173, |
|
"learning_rate": 2.8415756240452274e-06, |
|
"loss": 0.4436, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.3585212741222972, |
|
"grad_norm": 2.0340775030640708, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.4297, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.3599162985352244, |
|
"grad_norm": 2.173615317885776, |
|
"learning_rate": 2.819599891359337e-06, |
|
"loss": 0.4252, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.3613113229481515, |
|
"grad_norm": 1.9284825554902276, |
|
"learning_rate": 2.8086314502186197e-06, |
|
"loss": 0.4155, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.3627063473610788, |
|
"grad_norm": 2.141689581871054, |
|
"learning_rate": 2.7976760462448955e-06, |
|
"loss": 0.465, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.364101371774006, |
|
"grad_norm": 2.114769495652363, |
|
"learning_rate": 2.7867337446154397e-06, |
|
"loss": 0.5002, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.3654963961869333, |
|
"grad_norm": 2.2678110505319933, |
|
"learning_rate": 2.77580461042958e-06, |
|
"loss": 0.4671, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.3668914205998606, |
|
"grad_norm": 1.987667346423969, |
|
"learning_rate": 2.7648887087082996e-06, |
|
"loss": 0.4685, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.3682864450127878, |
|
"grad_norm": 2.24277483709307, |
|
"learning_rate": 2.753986104393864e-06, |
|
"loss": 0.4481, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.369681469425715, |
|
"grad_norm": 2.2378577673040048, |
|
"learning_rate": 2.743096862349427e-06, |
|
"loss": 0.441, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.3710764938386422, |
|
"grad_norm": 2.021640316592768, |
|
"learning_rate": 2.7322210473586406e-06, |
|
"loss": 0.437, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.3724715182515694, |
|
"grad_norm": 1.8635324388592955, |
|
"learning_rate": 2.721358724125287e-06, |
|
"loss": 0.4605, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.3738665426644967, |
|
"grad_norm": 3.7251213547992688, |
|
"learning_rate": 2.7105099572728676e-06, |
|
"loss": 0.4779, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.3752615670774238, |
|
"grad_norm": 2.084169605706041, |
|
"learning_rate": 2.6996748113442397e-06, |
|
"loss": 0.4533, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.376656591490351, |
|
"grad_norm": 2.2104579294138604, |
|
"learning_rate": 2.6888533508012247e-06, |
|
"loss": 0.4565, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.3780516159032783, |
|
"grad_norm": 2.1567174351782614, |
|
"learning_rate": 2.678045640024225e-06, |
|
"loss": 0.49, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.3794466403162056, |
|
"grad_norm": 2.0531595116227557, |
|
"learning_rate": 2.66725174331184e-06, |
|
"loss": 0.403, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.3808416647291328, |
|
"grad_norm": 2.080232972878775, |
|
"learning_rate": 2.6564717248804876e-06, |
|
"loss": 0.4926, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.38223668914206, |
|
"grad_norm": 2.0993530101540094, |
|
"learning_rate": 2.6457056488640135e-06, |
|
"loss": 0.4605, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.3836317135549872, |
|
"grad_norm": 2.230646736451978, |
|
"learning_rate": 2.6349535793133196e-06, |
|
"loss": 0.4612, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.3850267379679144, |
|
"grad_norm": 2.08748461860507, |
|
"learning_rate": 2.6242155801959814e-06, |
|
"loss": 0.4747, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.3864217623808417, |
|
"grad_norm": 2.169585558390855, |
|
"learning_rate": 2.613491715395861e-06, |
|
"loss": 0.4435, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.387816786793769, |
|
"grad_norm": 2.0019666409817574, |
|
"learning_rate": 2.602782048712729e-06, |
|
"loss": 0.4115, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.389211811206696, |
|
"grad_norm": 1.9749791762887468, |
|
"learning_rate": 2.592086643861891e-06, |
|
"loss": 0.4732, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.3906068356196233, |
|
"grad_norm": 2.174738415153292, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.4541, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.3920018600325506, |
|
"grad_norm": 2.139559543105941, |
|
"learning_rate": 2.57073887409369e-06, |
|
"loss": 0.4509, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.3933968844454778, |
|
"grad_norm": 2.045345170723802, |
|
"learning_rate": 2.5600866361811804e-06, |
|
"loss": 0.4342, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.394791908858405, |
|
"grad_norm": 2.1774650094819257, |
|
"learning_rate": 2.5494489141099155e-06, |
|
"loss": 0.4762, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.3961869332713324, |
|
"grad_norm": 2.188733000241817, |
|
"learning_rate": 2.5388257711671723e-06, |
|
"loss": 0.4656, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.3975819576842594, |
|
"grad_norm": 1.9431028809211057, |
|
"learning_rate": 2.5282172705535013e-06, |
|
"loss": 0.4199, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.3989769820971867, |
|
"grad_norm": 1.9956348364331262, |
|
"learning_rate": 2.517623475382335e-06, |
|
"loss": 0.4673, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.400372006510114, |
|
"grad_norm": 2.1848282860521824, |
|
"learning_rate": 2.507044448679621e-06, |
|
"loss": 0.4533, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.4017670309230412, |
|
"grad_norm": 2.0497329348777837, |
|
"learning_rate": 2.4964802533834404e-06, |
|
"loss": 0.4676, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.4031620553359683, |
|
"grad_norm": 2.1022873543390106, |
|
"learning_rate": 2.4859309523436415e-06, |
|
"loss": 0.4498, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.4045570797488955, |
|
"grad_norm": 2.1374560724246114, |
|
"learning_rate": 2.4753966083214613e-06, |
|
"loss": 0.4202, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.4059521041618228, |
|
"grad_norm": 1.994795529357376, |
|
"learning_rate": 2.4648772839891517e-06, |
|
"loss": 0.405, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.40734712857475, |
|
"grad_norm": 1.889213983531933, |
|
"learning_rate": 2.454373041929607e-06, |
|
"loss": 0.4043, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.4087421529876774, |
|
"grad_norm": 1.888123546815583, |
|
"learning_rate": 2.4438839446359936e-06, |
|
"loss": 0.4349, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.4101371774006046, |
|
"grad_norm": 2.1442846126168105, |
|
"learning_rate": 2.433410054511376e-06, |
|
"loss": 0.4897, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.4115322018135317, |
|
"grad_norm": 2.098646009752216, |
|
"learning_rate": 2.422951433868346e-06, |
|
"loss": 0.4814, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.412927226226459, |
|
"grad_norm": 2.0907240577366433, |
|
"learning_rate": 2.4125081449286523e-06, |
|
"loss": 0.4949, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.4143222506393862, |
|
"grad_norm": 2.1264766644102595, |
|
"learning_rate": 2.4020802498228333e-06, |
|
"loss": 0.4435, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.4157172750523135, |
|
"grad_norm": 2.161970375783907, |
|
"learning_rate": 2.3916678105898376e-06, |
|
"loss": 0.4446, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.4171122994652405, |
|
"grad_norm": 2.0357769161075954, |
|
"learning_rate": 2.3812708891766683e-06, |
|
"loss": 0.4516, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.4185073238781678, |
|
"grad_norm": 2.2463213912233613, |
|
"learning_rate": 2.3708895474380045e-06, |
|
"loss": 0.4481, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.419902348291095, |
|
"grad_norm": 2.23141097622481, |
|
"learning_rate": 2.360523847135838e-06, |
|
"loss": 0.4469, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.4212973727040223, |
|
"grad_norm": 2.019771445051018, |
|
"learning_rate": 2.3501738499391053e-06, |
|
"loss": 0.4235, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.4226923971169496, |
|
"grad_norm": 2.0632278153556776, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.465, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.4240874215298769, |
|
"grad_norm": 2.0613929308269867, |
|
"learning_rate": 2.3295212110701994e-06, |
|
"loss": 0.3962, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.425482445942804, |
|
"grad_norm": 1.9328040306708631, |
|
"learning_rate": 2.3192186922673187e-06, |
|
"loss": 0.434, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.4268774703557312, |
|
"grad_norm": 2.1639316185817496, |
|
"learning_rate": 2.308932122307724e-06, |
|
"loss": 0.5227, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.4282724947686585, |
|
"grad_norm": 2.0899973628027007, |
|
"learning_rate": 2.2986615623895774e-06, |
|
"loss": 0.4453, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.4296675191815857, |
|
"grad_norm": 2.051643921559896, |
|
"learning_rate": 2.288407073615797e-06, |
|
"loss": 0.4385, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.4310625435945128, |
|
"grad_norm": 2.134955583289714, |
|
"learning_rate": 2.2781687169936794e-06, |
|
"loss": 0.3805, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.43245756800744, |
|
"grad_norm": 1.957462775697652, |
|
"learning_rate": 2.2679465534345534e-06, |
|
"loss": 0.4605, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.4338525924203673, |
|
"grad_norm": 2.0642360916463924, |
|
"learning_rate": 2.2577406437534055e-06, |
|
"loss": 0.4511, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.4352476168332946, |
|
"grad_norm": 2.007231951364962, |
|
"learning_rate": 2.247551048668531e-06, |
|
"loss": 0.4527, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.4366426412462219, |
|
"grad_norm": 2.138332772618615, |
|
"learning_rate": 2.2373778288011517e-06, |
|
"loss": 0.4552, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.4380376656591491, |
|
"grad_norm": 2.124748566211008, |
|
"learning_rate": 2.2272210446750757e-06, |
|
"loss": 0.4635, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.4394326900720762, |
|
"grad_norm": 2.170811329651528, |
|
"learning_rate": 2.2170807567163294e-06, |
|
"loss": 0.4623, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.4408277144850035, |
|
"grad_norm": 2.090856244557684, |
|
"learning_rate": 2.2069570252527954e-06, |
|
"loss": 0.4357, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.4422227388979307, |
|
"grad_norm": 2.3690024054005154, |
|
"learning_rate": 2.196849910513858e-06, |
|
"loss": 0.4715, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.443617763310858, |
|
"grad_norm": 2.059481032162157, |
|
"learning_rate": 2.186759472630045e-06, |
|
"loss": 0.469, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.445012787723785, |
|
"grad_norm": 2.2361040736935465, |
|
"learning_rate": 2.17668577163266e-06, |
|
"loss": 0.46, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.4464078121367123, |
|
"grad_norm": 2.164364979964796, |
|
"learning_rate": 2.1666288674534446e-06, |
|
"loss": 0.446, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.4478028365496396, |
|
"grad_norm": 1.8634866672332282, |
|
"learning_rate": 2.156588819924204e-06, |
|
"loss": 0.4549, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.4491978609625669, |
|
"grad_norm": 2.132439121162979, |
|
"learning_rate": 2.1465656887764615e-06, |
|
"loss": 0.4222, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.4505928853754941, |
|
"grad_norm": 2.163017453224282, |
|
"learning_rate": 2.136559533641092e-06, |
|
"loss": 0.4677, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.4519879097884214, |
|
"grad_norm": 2.1316582396669386, |
|
"learning_rate": 2.126570414047982e-06, |
|
"loss": 0.4784, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.4533829342013485, |
|
"grad_norm": 2.1775898180475215, |
|
"learning_rate": 2.1165983894256647e-06, |
|
"loss": 0.47, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.4547779586142757, |
|
"grad_norm": 2.204489717101063, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.4415, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.456172983027203, |
|
"grad_norm": 2.0391115202528023, |
|
"learning_rate": 2.096705862298676e-06, |
|
"loss": 0.4437, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.4575680074401303, |
|
"grad_norm": 2.150122752289656, |
|
"learning_rate": 2.086785478141143e-06, |
|
"loss": 0.4328, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.4589630318530573, |
|
"grad_norm": 2.1245697032921833, |
|
"learning_rate": 2.076882425647977e-06, |
|
"loss": 0.3992, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.4603580562659846, |
|
"grad_norm": 2.0584854403080874, |
|
"learning_rate": 2.0669967637356702e-06, |
|
"loss": 0.4427, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.4617530806789119, |
|
"grad_norm": 1.9826831338383193, |
|
"learning_rate": 2.057128551217254e-06, |
|
"loss": 0.4774, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.4631481050918391, |
|
"grad_norm": 2.1289243452321847, |
|
"learning_rate": 2.0472778468019456e-06, |
|
"loss": 0.497, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.4645431295047664, |
|
"grad_norm": 2.1286358432747376, |
|
"learning_rate": 2.037444709094804e-06, |
|
"loss": 0.4321, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.4659381539176937, |
|
"grad_norm": 2.054601532030644, |
|
"learning_rate": 2.027629196596373e-06, |
|
"loss": 0.4763, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.4673331783306207, |
|
"grad_norm": 2.1232951836859106, |
|
"learning_rate": 2.0178313677023427e-06, |
|
"loss": 0.483, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.468728202743548, |
|
"grad_norm": 2.110872679736881, |
|
"learning_rate": 2.008051280703196e-06, |
|
"loss": 0.4284, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.4701232271564753, |
|
"grad_norm": 2.039901992775588, |
|
"learning_rate": 1.9982889937838635e-06, |
|
"loss": 0.4295, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.4715182515694025, |
|
"grad_norm": 1.9424993858352442, |
|
"learning_rate": 1.988544565023378e-06, |
|
"loss": 0.4417, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.4729132759823296, |
|
"grad_norm": 2.2251949175021157, |
|
"learning_rate": 1.978818052394528e-06, |
|
"loss": 0.4641, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.4743083003952568, |
|
"grad_norm": 1.9524088974321652, |
|
"learning_rate": 1.9691095137635117e-06, |
|
"loss": 0.4909, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.4757033248081841, |
|
"grad_norm": 2.320431539339629, |
|
"learning_rate": 1.9594190068895967e-06, |
|
"loss": 0.4686, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.4770983492211114, |
|
"grad_norm": 2.1811531311597174, |
|
"learning_rate": 1.949746589424772e-06, |
|
"loss": 0.4381, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.4784933736340387, |
|
"grad_norm": 2.0675702062294703, |
|
"learning_rate": 1.940092318913408e-06, |
|
"loss": 0.4801, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.479888398046966, |
|
"grad_norm": 2.140807400840788, |
|
"learning_rate": 1.9304562527919094e-06, |
|
"loss": 0.4569, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.481283422459893, |
|
"grad_norm": 2.1799124450514173, |
|
"learning_rate": 1.920838448388382e-06, |
|
"loss": 0.432, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.4826784468728202, |
|
"grad_norm": 1.893690087500473, |
|
"learning_rate": 1.911238962922282e-06, |
|
"loss": 0.427, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.4840734712857475, |
|
"grad_norm": 2.1124227125467336, |
|
"learning_rate": 1.901657853504088e-06, |
|
"loss": 0.4683, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.4854684956986748, |
|
"grad_norm": 2.116545768275609, |
|
"learning_rate": 1.8920951771349428e-06, |
|
"loss": 0.4389, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.4868635201116018, |
|
"grad_norm": 2.0960807190104473, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.4192, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.488258544524529, |
|
"grad_norm": 2.099536710624629, |
|
"learning_rate": 1.8730253509997388e-06, |
|
"loss": 0.4088, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.4896535689374564, |
|
"grad_norm": 1.9858631220209877, |
|
"learning_rate": 1.8635183146863016e-06, |
|
"loss": 0.4277, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.4910485933503836, |
|
"grad_norm": 2.170988774620348, |
|
"learning_rate": 1.8540299383264843e-06, |
|
"loss": 0.4559, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.492443617763311, |
|
"grad_norm": 2.1787387202817015, |
|
"learning_rate": 1.8445602783697375e-06, |
|
"loss": 0.455, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.4938386421762382, |
|
"grad_norm": 2.1599938341615985, |
|
"learning_rate": 1.8351093911541573e-06, |
|
"loss": 0.5288, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.4952336665891652, |
|
"grad_norm": 2.32780588654961, |
|
"learning_rate": 1.8256773329061566e-06, |
|
"loss": 0.4552, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.4966286910020925, |
|
"grad_norm": 2.149694858376219, |
|
"learning_rate": 1.8162641597401338e-06, |
|
"loss": 0.4689, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.4980237154150198, |
|
"grad_norm": 2.0819138266881225, |
|
"learning_rate": 1.8068699276581286e-06, |
|
"loss": 0.4683, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.499418739827947, |
|
"grad_norm": 2.2817200224362235, |
|
"learning_rate": 1.7974946925494925e-06, |
|
"loss": 0.5152, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.500813764240874, |
|
"grad_norm": 2.203558953898251, |
|
"learning_rate": 1.7881385101905613e-06, |
|
"loss": 0.4737, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.5022087886538014, |
|
"grad_norm": 2.0908558844435756, |
|
"learning_rate": 1.778801436244319e-06, |
|
"loss": 0.4283, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.5036038130667286, |
|
"grad_norm": 2.1315489899020963, |
|
"learning_rate": 1.7694835262600668e-06, |
|
"loss": 0.4837, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.504998837479656, |
|
"grad_norm": 2.1634782853778884, |
|
"learning_rate": 1.7601848356730933e-06, |
|
"loss": 0.4467, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.5063938618925832, |
|
"grad_norm": 2.154791342432924, |
|
"learning_rate": 1.7509054198043473e-06, |
|
"loss": 0.417, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.5077888863055104, |
|
"grad_norm": 2.0140475517719167, |
|
"learning_rate": 1.741645333860098e-06, |
|
"loss": 0.464, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.5091839107184377, |
|
"grad_norm": 2.1573584679806372, |
|
"learning_rate": 1.7324046329316253e-06, |
|
"loss": 0.4372, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.5105789351313648, |
|
"grad_norm": 1.9559737789586726, |
|
"learning_rate": 1.7231833719948743e-06, |
|
"loss": 0.4413, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.511973959544292, |
|
"grad_norm": 2.1157743977756405, |
|
"learning_rate": 1.7139816059101372e-06, |
|
"loss": 0.4098, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.5133689839572193, |
|
"grad_norm": 1.9673036282895517, |
|
"learning_rate": 1.7047993894217269e-06, |
|
"loss": 0.4533, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.5147640083701464, |
|
"grad_norm": 2.196912391461416, |
|
"learning_rate": 1.6956367771576432e-06, |
|
"loss": 0.4704, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.5161590327830736, |
|
"grad_norm": 2.0199973612735387, |
|
"learning_rate": 1.6864938236292606e-06, |
|
"loss": 0.4383, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.517554057196001, |
|
"grad_norm": 2.300672182405597, |
|
"learning_rate": 1.6773705832309945e-06, |
|
"loss": 0.4511, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.5189490816089282, |
|
"grad_norm": 2.14852844655263, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.4466, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.5203441060218554, |
|
"grad_norm": 2.1361355389334378, |
|
"learning_rate": 1.6591834588157523e-06, |
|
"loss": 0.4548, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.5217391304347827, |
|
"grad_norm": 2.0851465686341086, |
|
"learning_rate": 1.6501196829999179e-06, |
|
"loss": 0.501, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.52313415484771, |
|
"grad_norm": 2.229088020773415, |
|
"learning_rate": 1.6410758367158386e-06, |
|
"loss": 0.4214, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.524529179260637, |
|
"grad_norm": 1.9656859079499656, |
|
"learning_rate": 1.6320519737683095e-06, |
|
"loss": 0.394, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.5259242036735643, |
|
"grad_norm": 1.8733906967388423, |
|
"learning_rate": 1.6230481478432364e-06, |
|
"loss": 0.4564, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.5273192280864916, |
|
"grad_norm": 2.0441924506755664, |
|
"learning_rate": 1.6140644125073223e-06, |
|
"loss": 0.449, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.5287142524994186, |
|
"grad_norm": 2.0385854309420317, |
|
"learning_rate": 1.6051008212077384e-06, |
|
"loss": 0.4797, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.5301092769123459, |
|
"grad_norm": 2.3564266457176855, |
|
"learning_rate": 1.5961574272718179e-06, |
|
"loss": 0.4768, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.5315043013252732, |
|
"grad_norm": 1.9115979450281055, |
|
"learning_rate": 1.5872342839067305e-06, |
|
"loss": 0.458, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.5328993257382004, |
|
"grad_norm": 2.1808017439500027, |
|
"learning_rate": 1.5783314441991698e-06, |
|
"loss": 0.4233, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.5342943501511277, |
|
"grad_norm": 2.076982481552849, |
|
"learning_rate": 1.569448961115037e-06, |
|
"loss": 0.4696, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.535689374564055, |
|
"grad_norm": 2.2277775247007585, |
|
"learning_rate": 1.5605868874991225e-06, |
|
"loss": 0.4672, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.5370843989769822, |
|
"grad_norm": 2.029428608776388, |
|
"learning_rate": 1.5517452760747975e-06, |
|
"loss": 0.4391, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.5384794233899093, |
|
"grad_norm": 2.163702572810171, |
|
"learning_rate": 1.5429241794436939e-06, |
|
"loss": 0.4615, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.5398744478028366, |
|
"grad_norm": 2.0576551741631546, |
|
"learning_rate": 1.5341236500853963e-06, |
|
"loss": 0.3963, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.5412694722157638, |
|
"grad_norm": 1.948897872545703, |
|
"learning_rate": 1.525343740357128e-06, |
|
"loss": 0.4433, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.5426644966286909, |
|
"grad_norm": 2.194109971817455, |
|
"learning_rate": 1.5165845024934366e-06, |
|
"loss": 0.4713, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.5440595210416181, |
|
"grad_norm": 2.1942802527235767, |
|
"learning_rate": 1.5078459886058894e-06, |
|
"loss": 0.3669, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 1.931807338090762, |
|
"learning_rate": 1.499128250682757e-06, |
|
"loss": 0.4165, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.5468495698674727, |
|
"grad_norm": 1.8873433846497065, |
|
"learning_rate": 1.4904313405887111e-06, |
|
"loss": 0.4119, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.5482445942804, |
|
"grad_norm": 2.1562216951827913, |
|
"learning_rate": 1.4817553100645105e-06, |
|
"loss": 0.4601, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.5496396186933272, |
|
"grad_norm": 2.1807467873254116, |
|
"learning_rate": 1.4731002107266902e-06, |
|
"loss": 0.4231, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.5510346431062545, |
|
"grad_norm": 1.981889187995434, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.494, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.5524296675191815, |
|
"grad_norm": 2.5915146018778614, |
|
"learning_rate": 1.455853011453408e-06, |
|
"loss": 0.4647, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.5538246919321088, |
|
"grad_norm": 2.2136885014943677, |
|
"learning_rate": 1.447261014127167e-06, |
|
"loss": 0.4374, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.555219716345036, |
|
"grad_norm": 2.107779035217476, |
|
"learning_rate": 1.4386901532051379e-06, |
|
"loss": 0.4575, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.5566147407579631, |
|
"grad_norm": 2.0020086351509088, |
|
"learning_rate": 1.430140479678168e-06, |
|
"loss": 0.4751, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.5580097651708904, |
|
"grad_norm": 2.2610539554238613, |
|
"learning_rate": 1.4216120444110566e-06, |
|
"loss": 0.4146, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.5594047895838177, |
|
"grad_norm": 2.127211740430317, |
|
"learning_rate": 1.4131048981422529e-06, |
|
"loss": 0.413, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.560799813996745, |
|
"grad_norm": 2.0574224448728016, |
|
"learning_rate": 1.404619091483546e-06, |
|
"loss": 0.4128, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.5621948384096722, |
|
"grad_norm": 2.0228379456275216, |
|
"learning_rate": 1.3961546749197719e-06, |
|
"loss": 0.4266, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.5635898628225995, |
|
"grad_norm": 2.079227910587265, |
|
"learning_rate": 1.387711698808505e-06, |
|
"loss": 0.428, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.5649848872355268, |
|
"grad_norm": 1.8923261271608611, |
|
"learning_rate": 1.3792902133797692e-06, |
|
"loss": 0.4121, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.5663799116484538, |
|
"grad_norm": 1.9647313130229616, |
|
"learning_rate": 1.37089026873573e-06, |
|
"loss": 0.4266, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.567774936061381, |
|
"grad_norm": 2.1049543470618515, |
|
"learning_rate": 1.3625119148504013e-06, |
|
"loss": 0.4255, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.5691699604743083, |
|
"grad_norm": 2.018946253171041, |
|
"learning_rate": 1.354155201569346e-06, |
|
"loss": 0.4374, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.5705649848872354, |
|
"grad_norm": 2.0421990909718595, |
|
"learning_rate": 1.3458201786093795e-06, |
|
"loss": 0.4513, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.5719600093001627, |
|
"grad_norm": 2.0403374058085433, |
|
"learning_rate": 1.3375068955582754e-06, |
|
"loss": 0.4068, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.57335503371309, |
|
"grad_norm": 1.8850939356587402, |
|
"learning_rate": 1.329215401874468e-06, |
|
"loss": 0.4581, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.5747500581260172, |
|
"grad_norm": 2.0113778332021313, |
|
"learning_rate": 1.3209457468867614e-06, |
|
"loss": 0.4111, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.5761450825389445, |
|
"grad_norm": 1.961215646510735, |
|
"learning_rate": 1.3126979797940336e-06, |
|
"loss": 0.4016, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.5775401069518717, |
|
"grad_norm": 1.946752479107875, |
|
"learning_rate": 1.3044721496649427e-06, |
|
"loss": 0.4439, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.578935131364799, |
|
"grad_norm": 1.959475188059055, |
|
"learning_rate": 1.2962683054376375e-06, |
|
"loss": 0.4077, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.580330155777726, |
|
"grad_norm": 2.030187772323639, |
|
"learning_rate": 1.2880864959194666e-06, |
|
"loss": 0.3907, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.5817251801906533, |
|
"grad_norm": 1.9957922442663245, |
|
"learning_rate": 1.2799267697866868e-06, |
|
"loss": 0.463, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.5831202046035806, |
|
"grad_norm": 2.0602462684084624, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.4058, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.5845152290165077, |
|
"grad_norm": 2.1830055682068195, |
|
"learning_rate": 1.2636737617251283e-06, |
|
"loss": 0.4462, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.585910253429435, |
|
"grad_norm": 2.0532749033801814, |
|
"learning_rate": 1.255580576490802e-06, |
|
"loss": 0.4569, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.5873052778423622, |
|
"grad_norm": 2.1155124216443473, |
|
"learning_rate": 1.2475096680301934e-06, |
|
"loss": 0.4173, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.5887003022552895, |
|
"grad_norm": 2.1690763886161015, |
|
"learning_rate": 1.2394610843597737e-06, |
|
"loss": 0.4598, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.5900953266682167, |
|
"grad_norm": 2.073486351346297, |
|
"learning_rate": 1.2314348733631958e-06, |
|
"loss": 0.4099, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.591490351081144, |
|
"grad_norm": 2.009538340855588, |
|
"learning_rate": 1.2234310827910063e-06, |
|
"loss": 0.4634, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.5928853754940713, |
|
"grad_norm": 2.086735738787941, |
|
"learning_rate": 1.2154497602603704e-06, |
|
"loss": 0.4501, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.5942803999069983, |
|
"grad_norm": 2.124804280878044, |
|
"learning_rate": 1.2074909532547824e-06, |
|
"loss": 0.4075, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.5956754243199256, |
|
"grad_norm": 2.1372643069283517, |
|
"learning_rate": 1.1995547091237814e-06, |
|
"loss": 0.4584, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.5970704487328529, |
|
"grad_norm": 2.115705750764563, |
|
"learning_rate": 1.191641075082679e-06, |
|
"loss": 0.3771, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.59846547314578, |
|
"grad_norm": 2.1054204705622426, |
|
"learning_rate": 1.1837500982122646e-06, |
|
"loss": 0.4701, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.5998604975587072, |
|
"grad_norm": 2.1677272810674224, |
|
"learning_rate": 1.175881825458537e-06, |
|
"loss": 0.4798, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.6012555219716345, |
|
"grad_norm": 2.103897443365682, |
|
"learning_rate": 1.16803630363242e-06, |
|
"loss": 0.4168, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.6026505463845617, |
|
"grad_norm": 2.21888945610227, |
|
"learning_rate": 1.1602135794094843e-06, |
|
"loss": 0.4548, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.604045570797489, |
|
"grad_norm": 2.157370730462611, |
|
"learning_rate": 1.152413699329671e-06, |
|
"loss": 0.458, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.6054405952104163, |
|
"grad_norm": 2.1179984402535528, |
|
"learning_rate": 1.1446367097970134e-06, |
|
"loss": 0.4581, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.6068356196233435, |
|
"grad_norm": 2.286665408259628, |
|
"learning_rate": 1.1368826570793602e-06, |
|
"loss": 0.4452, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.6082306440362706, |
|
"grad_norm": 2.211199331004034, |
|
"learning_rate": 1.129151587308102e-06, |
|
"loss": 0.4097, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.6096256684491979, |
|
"grad_norm": 1.8853710993731516, |
|
"learning_rate": 1.1214435464779006e-06, |
|
"loss": 0.4815, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.6110206928621251, |
|
"grad_norm": 2.162910909135421, |
|
"learning_rate": 1.1137585804464062e-06, |
|
"loss": 0.4402, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.6124157172750522, |
|
"grad_norm": 2.119846894032719, |
|
"learning_rate": 1.1060967349339897e-06, |
|
"loss": 0.4085, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.6138107416879794, |
|
"grad_norm": 1.870879315183828, |
|
"learning_rate": 1.098458055523472e-06, |
|
"loss": 0.4787, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.6152057661009067, |
|
"grad_norm": 2.12494088689906, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.4754, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.616600790513834, |
|
"grad_norm": 2.063935560635135, |
|
"learning_rate": 1.0832503766500324e-06, |
|
"loss": 0.4139, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.6179958149267613, |
|
"grad_norm": 2.0787433691392745, |
|
"learning_rate": 1.0756814676625576e-06, |
|
"loss": 0.4386, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.6193908393396885, |
|
"grad_norm": 1.9939683928369583, |
|
"learning_rate": 1.0681359057273388e-06, |
|
"loss": 0.4471, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.6207858637526158, |
|
"grad_norm": 2.133799960540424, |
|
"learning_rate": 1.060613735735384e-06, |
|
"loss": 0.4595, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.6221808881655428, |
|
"grad_norm": 2.024216295186773, |
|
"learning_rate": 1.0531150024385423e-06, |
|
"loss": 0.4525, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.6235759125784701, |
|
"grad_norm": 2.0095052935864315, |
|
"learning_rate": 1.045639750449225e-06, |
|
"loss": 0.474, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.6249709369913974, |
|
"grad_norm": 2.1117515259793893, |
|
"learning_rate": 1.0381880242401483e-06, |
|
"loss": 0.3937, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.6263659614043244, |
|
"grad_norm": 1.9720718120560756, |
|
"learning_rate": 1.0307598681440617e-06, |
|
"loss": 0.4304, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.6277609858172517, |
|
"grad_norm": 2.0114538495387606, |
|
"learning_rate": 1.0233553263534924e-06, |
|
"loss": 0.4348, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.629156010230179, |
|
"grad_norm": 2.0352256936639663, |
|
"learning_rate": 1.0159744429204776e-06, |
|
"loss": 0.4358, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.6305510346431062, |
|
"grad_norm": 2.203732854508469, |
|
"learning_rate": 1.0086172617563022e-06, |
|
"loss": 0.4501, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.6319460590560335, |
|
"grad_norm": 2.0446799273020564, |
|
"learning_rate": 1.0012838266312397e-06, |
|
"loss": 0.4431, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.6333410834689608, |
|
"grad_norm": 2.2327287396202173, |
|
"learning_rate": 9.939741811742893e-07, |
|
"loss": 0.4403, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.634736107881888, |
|
"grad_norm": 2.2165342815886233, |
|
"learning_rate": 9.866883688729189e-07, |
|
"loss": 0.4378, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.636131132294815, |
|
"grad_norm": 2.0549633529337936, |
|
"learning_rate": 9.794264330728043e-07, |
|
"loss": 0.4093, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.6375261567077424, |
|
"grad_norm": 2.0537102551655257, |
|
"learning_rate": 9.721884169775735e-07, |
|
"loss": 0.4272, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.6389211811206696, |
|
"grad_norm": 2.200656487770776, |
|
"learning_rate": 9.64974363648548e-07, |
|
"loss": 0.4869, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.6403162055335967, |
|
"grad_norm": 2.016301063721608, |
|
"learning_rate": 9.577843160044842e-07, |
|
"loss": 0.4439, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.641711229946524, |
|
"grad_norm": 2.1709862159822215, |
|
"learning_rate": 9.506183168213245e-07, |
|
"loss": 0.4464, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.6431062543594512, |
|
"grad_norm": 2.0994595042624296, |
|
"learning_rate": 9.434764087319376e-07, |
|
"loss": 0.4349, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.6445012787723785, |
|
"grad_norm": 2.157447833178887, |
|
"learning_rate": 9.363586342258662e-07, |
|
"loss": 0.4461, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.6458963031853058, |
|
"grad_norm": 2.267561948229981, |
|
"learning_rate": 9.292650356490757e-07, |
|
"loss": 0.4379, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.647291327598233, |
|
"grad_norm": 2.0427902212170435, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.4314, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.6486863520111603, |
|
"grad_norm": 2.1552102034976603, |
|
"learning_rate": 9.151505349477901e-07, |
|
"loss": 0.4602, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.6500813764240876, |
|
"grad_norm": 2.185995467962314, |
|
"learning_rate": 9.081297167950682e-07, |
|
"loss": 0.4774, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.6514764008370146, |
|
"grad_norm": 2.1492175403491203, |
|
"learning_rate": 9.011332425146734e-07, |
|
"loss": 0.4464, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.652871425249942, |
|
"grad_norm": 2.009528313205518, |
|
"learning_rate": 8.941611537309158e-07, |
|
"loss": 0.4675, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.654266449662869, |
|
"grad_norm": 2.215590896357922, |
|
"learning_rate": 8.87213491923029e-07, |
|
"loss": 0.4243, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.6556614740757962, |
|
"grad_norm": 2.165672694359002, |
|
"learning_rate": 8.80290298424919e-07, |
|
"loss": 0.4333, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.6570564984887235, |
|
"grad_norm": 2.044698721900824, |
|
"learning_rate": 8.733916144249255e-07, |
|
"loss": 0.4859, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.6584515229016508, |
|
"grad_norm": 2.390370448435757, |
|
"learning_rate": 8.665174809655707e-07, |
|
"loss": 0.4776, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.659846547314578, |
|
"grad_norm": 2.0991424672781753, |
|
"learning_rate": 8.596679389433232e-07, |
|
"loss": 0.3906, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.6612415717275053, |
|
"grad_norm": 1.9493079845533017, |
|
"learning_rate": 8.528430291083412e-07, |
|
"loss": 0.4128, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.6626365961404326, |
|
"grad_norm": 1.9928338170064632, |
|
"learning_rate": 8.460427920642422e-07, |
|
"loss": 0.4332, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.6640316205533598, |
|
"grad_norm": 2.206404643941403, |
|
"learning_rate": 8.392672682678577e-07, |
|
"loss": 0.4433, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.665426644966287, |
|
"grad_norm": 2.056639857598042, |
|
"learning_rate": 8.325164980289896e-07, |
|
"loss": 0.4962, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.6668216693792142, |
|
"grad_norm": 2.2133368692574584, |
|
"learning_rate": 8.257905215101758e-07, |
|
"loss": 0.4062, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.6682166937921412, |
|
"grad_norm": 1.9230071533267274, |
|
"learning_rate": 8.19089378726447e-07, |
|
"loss": 0.4495, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.6696117182050685, |
|
"grad_norm": 2.1334286794624453, |
|
"learning_rate": 8.12413109545086e-07, |
|
"loss": 0.4735, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.6710067426179958, |
|
"grad_norm": 2.1036187662888644, |
|
"learning_rate": 8.057617536854012e-07, |
|
"loss": 0.4017, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.672401767030923, |
|
"grad_norm": 2.006107617620194, |
|
"learning_rate": 7.991353507184801e-07, |
|
"loss": 0.4349, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.6737967914438503, |
|
"grad_norm": 2.051201227177932, |
|
"learning_rate": 7.92533940066958e-07, |
|
"loss": 0.469, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.6751918158567776, |
|
"grad_norm": 2.1517754801345506, |
|
"learning_rate": 7.859575610047798e-07, |
|
"loss": 0.4495, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.6765868402697048, |
|
"grad_norm": 2.1279072064457942, |
|
"learning_rate": 7.794062526569735e-07, |
|
"loss": 0.4779, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.677981864682632, |
|
"grad_norm": 2.4080169749454297, |
|
"learning_rate": 7.728800539994113e-07, |
|
"loss": 0.4934, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.6793768890955592, |
|
"grad_norm": 2.250925372186841, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.4334, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.6807719135084864, |
|
"grad_norm": 2.1745930093080013, |
|
"learning_rate": 7.599031409113483e-07, |
|
"loss": 0.4884, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.6821669379214135, |
|
"grad_norm": 2.0042102092337033, |
|
"learning_rate": 7.534525036847406e-07, |
|
"loss": 0.4345, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.6835619623343407, |
|
"grad_norm": 2.157617727203113, |
|
"learning_rate": 7.470271305557036e-07, |
|
"loss": 0.4684, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.684956986747268, |
|
"grad_norm": 2.228687769508677, |
|
"learning_rate": 7.406270597508796e-07, |
|
"loss": 0.4522, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.6863520111601953, |
|
"grad_norm": 1.9744508349204999, |
|
"learning_rate": 7.342523293463799e-07, |
|
"loss": 0.4471, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.6877470355731226, |
|
"grad_norm": 2.0654991204899287, |
|
"learning_rate": 7.279029772675572e-07, |
|
"loss": 0.4699, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.6891420599860498, |
|
"grad_norm": 2.3077221294043673, |
|
"learning_rate": 7.215790412887802e-07, |
|
"loss": 0.4537, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.690537084398977, |
|
"grad_norm": 2.0916281069067812, |
|
"learning_rate": 7.152805590332079e-07, |
|
"loss": 0.4089, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.6919321088119044, |
|
"grad_norm": 1.9806178247028858, |
|
"learning_rate": 7.090075679725683e-07, |
|
"loss": 0.4576, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.6933271332248314, |
|
"grad_norm": 2.2405052747072753, |
|
"learning_rate": 7.027601054269329e-07, |
|
"loss": 0.4222, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.6947221576377587, |
|
"grad_norm": 2.175268576390091, |
|
"learning_rate": 6.965382085644968e-07, |
|
"loss": 0.4908, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.6961171820506857, |
|
"grad_norm": 2.2521989456802243, |
|
"learning_rate": 6.903419144013556e-07, |
|
"loss": 0.4417, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.697512206463613, |
|
"grad_norm": 2.0189939596511572, |
|
"learning_rate": 6.841712598012867e-07, |
|
"loss": 0.408, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.6989072308765403, |
|
"grad_norm": 2.046600993493337, |
|
"learning_rate": 6.780262814755284e-07, |
|
"loss": 0.4148, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.7003022552894675, |
|
"grad_norm": 2.125874995200457, |
|
"learning_rate": 6.719070159825642e-07, |
|
"loss": 0.4612, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.7016972797023948, |
|
"grad_norm": 2.231403630718439, |
|
"learning_rate": 6.65813499727902e-07, |
|
"loss": 0.5169, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.703092304115322, |
|
"grad_norm": 2.3095426463200734, |
|
"learning_rate": 6.597457689638598e-07, |
|
"loss": 0.4311, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.7044873285282494, |
|
"grad_norm": 1.8931375132713402, |
|
"learning_rate": 6.53703859789348e-07, |
|
"loss": 0.4401, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.7058823529411766, |
|
"grad_norm": 2.1226595938221156, |
|
"learning_rate": 6.476878081496579e-07, |
|
"loss": 0.4084, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.7072773773541037, |
|
"grad_norm": 1.963273731879721, |
|
"learning_rate": 6.416976498362432e-07, |
|
"loss": 0.4444, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.708672401767031, |
|
"grad_norm": 1.9577598278955202, |
|
"learning_rate": 6.357334204865156e-07, |
|
"loss": 0.4296, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.710067426179958, |
|
"grad_norm": 2.094472599509459, |
|
"learning_rate": 6.29795155583619e-07, |
|
"loss": 0.4681, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.7114624505928853, |
|
"grad_norm": 2.1531471300188723, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.4148, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.7128574750058125, |
|
"grad_norm": 2.155551596673569, |
|
"learning_rate": 6.179966602783493e-07, |
|
"loss": 0.4311, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.7142524994187398, |
|
"grad_norm": 2.0177115106660124, |
|
"learning_rate": 6.121365000690766e-07, |
|
"loss": 0.4598, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.715647523831667, |
|
"grad_norm": 2.1963675547860166, |
|
"learning_rate": 6.063024446924198e-07, |
|
"loss": 0.4421, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.7170425482445943, |
|
"grad_norm": 2.0991295738118843, |
|
"learning_rate": 6.004945288570813e-07, |
|
"loss": 0.4881, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.7184375726575216, |
|
"grad_norm": 2.2657270322156275, |
|
"learning_rate": 5.947127871162455e-07, |
|
"loss": 0.4533, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.7198325970704489, |
|
"grad_norm": 2.0317554787541385, |
|
"learning_rate": 5.889572538673826e-07, |
|
"loss": 0.4133, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.721227621483376, |
|
"grad_norm": 2.181744908024485, |
|
"learning_rate": 5.832279633520411e-07, |
|
"loss": 0.4401, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.7226226458963032, |
|
"grad_norm": 2.0739837998198767, |
|
"learning_rate": 5.775249496556406e-07, |
|
"loss": 0.4857, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.7240176703092303, |
|
"grad_norm": 2.1598290833897575, |
|
"learning_rate": 5.718482467072695e-07, |
|
"loss": 0.4431, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.7254126947221575, |
|
"grad_norm": 2.1157200647986643, |
|
"learning_rate": 5.661978882794883e-07, |
|
"loss": 0.4292, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.7268077191350848, |
|
"grad_norm": 2.1415635571508482, |
|
"learning_rate": 5.60573907988124e-07, |
|
"loss": 0.436, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.728202743548012, |
|
"grad_norm": 2.1240606052923767, |
|
"learning_rate": 5.54976339292072e-07, |
|
"loss": 0.4379, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.7295977679609393, |
|
"grad_norm": 2.0868385511709686, |
|
"learning_rate": 5.494052154930956e-07, |
|
"loss": 0.5001, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.7309927923738666, |
|
"grad_norm": 2.109154503050066, |
|
"learning_rate": 5.438605697356309e-07, |
|
"loss": 0.4067, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.7323878167867939, |
|
"grad_norm": 1.9920843328096538, |
|
"learning_rate": 5.383424350065825e-07, |
|
"loss": 0.4641, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.7337828411997211, |
|
"grad_norm": 2.0920755339457116, |
|
"learning_rate": 5.328508441351382e-07, |
|
"loss": 0.4055, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.7351778656126482, |
|
"grad_norm": 2.139045511713234, |
|
"learning_rate": 5.273858297925649e-07, |
|
"loss": 0.4874, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.7365728900255755, |
|
"grad_norm": 2.123440484636655, |
|
"learning_rate": 5.219474244920164e-07, |
|
"loss": 0.4354, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.7379679144385025, |
|
"grad_norm": 2.1771103649698045, |
|
"learning_rate": 5.165356605883432e-07, |
|
"loss": 0.4151, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.7393629388514298, |
|
"grad_norm": 2.1281745438674404, |
|
"learning_rate": 5.111505702778935e-07, |
|
"loss": 0.4411, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.740757963264357, |
|
"grad_norm": 1.9713491600361932, |
|
"learning_rate": 5.057921855983288e-07, |
|
"loss": 0.3942, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.7421529876772843, |
|
"grad_norm": 2.044049483117014, |
|
"learning_rate": 5.004605384284295e-07, |
|
"loss": 0.4238, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.7435480120902116, |
|
"grad_norm": 2.0491078987608837, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.4466, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.7449430365031389, |
|
"grad_norm": 2.0685568916077965, |
|
"learning_rate": 4.898775833372055e-07, |
|
"loss": 0.43, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.7463380609160661, |
|
"grad_norm": 2.2550610819046044, |
|
"learning_rate": 4.846263383773364e-07, |
|
"loss": 0.4687, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.7477330853289934, |
|
"grad_norm": 2.1006531168627323, |
|
"learning_rate": 4.794019568496683e-07, |
|
"loss": 0.4575, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.7491281097419205, |
|
"grad_norm": 2.1037421201005064, |
|
"learning_rate": 4.7420446983575286e-07, |
|
"loss": 0.4148, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.7505231341548477, |
|
"grad_norm": 2.191926859388401, |
|
"learning_rate": 4.690339082571366e-07, |
|
"loss": 0.4326, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.7519181585677748, |
|
"grad_norm": 1.967041448878273, |
|
"learning_rate": 4.638903028751796e-07, |
|
"loss": 0.4645, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.753313182980702, |
|
"grad_norm": 2.1553532027152826, |
|
"learning_rate": 4.5877368429086857e-07, |
|
"loss": 0.3935, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.7547082073936293, |
|
"grad_norm": 2.0123299142859303, |
|
"learning_rate": 4.5368408294463873e-07, |
|
"loss": 0.4569, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.7561032318065566, |
|
"grad_norm": 2.1934970199026176, |
|
"learning_rate": 4.486215291161894e-07, |
|
"loss": 0.4907, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.7574982562194839, |
|
"grad_norm": 2.151146138020403, |
|
"learning_rate": 4.4358605292430743e-07, |
|
"loss": 0.4436, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.7588932806324111, |
|
"grad_norm": 2.2698087475883293, |
|
"learning_rate": 4.385776843266842e-07, |
|
"loss": 0.4743, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.7602883050453384, |
|
"grad_norm": 2.268096663009867, |
|
"learning_rate": 4.335964531197401e-07, |
|
"loss": 0.4965, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.7616833294582657, |
|
"grad_norm": 2.133924279141534, |
|
"learning_rate": 4.286423889384456e-07, |
|
"loss": 0.4296, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.7630783538711927, |
|
"grad_norm": 2.1019100692382913, |
|
"learning_rate": 4.2371552125614614e-07, |
|
"loss": 0.4642, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.76447337828412, |
|
"grad_norm": 2.128549712904592, |
|
"learning_rate": 4.1881587938438606e-07, |
|
"loss": 0.4555, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.765868402697047, |
|
"grad_norm": 2.0813994372296705, |
|
"learning_rate": 4.139434924727359e-07, |
|
"loss": 0.4449, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.7672634271099743, |
|
"grad_norm": 2.133944501402786, |
|
"learning_rate": 4.090983895086137e-07, |
|
"loss": 0.4128, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.7686584515229016, |
|
"grad_norm": 2.103631001632505, |
|
"learning_rate": 4.0428059931712116e-07, |
|
"loss": 0.4334, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.7700534759358288, |
|
"grad_norm": 2.066477638022606, |
|
"learning_rate": 3.994901505608628e-07, |
|
"loss": 0.436, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.7714485003487561, |
|
"grad_norm": 2.194181829948266, |
|
"learning_rate": 3.94727071739785e-07, |
|
"loss": 0.4376, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.7728435247616834, |
|
"grad_norm": 2.0725658101591806, |
|
"learning_rate": 3.899913911909986e-07, |
|
"loss": 0.4075, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.7742385491746107, |
|
"grad_norm": 1.9580748592599533, |
|
"learning_rate": 3.8528313708861173e-07, |
|
"loss": 0.4488, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.775633573587538, |
|
"grad_norm": 2.261238952398715, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.461, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.777028598000465, |
|
"grad_norm": 2.2710082458435155, |
|
"learning_rate": 3.759490201034677e-07, |
|
"loss": 0.4508, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.7784236224133922, |
|
"grad_norm": 1.9789141505409449, |
|
"learning_rate": 3.7132321275241966e-07, |
|
"loss": 0.453, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.7798186468263193, |
|
"grad_norm": 2.2126452553672475, |
|
"learning_rate": 3.6672494291086156e-07, |
|
"loss": 0.4554, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.7812136712392466, |
|
"grad_norm": 1.9936682658714493, |
|
"learning_rate": 3.6215423793540036e-07, |
|
"loss": 0.4386, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.7826086956521738, |
|
"grad_norm": 2.184960862216077, |
|
"learning_rate": 3.576111250186526e-07, |
|
"loss": 0.4894, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.784003720065101, |
|
"grad_norm": 2.2698572395940553, |
|
"learning_rate": 3.530956311890815e-07, |
|
"loss": 0.4553, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.7853987444780284, |
|
"grad_norm": 1.9219568300840275, |
|
"learning_rate": 3.486077833108342e-07, |
|
"loss": 0.414, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.7867937688909556, |
|
"grad_norm": 2.1229280934149712, |
|
"learning_rate": 3.441476080835826e-07, |
|
"loss": 0.4794, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.788188793303883, |
|
"grad_norm": 2.078715218304194, |
|
"learning_rate": 3.397151320423647e-07, |
|
"loss": 0.4163, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.7895838177168102, |
|
"grad_norm": 2.2246453997801328, |
|
"learning_rate": 3.353103815574282e-07, |
|
"loss": 0.4323, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.7909788421297372, |
|
"grad_norm": 2.2738150018866463, |
|
"learning_rate": 3.309333828340722e-07, |
|
"loss": 0.4629, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.7923738665426645, |
|
"grad_norm": 2.2022495508806004, |
|
"learning_rate": 3.2658416191249045e-07, |
|
"loss": 0.4476, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.7937688909555918, |
|
"grad_norm": 2.079838874938454, |
|
"learning_rate": 3.2226274466761906e-07, |
|
"loss": 0.4355, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.7951639153685188, |
|
"grad_norm": 2.1980853821963273, |
|
"learning_rate": 3.179691568089799e-07, |
|
"loss": 0.4844, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.796558939781446, |
|
"grad_norm": 2.104507635473491, |
|
"learning_rate": 3.137034238805292e-07, |
|
"loss": 0.4641, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.7979539641943734, |
|
"grad_norm": 2.1609985762509174, |
|
"learning_rate": 3.0946557126050435e-07, |
|
"loss": 0.4353, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.7993489886073006, |
|
"grad_norm": 2.1808994040352903, |
|
"learning_rate": 3.0525562416127473e-07, |
|
"loss": 0.4474, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.800744013020228, |
|
"grad_norm": 2.089132424900587, |
|
"learning_rate": 3.010736076291909e-07, |
|
"loss": 0.3931, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.8021390374331552, |
|
"grad_norm": 1.9289962008895816, |
|
"learning_rate": 2.9691954654443353e-07, |
|
"loss": 0.4305, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.8035340618460824, |
|
"grad_norm": 2.0842770826082386, |
|
"learning_rate": 2.927934656208681e-07, |
|
"loss": 0.4049, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.8049290862590095, |
|
"grad_norm": 2.0122506975099927, |
|
"learning_rate": 2.88695389405898e-07, |
|
"loss": 0.4269, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.8063241106719368, |
|
"grad_norm": 2.063376075115741, |
|
"learning_rate": 2.8462534228031636e-07, |
|
"loss": 0.4754, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.807719135084864, |
|
"grad_norm": 2.167029255388058, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.5274, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.809114159497791, |
|
"grad_norm": 2.24210170332635, |
|
"learning_rate": 2.765694319865775e-07, |
|
"loss": 0.4648, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.8105091839107184, |
|
"grad_norm": 2.285072471770254, |
|
"learning_rate": 2.725836167456608e-07, |
|
"loss": 0.4601, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.8119042083236456, |
|
"grad_norm": 1.8382699307655532, |
|
"learning_rate": 2.6862592644832875e-07, |
|
"loss": 0.385, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.813299232736573, |
|
"grad_norm": 2.0290063843910424, |
|
"learning_rate": 2.646963846401729e-07, |
|
"loss": 0.451, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.8146942571495002, |
|
"grad_norm": 2.012644575855064, |
|
"learning_rate": 2.6079501469932154e-07, |
|
"loss": 0.4129, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.8160892815624274, |
|
"grad_norm": 2.0525851758055444, |
|
"learning_rate": 2.569218398362971e-07, |
|
"loss": 0.4314, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.8174843059753547, |
|
"grad_norm": 1.9495707816729695, |
|
"learning_rate": 2.530768830938818e-07, |
|
"loss": 0.4533, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.8188793303882818, |
|
"grad_norm": 2.215837334297456, |
|
"learning_rate": 2.49260167346978e-07, |
|
"loss": 0.3923, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.820274354801209, |
|
"grad_norm": 1.834187483715057, |
|
"learning_rate": 2.4547171530247437e-07, |
|
"loss": 0.4466, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.8216693792141363, |
|
"grad_norm": 1.9292756082760862, |
|
"learning_rate": 2.417115494991107e-07, |
|
"loss": 0.4087, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.8230644036270633, |
|
"grad_norm": 2.0829059283179583, |
|
"learning_rate": 2.3797969230733853e-07, |
|
"loss": 0.4821, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.8244594280399906, |
|
"grad_norm": 2.269315325383343, |
|
"learning_rate": 2.3427616592919587e-07, |
|
"loss": 0.5061, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.8258544524529179, |
|
"grad_norm": 2.231129621626873, |
|
"learning_rate": 2.3060099239817059e-07, |
|
"loss": 0.4408, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.8272494768658452, |
|
"grad_norm": 2.036459196177826, |
|
"learning_rate": 2.2695419357906978e-07, |
|
"loss": 0.443, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.8286445012787724, |
|
"grad_norm": 2.142821360259979, |
|
"learning_rate": 2.2333579116788962e-07, |
|
"loss": 0.4114, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.8300395256916997, |
|
"grad_norm": 2.3397446775684276, |
|
"learning_rate": 2.197458066916891e-07, |
|
"loss": 0.483, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.831434550104627, |
|
"grad_norm": 1.6794181268501756, |
|
"learning_rate": 2.161842615084564e-07, |
|
"loss": 0.442, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.832829574517554, |
|
"grad_norm": 1.9926444296965193, |
|
"learning_rate": 2.1265117680698665e-07, |
|
"loss": 0.4395, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.8342245989304813, |
|
"grad_norm": 2.205617293991677, |
|
"learning_rate": 2.0914657360675606e-07, |
|
"loss": 0.4567, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.8356196233434086, |
|
"grad_norm": 1.9745222058356298, |
|
"learning_rate": 2.0567047275779184e-07, |
|
"loss": 0.4169, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.8370146477563356, |
|
"grad_norm": 1.9096372104370183, |
|
"learning_rate": 2.0222289494055357e-07, |
|
"loss": 0.4349, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.8384096721692629, |
|
"grad_norm": 2.0974005709989676, |
|
"learning_rate": 1.988038606658066e-07, |
|
"loss": 0.4603, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.8398046965821901, |
|
"grad_norm": 2.1449356644662183, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.4345, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.8411997209951174, |
|
"grad_norm": 2.0964057418342597, |
|
"learning_rate": 1.9205150393765692e-07, |
|
"loss": 0.4481, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.8425947454080447, |
|
"grad_norm": 1.984763144961962, |
|
"learning_rate": 1.887182216562289e-07, |
|
"loss": 0.4643, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.843989769820972, |
|
"grad_norm": 2.233536151283806, |
|
"learning_rate": 1.8541356326100436e-07, |
|
"loss": 0.4577, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.8453847942338992, |
|
"grad_norm": 2.179287384209138, |
|
"learning_rate": 1.821375484124721e-07, |
|
"loss": 0.4779, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.8467798186468263, |
|
"grad_norm": 2.167929996549515, |
|
"learning_rate": 1.7889019660071728e-07, |
|
"loss": 0.449, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.8481748430597535, |
|
"grad_norm": 2.0761119846456513, |
|
"learning_rate": 1.7567152714529468e-07, |
|
"loss": 0.4373, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.8495698674726808, |
|
"grad_norm": 2.2529101497738013, |
|
"learning_rate": 1.7248155919512067e-07, |
|
"loss": 0.4246, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.8509648918856079, |
|
"grad_norm": 2.1661008201597487, |
|
"learning_rate": 1.693203117283543e-07, |
|
"loss": 0.4932, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.8523599162985351, |
|
"grad_norm": 2.151700601180302, |
|
"learning_rate": 1.6618780355229069e-07, |
|
"loss": 0.4591, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.8537549407114624, |
|
"grad_norm": 2.0281053867258447, |
|
"learning_rate": 1.6308405330324294e-07, |
|
"loss": 0.4462, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.8551499651243897, |
|
"grad_norm": 2.1499188174186252, |
|
"learning_rate": 1.6000907944643428e-07, |
|
"loss": 0.4401, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.856544989537317, |
|
"grad_norm": 2.2303858185021124, |
|
"learning_rate": 1.5696290027588933e-07, |
|
"loss": 0.4671, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.8579400139502442, |
|
"grad_norm": 2.05207937944285, |
|
"learning_rate": 1.5394553391432142e-07, |
|
"loss": 0.4419, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.8593350383631715, |
|
"grad_norm": 2.1335914632220363, |
|
"learning_rate": 1.5095699831302935e-07, |
|
"loss": 0.4559, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.8607300627760985, |
|
"grad_norm": 2.1156083069649356, |
|
"learning_rate": 1.479973112517863e-07, |
|
"loss": 0.3962, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 1.8621250871890258, |
|
"grad_norm": 2.1890879940111434, |
|
"learning_rate": 1.450664903387372e-07, |
|
"loss": 0.4032, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.863520111601953, |
|
"grad_norm": 1.9867732268542835, |
|
"learning_rate": 1.4216455301029274e-07, |
|
"loss": 0.4072, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.8649151360148801, |
|
"grad_norm": 2.17706468053253, |
|
"learning_rate": 1.3929151653102546e-07, |
|
"loss": 0.4359, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.8663101604278074, |
|
"grad_norm": 2.1342756504255305, |
|
"learning_rate": 1.3644739799356765e-07, |
|
"loss": 0.4028, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 1.8677051848407347, |
|
"grad_norm": 2.0015663881713945, |
|
"learning_rate": 1.3363221431850816e-07, |
|
"loss": 0.4239, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.869100209253662, |
|
"grad_norm": 1.9342131413372554, |
|
"learning_rate": 1.308459822542951e-07, |
|
"loss": 0.4067, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 1.8704952336665892, |
|
"grad_norm": 2.2918737457866127, |
|
"learning_rate": 1.2808871837713177e-07, |
|
"loss": 0.4981, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.8718902580795165, |
|
"grad_norm": 2.0962472188544368, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.4133, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 1.8732852824924437, |
|
"grad_norm": 2.225773475829192, |
|
"learning_rate": 1.2266116062696954e-07, |
|
"loss": 0.4306, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.8746803069053708, |
|
"grad_norm": 1.9721376525666336, |
|
"learning_rate": 1.1999089904428385e-07, |
|
"loss": 0.4544, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 1.876075331318298, |
|
"grad_norm": 2.1332846342167455, |
|
"learning_rate": 1.1734967022908217e-07, |
|
"loss": 0.3989, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.8774703557312253, |
|
"grad_norm": 1.9410944812060686, |
|
"learning_rate": 1.1473748989489775e-07, |
|
"loss": 0.407, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.8788653801441524, |
|
"grad_norm": 1.9919021228347704, |
|
"learning_rate": 1.121543735824443e-07, |
|
"loss": 0.4283, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.8802604045570797, |
|
"grad_norm": 2.085986467469432, |
|
"learning_rate": 1.096003366595233e-07, |
|
"loss": 0.46, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 1.881655428970007, |
|
"grad_norm": 2.0781150667754935, |
|
"learning_rate": 1.0707539432093517e-07, |
|
"loss": 0.4333, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.8830504533829342, |
|
"grad_norm": 2.0437754943186914, |
|
"learning_rate": 1.0457956158838545e-07, |
|
"loss": 0.4215, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 1.8844454777958615, |
|
"grad_norm": 2.1132790289858705, |
|
"learning_rate": 1.0211285331039933e-07, |
|
"loss": 0.4667, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.8858405022087887, |
|
"grad_norm": 2.2230225558665864, |
|
"learning_rate": 9.967528416222838e-08, |
|
"loss": 0.4549, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.887235526621716, |
|
"grad_norm": 2.1783739759463385, |
|
"learning_rate": 9.72668686457684e-08, |
|
"loss": 0.4569, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.888630551034643, |
|
"grad_norm": 2.137850433771528, |
|
"learning_rate": 9.488762108946891e-08, |
|
"loss": 0.3936, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 1.8900255754475703, |
|
"grad_norm": 1.8473066765734945, |
|
"learning_rate": 9.25375556482505e-08, |
|
"loss": 0.4036, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.8914205998604976, |
|
"grad_norm": 1.9731724772291126, |
|
"learning_rate": 9.02166863034204e-08, |
|
"loss": 0.4374, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.8928156242734246, |
|
"grad_norm": 2.0133494359381197, |
|
"learning_rate": 8.792502686258752e-08, |
|
"loss": 0.4398, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.894210648686352, |
|
"grad_norm": 2.060188087569894, |
|
"learning_rate": 8.566259095958207e-08, |
|
"loss": 0.42, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 1.8956056730992792, |
|
"grad_norm": 2.05937443834135, |
|
"learning_rate": 8.342939205437384e-08, |
|
"loss": 0.4899, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.8970006975122065, |
|
"grad_norm": 2.137588600649725, |
|
"learning_rate": 8.122544343299343e-08, |
|
"loss": 0.4246, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 1.8983957219251337, |
|
"grad_norm": 2.0768764161061473, |
|
"learning_rate": 7.905075820744956e-08, |
|
"loss": 0.4156, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.899790746338061, |
|
"grad_norm": 2.1826612042473648, |
|
"learning_rate": 7.690534931565519e-08, |
|
"loss": 0.4056, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 1.9011857707509883, |
|
"grad_norm": 2.104115929738705, |
|
"learning_rate": 7.478922952134815e-08, |
|
"loss": 0.4129, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.9025807951639153, |
|
"grad_norm": 2.127950643895458, |
|
"learning_rate": 7.270241141401568e-08, |
|
"loss": 0.5071, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 1.9039758195768426, |
|
"grad_norm": 2.3752086764361984, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.4423, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.9053708439897699, |
|
"grad_norm": 2.009116532809666, |
|
"learning_rate": 6.861672974652622e-08, |
|
"loss": 0.4315, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.906765868402697, |
|
"grad_norm": 2.053589930668402, |
|
"learning_rate": 6.661789049342282e-08, |
|
"loss": 0.4372, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.9081608928156242, |
|
"grad_norm": 2.1185512360662195, |
|
"learning_rate": 6.464840154125795e-08, |
|
"loss": 0.4441, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 1.9095559172285514, |
|
"grad_norm": 2.289173094731034, |
|
"learning_rate": 6.270827460716499e-08, |
|
"loss": 0.4599, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.9109509416414787, |
|
"grad_norm": 1.9993535700562155, |
|
"learning_rate": 6.079752123359261e-08, |
|
"loss": 0.4065, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 1.912345966054406, |
|
"grad_norm": 2.0575734889429147, |
|
"learning_rate": 5.891615278823537e-08, |
|
"loss": 0.4273, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.9137409904673333, |
|
"grad_norm": 1.8757933128037505, |
|
"learning_rate": 5.7064180463969886e-08, |
|
"loss": 0.4753, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 1.9151360148802605, |
|
"grad_norm": 2.0999441835058485, |
|
"learning_rate": 5.5241615278783245e-08, |
|
"loss": 0.4113, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.9165310392931876, |
|
"grad_norm": 1.9826164775573454, |
|
"learning_rate": 5.34484680757108e-08, |
|
"loss": 0.4652, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 1.9179260637061148, |
|
"grad_norm": 2.2099157937252203, |
|
"learning_rate": 5.168474952277125e-08, |
|
"loss": 0.476, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.9193210881190421, |
|
"grad_norm": 2.2415586636650144, |
|
"learning_rate": 4.995047011290166e-08, |
|
"loss": 0.4438, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.9207161125319692, |
|
"grad_norm": 2.0173519912409477, |
|
"learning_rate": 4.8245640163896433e-08, |
|
"loss": 0.3857, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.9221111369448964, |
|
"grad_norm": 2.0653872634015644, |
|
"learning_rate": 4.657026981834623e-08, |
|
"loss": 0.4163, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 1.9235061613578237, |
|
"grad_norm": 2.0853147966134076, |
|
"learning_rate": 4.4924369043575776e-08, |
|
"loss": 0.438, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.924901185770751, |
|
"grad_norm": 2.0891024495727852, |
|
"learning_rate": 4.330794763158619e-08, |
|
"loss": 0.4566, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 1.9262962101836782, |
|
"grad_norm": 2.014329211388496, |
|
"learning_rate": 4.172101519899607e-08, |
|
"loss": 0.4004, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.9276912345966055, |
|
"grad_norm": 2.099355843113096, |
|
"learning_rate": 4.0163581186984935e-08, |
|
"loss": 0.4773, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 1.9290862590095328, |
|
"grad_norm": 2.19449600992824, |
|
"learning_rate": 3.8635654861235994e-08, |
|
"loss": 0.4552, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.93048128342246, |
|
"grad_norm": 2.232744089829509, |
|
"learning_rate": 3.713724531188234e-08, |
|
"loss": 0.4947, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 1.931876307835387, |
|
"grad_norm": 2.1497800555978865, |
|
"learning_rate": 3.566836145345032e-08, |
|
"loss": 0.4119, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.9332713322483144, |
|
"grad_norm": 2.0540515877712973, |
|
"learning_rate": 3.4229012024811215e-08, |
|
"loss": 0.4251, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.9346663566612414, |
|
"grad_norm": 1.971100926846232, |
|
"learning_rate": 3.28192055891241e-08, |
|
"loss": 0.4448, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.9360613810741687, |
|
"grad_norm": 2.140868647356388, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.4333, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 1.937456405487096, |
|
"grad_norm": 2.2347840225168056, |
|
"learning_rate": 3.0088255070389037e-08, |
|
"loss": 0.4744, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.9388514299000232, |
|
"grad_norm": 2.1961087854054915, |
|
"learning_rate": 2.876712723465791e-08, |
|
"loss": 0.4018, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 1.9402464543129505, |
|
"grad_norm": 2.0748231221186337, |
|
"learning_rate": 2.7475574886414726e-08, |
|
"loss": 0.4332, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.9416414787258778, |
|
"grad_norm": 2.080513122073544, |
|
"learning_rate": 2.6213605709525803e-08, |
|
"loss": 0.4138, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 1.943036503138805, |
|
"grad_norm": 1.8940699773649592, |
|
"learning_rate": 2.498122721185825e-08, |
|
"loss": 0.3667, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.9444315275517323, |
|
"grad_norm": 2.065116925752544, |
|
"learning_rate": 2.3778446725233328e-08, |
|
"loss": 0.4528, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 1.9458265519646594, |
|
"grad_norm": 2.2286153954981915, |
|
"learning_rate": 2.2605271405385932e-08, |
|
"loss": 0.3904, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.9472215763775866, |
|
"grad_norm": 1.7968513593721984, |
|
"learning_rate": 2.1461708231919064e-08, |
|
"loss": 0.408, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.9486166007905137, |
|
"grad_norm": 2.110245061358655, |
|
"learning_rate": 2.0347764008262216e-08, |
|
"loss": 0.4298, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.950011625203441, |
|
"grad_norm": 2.14786841024935, |
|
"learning_rate": 1.926344536163416e-08, |
|
"loss": 0.5449, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 1.9514066496163682, |
|
"grad_norm": 2.130629015124878, |
|
"learning_rate": 1.820875874300021e-08, |
|
"loss": 0.3951, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.9528016740292955, |
|
"grad_norm": 2.155436350433079, |
|
"learning_rate": 1.7183710427035595e-08, |
|
"loss": 0.4529, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 1.9541966984422228, |
|
"grad_norm": 2.1972601350182854, |
|
"learning_rate": 1.6188306512087692e-08, |
|
"loss": 0.4549, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.95559172285515, |
|
"grad_norm": 2.2464198601839005, |
|
"learning_rate": 1.5222552920138855e-08, |
|
"loss": 0.4751, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 1.9569867472680773, |
|
"grad_norm": 2.1327761308340305, |
|
"learning_rate": 1.4286455396773092e-08, |
|
"loss": 0.4438, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.9583817716810046, |
|
"grad_norm": 2.057020013675667, |
|
"learning_rate": 1.338001951113943e-08, |
|
"loss": 0.444, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 1.9597767960939316, |
|
"grad_norm": 2.1678792562294373, |
|
"learning_rate": 1.250325065592195e-08, |
|
"loss": 0.4264, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.961171820506859, |
|
"grad_norm": 1.8983518942589257, |
|
"learning_rate": 1.1656154047303691e-08, |
|
"loss": 0.4593, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.962566844919786, |
|
"grad_norm": 2.1610659684462603, |
|
"learning_rate": 1.083873472494057e-08, |
|
"loss": 0.4464, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.9639618693327132, |
|
"grad_norm": 2.0869753487925906, |
|
"learning_rate": 1.005099755192529e-08, |
|
"loss": 0.45, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 1.9653568937456405, |
|
"grad_norm": 2.0578950075416764, |
|
"learning_rate": 9.292947214764036e-09, |
|
"loss": 0.4871, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.9667519181585678, |
|
"grad_norm": 2.3034725391793183, |
|
"learning_rate": 8.564588223346493e-09, |
|
"loss": 0.4249, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 1.968146942571495, |
|
"grad_norm": 1.9173773413193362, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.4233, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.9695419669844223, |
|
"grad_norm": 2.0570262788637517, |
|
"learning_rate": 7.196961434052796e-09, |
|
"loss": 0.4357, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 1.9709369913973496, |
|
"grad_norm": 2.0091815939076976, |
|
"learning_rate": 6.557701772635372e-09, |
|
"loss": 0.4814, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.9723320158102768, |
|
"grad_norm": 2.1197632136213302, |
|
"learning_rate": 5.9481497298291556e-09, |
|
"loss": 0.4307, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 1.9737270402232039, |
|
"grad_norm": 2.1470802379271383, |
|
"learning_rate": 5.368308932058863e-09, |
|
"loss": 0.3866, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.9751220646361312, |
|
"grad_norm": 1.838815471286048, |
|
"learning_rate": 4.818182828985607e-09, |
|
"loss": 0.4183, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.9765170890490582, |
|
"grad_norm": 2.070812126823317, |
|
"learning_rate": 4.2977746934907975e-09, |
|
"loss": 0.4314, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.9779121134619855, |
|
"grad_norm": 2.1089607715907364, |
|
"learning_rate": 3.807087621650607e-09, |
|
"loss": 0.4657, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 1.9793071378749127, |
|
"grad_norm": 2.0156870049179796, |
|
"learning_rate": 3.3461245327237603e-09, |
|
"loss": 0.4398, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.98070216228784, |
|
"grad_norm": 1.959812635023234, |
|
"learning_rate": 2.9148881691298812e-09, |
|
"loss": 0.4006, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 1.9820971867007673, |
|
"grad_norm": 2.1219950274732833, |
|
"learning_rate": 2.5133810964350635e-09, |
|
"loss": 0.4378, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.9834922111136946, |
|
"grad_norm": 2.293659588891337, |
|
"learning_rate": 2.1416057033352144e-09, |
|
"loss": 0.4599, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 1.9848872355266218, |
|
"grad_norm": 1.7483406974593192, |
|
"learning_rate": 1.7995642016449544e-09, |
|
"loss": 0.4657, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.986282259939549, |
|
"grad_norm": 2.2362141922397267, |
|
"learning_rate": 1.4872586262792975e-09, |
|
"loss": 0.4324, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 1.9876772843524761, |
|
"grad_norm": 2.115899803265782, |
|
"learning_rate": 1.2046908352475462e-09, |
|
"loss": 0.4817, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.9890723087654034, |
|
"grad_norm": 2.2142030661330776, |
|
"learning_rate": 9.518625096366364e-10, |
|
"loss": 0.4473, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.9904673331783305, |
|
"grad_norm": 2.1437905315255703, |
|
"learning_rate": 7.287751536050324e-10, |
|
"loss": 0.411, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.9918623575912577, |
|
"grad_norm": 2.037019408233888, |
|
"learning_rate": 5.354300943727353e-10, |
|
"loss": 0.4586, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.993257382004185, |
|
"grad_norm": 2.2737343174448714, |
|
"learning_rate": 3.718284822118445e-10, |
|
"loss": 0.4408, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.9946524064171123, |
|
"grad_norm": 1.8775148208475847, |
|
"learning_rate": 2.379712904426734e-10, |
|
"loss": 0.3978, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.9960474308300395, |
|
"grad_norm": 2.229374174784266, |
|
"learning_rate": 1.3385931542486686e-10, |
|
"loss": 0.4362, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.9974424552429668, |
|
"grad_norm": 2.0489067262541956, |
|
"learning_rate": 5.949317655462583e-11, |
|
"loss": 0.4613, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.998837479655894, |
|
"grad_norm": 2.161620948843524, |
|
"learning_rate": 1.4873316260266558e-11, |
|
"loss": 0.4646, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.998837479655894, |
|
"step": 1432, |
|
"total_flos": 1.7074790695593574e+17, |
|
"train_loss": 0.6007801830851832, |
|
"train_runtime": 11727.8019, |
|
"train_samples_per_second": 2.934, |
|
"train_steps_per_second": 0.122 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1432, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7074790695593574e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|