|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 277, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0036101083032490976, |
|
"grad_norm": 1.1218351125717163, |
|
"learning_rate": 1e-05, |
|
"loss": 2.188, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007220216606498195, |
|
"grad_norm": 1.0706913471221924, |
|
"learning_rate": 9.96389891696751e-06, |
|
"loss": 2.1819, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010830324909747292, |
|
"grad_norm": 1.0887945890426636, |
|
"learning_rate": 9.92779783393502e-06, |
|
"loss": 2.1912, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01444043321299639, |
|
"grad_norm": 1.0803953409194946, |
|
"learning_rate": 9.891696750902527e-06, |
|
"loss": 2.2241, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.018050541516245487, |
|
"grad_norm": 1.0402294397354126, |
|
"learning_rate": 9.855595667870036e-06, |
|
"loss": 2.2483, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.021660649819494584, |
|
"grad_norm": 1.0075435638427734, |
|
"learning_rate": 9.819494584837546e-06, |
|
"loss": 2.2007, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02527075812274368, |
|
"grad_norm": 0.9559407830238342, |
|
"learning_rate": 9.783393501805055e-06, |
|
"loss": 2.1599, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02888086642599278, |
|
"grad_norm": 0.940712571144104, |
|
"learning_rate": 9.747292418772564e-06, |
|
"loss": 2.1482, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.032490974729241874, |
|
"grad_norm": 0.8784303069114685, |
|
"learning_rate": 9.711191335740074e-06, |
|
"loss": 2.1772, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.036101083032490974, |
|
"grad_norm": 0.8126964569091797, |
|
"learning_rate": 9.675090252707581e-06, |
|
"loss": 2.1021, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.039711191335740074, |
|
"grad_norm": 0.7782945036888123, |
|
"learning_rate": 9.63898916967509e-06, |
|
"loss": 2.0967, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04332129963898917, |
|
"grad_norm": 0.7165591716766357, |
|
"learning_rate": 9.6028880866426e-06, |
|
"loss": 2.0123, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04693140794223827, |
|
"grad_norm": 0.6946887373924255, |
|
"learning_rate": 9.56678700361011e-06, |
|
"loss": 2.0483, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05054151624548736, |
|
"grad_norm": 0.6478677988052368, |
|
"learning_rate": 9.530685920577619e-06, |
|
"loss": 2.0618, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05415162454873646, |
|
"grad_norm": 0.6255316138267517, |
|
"learning_rate": 9.494584837545126e-06, |
|
"loss": 2.0202, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05776173285198556, |
|
"grad_norm": 0.5520123839378357, |
|
"learning_rate": 9.458483754512636e-06, |
|
"loss": 1.9815, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.061371841155234655, |
|
"grad_norm": 0.554833710193634, |
|
"learning_rate": 9.422382671480145e-06, |
|
"loss": 2.0234, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06498194945848375, |
|
"grad_norm": 0.5348688364028931, |
|
"learning_rate": 9.386281588447654e-06, |
|
"loss": 2.0197, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06859205776173286, |
|
"grad_norm": 0.4817121922969818, |
|
"learning_rate": 9.350180505415164e-06, |
|
"loss": 1.9425, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07220216606498195, |
|
"grad_norm": 0.44491052627563477, |
|
"learning_rate": 9.314079422382673e-06, |
|
"loss": 1.9109, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07581227436823104, |
|
"grad_norm": 0.43117740750312805, |
|
"learning_rate": 9.27797833935018e-06, |
|
"loss": 1.8809, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07942238267148015, |
|
"grad_norm": 0.4064043164253235, |
|
"learning_rate": 9.24187725631769e-06, |
|
"loss": 1.7955, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08303249097472924, |
|
"grad_norm": 0.4002125859260559, |
|
"learning_rate": 9.2057761732852e-06, |
|
"loss": 1.8682, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08664259927797834, |
|
"grad_norm": 0.4068574905395508, |
|
"learning_rate": 9.169675090252709e-06, |
|
"loss": 1.9182, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09025270758122744, |
|
"grad_norm": 0.39439308643341064, |
|
"learning_rate": 9.133574007220218e-06, |
|
"loss": 1.8909, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09386281588447654, |
|
"grad_norm": 0.4042982757091522, |
|
"learning_rate": 9.097472924187727e-06, |
|
"loss": 1.9381, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09747292418772563, |
|
"grad_norm": 0.3561984598636627, |
|
"learning_rate": 9.061371841155235e-06, |
|
"loss": 1.839, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.10108303249097472, |
|
"grad_norm": 0.39211294054985046, |
|
"learning_rate": 9.025270758122744e-06, |
|
"loss": 1.8968, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.10469314079422383, |
|
"grad_norm": 0.36844152212142944, |
|
"learning_rate": 8.989169675090254e-06, |
|
"loss": 1.8805, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10830324909747292, |
|
"grad_norm": 0.3706235885620117, |
|
"learning_rate": 8.953068592057763e-06, |
|
"loss": 1.8828, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11191335740072202, |
|
"grad_norm": 0.3436739444732666, |
|
"learning_rate": 8.916967509025272e-06, |
|
"loss": 1.8159, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.11552346570397112, |
|
"grad_norm": 0.33314648270606995, |
|
"learning_rate": 8.88086642599278e-06, |
|
"loss": 1.8116, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11913357400722022, |
|
"grad_norm": 0.3236274719238281, |
|
"learning_rate": 8.84476534296029e-06, |
|
"loss": 1.7399, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.12274368231046931, |
|
"grad_norm": 0.30125582218170166, |
|
"learning_rate": 8.808664259927798e-06, |
|
"loss": 1.7315, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1263537906137184, |
|
"grad_norm": 0.31622451543807983, |
|
"learning_rate": 8.772563176895308e-06, |
|
"loss": 1.7964, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1299638989169675, |
|
"grad_norm": 0.30419063568115234, |
|
"learning_rate": 8.736462093862817e-06, |
|
"loss": 1.7526, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.13357400722021662, |
|
"grad_norm": 0.30785295367240906, |
|
"learning_rate": 8.700361010830326e-06, |
|
"loss": 1.7974, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1371841155234657, |
|
"grad_norm": 0.27913904190063477, |
|
"learning_rate": 8.664259927797834e-06, |
|
"loss": 1.7141, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1407942238267148, |
|
"grad_norm": 0.2888409495353699, |
|
"learning_rate": 8.628158844765343e-06, |
|
"loss": 1.7374, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1444043321299639, |
|
"grad_norm": 0.29159021377563477, |
|
"learning_rate": 8.592057761732853e-06, |
|
"loss": 1.7762, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.148014440433213, |
|
"grad_norm": 0.28307339549064636, |
|
"learning_rate": 8.55595667870036e-06, |
|
"loss": 1.7281, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.15162454873646208, |
|
"grad_norm": 0.28662118315696716, |
|
"learning_rate": 8.519855595667871e-06, |
|
"loss": 1.7282, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1552346570397112, |
|
"grad_norm": 0.29065990447998047, |
|
"learning_rate": 8.483754512635379e-06, |
|
"loss": 1.7636, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1588447653429603, |
|
"grad_norm": 0.2732274532318115, |
|
"learning_rate": 8.447653429602888e-06, |
|
"loss": 1.6769, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1624548736462094, |
|
"grad_norm": 0.28228530287742615, |
|
"learning_rate": 8.411552346570398e-06, |
|
"loss": 1.7207, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.16606498194945848, |
|
"grad_norm": 0.2729104161262512, |
|
"learning_rate": 8.375451263537907e-06, |
|
"loss": 1.6888, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16967509025270758, |
|
"grad_norm": 0.26737287640571594, |
|
"learning_rate": 8.339350180505416e-06, |
|
"loss": 1.6536, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.17328519855595667, |
|
"grad_norm": 0.2611989378929138, |
|
"learning_rate": 8.303249097472926e-06, |
|
"loss": 1.6595, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.17689530685920576, |
|
"grad_norm": 0.2761898338794708, |
|
"learning_rate": 8.267148014440433e-06, |
|
"loss": 1.6674, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.18050541516245489, |
|
"grad_norm": 0.26143068075180054, |
|
"learning_rate": 8.231046931407943e-06, |
|
"loss": 1.6382, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18411552346570398, |
|
"grad_norm": 0.2734948992729187, |
|
"learning_rate": 8.194945848375452e-06, |
|
"loss": 1.7228, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.18772563176895307, |
|
"grad_norm": 0.2682507634162903, |
|
"learning_rate": 8.158844765342961e-06, |
|
"loss": 1.6465, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.19133574007220217, |
|
"grad_norm": 0.2903922498226166, |
|
"learning_rate": 8.12274368231047e-06, |
|
"loss": 1.6962, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.19494584837545126, |
|
"grad_norm": 0.2923874855041504, |
|
"learning_rate": 8.086642599277978e-06, |
|
"loss": 1.7269, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.19855595667870035, |
|
"grad_norm": 0.26083049178123474, |
|
"learning_rate": 8.050541516245488e-06, |
|
"loss": 1.6442, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.20216606498194944, |
|
"grad_norm": 0.24434912204742432, |
|
"learning_rate": 8.014440433212997e-06, |
|
"loss": 1.586, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.20577617328519857, |
|
"grad_norm": 0.24050913751125336, |
|
"learning_rate": 7.978339350180506e-06, |
|
"loss": 1.6119, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.20938628158844766, |
|
"grad_norm": 0.24940010905265808, |
|
"learning_rate": 7.942238267148014e-06, |
|
"loss": 1.5994, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.21299638989169675, |
|
"grad_norm": 0.24224700033664703, |
|
"learning_rate": 7.906137184115525e-06, |
|
"loss": 1.5855, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.21660649819494585, |
|
"grad_norm": 0.2521527409553528, |
|
"learning_rate": 7.870036101083033e-06, |
|
"loss": 1.5845, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22021660649819494, |
|
"grad_norm": 0.24293367564678192, |
|
"learning_rate": 7.833935018050542e-06, |
|
"loss": 1.5839, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.22382671480144403, |
|
"grad_norm": 0.23730704188346863, |
|
"learning_rate": 7.797833935018051e-06, |
|
"loss": 1.5611, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.22743682310469315, |
|
"grad_norm": 0.23844872415065765, |
|
"learning_rate": 7.76173285198556e-06, |
|
"loss": 1.5947, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.23104693140794225, |
|
"grad_norm": 0.243188738822937, |
|
"learning_rate": 7.72563176895307e-06, |
|
"loss": 1.58, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.23465703971119134, |
|
"grad_norm": 0.22330617904663086, |
|
"learning_rate": 7.68953068592058e-06, |
|
"loss": 1.5329, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.23826714801444043, |
|
"grad_norm": 0.2230277955532074, |
|
"learning_rate": 7.653429602888087e-06, |
|
"loss": 1.5251, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.24187725631768953, |
|
"grad_norm": 0.23090355098247528, |
|
"learning_rate": 7.617328519855596e-06, |
|
"loss": 1.5686, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.24548736462093862, |
|
"grad_norm": 0.23035725951194763, |
|
"learning_rate": 7.5812274368231055e-06, |
|
"loss": 1.5596, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2490974729241877, |
|
"grad_norm": 0.2521900236606598, |
|
"learning_rate": 7.545126353790614e-06, |
|
"loss": 1.57, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2527075812274368, |
|
"grad_norm": 0.23036813735961914, |
|
"learning_rate": 7.509025270758123e-06, |
|
"loss": 1.5087, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2563176895306859, |
|
"grad_norm": 0.24895897507667542, |
|
"learning_rate": 7.472924187725632e-06, |
|
"loss": 1.5433, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.259927797833935, |
|
"grad_norm": 0.2232387810945511, |
|
"learning_rate": 7.436823104693142e-06, |
|
"loss": 1.5283, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.26353790613718414, |
|
"grad_norm": 0.23834098875522614, |
|
"learning_rate": 7.40072202166065e-06, |
|
"loss": 1.5719, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.26714801444043323, |
|
"grad_norm": 0.24203678965568542, |
|
"learning_rate": 7.36462093862816e-06, |
|
"loss": 1.5768, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.27075812274368233, |
|
"grad_norm": 0.2327604442834854, |
|
"learning_rate": 7.328519855595668e-06, |
|
"loss": 1.5387, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2743682310469314, |
|
"grad_norm": 0.2127024084329605, |
|
"learning_rate": 7.2924187725631776e-06, |
|
"loss": 1.4921, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2779783393501805, |
|
"grad_norm": 0.2509189248085022, |
|
"learning_rate": 7.256317689530686e-06, |
|
"loss": 1.565, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2815884476534296, |
|
"grad_norm": 0.23548553884029388, |
|
"learning_rate": 7.220216606498196e-06, |
|
"loss": 1.5078, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2851985559566787, |
|
"grad_norm": 0.23434241116046906, |
|
"learning_rate": 7.184115523465705e-06, |
|
"loss": 1.5232, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2888086642599278, |
|
"grad_norm": 0.22626014053821564, |
|
"learning_rate": 7.148014440433214e-06, |
|
"loss": 1.5143, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2924187725631769, |
|
"grad_norm": 0.232896089553833, |
|
"learning_rate": 7.1119133574007225e-06, |
|
"loss": 1.5495, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.296028880866426, |
|
"grad_norm": 0.22791652381420135, |
|
"learning_rate": 7.075812274368231e-06, |
|
"loss": 1.5021, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2996389891696751, |
|
"grad_norm": 0.2307957261800766, |
|
"learning_rate": 7.039711191335741e-06, |
|
"loss": 1.5721, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.30324909747292417, |
|
"grad_norm": 0.26458626985549927, |
|
"learning_rate": 7.00361010830325e-06, |
|
"loss": 1.5046, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.30685920577617326, |
|
"grad_norm": 0.20370201766490936, |
|
"learning_rate": 6.967509025270759e-06, |
|
"loss": 1.4624, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3104693140794224, |
|
"grad_norm": 0.2249036282300949, |
|
"learning_rate": 6.9314079422382674e-06, |
|
"loss": 1.5127, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3140794223826715, |
|
"grad_norm": 0.22176551818847656, |
|
"learning_rate": 6.895306859205777e-06, |
|
"loss": 1.4924, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3176895306859206, |
|
"grad_norm": 0.19935470819473267, |
|
"learning_rate": 6.859205776173285e-06, |
|
"loss": 1.4207, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3212996389891697, |
|
"grad_norm": 0.21199044585227966, |
|
"learning_rate": 6.8231046931407954e-06, |
|
"loss": 1.4677, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3249097472924188, |
|
"grad_norm": 0.2081277221441269, |
|
"learning_rate": 6.787003610108304e-06, |
|
"loss": 1.443, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3285198555956679, |
|
"grad_norm": 0.20040768384933472, |
|
"learning_rate": 6.750902527075813e-06, |
|
"loss": 1.474, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.33212996389891697, |
|
"grad_norm": 0.2211485207080841, |
|
"learning_rate": 6.714801444043322e-06, |
|
"loss": 1.4466, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.33574007220216606, |
|
"grad_norm": 0.21461904048919678, |
|
"learning_rate": 6.678700361010831e-06, |
|
"loss": 1.4769, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.33935018050541516, |
|
"grad_norm": 0.20499303936958313, |
|
"learning_rate": 6.6425992779783395e-06, |
|
"loss": 1.4367, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.34296028880866425, |
|
"grad_norm": 0.2087281048297882, |
|
"learning_rate": 6.606498194945848e-06, |
|
"loss": 1.4919, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.34657039711191334, |
|
"grad_norm": 0.22192475199699402, |
|
"learning_rate": 6.570397111913358e-06, |
|
"loss": 1.4722, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.35018050541516244, |
|
"grad_norm": 0.21253931522369385, |
|
"learning_rate": 6.534296028880867e-06, |
|
"loss": 1.4654, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.35379061371841153, |
|
"grad_norm": 0.19912089407444, |
|
"learning_rate": 6.498194945848376e-06, |
|
"loss": 1.4187, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3574007220216607, |
|
"grad_norm": 0.1975143700838089, |
|
"learning_rate": 6.4620938628158845e-06, |
|
"loss": 1.4604, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.36101083032490977, |
|
"grad_norm": 0.1989564597606659, |
|
"learning_rate": 6.425992779783395e-06, |
|
"loss": 1.4471, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.36462093862815886, |
|
"grad_norm": 0.19660496711730957, |
|
"learning_rate": 6.389891696750903e-06, |
|
"loss": 1.4487, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.36823104693140796, |
|
"grad_norm": 0.20031259953975677, |
|
"learning_rate": 6.3537906137184125e-06, |
|
"loss": 1.4529, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.37184115523465705, |
|
"grad_norm": 0.22296781837940216, |
|
"learning_rate": 6.317689530685921e-06, |
|
"loss": 1.5193, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.37545126353790614, |
|
"grad_norm": 0.23671561479568481, |
|
"learning_rate": 6.28158844765343e-06, |
|
"loss": 1.5017, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.37906137184115524, |
|
"grad_norm": 0.20504920184612274, |
|
"learning_rate": 6.245487364620939e-06, |
|
"loss": 1.453, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.38267148014440433, |
|
"grad_norm": 0.2209184765815735, |
|
"learning_rate": 6.209386281588449e-06, |
|
"loss": 1.4509, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3862815884476534, |
|
"grad_norm": 0.20382221043109894, |
|
"learning_rate": 6.173285198555957e-06, |
|
"loss": 1.4356, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3898916967509025, |
|
"grad_norm": 0.2022784799337387, |
|
"learning_rate": 6.137184115523466e-06, |
|
"loss": 1.4164, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3935018050541516, |
|
"grad_norm": 0.1905573010444641, |
|
"learning_rate": 6.101083032490975e-06, |
|
"loss": 1.4002, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3971119133574007, |
|
"grad_norm": 0.20327116549015045, |
|
"learning_rate": 6.064981949458484e-06, |
|
"loss": 1.3884, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4007220216606498, |
|
"grad_norm": 0.19279006123542786, |
|
"learning_rate": 6.028880866425994e-06, |
|
"loss": 1.4392, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4043321299638989, |
|
"grad_norm": 0.19752241671085358, |
|
"learning_rate": 5.992779783393502e-06, |
|
"loss": 1.4033, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.40794223826714804, |
|
"grad_norm": 0.20032241940498352, |
|
"learning_rate": 5.956678700361012e-06, |
|
"loss": 1.4291, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.41155234657039713, |
|
"grad_norm": 0.21105395257472992, |
|
"learning_rate": 5.92057761732852e-06, |
|
"loss": 1.4483, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4151624548736462, |
|
"grad_norm": 0.19472186267375946, |
|
"learning_rate": 5.8844765342960295e-06, |
|
"loss": 1.4071, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4187725631768953, |
|
"grad_norm": 0.1926383376121521, |
|
"learning_rate": 5.848375451263538e-06, |
|
"loss": 1.4419, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4223826714801444, |
|
"grad_norm": 0.18957462906837463, |
|
"learning_rate": 5.812274368231048e-06, |
|
"loss": 1.4055, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4259927797833935, |
|
"grad_norm": 0.19267480075359344, |
|
"learning_rate": 5.776173285198557e-06, |
|
"loss": 1.4112, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4296028880866426, |
|
"grad_norm": 0.18771202862262726, |
|
"learning_rate": 5.740072202166066e-06, |
|
"loss": 1.4294, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4332129963898917, |
|
"grad_norm": 0.19841831922531128, |
|
"learning_rate": 5.7039711191335744e-06, |
|
"loss": 1.4272, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4368231046931408, |
|
"grad_norm": 0.20646512508392334, |
|
"learning_rate": 5.667870036101083e-06, |
|
"loss": 1.3947, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4404332129963899, |
|
"grad_norm": 0.20326825976371765, |
|
"learning_rate": 5.631768953068592e-06, |
|
"loss": 1.4393, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.44404332129963897, |
|
"grad_norm": 0.19117839634418488, |
|
"learning_rate": 5.595667870036101e-06, |
|
"loss": 1.3686, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.44765342960288806, |
|
"grad_norm": 0.19725248217582703, |
|
"learning_rate": 5.559566787003611e-06, |
|
"loss": 1.4325, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.45126353790613716, |
|
"grad_norm": 0.20171114802360535, |
|
"learning_rate": 5.523465703971119e-06, |
|
"loss": 1.3884, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4548736462093863, |
|
"grad_norm": 0.21069733798503876, |
|
"learning_rate": 5.487364620938629e-06, |
|
"loss": 1.4115, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4584837545126354, |
|
"grad_norm": 0.19288866221904755, |
|
"learning_rate": 5.451263537906137e-06, |
|
"loss": 1.4237, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.4620938628158845, |
|
"grad_norm": 0.21202199161052704, |
|
"learning_rate": 5.415162454873647e-06, |
|
"loss": 1.4356, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4657039711191336, |
|
"grad_norm": 0.19240565598011017, |
|
"learning_rate": 5.379061371841156e-06, |
|
"loss": 1.3668, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4693140794223827, |
|
"grad_norm": 0.20009998977184296, |
|
"learning_rate": 5.342960288808665e-06, |
|
"loss": 1.4231, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4729241877256318, |
|
"grad_norm": 0.19136758148670197, |
|
"learning_rate": 5.306859205776174e-06, |
|
"loss": 1.4169, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.47653429602888087, |
|
"grad_norm": 0.21354779601097107, |
|
"learning_rate": 5.270758122743683e-06, |
|
"loss": 1.43, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.48014440433212996, |
|
"grad_norm": 0.20004898309707642, |
|
"learning_rate": 5.2346570397111915e-06, |
|
"loss": 1.4465, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.48375451263537905, |
|
"grad_norm": 0.21821723878383636, |
|
"learning_rate": 5.1985559566787e-06, |
|
"loss": 1.4209, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.48736462093862815, |
|
"grad_norm": 0.19806598126888275, |
|
"learning_rate": 5.16245487364621e-06, |
|
"loss": 1.3948, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.49097472924187724, |
|
"grad_norm": 0.202366441488266, |
|
"learning_rate": 5.126353790613719e-06, |
|
"loss": 1.3961, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.49458483754512633, |
|
"grad_norm": 0.20319777727127075, |
|
"learning_rate": 5.090252707581228e-06, |
|
"loss": 1.4122, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4981949458483754, |
|
"grad_norm": 0.1868642121553421, |
|
"learning_rate": 5.054151624548736e-06, |
|
"loss": 1.3635, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5018050541516246, |
|
"grad_norm": 0.2048720419406891, |
|
"learning_rate": 5.018050541516246e-06, |
|
"loss": 1.4029, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5054151624548736, |
|
"grad_norm": 0.19640134274959564, |
|
"learning_rate": 4.981949458483755e-06, |
|
"loss": 1.3501, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5090252707581228, |
|
"grad_norm": 0.19755016267299652, |
|
"learning_rate": 4.9458483754512636e-06, |
|
"loss": 1.3714, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5126353790613718, |
|
"grad_norm": 0.2008562535047531, |
|
"learning_rate": 4.909747292418773e-06, |
|
"loss": 1.371, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.516245487364621, |
|
"grad_norm": 0.20218950510025024, |
|
"learning_rate": 4.873646209386282e-06, |
|
"loss": 1.4145, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.51985559566787, |
|
"grad_norm": 0.19422906637191772, |
|
"learning_rate": 4.837545126353791e-06, |
|
"loss": 1.3604, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5234657039711191, |
|
"grad_norm": 0.20418153703212738, |
|
"learning_rate": 4.8014440433213e-06, |
|
"loss": 1.3712, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5270758122743683, |
|
"grad_norm": 0.20942777395248413, |
|
"learning_rate": 4.765342960288809e-06, |
|
"loss": 1.402, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5306859205776173, |
|
"grad_norm": 0.18804116547107697, |
|
"learning_rate": 4.729241877256318e-06, |
|
"loss": 1.3649, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5342960288808665, |
|
"grad_norm": 0.22690172493457794, |
|
"learning_rate": 4.693140794223827e-06, |
|
"loss": 1.3621, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5379061371841155, |
|
"grad_norm": 0.18934603035449982, |
|
"learning_rate": 4.6570397111913365e-06, |
|
"loss": 1.3749, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5415162454873647, |
|
"grad_norm": 0.18682512640953064, |
|
"learning_rate": 4.620938628158845e-06, |
|
"loss": 1.3191, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5451263537906137, |
|
"grad_norm": 0.19843855500221252, |
|
"learning_rate": 4.584837545126354e-06, |
|
"loss": 1.3605, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5487364620938628, |
|
"grad_norm": 0.18685825169086456, |
|
"learning_rate": 4.548736462093864e-06, |
|
"loss": 1.3702, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5523465703971119, |
|
"grad_norm": 0.19559639692306519, |
|
"learning_rate": 4.512635379061372e-06, |
|
"loss": 1.3844, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.555956678700361, |
|
"grad_norm": 0.2046169936656952, |
|
"learning_rate": 4.4765342960288814e-06, |
|
"loss": 1.3849, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5595667870036101, |
|
"grad_norm": 0.20239655673503876, |
|
"learning_rate": 4.44043321299639e-06, |
|
"loss": 1.3784, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5631768953068592, |
|
"grad_norm": 0.19824427366256714, |
|
"learning_rate": 4.404332129963899e-06, |
|
"loss": 1.3398, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5667870036101083, |
|
"grad_norm": 0.1943778544664383, |
|
"learning_rate": 4.3682310469314086e-06, |
|
"loss": 1.3683, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5703971119133574, |
|
"grad_norm": 0.2199310064315796, |
|
"learning_rate": 4.332129963898917e-06, |
|
"loss": 1.4149, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5740072202166066, |
|
"grad_norm": 0.21495173871517181, |
|
"learning_rate": 4.296028880866426e-06, |
|
"loss": 1.4314, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5776173285198556, |
|
"grad_norm": 0.2000962346792221, |
|
"learning_rate": 4.259927797833936e-06, |
|
"loss": 1.3201, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5812274368231047, |
|
"grad_norm": 0.22053833305835724, |
|
"learning_rate": 4.223826714801444e-06, |
|
"loss": 1.4319, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5848375451263538, |
|
"grad_norm": 0.20683500170707703, |
|
"learning_rate": 4.1877256317689535e-06, |
|
"loss": 1.3582, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5884476534296029, |
|
"grad_norm": 0.1908424198627472, |
|
"learning_rate": 4.151624548736463e-06, |
|
"loss": 1.349, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.592057761732852, |
|
"grad_norm": 0.20667223632335663, |
|
"learning_rate": 4.115523465703971e-06, |
|
"loss": 1.3862, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5956678700361011, |
|
"grad_norm": 0.20890134572982788, |
|
"learning_rate": 4.079422382671481e-06, |
|
"loss": 1.3584, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5992779783393501, |
|
"grad_norm": 0.20082373917102814, |
|
"learning_rate": 4.043321299638989e-06, |
|
"loss": 1.2859, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6028880866425993, |
|
"grad_norm": 0.19916357100009918, |
|
"learning_rate": 4.0072202166064985e-06, |
|
"loss": 1.3676, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6064981949458483, |
|
"grad_norm": 0.20149283111095428, |
|
"learning_rate": 3.971119133574007e-06, |
|
"loss": 1.3477, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6101083032490975, |
|
"grad_norm": 0.20036858320236206, |
|
"learning_rate": 3.935018050541516e-06, |
|
"loss": 1.3326, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6137184115523465, |
|
"grad_norm": 0.20373424887657166, |
|
"learning_rate": 3.898916967509026e-06, |
|
"loss": 1.3656, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6173285198555957, |
|
"grad_norm": 0.1955791711807251, |
|
"learning_rate": 3.862815884476535e-06, |
|
"loss": 1.3238, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6209386281588448, |
|
"grad_norm": 0.20329371094703674, |
|
"learning_rate": 3.826714801444043e-06, |
|
"loss": 1.3233, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6245487364620939, |
|
"grad_norm": 0.19480253756046295, |
|
"learning_rate": 3.7906137184115527e-06, |
|
"loss": 1.3483, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.628158844765343, |
|
"grad_norm": 0.20373401045799255, |
|
"learning_rate": 3.7545126353790616e-06, |
|
"loss": 1.3276, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.631768953068592, |
|
"grad_norm": 0.21212375164031982, |
|
"learning_rate": 3.718411552346571e-06, |
|
"loss": 1.3661, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6353790613718412, |
|
"grad_norm": 0.196991965174675, |
|
"learning_rate": 3.68231046931408e-06, |
|
"loss": 1.3723, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6389891696750902, |
|
"grad_norm": 0.19335833191871643, |
|
"learning_rate": 3.6462093862815888e-06, |
|
"loss": 1.3218, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6425992779783394, |
|
"grad_norm": 0.20568343997001648, |
|
"learning_rate": 3.610108303249098e-06, |
|
"loss": 1.3535, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6462093862815884, |
|
"grad_norm": 0.20011267066001892, |
|
"learning_rate": 3.574007220216607e-06, |
|
"loss": 1.3505, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6498194945848376, |
|
"grad_norm": 0.21634064614772797, |
|
"learning_rate": 3.5379061371841155e-06, |
|
"loss": 1.3959, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6534296028880866, |
|
"grad_norm": 0.19454725086688995, |
|
"learning_rate": 3.501805054151625e-06, |
|
"loss": 1.3235, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6570397111913358, |
|
"grad_norm": 0.206266850233078, |
|
"learning_rate": 3.4657039711191337e-06, |
|
"loss": 1.3527, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6606498194945848, |
|
"grad_norm": 0.20744888484477997, |
|
"learning_rate": 3.4296028880866426e-06, |
|
"loss": 1.3609, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6642599277978339, |
|
"grad_norm": 0.19529123604297638, |
|
"learning_rate": 3.393501805054152e-06, |
|
"loss": 1.3409, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6678700361010831, |
|
"grad_norm": 0.20020008087158203, |
|
"learning_rate": 3.357400722021661e-06, |
|
"loss": 1.3426, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6714801444043321, |
|
"grad_norm": 0.23242731392383575, |
|
"learning_rate": 3.3212996389891698e-06, |
|
"loss": 1.3632, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6750902527075813, |
|
"grad_norm": 0.19558390974998474, |
|
"learning_rate": 3.285198555956679e-06, |
|
"loss": 1.356, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6787003610108303, |
|
"grad_norm": 0.21582651138305664, |
|
"learning_rate": 3.249097472924188e-06, |
|
"loss": 1.3416, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6823104693140795, |
|
"grad_norm": 0.19333289563655853, |
|
"learning_rate": 3.2129963898916973e-06, |
|
"loss": 1.2946, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6859205776173285, |
|
"grad_norm": 0.19065658748149872, |
|
"learning_rate": 3.1768953068592062e-06, |
|
"loss": 1.3152, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6895306859205776, |
|
"grad_norm": 0.19440875947475433, |
|
"learning_rate": 3.140794223826715e-06, |
|
"loss": 1.3243, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6931407942238267, |
|
"grad_norm": 0.18864405155181885, |
|
"learning_rate": 3.1046931407942245e-06, |
|
"loss": 1.3322, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6967509025270758, |
|
"grad_norm": 0.1976729929447174, |
|
"learning_rate": 3.068592057761733e-06, |
|
"loss": 1.3288, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7003610108303249, |
|
"grad_norm": 0.19966968894004822, |
|
"learning_rate": 3.032490974729242e-06, |
|
"loss": 1.3215, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.703971119133574, |
|
"grad_norm": 0.19344742596149445, |
|
"learning_rate": 2.996389891696751e-06, |
|
"loss": 1.3391, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7075812274368231, |
|
"grad_norm": 0.1955103874206543, |
|
"learning_rate": 2.96028880866426e-06, |
|
"loss": 1.3181, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7111913357400722, |
|
"grad_norm": 0.21449753642082214, |
|
"learning_rate": 2.924187725631769e-06, |
|
"loss": 1.3647, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.7148014440433214, |
|
"grad_norm": 0.1900106519460678, |
|
"learning_rate": 2.8880866425992783e-06, |
|
"loss": 1.3671, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.7184115523465704, |
|
"grad_norm": 0.20474538207054138, |
|
"learning_rate": 2.8519855595667872e-06, |
|
"loss": 1.3417, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.7220216606498195, |
|
"grad_norm": 0.2103157937526703, |
|
"learning_rate": 2.815884476534296e-06, |
|
"loss": 1.348, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7256317689530686, |
|
"grad_norm": 0.2024817168712616, |
|
"learning_rate": 2.7797833935018055e-06, |
|
"loss": 1.3295, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7292418772563177, |
|
"grad_norm": 0.19940879940986633, |
|
"learning_rate": 2.7436823104693144e-06, |
|
"loss": 1.3012, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.7328519855595668, |
|
"grad_norm": 0.1980341076850891, |
|
"learning_rate": 2.7075812274368237e-06, |
|
"loss": 1.3099, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.7364620938628159, |
|
"grad_norm": 0.201655313372612, |
|
"learning_rate": 2.6714801444043326e-06, |
|
"loss": 1.3601, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.740072202166065, |
|
"grad_norm": 0.2011563926935196, |
|
"learning_rate": 2.6353790613718415e-06, |
|
"loss": 1.3372, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7436823104693141, |
|
"grad_norm": 0.19773615896701813, |
|
"learning_rate": 2.59927797833935e-06, |
|
"loss": 1.3081, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7472924187725631, |
|
"grad_norm": 0.20711122453212738, |
|
"learning_rate": 2.5631768953068593e-06, |
|
"loss": 1.3639, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7509025270758123, |
|
"grad_norm": 0.19647595286369324, |
|
"learning_rate": 2.527075812274368e-06, |
|
"loss": 1.305, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7545126353790613, |
|
"grad_norm": 0.19561149179935455, |
|
"learning_rate": 2.4909747292418775e-06, |
|
"loss": 1.2962, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7581227436823105, |
|
"grad_norm": 0.21176967024803162, |
|
"learning_rate": 2.4548736462093864e-06, |
|
"loss": 1.3721, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7617328519855595, |
|
"grad_norm": 0.2063320130109787, |
|
"learning_rate": 2.4187725631768953e-06, |
|
"loss": 1.2738, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.7653429602888087, |
|
"grad_norm": 0.20307239890098572, |
|
"learning_rate": 2.3826714801444047e-06, |
|
"loss": 1.3045, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7689530685920578, |
|
"grad_norm": 0.21186378598213196, |
|
"learning_rate": 2.3465703971119136e-06, |
|
"loss": 1.3652, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7725631768953068, |
|
"grad_norm": 0.19658677279949188, |
|
"learning_rate": 2.3104693140794225e-06, |
|
"loss": 1.3755, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.776173285198556, |
|
"grad_norm": 0.21189101040363312, |
|
"learning_rate": 2.274368231046932e-06, |
|
"loss": 1.3618, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.779783393501805, |
|
"grad_norm": 0.2035369873046875, |
|
"learning_rate": 2.2382671480144407e-06, |
|
"loss": 1.3704, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7833935018050542, |
|
"grad_norm": 0.19990962743759155, |
|
"learning_rate": 2.2021660649819496e-06, |
|
"loss": 1.3603, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7870036101083032, |
|
"grad_norm": 0.19858066737651825, |
|
"learning_rate": 2.1660649819494585e-06, |
|
"loss": 1.3509, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7906137184115524, |
|
"grad_norm": 0.21003876626491547, |
|
"learning_rate": 2.129963898916968e-06, |
|
"loss": 1.3435, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7942238267148014, |
|
"grad_norm": 0.19455671310424805, |
|
"learning_rate": 2.0938628158844768e-06, |
|
"loss": 1.3381, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7978339350180506, |
|
"grad_norm": 0.20084233582019806, |
|
"learning_rate": 2.0577617328519857e-06, |
|
"loss": 1.3275, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8014440433212996, |
|
"grad_norm": 0.20646274089813232, |
|
"learning_rate": 2.0216606498194946e-06, |
|
"loss": 1.3127, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.8050541516245487, |
|
"grad_norm": 0.1980086714029312, |
|
"learning_rate": 1.9855595667870035e-06, |
|
"loss": 1.355, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.8086642599277978, |
|
"grad_norm": 0.19509509205818176, |
|
"learning_rate": 1.949458483754513e-06, |
|
"loss": 1.3236, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.8122743682310469, |
|
"grad_norm": 0.19844523072242737, |
|
"learning_rate": 1.9133574007220217e-06, |
|
"loss": 1.3071, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8158844765342961, |
|
"grad_norm": 0.18677139282226562, |
|
"learning_rate": 1.8772563176895308e-06, |
|
"loss": 1.3124, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.8194945848375451, |
|
"grad_norm": 0.2121885120868683, |
|
"learning_rate": 1.84115523465704e-06, |
|
"loss": 1.2569, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.8231046931407943, |
|
"grad_norm": 0.21662482619285583, |
|
"learning_rate": 1.805054151624549e-06, |
|
"loss": 1.2857, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.8267148014440433, |
|
"grad_norm": 0.20647631585597992, |
|
"learning_rate": 1.7689530685920577e-06, |
|
"loss": 1.3425, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.8303249097472925, |
|
"grad_norm": 0.21089228987693787, |
|
"learning_rate": 1.7328519855595669e-06, |
|
"loss": 1.3092, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8339350180505415, |
|
"grad_norm": 0.19865262508392334, |
|
"learning_rate": 1.696750902527076e-06, |
|
"loss": 1.3075, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.8375451263537906, |
|
"grad_norm": 0.20838730037212372, |
|
"learning_rate": 1.6606498194945849e-06, |
|
"loss": 1.3776, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8411552346570397, |
|
"grad_norm": 0.20943178236484528, |
|
"learning_rate": 1.624548736462094e-06, |
|
"loss": 1.3552, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8447653429602888, |
|
"grad_norm": 0.19375556707382202, |
|
"learning_rate": 1.5884476534296031e-06, |
|
"loss": 1.318, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8483754512635379, |
|
"grad_norm": 0.23595024645328522, |
|
"learning_rate": 1.5523465703971122e-06, |
|
"loss": 1.3337, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.851985559566787, |
|
"grad_norm": 0.20870931446552277, |
|
"learning_rate": 1.516245487364621e-06, |
|
"loss": 1.3896, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.855595667870036, |
|
"grad_norm": 0.1979263424873352, |
|
"learning_rate": 1.48014440433213e-06, |
|
"loss": 1.3133, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8592057761732852, |
|
"grad_norm": 0.2063916027545929, |
|
"learning_rate": 1.4440433212996392e-06, |
|
"loss": 1.3236, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8628158844765343, |
|
"grad_norm": 0.20475609600543976, |
|
"learning_rate": 1.407942238267148e-06, |
|
"loss": 1.3094, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8664259927797834, |
|
"grad_norm": 0.21313494443893433, |
|
"learning_rate": 1.3718411552346572e-06, |
|
"loss": 1.3487, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8700361010830325, |
|
"grad_norm": 0.20646831393241882, |
|
"learning_rate": 1.3357400722021663e-06, |
|
"loss": 1.3421, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.8736462093862816, |
|
"grad_norm": 0.2132684737443924, |
|
"learning_rate": 1.299638989169675e-06, |
|
"loss": 1.3254, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.8772563176895307, |
|
"grad_norm": 0.20300714671611786, |
|
"learning_rate": 1.263537906137184e-06, |
|
"loss": 1.2963, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8808664259927798, |
|
"grad_norm": 0.20637428760528564, |
|
"learning_rate": 1.2274368231046932e-06, |
|
"loss": 1.3531, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.8844765342960289, |
|
"grad_norm": 0.20430119335651398, |
|
"learning_rate": 1.1913357400722023e-06, |
|
"loss": 1.3097, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8880866425992779, |
|
"grad_norm": 0.2088884711265564, |
|
"learning_rate": 1.1552346570397112e-06, |
|
"loss": 1.3079, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8916967509025271, |
|
"grad_norm": 0.24322503805160522, |
|
"learning_rate": 1.1191335740072204e-06, |
|
"loss": 1.3841, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8953068592057761, |
|
"grad_norm": 0.2223634272813797, |
|
"learning_rate": 1.0830324909747293e-06, |
|
"loss": 1.2808, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8989169675090253, |
|
"grad_norm": 0.20814542472362518, |
|
"learning_rate": 1.0469314079422384e-06, |
|
"loss": 1.3186, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.9025270758122743, |
|
"grad_norm": 0.203385591506958, |
|
"learning_rate": 1.0108303249097473e-06, |
|
"loss": 1.2978, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9061371841155235, |
|
"grad_norm": 0.19583970308303833, |
|
"learning_rate": 9.747292418772564e-07, |
|
"loss": 1.3459, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.9097472924187726, |
|
"grad_norm": 0.1949346959590912, |
|
"learning_rate": 9.386281588447654e-07, |
|
"loss": 1.287, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.9133574007220217, |
|
"grad_norm": 0.2315586656332016, |
|
"learning_rate": 9.025270758122745e-07, |
|
"loss": 1.3514, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.9169675090252708, |
|
"grad_norm": 0.20318041741847992, |
|
"learning_rate": 8.664259927797834e-07, |
|
"loss": 1.2926, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.9205776173285198, |
|
"grad_norm": 0.20969466865062714, |
|
"learning_rate": 8.303249097472924e-07, |
|
"loss": 1.3309, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.924187725631769, |
|
"grad_norm": 0.20613840222358704, |
|
"learning_rate": 7.942238267148016e-07, |
|
"loss": 1.3204, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.927797833935018, |
|
"grad_norm": 0.2004636526107788, |
|
"learning_rate": 7.581227436823105e-07, |
|
"loss": 1.3267, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.9314079422382672, |
|
"grad_norm": 0.21113616228103638, |
|
"learning_rate": 7.220216606498196e-07, |
|
"loss": 1.2908, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.9350180505415162, |
|
"grad_norm": 0.1977560967206955, |
|
"learning_rate": 6.859205776173286e-07, |
|
"loss": 1.3163, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.9386281588447654, |
|
"grad_norm": 0.2119782716035843, |
|
"learning_rate": 6.498194945848375e-07, |
|
"loss": 1.3165, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9422382671480144, |
|
"grad_norm": 0.19387714564800262, |
|
"learning_rate": 6.137184115523466e-07, |
|
"loss": 1.2911, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.9458483754512635, |
|
"grad_norm": 0.21642278134822845, |
|
"learning_rate": 5.776173285198556e-07, |
|
"loss": 1.303, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.9494584837545126, |
|
"grad_norm": 0.21271206438541412, |
|
"learning_rate": 5.415162454873646e-07, |
|
"loss": 1.3273, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.9530685920577617, |
|
"grad_norm": 0.21591399610042572, |
|
"learning_rate": 5.054151624548736e-07, |
|
"loss": 1.3196, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9566787003610109, |
|
"grad_norm": 0.2054390162229538, |
|
"learning_rate": 4.693140794223827e-07, |
|
"loss": 1.3363, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9602888086642599, |
|
"grad_norm": 0.2292117476463318, |
|
"learning_rate": 4.332129963898917e-07, |
|
"loss": 1.3768, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9638989169675091, |
|
"grad_norm": 0.2526095509529114, |
|
"learning_rate": 3.971119133574008e-07, |
|
"loss": 1.4071, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9675090252707581, |
|
"grad_norm": 0.21546489000320435, |
|
"learning_rate": 3.610108303249098e-07, |
|
"loss": 1.3661, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9711191335740073, |
|
"grad_norm": 0.20055970549583435, |
|
"learning_rate": 3.2490974729241875e-07, |
|
"loss": 1.3288, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9747292418772563, |
|
"grad_norm": 0.25362637639045715, |
|
"learning_rate": 2.888086642599278e-07, |
|
"loss": 1.4092, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9783393501805054, |
|
"grad_norm": 0.2010817676782608, |
|
"learning_rate": 2.527075812274368e-07, |
|
"loss": 1.3286, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.9819494584837545, |
|
"grad_norm": 0.20375026762485504, |
|
"learning_rate": 2.1660649819494586e-07, |
|
"loss": 1.3446, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.9855595667870036, |
|
"grad_norm": 0.2000964730978012, |
|
"learning_rate": 1.805054151624549e-07, |
|
"loss": 1.3223, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.9891696750902527, |
|
"grad_norm": 0.19899840652942657, |
|
"learning_rate": 1.444043321299639e-07, |
|
"loss": 1.3393, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.9927797833935018, |
|
"grad_norm": 0.20534487068653107, |
|
"learning_rate": 1.0830324909747293e-07, |
|
"loss": 1.3059, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9963898916967509, |
|
"grad_norm": 0.20792889595031738, |
|
"learning_rate": 7.220216606498195e-08, |
|
"loss": 1.3125, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.20410171151161194, |
|
"learning_rate": 3.6101083032490976e-08, |
|
"loss": 1.3321, |
|
"step": 277 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 277, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.827066833587405e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|