|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 15.0, |
|
"eval_steps": 500, |
|
"global_step": 3075, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.4036892056465149, |
|
"learning_rate": 0.00019999478113897612, |
|
"loss": 1.0282, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.3629762828350067, |
|
"learning_rate": 0.0001999791251006346, |
|
"loss": 0.7875, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.4877622425556183, |
|
"learning_rate": 0.0001999530335191093, |
|
"loss": 0.5942, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.4466260075569153, |
|
"learning_rate": 0.00019991650911776695, |
|
"loss": 0.3866, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.649118959903717, |
|
"learning_rate": 0.000199869555708923, |
|
"loss": 0.3928, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.8762800097465515, |
|
"learning_rate": 0.0001998121781934438, |
|
"loss": 0.3258, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.9195622801780701, |
|
"learning_rate": 0.0001997443825602349, |
|
"loss": 0.2885, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.5856262445449829, |
|
"learning_rate": 0.00019966617588561609, |
|
"loss": 0.2888, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.5520443320274353, |
|
"learning_rate": 0.00019957756633258265, |
|
"loss": 0.2242, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.9435800909996033, |
|
"learning_rate": 0.00019947856314995349, |
|
"loss": 0.1629, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.9416623115539551, |
|
"learning_rate": 0.00019936917667140555, |
|
"loss": 0.1555, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.802065372467041, |
|
"learning_rate": 0.0001992494183143955, |
|
"loss": 0.1339, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.7007794380187988, |
|
"learning_rate": 0.00019911930057896774, |
|
"loss": 0.1191, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.6755990386009216, |
|
"learning_rate": 0.00019897883704644983, |
|
"loss": 0.1571, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 1.6951078176498413, |
|
"learning_rate": 0.00019882804237803488, |
|
"loss": 0.1309, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.567158579826355, |
|
"learning_rate": 0.0001986669323132512, |
|
"loss": 0.0766, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.8820038437843323, |
|
"learning_rate": 0.0001984955236683196, |
|
"loss": 0.0839, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.6520794034004211, |
|
"learning_rate": 0.00019831383433439797, |
|
"loss": 0.0863, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.45519864559173584, |
|
"learning_rate": 0.00019812188327571399, |
|
"loss": 0.0889, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.614235520362854, |
|
"learning_rate": 0.00019791969052758562, |
|
"loss": 0.0725, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.024390243902439, |
|
"grad_norm": 0.2764686644077301, |
|
"learning_rate": 0.00019770727719432994, |
|
"loss": 0.0407, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0731707317073171, |
|
"grad_norm": 0.6082726716995239, |
|
"learning_rate": 0.00019748466544706022, |
|
"loss": 0.044, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.1219512195121952, |
|
"grad_norm": 0.9295619130134583, |
|
"learning_rate": 0.00019725187852137195, |
|
"loss": 0.0675, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.170731707317073, |
|
"grad_norm": 0.3758924603462219, |
|
"learning_rate": 0.00019700894071491732, |
|
"loss": 0.0439, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 0.46514585614204407, |
|
"learning_rate": 0.00019675587738486936, |
|
"loss": 0.0398, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2682926829268293, |
|
"grad_norm": 0.5870018005371094, |
|
"learning_rate": 0.0001964927149452751, |
|
"loss": 0.0406, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.3170731707317074, |
|
"grad_norm": 0.30292996764183044, |
|
"learning_rate": 0.00019621948086429844, |
|
"loss": 0.028, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3658536585365852, |
|
"grad_norm": 0.47037121653556824, |
|
"learning_rate": 0.00019593620366135337, |
|
"loss": 0.0239, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.4146341463414633, |
|
"grad_norm": 0.4176475405693054, |
|
"learning_rate": 0.00019564291290412688, |
|
"loss": 0.0281, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 0.3179157078266144, |
|
"learning_rate": 0.00019533963920549306, |
|
"loss": 0.0281, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.5121951219512195, |
|
"grad_norm": 0.5817562937736511, |
|
"learning_rate": 0.00019502641422031763, |
|
"loss": 0.0296, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.5609756097560976, |
|
"grad_norm": 0.7409655451774597, |
|
"learning_rate": 0.00019470327064215383, |
|
"loss": 0.029, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.6097560975609757, |
|
"grad_norm": 0.4418310225009918, |
|
"learning_rate": 0.00019437024219983028, |
|
"loss": 0.0583, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.6585365853658538, |
|
"grad_norm": 0.31637728214263916, |
|
"learning_rate": 0.0001940273636539301, |
|
"loss": 0.0354, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 0.22175493836402893, |
|
"learning_rate": 0.00019367467079316279, |
|
"loss": 0.0514, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.7560975609756098, |
|
"grad_norm": 0.6636152267456055, |
|
"learning_rate": 0.00019331220043062894, |
|
"loss": 0.034, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.8048780487804879, |
|
"grad_norm": 0.8424332141876221, |
|
"learning_rate": 0.00019293999039997746, |
|
"loss": 0.0299, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.8536585365853657, |
|
"grad_norm": 0.6435155272483826, |
|
"learning_rate": 0.00019255807955145677, |
|
"loss": 0.0508, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.9024390243902438, |
|
"grad_norm": 0.7734220027923584, |
|
"learning_rate": 0.00019216650774785972, |
|
"loss": 0.035, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.2854250967502594, |
|
"learning_rate": 0.0001917653158603628, |
|
"loss": 0.0339, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.6165639758110046, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 0.0323, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.048780487804878, |
|
"grad_norm": 0.2167205959558487, |
|
"learning_rate": 0.00019093424033459248, |
|
"loss": 0.026, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.097560975609756, |
|
"grad_norm": 0.2723434269428253, |
|
"learning_rate": 0.0001905044434416725, |
|
"loss": 0.0176, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.1463414634146343, |
|
"grad_norm": 0.4085879325866699, |
|
"learning_rate": 0.00019006519994650513, |
|
"loss": 0.0138, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 0.17931397259235382, |
|
"learning_rate": 0.00018961655569610557, |
|
"loss": 0.0358, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.2439024390243905, |
|
"grad_norm": 0.3886450231075287, |
|
"learning_rate": 0.00018915855751871363, |
|
"loss": 0.0209, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.292682926829268, |
|
"grad_norm": 0.11022531986236572, |
|
"learning_rate": 0.0001886912532189061, |
|
"loss": 0.0101, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.341463414634146, |
|
"grad_norm": 0.14626094698905945, |
|
"learning_rate": 0.00018821469157260685, |
|
"loss": 0.0156, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.3902439024390243, |
|
"grad_norm": 0.18015721440315247, |
|
"learning_rate": 0.00018772892232199592, |
|
"loss": 0.0156, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 0.3262254595756531, |
|
"learning_rate": 0.00018723399617031751, |
|
"loss": 0.0295, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.4878048780487805, |
|
"grad_norm": 0.10063759982585907, |
|
"learning_rate": 0.00018672996477658767, |
|
"loss": 0.0164, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.5365853658536586, |
|
"grad_norm": 0.11382050812244415, |
|
"learning_rate": 0.00018621688075020227, |
|
"loss": 0.0207, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.5853658536585367, |
|
"grad_norm": 0.12307539582252502, |
|
"learning_rate": 0.0001856947976454459, |
|
"loss": 0.0326, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.6341463414634148, |
|
"grad_norm": 0.18902955949306488, |
|
"learning_rate": 0.00018516376995590187, |
|
"loss": 0.0144, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 0.18087974190711975, |
|
"learning_rate": 0.00018462385310876443, |
|
"loss": 0.0111, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.7317073170731705, |
|
"grad_norm": 0.2894444465637207, |
|
"learning_rate": 0.00018407510345905332, |
|
"loss": 0.0081, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.7804878048780486, |
|
"grad_norm": 0.24273361265659332, |
|
"learning_rate": 0.0001835175782837318, |
|
"loss": 0.0301, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.8292682926829267, |
|
"grad_norm": 0.069428451359272, |
|
"learning_rate": 0.00018295133577572799, |
|
"loss": 0.0234, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.8780487804878048, |
|
"grad_norm": 0.1845165342092514, |
|
"learning_rate": 0.00018237643503786095, |
|
"loss": 0.0112, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 0.06373828649520874, |
|
"learning_rate": 0.00018179293607667178, |
|
"loss": 0.0241, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.975609756097561, |
|
"grad_norm": 0.09466666728258133, |
|
"learning_rate": 0.0001812008997961602, |
|
"loss": 0.0151, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.024390243902439, |
|
"grad_norm": 0.5068451762199402, |
|
"learning_rate": 0.00018060038799142759, |
|
"loss": 0.02, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.073170731707317, |
|
"grad_norm": 0.17547158896923065, |
|
"learning_rate": 0.00017999146334222695, |
|
"loss": 0.0111, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.1219512195121952, |
|
"grad_norm": 0.24108292162418365, |
|
"learning_rate": 0.00017937418940642074, |
|
"loss": 0.0095, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.1707317073170733, |
|
"grad_norm": 0.24457822740077972, |
|
"learning_rate": 0.00017874863061334657, |
|
"loss": 0.0134, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.2195121951219514, |
|
"grad_norm": 0.2185467779636383, |
|
"learning_rate": 0.00017811485225709256, |
|
"loss": 0.0135, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.2682926829268295, |
|
"grad_norm": 0.12849357724189758, |
|
"learning_rate": 0.00017747292048968187, |
|
"loss": 0.0154, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.317073170731707, |
|
"grad_norm": 0.09158976376056671, |
|
"learning_rate": 0.0001768229023141682, |
|
"loss": 0.0137, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.3658536585365852, |
|
"grad_norm": 0.09520118683576584, |
|
"learning_rate": 0.00017616486557764187, |
|
"loss": 0.0147, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.4146341463414633, |
|
"grad_norm": 0.11151307821273804, |
|
"learning_rate": 0.00017549887896414851, |
|
"loss": 0.0168, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.4634146341463414, |
|
"grad_norm": 0.1175757572054863, |
|
"learning_rate": 0.00017482501198751965, |
|
"loss": 0.015, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.5121951219512195, |
|
"grad_norm": 0.12314116209745407, |
|
"learning_rate": 0.00017414333498411733, |
|
"loss": 0.0179, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.5609756097560976, |
|
"grad_norm": 0.08803991228342056, |
|
"learning_rate": 0.00017345391910549238, |
|
"loss": 0.0105, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.6097560975609757, |
|
"grad_norm": 0.06382381916046143, |
|
"learning_rate": 0.000172756836310958, |
|
"loss": 0.0106, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 0.47523975372314453, |
|
"learning_rate": 0.0001720521593600787, |
|
"loss": 0.0085, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.7073170731707314, |
|
"grad_norm": 0.1268441379070282, |
|
"learning_rate": 0.000171339961805076, |
|
"loss": 0.0155, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.7560975609756095, |
|
"grad_norm": 0.23719309270381927, |
|
"learning_rate": 0.000170620317983151, |
|
"loss": 0.015, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.8048780487804876, |
|
"grad_norm": 0.0664207935333252, |
|
"learning_rate": 0.00016989330300872576, |
|
"loss": 0.0179, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.8536585365853657, |
|
"grad_norm": 0.3121427893638611, |
|
"learning_rate": 0.00016915899276560237, |
|
"loss": 0.0138, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.902439024390244, |
|
"grad_norm": 0.0784049853682518, |
|
"learning_rate": 0.00016841746389904304, |
|
"loss": 0.0114, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.951219512195122, |
|
"grad_norm": 0.7239044904708862, |
|
"learning_rate": 0.0001676687938077698, |
|
"loss": 0.0251, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.2333860546350479, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 0.0135, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.048780487804878, |
|
"grad_norm": 0.7087911367416382, |
|
"learning_rate": 0.00016615034326471898, |
|
"loss": 0.0195, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.097560975609756, |
|
"grad_norm": 0.07190815359354019, |
|
"learning_rate": 0.00016538072130458853, |
|
"loss": 0.0095, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.146341463414634, |
|
"grad_norm": 0.33951500058174133, |
|
"learning_rate": 0.00016460427508649546, |
|
"loss": 0.0131, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.195121951219512, |
|
"grad_norm": 0.06381627917289734, |
|
"learning_rate": 0.00016382108565373785, |
|
"loss": 0.0119, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.2439024390243905, |
|
"grad_norm": 0.44717633724212646, |
|
"learning_rate": 0.00016303123475345182, |
|
"loss": 0.0127, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.2926829268292686, |
|
"grad_norm": 0.6674973368644714, |
|
"learning_rate": 0.00016223480482807894, |
|
"loss": 0.0111, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.341463414634147, |
|
"grad_norm": 0.3303108215332031, |
|
"learning_rate": 0.00016143187900676112, |
|
"loss": 0.0159, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.390243902439025, |
|
"grad_norm": 0.2972947359085083, |
|
"learning_rate": 0.0001606225410966638, |
|
"loss": 0.0086, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.439024390243903, |
|
"grad_norm": 0.059205561876297, |
|
"learning_rate": 0.00015980687557422854, |
|
"loss": 0.0104, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.487804878048781, |
|
"grad_norm": 0.08515360206365585, |
|
"learning_rate": 0.00015898496757635536, |
|
"loss": 0.0079, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.536585365853659, |
|
"grad_norm": 0.14600469172000885, |
|
"learning_rate": 0.00015815690289151658, |
|
"loss": 0.0101, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.585365853658536, |
|
"grad_norm": 0.05901546776294708, |
|
"learning_rate": 0.0001573227679508024, |
|
"loss": 0.0075, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.634146341463414, |
|
"grad_norm": 0.3404502868652344, |
|
"learning_rate": 0.00015648264981889934, |
|
"loss": 0.0158, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.682926829268292, |
|
"grad_norm": 0.0968211218714714, |
|
"learning_rate": 0.00015563663618500302, |
|
"loss": 0.0092, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.7317073170731705, |
|
"grad_norm": 0.08040373027324677, |
|
"learning_rate": 0.00015478481535366494, |
|
"loss": 0.0088, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.780487804878049, |
|
"grad_norm": 0.7208348512649536, |
|
"learning_rate": 0.00015392727623557585, |
|
"loss": 0.0132, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.829268292682927, |
|
"grad_norm": 0.26821044087409973, |
|
"learning_rate": 0.00015306410833828535, |
|
"loss": 0.0113, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 0.09240720421075821, |
|
"learning_rate": 0.00015219540175685938, |
|
"loss": 0.0096, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.926829268292683, |
|
"grad_norm": 0.3397311270236969, |
|
"learning_rate": 0.00015132124716447627, |
|
"loss": 0.0072, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.975609756097561, |
|
"grad_norm": 0.07910922169685364, |
|
"learning_rate": 0.00015044173580296266, |
|
"loss": 0.0115, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 5.024390243902439, |
|
"grad_norm": 0.05629584938287735, |
|
"learning_rate": 0.00014955695947326987, |
|
"loss": 0.0066, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.073170731707317, |
|
"grad_norm": 0.7522541880607605, |
|
"learning_rate": 0.00014866701052589193, |
|
"loss": 0.0159, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.121951219512195, |
|
"grad_norm": 0.09192178398370743, |
|
"learning_rate": 0.0001477719818512263, |
|
"loss": 0.0085, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.170731707317073, |
|
"grad_norm": 0.3984428346157074, |
|
"learning_rate": 0.00014687196686987832, |
|
"loss": 0.0091, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.219512195121951, |
|
"grad_norm": 0.4188997149467468, |
|
"learning_rate": 0.00014596705952291017, |
|
"loss": 0.0083, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.2682926829268295, |
|
"grad_norm": 0.07686212658882141, |
|
"learning_rate": 0.00014505735426203543, |
|
"loss": 0.0064, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.317073170731708, |
|
"grad_norm": 0.38933801651000977, |
|
"learning_rate": 0.00014414294603976076, |
|
"loss": 0.0086, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.365853658536586, |
|
"grad_norm": 0.0843789279460907, |
|
"learning_rate": 0.00014322393029947468, |
|
"loss": 0.0074, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.414634146341464, |
|
"grad_norm": 0.19899946451187134, |
|
"learning_rate": 0.0001423004029654859, |
|
"loss": 0.0102, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.463414634146342, |
|
"grad_norm": 0.06790963560342789, |
|
"learning_rate": 0.00014137246043301042, |
|
"loss": 0.0102, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.512195121951219, |
|
"grad_norm": 0.08267515152692795, |
|
"learning_rate": 0.00014044019955811065, |
|
"loss": 0.009, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.560975609756097, |
|
"grad_norm": 0.3616844713687897, |
|
"learning_rate": 0.00013950371764758542, |
|
"loss": 0.0119, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.609756097560975, |
|
"grad_norm": 0.052261725068092346, |
|
"learning_rate": 0.0001385631124488136, |
|
"loss": 0.0078, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.658536585365853, |
|
"grad_norm": 0.07086987048387527, |
|
"learning_rate": 0.00013761848213955134, |
|
"loss": 0.0112, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.7073170731707314, |
|
"grad_norm": 0.05841855704784393, |
|
"learning_rate": 0.00013666992531768482, |
|
"loss": 0.0095, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.7560975609756095, |
|
"grad_norm": 0.2936477065086365, |
|
"learning_rate": 0.00013571754099093848, |
|
"loss": 0.0082, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.804878048780488, |
|
"grad_norm": 0.7195134162902832, |
|
"learning_rate": 0.0001347614285665412, |
|
"loss": 0.0093, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.853658536585366, |
|
"grad_norm": 0.09665755927562714, |
|
"learning_rate": 0.00013380168784085027, |
|
"loss": 0.0125, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.902439024390244, |
|
"grad_norm": 0.08146821707487106, |
|
"learning_rate": 0.0001328384189889351, |
|
"loss": 0.0094, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.951219512195122, |
|
"grad_norm": 1.6456434726715088, |
|
"learning_rate": 0.00013187172255412096, |
|
"loss": 0.0104, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.46437689661979675, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 0.0103, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 6.048780487804878, |
|
"grad_norm": 0.40744251012802124, |
|
"learning_rate": 0.00012992845088737322, |
|
"loss": 0.0105, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 6.097560975609756, |
|
"grad_norm": 0.09202691912651062, |
|
"learning_rate": 0.00012895207848873487, |
|
"loss": 0.0088, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 6.146341463414634, |
|
"grad_norm": 0.41562455892562866, |
|
"learning_rate": 0.00012797268415261682, |
|
"loss": 0.0105, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 6.195121951219512, |
|
"grad_norm": 0.061398524791002274, |
|
"learning_rate": 0.0001269903701054776, |
|
"loss": 0.0084, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 6.2439024390243905, |
|
"grad_norm": 0.0851488932967186, |
|
"learning_rate": 0.00012600523887852706, |
|
"loss": 0.0081, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 6.2926829268292686, |
|
"grad_norm": 0.4565141201019287, |
|
"learning_rate": 0.00012501739329702453, |
|
"loss": 0.0122, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 6.341463414634147, |
|
"grad_norm": 0.06614430993795395, |
|
"learning_rate": 0.00012402693646954607, |
|
"loss": 0.0079, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.390243902439025, |
|
"grad_norm": 0.3719758987426758, |
|
"learning_rate": 0.00012303397177722234, |
|
"loss": 0.0096, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 6.439024390243903, |
|
"grad_norm": 0.06896215677261353, |
|
"learning_rate": 0.0001220386028629479, |
|
"loss": 0.0059, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.487804878048781, |
|
"grad_norm": 0.05981654301285744, |
|
"learning_rate": 0.00012104093362056341, |
|
"loss": 0.007, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.536585365853659, |
|
"grad_norm": 0.0625968649983406, |
|
"learning_rate": 0.00012004106818401135, |
|
"loss": 0.0104, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.585365853658536, |
|
"grad_norm": 0.04070024937391281, |
|
"learning_rate": 0.00011903911091646684, |
|
"loss": 0.0082, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.634146341463414, |
|
"grad_norm": 0.05095053091645241, |
|
"learning_rate": 0.00011803516639944452, |
|
"loss": 0.008, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.682926829268292, |
|
"grad_norm": 0.058545567095279694, |
|
"learning_rate": 0.00011702933942188252, |
|
"loss": 0.0079, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.7317073170731705, |
|
"grad_norm": 0.06065903976559639, |
|
"learning_rate": 0.0001160217349692051, |
|
"loss": 0.0075, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.780487804878049, |
|
"grad_norm": 0.13544753193855286, |
|
"learning_rate": 0.00011501245821236439, |
|
"loss": 0.0075, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 6.829268292682927, |
|
"grad_norm": 0.06476018577814102, |
|
"learning_rate": 0.00011400161449686293, |
|
"loss": 0.0086, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 6.878048780487805, |
|
"grad_norm": 0.0454210601747036, |
|
"learning_rate": 0.00011298930933175804, |
|
"loss": 0.0074, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 6.926829268292683, |
|
"grad_norm": 0.05906907469034195, |
|
"learning_rate": 0.00011197564837864922, |
|
"loss": 0.007, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 6.975609756097561, |
|
"grad_norm": 0.2888021171092987, |
|
"learning_rate": 0.00011096073744064919, |
|
"loss": 0.0076, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 7.024390243902439, |
|
"grad_norm": 0.05438758432865143, |
|
"learning_rate": 0.00010994468245134071, |
|
"loss": 0.008, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 7.073170731707317, |
|
"grad_norm": 0.04347096383571625, |
|
"learning_rate": 0.00010892758946371944, |
|
"loss": 0.006, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 7.121951219512195, |
|
"grad_norm": 0.06834473460912704, |
|
"learning_rate": 0.00010790956463912438, |
|
"loss": 0.0079, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 7.170731707317073, |
|
"grad_norm": 0.04391489177942276, |
|
"learning_rate": 0.00010689071423615709, |
|
"loss": 0.0061, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 7.219512195121951, |
|
"grad_norm": 0.0990622267127037, |
|
"learning_rate": 0.00010587114459959071, |
|
"loss": 0.0071, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 7.2682926829268295, |
|
"grad_norm": 0.048575662076473236, |
|
"learning_rate": 0.00010485096214927002, |
|
"loss": 0.0066, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 7.317073170731708, |
|
"grad_norm": 0.05991552770137787, |
|
"learning_rate": 0.00010383027336900355, |
|
"loss": 0.0075, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.365853658536586, |
|
"grad_norm": 0.08045367151498795, |
|
"learning_rate": 0.00010280918479544914, |
|
"loss": 0.0062, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 7.414634146341464, |
|
"grad_norm": 0.044166866689920425, |
|
"learning_rate": 0.00010178780300699395, |
|
"loss": 0.008, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 7.463414634146342, |
|
"grad_norm": 0.06291426718235016, |
|
"learning_rate": 0.00010076623461263018, |
|
"loss": 0.0066, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 7.512195121951219, |
|
"grad_norm": 0.056730519980192184, |
|
"learning_rate": 9.974458624082727e-05, |
|
"loss": 0.0063, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 7.560975609756097, |
|
"grad_norm": 0.18145255744457245, |
|
"learning_rate": 9.872296452840264e-05, |
|
"loss": 0.007, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.609756097560975, |
|
"grad_norm": 0.038042183965444565, |
|
"learning_rate": 9.770147610939097e-05, |
|
"loss": 0.0071, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 7.658536585365853, |
|
"grad_norm": 0.09980273991823196, |
|
"learning_rate": 9.668022760391418e-05, |
|
"loss": 0.0083, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 7.7073170731707314, |
|
"grad_norm": 0.04334135353565216, |
|
"learning_rate": 9.565932560705277e-05, |
|
"loss": 0.0073, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 7.7560975609756095, |
|
"grad_norm": 0.08293572813272476, |
|
"learning_rate": 9.463887667771946e-05, |
|
"loss": 0.007, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 7.804878048780488, |
|
"grad_norm": 0.06575772166252136, |
|
"learning_rate": 9.361898732753716e-05, |
|
"loss": 0.007, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 7.853658536585366, |
|
"grad_norm": 0.13714659214019775, |
|
"learning_rate": 9.259976400972147e-05, |
|
"loss": 0.0071, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 7.902439024390244, |
|
"grad_norm": 0.05362860858440399, |
|
"learning_rate": 9.15813131079693e-05, |
|
"loss": 0.0069, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 7.951219512195122, |
|
"grad_norm": 0.036402106285095215, |
|
"learning_rate": 9.056374092535504e-05, |
|
"loss": 0.0068, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.06819606572389603, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 0.0074, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 8.048780487804878, |
|
"grad_norm": 0.051237184554338455, |
|
"learning_rate": 8.853165746015997e-05, |
|
"loss": 0.0061, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 8.097560975609756, |
|
"grad_norm": 0.04173099994659424, |
|
"learning_rate": 8.751735828080308e-05, |
|
"loss": 0.0071, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 8.146341463414634, |
|
"grad_norm": 0.04740411415696144, |
|
"learning_rate": 8.650436200489303e-05, |
|
"loss": 0.0062, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 8.195121951219512, |
|
"grad_norm": 0.07791474461555481, |
|
"learning_rate": 8.549277436616551e-05, |
|
"loss": 0.0063, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 8.24390243902439, |
|
"grad_norm": 0.10498108714818954, |
|
"learning_rate": 8.448270095132652e-05, |
|
"loss": 0.0061, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 8.292682926829269, |
|
"grad_norm": 0.07452121376991272, |
|
"learning_rate": 8.347424718903151e-05, |
|
"loss": 0.0063, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 8.341463414634147, |
|
"grad_norm": 0.07632943242788315, |
|
"learning_rate": 8.246751833888122e-05, |
|
"loss": 0.0067, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 8.390243902439025, |
|
"grad_norm": 0.040240757167339325, |
|
"learning_rate": 8.146261948043485e-05, |
|
"loss": 0.0063, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 8.439024390243903, |
|
"grad_norm": 0.06094524264335632, |
|
"learning_rate": 8.0459655502242e-05, |
|
"loss": 0.0061, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 8.487804878048781, |
|
"grad_norm": 0.09173769503831863, |
|
"learning_rate": 7.945873109089503e-05, |
|
"loss": 0.0066, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 8.536585365853659, |
|
"grad_norm": 0.07450529932975769, |
|
"learning_rate": 7.845995072010188e-05, |
|
"loss": 0.0065, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 8.585365853658537, |
|
"grad_norm": 0.07175474613904953, |
|
"learning_rate": 7.74634186397815e-05, |
|
"loss": 0.0063, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 8.634146341463415, |
|
"grad_norm": 0.1444873958826065, |
|
"learning_rate": 7.646923886518264e-05, |
|
"loss": 0.0066, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 8.682926829268293, |
|
"grad_norm": 0.06203252822160721, |
|
"learning_rate": 7.54775151660267e-05, |
|
"loss": 0.0061, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 8.731707317073171, |
|
"grad_norm": 0.07840217649936676, |
|
"learning_rate": 7.448835105567696e-05, |
|
"loss": 0.0066, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 8.78048780487805, |
|
"grad_norm": 0.05611024424433708, |
|
"learning_rate": 7.350184978033386e-05, |
|
"loss": 0.0063, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 8.829268292682928, |
|
"grad_norm": 0.05446132645010948, |
|
"learning_rate": 7.251811430825846e-05, |
|
"loss": 0.0072, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 8.878048780487806, |
|
"grad_norm": 0.06147909164428711, |
|
"learning_rate": 7.153724731902506e-05, |
|
"loss": 0.0063, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 8.926829268292684, |
|
"grad_norm": 0.137003555893898, |
|
"learning_rate": 7.055935119280369e-05, |
|
"loss": 0.0066, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 8.975609756097562, |
|
"grad_norm": 0.15653973817825317, |
|
"learning_rate": 6.958452799967385e-05, |
|
"loss": 0.007, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 9.024390243902438, |
|
"grad_norm": 0.06786433607339859, |
|
"learning_rate": 6.861287948897091e-05, |
|
"loss": 0.0069, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 9.073170731707316, |
|
"grad_norm": 0.07620502263307571, |
|
"learning_rate": 6.764450707866576e-05, |
|
"loss": 0.0062, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 9.121951219512194, |
|
"grad_norm": 0.05350011587142944, |
|
"learning_rate": 6.667951184477893e-05, |
|
"loss": 0.0059, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 9.170731707317072, |
|
"grad_norm": 0.05874161049723625, |
|
"learning_rate": 6.57179945108307e-05, |
|
"loss": 0.0054, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 9.21951219512195, |
|
"grad_norm": 0.07297080755233765, |
|
"learning_rate": 6.476005543732782e-05, |
|
"loss": 0.0067, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 9.268292682926829, |
|
"grad_norm": 0.08384808897972107, |
|
"learning_rate": 6.380579461128819e-05, |
|
"loss": 0.0065, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 9.317073170731707, |
|
"grad_norm": 0.06501758098602295, |
|
"learning_rate": 6.285531163580449e-05, |
|
"loss": 0.0058, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 9.365853658536585, |
|
"grad_norm": 0.051126834005117416, |
|
"learning_rate": 6.190870571964774e-05, |
|
"loss": 0.0061, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 9.414634146341463, |
|
"grad_norm": 0.07436596602201462, |
|
"learning_rate": 6.096607566691235e-05, |
|
"loss": 0.0064, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 9.463414634146341, |
|
"grad_norm": 0.0661911889910698, |
|
"learning_rate": 6.002751986670323e-05, |
|
"loss": 0.0061, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 9.512195121951219, |
|
"grad_norm": 0.058736588805913925, |
|
"learning_rate": 5.909313628286601e-05, |
|
"loss": 0.0063, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 9.560975609756097, |
|
"grad_norm": 0.051081206649541855, |
|
"learning_rate": 5.816302244376206e-05, |
|
"loss": 0.0074, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 9.609756097560975, |
|
"grad_norm": 0.06269817799329758, |
|
"learning_rate": 5.7237275432088664e-05, |
|
"loss": 0.0057, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 9.658536585365853, |
|
"grad_norm": 0.0546838752925396, |
|
"learning_rate": 5.63159918747457e-05, |
|
"loss": 0.0063, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 9.707317073170731, |
|
"grad_norm": 0.09113246202468872, |
|
"learning_rate": 5.539926793275021e-05, |
|
"loss": 0.0067, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 9.75609756097561, |
|
"grad_norm": 0.06070900708436966, |
|
"learning_rate": 5.448719929119915e-05, |
|
"loss": 0.0065, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 9.804878048780488, |
|
"grad_norm": 0.06381196528673172, |
|
"learning_rate": 5.3579881149282204e-05, |
|
"loss": 0.0063, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 9.853658536585366, |
|
"grad_norm": 0.05914902687072754, |
|
"learning_rate": 5.267740821034524e-05, |
|
"loss": 0.0059, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 9.902439024390244, |
|
"grad_norm": 0.0414993092417717, |
|
"learning_rate": 5.177987467200504e-05, |
|
"loss": 0.0065, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 9.951219512195122, |
|
"grad_norm": 0.051615435630083084, |
|
"learning_rate": 5.088737421631767e-05, |
|
"loss": 0.0061, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.08131363987922668, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.0058, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 10.048780487804878, |
|
"grad_norm": 0.06244876608252525, |
|
"learning_rate": 4.911784464470623e-05, |
|
"loss": 0.0057, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 10.097560975609756, |
|
"grad_norm": 0.060385096818208694, |
|
"learning_rate": 4.8241000227360354e-05, |
|
"loss": 0.0062, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 10.146341463414634, |
|
"grad_norm": 0.052863430231809616, |
|
"learning_rate": 4.73695582705455e-05, |
|
"loss": 0.0062, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 10.195121951219512, |
|
"grad_norm": 0.07917828112840652, |
|
"learning_rate": 4.650360973295086e-05, |
|
"loss": 0.0061, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 10.24390243902439, |
|
"grad_norm": 0.054362453520298004, |
|
"learning_rate": 4.56432449998779e-05, |
|
"loss": 0.0069, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 10.292682926829269, |
|
"grad_norm": 0.06768287718296051, |
|
"learning_rate": 4.478855387380605e-05, |
|
"loss": 0.0062, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 10.341463414634147, |
|
"grad_norm": 0.06945596635341644, |
|
"learning_rate": 4.3939625565019416e-05, |
|
"loss": 0.0062, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 10.390243902439025, |
|
"grad_norm": 0.07058496028184891, |
|
"learning_rate": 4.30965486822953e-05, |
|
"loss": 0.0064, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 10.439024390243903, |
|
"grad_norm": 0.06080787256360054, |
|
"learning_rate": 4.225941122365529e-05, |
|
"loss": 0.0058, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 10.487804878048781, |
|
"grad_norm": 0.059627074748277664, |
|
"learning_rate": 4.142830056718052e-05, |
|
"loss": 0.0062, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 10.536585365853659, |
|
"grad_norm": 0.05086175352334976, |
|
"learning_rate": 4.0603303461891253e-05, |
|
"loss": 0.0056, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 10.585365853658537, |
|
"grad_norm": 0.1199130192399025, |
|
"learning_rate": 3.978450601869217e-05, |
|
"loss": 0.0062, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 10.634146341463415, |
|
"grad_norm": 0.1347583681344986, |
|
"learning_rate": 3.8971993701384554e-05, |
|
"loss": 0.0064, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 10.682926829268293, |
|
"grad_norm": 0.050621725618839264, |
|
"learning_rate": 3.816585131774571e-05, |
|
"loss": 0.0057, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 10.731707317073171, |
|
"grad_norm": 0.04940760135650635, |
|
"learning_rate": 3.736616301067694e-05, |
|
"loss": 0.0062, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 10.78048780487805, |
|
"grad_norm": 0.052099861204624176, |
|
"learning_rate": 3.657301224942098e-05, |
|
"loss": 0.0058, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 10.829268292682928, |
|
"grad_norm": 0.08436452597379684, |
|
"learning_rate": 3.578648182084975e-05, |
|
"loss": 0.0058, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 10.878048780487806, |
|
"grad_norm": 0.0537634901702404, |
|
"learning_rate": 3.5006653820823135e-05, |
|
"loss": 0.0056, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 10.926829268292684, |
|
"grad_norm": 0.06204523518681526, |
|
"learning_rate": 3.423360964562026e-05, |
|
"loss": 0.0058, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 10.975609756097562, |
|
"grad_norm": 0.1142859160900116, |
|
"learning_rate": 3.346742998344348e-05, |
|
"loss": 0.0062, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 11.024390243902438, |
|
"grad_norm": 0.06821559369564056, |
|
"learning_rate": 3.270819480599625e-05, |
|
"loss": 0.006, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 11.073170731707316, |
|
"grad_norm": 0.05169494450092316, |
|
"learning_rate": 3.195598336013615e-05, |
|
"loss": 0.0053, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 11.121951219512194, |
|
"grad_norm": 0.05910908058285713, |
|
"learning_rate": 3.121087415960304e-05, |
|
"loss": 0.0059, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 11.170731707317072, |
|
"grad_norm": 0.07483071833848953, |
|
"learning_rate": 3.0472944976824224e-05, |
|
"loss": 0.0058, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 11.21951219512195, |
|
"grad_norm": 0.0738058090209961, |
|
"learning_rate": 2.9742272834796813e-05, |
|
"loss": 0.0055, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 11.268292682926829, |
|
"grad_norm": 0.09469424933195114, |
|
"learning_rate": 2.901893399904797e-05, |
|
"loss": 0.0062, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 11.317073170731707, |
|
"grad_norm": 0.05931226164102554, |
|
"learning_rate": 2.830300396967487e-05, |
|
"loss": 0.0059, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 11.365853658536585, |
|
"grad_norm": 0.07385493069887161, |
|
"learning_rate": 2.7594557473464066e-05, |
|
"loss": 0.0059, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 11.414634146341463, |
|
"grad_norm": 0.08171911537647247, |
|
"learning_rate": 2.6893668456091625e-05, |
|
"loss": 0.0057, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 11.463414634146341, |
|
"grad_norm": 0.05290233716368675, |
|
"learning_rate": 2.620041007440508e-05, |
|
"loss": 0.0064, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 11.512195121951219, |
|
"grad_norm": 0.06285463273525238, |
|
"learning_rate": 2.5514854688787405e-05, |
|
"loss": 0.0062, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 11.560975609756097, |
|
"grad_norm": 0.06230178475379944, |
|
"learning_rate": 2.4837073855604188e-05, |
|
"loss": 0.0061, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 11.609756097560975, |
|
"grad_norm": 0.05689137428998947, |
|
"learning_rate": 2.4167138319734905e-05, |
|
"loss": 0.006, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 11.658536585365853, |
|
"grad_norm": 0.07083682715892792, |
|
"learning_rate": 2.350511800718871e-05, |
|
"loss": 0.0057, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 11.707317073170731, |
|
"grad_norm": 0.056767258793115616, |
|
"learning_rate": 2.2851082017805703e-05, |
|
"loss": 0.0056, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 11.75609756097561, |
|
"grad_norm": 0.06561973690986633, |
|
"learning_rate": 2.2205098618044583e-05, |
|
"loss": 0.0058, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 11.804878048780488, |
|
"grad_norm": 0.05094975233078003, |
|
"learning_rate": 2.1567235233857054e-05, |
|
"loss": 0.0057, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 11.853658536585366, |
|
"grad_norm": 0.0570388026535511, |
|
"learning_rate": 2.0937558443650206e-05, |
|
"loss": 0.006, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 11.902439024390244, |
|
"grad_norm": 0.06019320338964462, |
|
"learning_rate": 2.031613397133724e-05, |
|
"loss": 0.006, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 11.951219512195122, |
|
"grad_norm": 0.09152466058731079, |
|
"learning_rate": 1.9703026679477256e-05, |
|
"loss": 0.0059, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 0.06767109036445618, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 0.0058, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 12.048780487804878, |
|
"grad_norm": 0.05434449389576912, |
|
"learning_rate": 1.8502018740052496e-05, |
|
"loss": 0.0057, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 12.097560975609756, |
|
"grad_norm": 0.061113059520721436, |
|
"learning_rate": 1.7914243450358216e-05, |
|
"loss": 0.0054, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 12.146341463414634, |
|
"grad_norm": 0.05813341587781906, |
|
"learning_rate": 1.73350360437734e-05, |
|
"loss": 0.0057, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 12.195121951219512, |
|
"grad_norm": 0.07934972643852234, |
|
"learning_rate": 1.676445697635728e-05, |
|
"loss": 0.0057, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 12.24390243902439, |
|
"grad_norm": 0.08221197873353958, |
|
"learning_rate": 1.6202565803566917e-05, |
|
"loss": 0.0056, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 12.292682926829269, |
|
"grad_norm": 0.07954658567905426, |
|
"learning_rate": 1.564942117404119e-05, |
|
"loss": 0.0056, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 12.341463414634147, |
|
"grad_norm": 0.07448497414588928, |
|
"learning_rate": 1.510508082347899e-05, |
|
"loss": 0.0058, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 12.390243902439025, |
|
"grad_norm": 0.06170118972659111, |
|
"learning_rate": 1.4569601568613144e-05, |
|
"loss": 0.0058, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 12.439024390243903, |
|
"grad_norm": 0.05958204343914986, |
|
"learning_rate": 1.4043039301279903e-05, |
|
"loss": 0.0058, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 12.487804878048781, |
|
"grad_norm": 0.05719635635614395, |
|
"learning_rate": 1.352544898258511e-05, |
|
"loss": 0.0056, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 12.536585365853659, |
|
"grad_norm": 0.056574221700429916, |
|
"learning_rate": 1.301688463716757e-05, |
|
"loss": 0.0063, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 12.585365853658537, |
|
"grad_norm": 0.14904461801052094, |
|
"learning_rate": 1.2517399347560167e-05, |
|
"loss": 0.0059, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 12.634146341463415, |
|
"grad_norm": 0.03999248892068863, |
|
"learning_rate": 1.2027045248649015e-05, |
|
"loss": 0.0057, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 12.682926829268293, |
|
"grad_norm": 0.05714278668165207, |
|
"learning_rate": 1.1545873522232053e-05, |
|
"loss": 0.0056, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 12.731707317073171, |
|
"grad_norm": 0.06596580147743225, |
|
"learning_rate": 1.1073934391676667e-05, |
|
"loss": 0.0058, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 12.78048780487805, |
|
"grad_norm": 0.06003756448626518, |
|
"learning_rate": 1.0611277116677487e-05, |
|
"loss": 0.0059, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 12.829268292682928, |
|
"grad_norm": 0.05459544062614441, |
|
"learning_rate": 1.0157949988114967e-05, |
|
"loss": 0.0059, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 12.878048780487806, |
|
"grad_norm": 0.061511751264333725, |
|
"learning_rate": 9.714000323014705e-06, |
|
"loss": 0.0058, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 12.926829268292684, |
|
"grad_norm": 0.048676956444978714, |
|
"learning_rate": 9.279474459608805e-06, |
|
"loss": 0.0059, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 12.975609756097562, |
|
"grad_norm": 0.05746001750230789, |
|
"learning_rate": 8.854417752499111e-06, |
|
"loss": 0.0057, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 13.024390243902438, |
|
"grad_norm": 0.05339556187391281, |
|
"learning_rate": 8.43887456792326e-06, |
|
"loss": 0.0057, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 13.073170731707316, |
|
"grad_norm": 0.06085890159010887, |
|
"learning_rate": 8.032888279123829e-06, |
|
"loss": 0.0056, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 13.121951219512194, |
|
"grad_norm": 0.06632421165704727, |
|
"learning_rate": 7.636501261821238e-06, |
|
"loss": 0.0057, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 13.170731707317072, |
|
"grad_norm": 0.07371345162391663, |
|
"learning_rate": 7.249754889790539e-06, |
|
"loss": 0.0058, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 13.21951219512195, |
|
"grad_norm": 0.06796526908874512, |
|
"learning_rate": 6.872689530543086e-06, |
|
"loss": 0.0059, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 13.268292682926829, |
|
"grad_norm": 0.05836968868970871, |
|
"learning_rate": 6.505344541113046e-06, |
|
"loss": 0.0058, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 13.317073170731707, |
|
"grad_norm": 0.05631383880972862, |
|
"learning_rate": 6.147758263949321e-06, |
|
"loss": 0.0055, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 13.365853658536585, |
|
"grad_norm": 0.057296037673950195, |
|
"learning_rate": 5.7999680229136375e-06, |
|
"loss": 0.0057, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 13.414634146341463, |
|
"grad_norm": 0.059217844158411026, |
|
"learning_rate": 5.462010119384664e-06, |
|
"loss": 0.0056, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 13.463414634146341, |
|
"grad_norm": 0.052348434925079346, |
|
"learning_rate": 5.1339198284689916e-06, |
|
"loss": 0.0057, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 13.512195121951219, |
|
"grad_norm": 0.04569809511303902, |
|
"learning_rate": 4.815731395319278e-06, |
|
"loss": 0.0055, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 13.560975609756097, |
|
"grad_norm": 0.05811962112784386, |
|
"learning_rate": 4.507478031559731e-06, |
|
"loss": 0.0057, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 13.609756097560975, |
|
"grad_norm": 0.04801028221845627, |
|
"learning_rate": 4.209191911819688e-06, |
|
"loss": 0.0055, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 13.658536585365853, |
|
"grad_norm": 0.04857472702860832, |
|
"learning_rate": 3.9209041703752395e-06, |
|
"loss": 0.0056, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 13.707317073170731, |
|
"grad_norm": 0.09129537642002106, |
|
"learning_rate": 3.6426448978995056e-06, |
|
"loss": 0.006, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 13.75609756097561, |
|
"grad_norm": 0.06310546398162842, |
|
"learning_rate": 3.3744431383219367e-06, |
|
"loss": 0.0055, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 13.804878048780488, |
|
"grad_norm": 0.055802859365940094, |
|
"learning_rate": 3.116326885796728e-06, |
|
"loss": 0.0056, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 13.853658536585366, |
|
"grad_norm": 0.0634494498372078, |
|
"learning_rate": 2.868323081780877e-06, |
|
"loss": 0.0055, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 13.902439024390244, |
|
"grad_norm": 0.05695090815424919, |
|
"learning_rate": 2.6304576122221035e-06, |
|
"loss": 0.0055, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 13.951219512195122, |
|
"grad_norm": 0.06994400918483734, |
|
"learning_rate": 2.402755304856974e-06, |
|
"loss": 0.0057, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 0.0930507555603981, |
|
"learning_rate": 2.1852399266194314e-06, |
|
"loss": 0.006, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 14.048780487804878, |
|
"grad_norm": 0.056339725852012634, |
|
"learning_rate": 1.9779341811600794e-06, |
|
"loss": 0.0056, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 14.097560975609756, |
|
"grad_norm": 0.05481722205877304, |
|
"learning_rate": 1.7808597064764009e-06, |
|
"loss": 0.0056, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 14.146341463414634, |
|
"grad_norm": 0.07605459541082382, |
|
"learning_rate": 1.5940370726542863e-06, |
|
"loss": 0.0055, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 14.195121951219512, |
|
"grad_norm": 0.07780090719461441, |
|
"learning_rate": 1.417485779720995e-06, |
|
"loss": 0.0055, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 14.24390243902439, |
|
"grad_norm": 0.06754057854413986, |
|
"learning_rate": 1.2512242556097398e-06, |
|
"loss": 0.0055, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 14.292682926829269, |
|
"grad_norm": 0.055912312120199203, |
|
"learning_rate": 1.0952698542362805e-06, |
|
"loss": 0.0056, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 14.341463414634147, |
|
"grad_norm": 0.06355050206184387, |
|
"learning_rate": 9.496388536875623e-07, |
|
"loss": 0.0058, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 14.390243902439025, |
|
"grad_norm": 0.05702829360961914, |
|
"learning_rate": 8.143464545226298e-07, |
|
"loss": 0.0057, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 14.439024390243903, |
|
"grad_norm": 0.05529710277915001, |
|
"learning_rate": 6.894067781860635e-07, |
|
"loss": 0.0057, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 14.487804878048781, |
|
"grad_norm": 0.073829285800457, |
|
"learning_rate": 5.74832865534014e-07, |
|
"loss": 0.0055, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 14.536585365853659, |
|
"grad_norm": 0.054862912744283676, |
|
"learning_rate": 4.7063667547301294e-07, |
|
"loss": 0.0058, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 14.585365853658537, |
|
"grad_norm": 0.07511288672685623, |
|
"learning_rate": 3.768290837117605e-07, |
|
"loss": 0.0056, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 14.634146341463415, |
|
"grad_norm": 0.05845462158322334, |
|
"learning_rate": 2.934198816259559e-07, |
|
"loss": 0.0055, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 14.682926829268293, |
|
"grad_norm": 0.0624593086540699, |
|
"learning_rate": 2.2041777523627017e-07, |
|
"loss": 0.0057, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 14.731707317073171, |
|
"grad_norm": 0.056128259748220444, |
|
"learning_rate": 1.5783038429965092e-07, |
|
"loss": 0.0055, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 14.78048780487805, |
|
"grad_norm": 0.0808698758482933, |
|
"learning_rate": 1.0566424151401411e-07, |
|
"loss": 0.0057, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 14.829268292682928, |
|
"grad_norm": 0.0584842748939991, |
|
"learning_rate": 6.392479183633394e-08, |
|
"loss": 0.0055, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 14.878048780487806, |
|
"grad_norm": 0.07366840541362762, |
|
"learning_rate": 3.2616391914364054e-08, |
|
"loss": 0.0055, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 14.926829268292684, |
|
"grad_norm": 0.06805972754955292, |
|
"learning_rate": 1.1742309631845861e-08, |
|
"loss": 0.0055, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 14.975609756097562, |
|
"grad_norm": 0.057060301303863525, |
|
"learning_rate": 1.304723767492355e-09, |
|
"loss": 0.0056, |
|
"step": 3070 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3075, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 15, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.23238405675008e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|