|
{ |
|
"best_metric": 0.8874096274375916, |
|
"best_model_checkpoint": "/workspace/previous_works/MedBLIP/output/MedBLIP-0007/checkpoint-10000", |
|
"epoch": 3.0, |
|
"eval_steps": 10000, |
|
"global_step": 14319, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0031426775612822125, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 6.4738, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006285355122564425, |
|
"grad_norm": 143.78610229492188, |
|
"learning_rate": 1.3966480446927375e-07, |
|
"loss": 6.4427, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009428032683846637, |
|
"grad_norm": 125.2808609008789, |
|
"learning_rate": 3.3519553072625703e-07, |
|
"loss": 6.1729, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01257071024512885, |
|
"grad_norm": 55.64238357543945, |
|
"learning_rate": 5.446927374301677e-07, |
|
"loss": 5.6704, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01571338780641106, |
|
"grad_norm": 58.784568786621094, |
|
"learning_rate": 7.402234636871509e-07, |
|
"loss": 4.6903, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.018856065367693273, |
|
"grad_norm": 101.14881896972656, |
|
"learning_rate": 9.497206703910615e-07, |
|
"loss": 4.009, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02199874292897549, |
|
"grad_norm": 40.7690315246582, |
|
"learning_rate": 1.159217877094972e-06, |
|
"loss": 3.3713, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0251414204902577, |
|
"grad_norm": 22.23933219909668, |
|
"learning_rate": 1.3687150837988828e-06, |
|
"loss": 2.9785, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.028284098051539912, |
|
"grad_norm": 16.25871467590332, |
|
"learning_rate": 1.5782122905027933e-06, |
|
"loss": 2.8118, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03142677561282212, |
|
"grad_norm": 23.991317749023438, |
|
"learning_rate": 1.787709497206704e-06, |
|
"loss": 2.6758, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.034569453174104335, |
|
"grad_norm": 27.5322265625, |
|
"learning_rate": 1.9972067039106146e-06, |
|
"loss": 2.5263, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03771213073538655, |
|
"grad_norm": 18.619874954223633, |
|
"learning_rate": 2.2067039106145253e-06, |
|
"loss": 2.4554, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04085480829666876, |
|
"grad_norm": 17.445724487304688, |
|
"learning_rate": 2.416201117318436e-06, |
|
"loss": 2.3279, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04399748585795098, |
|
"grad_norm": 19.981834411621094, |
|
"learning_rate": 2.6256983240223464e-06, |
|
"loss": 2.2568, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04714016341923319, |
|
"grad_norm": 15.571565628051758, |
|
"learning_rate": 2.8351955307262576e-06, |
|
"loss": 2.1021, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.0502828409805154, |
|
"grad_norm": 13.631041526794434, |
|
"learning_rate": 3.044692737430168e-06, |
|
"loss": 1.9724, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05342551854179761, |
|
"grad_norm": 10.806873321533203, |
|
"learning_rate": 3.2541899441340786e-06, |
|
"loss": 1.8049, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.056568196103079824, |
|
"grad_norm": 14.863541603088379, |
|
"learning_rate": 3.4636871508379893e-06, |
|
"loss": 1.7358, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.059710873664362035, |
|
"grad_norm": 12.929265022277832, |
|
"learning_rate": 3.6731843575418996e-06, |
|
"loss": 1.6953, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06285355122564425, |
|
"grad_norm": 12.591845512390137, |
|
"learning_rate": 3.88268156424581e-06, |
|
"loss": 1.6874, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06599622878692646, |
|
"grad_norm": 13.552529335021973, |
|
"learning_rate": 4.0921787709497215e-06, |
|
"loss": 1.6485, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.06913890634820867, |
|
"grad_norm": 9.15485668182373, |
|
"learning_rate": 4.301675977653632e-06, |
|
"loss": 1.6044, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07228158390949088, |
|
"grad_norm": 7.887852668762207, |
|
"learning_rate": 4.511173184357542e-06, |
|
"loss": 1.4664, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.0754242614707731, |
|
"grad_norm": 12.257076263427734, |
|
"learning_rate": 4.7206703910614525e-06, |
|
"loss": 1.5338, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.0785669390320553, |
|
"grad_norm": 8.181825637817383, |
|
"learning_rate": 4.930167597765364e-06, |
|
"loss": 1.4954, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08170961659333752, |
|
"grad_norm": 9.838497161865234, |
|
"learning_rate": 5.139664804469274e-06, |
|
"loss": 1.4217, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08485229415461974, |
|
"grad_norm": 15.192863464355469, |
|
"learning_rate": 5.349162011173184e-06, |
|
"loss": 1.4725, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.08799497171590195, |
|
"grad_norm": 7.013632297515869, |
|
"learning_rate": 5.558659217877096e-06, |
|
"loss": 1.3989, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09113764927718417, |
|
"grad_norm": 8.598616600036621, |
|
"learning_rate": 5.768156424581007e-06, |
|
"loss": 1.4828, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09428032683846638, |
|
"grad_norm": 5.625289440155029, |
|
"learning_rate": 5.977653631284917e-06, |
|
"loss": 1.4513, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09742300439974859, |
|
"grad_norm": 7.280072212219238, |
|
"learning_rate": 6.187150837988828e-06, |
|
"loss": 1.4092, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1005656819610308, |
|
"grad_norm": 7.608973979949951, |
|
"learning_rate": 6.396648044692738e-06, |
|
"loss": 1.4124, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.10370835952231301, |
|
"grad_norm": 8.430741310119629, |
|
"learning_rate": 6.606145251396649e-06, |
|
"loss": 1.4466, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.10685103708359522, |
|
"grad_norm": 8.642643928527832, |
|
"learning_rate": 6.815642458100559e-06, |
|
"loss": 1.3558, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.10999371464487744, |
|
"grad_norm": 6.632693290710449, |
|
"learning_rate": 7.02513966480447e-06, |
|
"loss": 1.4129, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11313639220615965, |
|
"grad_norm": 12.695171356201172, |
|
"learning_rate": 7.2346368715083805e-06, |
|
"loss": 1.3444, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 5.393228054046631, |
|
"learning_rate": 7.444134078212291e-06, |
|
"loss": 1.3485, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.11942174732872407, |
|
"grad_norm": 8.173951148986816, |
|
"learning_rate": 7.653631284916202e-06, |
|
"loss": 1.4175, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12256442489000628, |
|
"grad_norm": 14.139352798461914, |
|
"learning_rate": 7.863128491620112e-06, |
|
"loss": 1.3797, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.1257071024512885, |
|
"grad_norm": 4.9999613761901855, |
|
"learning_rate": 8.072625698324023e-06, |
|
"loss": 1.3716, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.12884978001257072, |
|
"grad_norm": 4.344189643859863, |
|
"learning_rate": 8.282122905027935e-06, |
|
"loss": 1.3011, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13199245757385292, |
|
"grad_norm": 11.033843040466309, |
|
"learning_rate": 8.491620111731845e-06, |
|
"loss": 1.3672, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 4.3888068199157715, |
|
"learning_rate": 8.701117318435755e-06, |
|
"loss": 1.2773, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.13827781269641734, |
|
"grad_norm": 4.085406303405762, |
|
"learning_rate": 8.910614525139666e-06, |
|
"loss": 1.3057, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14142049025769957, |
|
"grad_norm": 11.730964660644531, |
|
"learning_rate": 9.120111731843576e-06, |
|
"loss": 1.3752, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14456316781898176, |
|
"grad_norm": 4.986327648162842, |
|
"learning_rate": 9.329608938547486e-06, |
|
"loss": 1.3055, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.147705845380264, |
|
"grad_norm": 5.643145561218262, |
|
"learning_rate": 9.539106145251398e-06, |
|
"loss": 1.3608, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.1508485229415462, |
|
"grad_norm": 12.661754608154297, |
|
"learning_rate": 9.748603351955308e-06, |
|
"loss": 1.4083, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.1539912005028284, |
|
"grad_norm": 6.018293857574463, |
|
"learning_rate": 9.958100558659219e-06, |
|
"loss": 1.2323, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.1571338780641106, |
|
"grad_norm": 4.150609493255615, |
|
"learning_rate": 1.0167597765363129e-05, |
|
"loss": 1.3068, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16027655562539284, |
|
"grad_norm": 4.229065895080566, |
|
"learning_rate": 1.037709497206704e-05, |
|
"loss": 1.3257, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16341923318667503, |
|
"grad_norm": 9.254191398620605, |
|
"learning_rate": 1.0586592178770951e-05, |
|
"loss": 1.2674, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16656191074795726, |
|
"grad_norm": 3.52528977394104, |
|
"learning_rate": 1.0796089385474862e-05, |
|
"loss": 1.271, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.16970458830923948, |
|
"grad_norm": 6.313427448272705, |
|
"learning_rate": 1.1005586592178772e-05, |
|
"loss": 1.2948, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17284726587052168, |
|
"grad_norm": 3.691848039627075, |
|
"learning_rate": 1.1215083798882682e-05, |
|
"loss": 1.292, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.1759899434318039, |
|
"grad_norm": 3.9371867179870605, |
|
"learning_rate": 1.1424581005586593e-05, |
|
"loss": 1.3135, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1791326209930861, |
|
"grad_norm": 3.455486297607422, |
|
"learning_rate": 1.1634078212290503e-05, |
|
"loss": 1.2495, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18227529855436833, |
|
"grad_norm": 10.44939136505127, |
|
"learning_rate": 1.1843575418994415e-05, |
|
"loss": 1.356, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18541797611565053, |
|
"grad_norm": 3.6111254692077637, |
|
"learning_rate": 1.2053072625698325e-05, |
|
"loss": 1.3196, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18856065367693275, |
|
"grad_norm": 3.531568765640259, |
|
"learning_rate": 1.2262569832402236e-05, |
|
"loss": 1.2307, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19170333123821495, |
|
"grad_norm": 3.490081310272217, |
|
"learning_rate": 1.2472067039106146e-05, |
|
"loss": 1.2445, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19484600879949718, |
|
"grad_norm": 5.011629581451416, |
|
"learning_rate": 1.2681564245810056e-05, |
|
"loss": 1.3192, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19798868636077938, |
|
"grad_norm": 7.499322414398193, |
|
"learning_rate": 1.2891061452513967e-05, |
|
"loss": 1.2827, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2011313639220616, |
|
"grad_norm": 3.792930841445923, |
|
"learning_rate": 1.3100558659217879e-05, |
|
"loss": 1.336, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2042740414833438, |
|
"grad_norm": 4.43565559387207, |
|
"learning_rate": 1.3310055865921789e-05, |
|
"loss": 1.3371, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.20741671904462602, |
|
"grad_norm": 4.2933173179626465, |
|
"learning_rate": 1.3519553072625699e-05, |
|
"loss": 1.3118, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21055939660590822, |
|
"grad_norm": 3.68387770652771, |
|
"learning_rate": 1.372905027932961e-05, |
|
"loss": 1.3975, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21370207416719045, |
|
"grad_norm": 5.9917707443237305, |
|
"learning_rate": 1.393854748603352e-05, |
|
"loss": 1.2626, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21684475172847265, |
|
"grad_norm": 3.1892921924591064, |
|
"learning_rate": 1.414804469273743e-05, |
|
"loss": 1.2781, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.21998742928975487, |
|
"grad_norm": 3.4965710639953613, |
|
"learning_rate": 1.4357541899441342e-05, |
|
"loss": 1.2557, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.2231301068510371, |
|
"grad_norm": 7.907847881317139, |
|
"learning_rate": 1.4567039106145252e-05, |
|
"loss": 1.2341, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.2262727844123193, |
|
"grad_norm": 3.6374635696411133, |
|
"learning_rate": 1.4776536312849163e-05, |
|
"loss": 1.3225, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.22941546197360152, |
|
"grad_norm": 6.52725887298584, |
|
"learning_rate": 1.4986033519553073e-05, |
|
"loss": 1.2137, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 4.659281253814697, |
|
"learning_rate": 1.5195530726256983e-05, |
|
"loss": 1.3373, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23570081709616594, |
|
"grad_norm": 3.1662540435791016, |
|
"learning_rate": 1.5405027932960895e-05, |
|
"loss": 1.3348, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23884349465744814, |
|
"grad_norm": 3.8003756999969482, |
|
"learning_rate": 1.5614525139664806e-05, |
|
"loss": 1.2948, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24198617221873037, |
|
"grad_norm": 3.0090460777282715, |
|
"learning_rate": 1.5824022346368716e-05, |
|
"loss": 1.2724, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24512884978001256, |
|
"grad_norm": 3.1680350303649902, |
|
"learning_rate": 1.6033519553072626e-05, |
|
"loss": 1.2402, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.2482715273412948, |
|
"grad_norm": 3.944425106048584, |
|
"learning_rate": 1.6243016759776537e-05, |
|
"loss": 1.303, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.251414204902577, |
|
"grad_norm": 8.234729766845703, |
|
"learning_rate": 1.6452513966480447e-05, |
|
"loss": 1.2196, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.2545568824638592, |
|
"grad_norm": 6.725916862487793, |
|
"learning_rate": 1.6662011173184357e-05, |
|
"loss": 1.3213, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25769956002514144, |
|
"grad_norm": 3.2696242332458496, |
|
"learning_rate": 1.687150837988827e-05, |
|
"loss": 1.2611, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.2608422375864236, |
|
"grad_norm": 3.0667693614959717, |
|
"learning_rate": 1.708100558659218e-05, |
|
"loss": 1.22, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26398491514770583, |
|
"grad_norm": 3.5260438919067383, |
|
"learning_rate": 1.729050279329609e-05, |
|
"loss": 1.3142, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26712759270898806, |
|
"grad_norm": 3.51269793510437, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 1.2031, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 2.9083192348480225, |
|
"learning_rate": 1.7709497206703912e-05, |
|
"loss": 1.1711, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27341294783155246, |
|
"grad_norm": 2.915454626083374, |
|
"learning_rate": 1.7918994413407822e-05, |
|
"loss": 1.2452, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.2765556253928347, |
|
"grad_norm": 4.736166954040527, |
|
"learning_rate": 1.8128491620111733e-05, |
|
"loss": 1.2251, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.2796983029541169, |
|
"grad_norm": 2.881985664367676, |
|
"learning_rate": 1.8337988826815643e-05, |
|
"loss": 1.2541, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28284098051539913, |
|
"grad_norm": 4.29525899887085, |
|
"learning_rate": 1.8547486033519553e-05, |
|
"loss": 1.2336, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28598365807668136, |
|
"grad_norm": 3.046929359436035, |
|
"learning_rate": 1.8756983240223464e-05, |
|
"loss": 1.2576, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.2891263356379635, |
|
"grad_norm": 4.023289680480957, |
|
"learning_rate": 1.8966480446927374e-05, |
|
"loss": 1.2371, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29226901319924575, |
|
"grad_norm": 6.221644878387451, |
|
"learning_rate": 1.9175977653631284e-05, |
|
"loss": 1.1985, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.295411690760528, |
|
"grad_norm": 6.021862030029297, |
|
"learning_rate": 1.9385474860335195e-05, |
|
"loss": 1.2741, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.2985543683218102, |
|
"grad_norm": 6.475122451782227, |
|
"learning_rate": 1.959497206703911e-05, |
|
"loss": 1.2405, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3016970458830924, |
|
"grad_norm": 6.075967788696289, |
|
"learning_rate": 1.980446927374302e-05, |
|
"loss": 1.1969, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3048397234443746, |
|
"grad_norm": 2.591923713684082, |
|
"learning_rate": 1.9999999702856602e-05, |
|
"loss": 1.1979, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3079824010056568, |
|
"grad_norm": 6.351642608642578, |
|
"learning_rate": 1.9999923931385287e-05, |
|
"loss": 1.2887, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31112507856693905, |
|
"grad_norm": 2.5136075019836426, |
|
"learning_rate": 1.9999714446548786e-05, |
|
"loss": 1.2974, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.3142677561282212, |
|
"grad_norm": 3.683798313140869, |
|
"learning_rate": 1.9999371251148214e-05, |
|
"loss": 1.2073, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31741043368950345, |
|
"grad_norm": 6.616783142089844, |
|
"learning_rate": 1.9998894349772585e-05, |
|
"loss": 1.224, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32055311125078567, |
|
"grad_norm": 3.386265277862549, |
|
"learning_rate": 1.9998283748798757e-05, |
|
"loss": 1.2237, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.3236957888120679, |
|
"grad_norm": 3.2038071155548096, |
|
"learning_rate": 1.9997539456391343e-05, |
|
"loss": 1.2805, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32683846637335007, |
|
"grad_norm": 5.434456825256348, |
|
"learning_rate": 1.9996661482502602e-05, |
|
"loss": 1.295, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.3299811439346323, |
|
"grad_norm": 3.0071728229522705, |
|
"learning_rate": 1.999564983887231e-05, |
|
"loss": 1.2207, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.3331238214959145, |
|
"grad_norm": 4.010280609130859, |
|
"learning_rate": 1.9994504539027594e-05, |
|
"loss": 1.2005, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33626649905719674, |
|
"grad_norm": 4.2148237228393555, |
|
"learning_rate": 1.9993225598282768e-05, |
|
"loss": 1.1608, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33940917661847897, |
|
"grad_norm": 5.541611194610596, |
|
"learning_rate": 1.9991813033739097e-05, |
|
"loss": 1.1649, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34255185417976114, |
|
"grad_norm": 5.37580680847168, |
|
"learning_rate": 1.9990266864284607e-05, |
|
"loss": 1.2087, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34569453174104336, |
|
"grad_norm": 3.72402286529541, |
|
"learning_rate": 1.998858711059381e-05, |
|
"loss": 1.2606, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 3.596006393432617, |
|
"learning_rate": 1.9986773795127425e-05, |
|
"loss": 1.2509, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.3519798868636078, |
|
"grad_norm": 2.3336808681488037, |
|
"learning_rate": 1.9984826942132087e-05, |
|
"loss": 1.2389, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35512256442489, |
|
"grad_norm": 2.766104221343994, |
|
"learning_rate": 1.9982746577640024e-05, |
|
"loss": 1.2334, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.3582652419861722, |
|
"grad_norm": 2.6721198558807373, |
|
"learning_rate": 1.9980532729468698e-05, |
|
"loss": 1.2069, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36140791954745444, |
|
"grad_norm": 2.4807119369506836, |
|
"learning_rate": 1.9978185427220447e-05, |
|
"loss": 1.264, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36455059710873666, |
|
"grad_norm": 2.6071507930755615, |
|
"learning_rate": 1.9975704702282074e-05, |
|
"loss": 1.2745, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36769327467001883, |
|
"grad_norm": 2.5577876567840576, |
|
"learning_rate": 1.9973090587824443e-05, |
|
"loss": 1.2244, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37083595223130106, |
|
"grad_norm": 5.360003471374512, |
|
"learning_rate": 1.997034311880202e-05, |
|
"loss": 1.1723, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.3739786297925833, |
|
"grad_norm": 2.606682777404785, |
|
"learning_rate": 1.996746233195242e-05, |
|
"loss": 1.2349, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.3771213073538655, |
|
"grad_norm": 2.898453712463379, |
|
"learning_rate": 1.9964448265795903e-05, |
|
"loss": 1.2428, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.3802639849151477, |
|
"grad_norm": 2.8664214611053467, |
|
"learning_rate": 1.9961300960634876e-05, |
|
"loss": 1.2338, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.3834066624764299, |
|
"grad_norm": 3.170452117919922, |
|
"learning_rate": 1.9958020458553326e-05, |
|
"loss": 1.1729, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38654934003771213, |
|
"grad_norm": 2.6928646564483643, |
|
"learning_rate": 1.995460680341629e-05, |
|
"loss": 1.2791, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38969201759899436, |
|
"grad_norm": 5.1588287353515625, |
|
"learning_rate": 1.9951060040869245e-05, |
|
"loss": 1.1712, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.3928346951602766, |
|
"grad_norm": 4.63333797454834, |
|
"learning_rate": 1.994738021833751e-05, |
|
"loss": 1.285, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39597737272155875, |
|
"grad_norm": 2.860305070877075, |
|
"learning_rate": 1.9943567385025605e-05, |
|
"loss": 1.1853, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.399120050282841, |
|
"grad_norm": 3.4676976203918457, |
|
"learning_rate": 1.9939621591916587e-05, |
|
"loss": 1.2164, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4022627278441232, |
|
"grad_norm": 4.743997573852539, |
|
"learning_rate": 1.9935542891771392e-05, |
|
"loss": 1.2182, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 4.906563758850098, |
|
"learning_rate": 1.9931331339128107e-05, |
|
"loss": 1.1844, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4085480829666876, |
|
"grad_norm": 2.863513946533203, |
|
"learning_rate": 1.9926986990301244e-05, |
|
"loss": 1.2124, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.4116907605279698, |
|
"grad_norm": 2.1002469062805176, |
|
"learning_rate": 1.9922509903381e-05, |
|
"loss": 1.1589, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41483343808925205, |
|
"grad_norm": 5.061679840087891, |
|
"learning_rate": 1.991790013823246e-05, |
|
"loss": 1.2208, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.4179761156505343, |
|
"grad_norm": 5.046812534332275, |
|
"learning_rate": 1.9913157756494822e-05, |
|
"loss": 1.2122, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42111879321181644, |
|
"grad_norm": 2.990056037902832, |
|
"learning_rate": 1.9908282821580544e-05, |
|
"loss": 1.2296, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42426147077309867, |
|
"grad_norm": 2.477032423019409, |
|
"learning_rate": 1.9903275398674523e-05, |
|
"loss": 1.2259, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.4274041483343809, |
|
"grad_norm": 2.7786192893981934, |
|
"learning_rate": 1.9898135554733198e-05, |
|
"loss": 1.2062, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.4305468258956631, |
|
"grad_norm": 2.325221061706543, |
|
"learning_rate": 1.989286335848368e-05, |
|
"loss": 1.2676, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.4336895034569453, |
|
"grad_norm": 2.26428484916687, |
|
"learning_rate": 1.988745888042281e-05, |
|
"loss": 1.1832, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.4368321810182275, |
|
"grad_norm": 2.361360788345337, |
|
"learning_rate": 1.988192219281624e-05, |
|
"loss": 1.1814, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43997485857950974, |
|
"grad_norm": 2.2548787593841553, |
|
"learning_rate": 1.987625336969744e-05, |
|
"loss": 1.1371, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44311753614079197, |
|
"grad_norm": 2.9371745586395264, |
|
"learning_rate": 1.9870452486866733e-05, |
|
"loss": 1.1185, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.4462602137020742, |
|
"grad_norm": 2.250372886657715, |
|
"learning_rate": 1.9864519621890263e-05, |
|
"loss": 1.1919, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44940289126335636, |
|
"grad_norm": 2.822431802749634, |
|
"learning_rate": 1.9858454854098968e-05, |
|
"loss": 1.1479, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.4525455688246386, |
|
"grad_norm": 5.055064678192139, |
|
"learning_rate": 1.985225826458751e-05, |
|
"loss": 1.2237, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.4556882463859208, |
|
"grad_norm": 2.3341803550720215, |
|
"learning_rate": 1.9845929936213216e-05, |
|
"loss": 1.1988, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45883092394720304, |
|
"grad_norm": 2.5791430473327637, |
|
"learning_rate": 1.9839469953594927e-05, |
|
"loss": 1.19, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.4619736015084852, |
|
"grad_norm": 3.167426824569702, |
|
"learning_rate": 1.9832878403111908e-05, |
|
"loss": 1.1852, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 4.721558570861816, |
|
"learning_rate": 1.9826155372902667e-05, |
|
"loss": 1.198, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46825895663104966, |
|
"grad_norm": 2.5482370853424072, |
|
"learning_rate": 1.9819300952863783e-05, |
|
"loss": 1.1782, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.4714016341923319, |
|
"grad_norm": 3.1147351264953613, |
|
"learning_rate": 1.9812315234648714e-05, |
|
"loss": 1.2138, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47454431175361406, |
|
"grad_norm": 4.730167865753174, |
|
"learning_rate": 1.980519831166656e-05, |
|
"loss": 1.1622, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.4776869893148963, |
|
"grad_norm": 1.9187180995941162, |
|
"learning_rate": 1.979795027908082e-05, |
|
"loss": 1.1624, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.4808296668761785, |
|
"grad_norm": 2.371293067932129, |
|
"learning_rate": 1.9790571233808108e-05, |
|
"loss": 1.1823, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48397234443746073, |
|
"grad_norm": 2.103717803955078, |
|
"learning_rate": 1.978306127451687e-05, |
|
"loss": 1.2298, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.4871150219987429, |
|
"grad_norm": 3.5898678302764893, |
|
"learning_rate": 1.9775420501626073e-05, |
|
"loss": 1.2612, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.49025769956002513, |
|
"grad_norm": 2.680027723312378, |
|
"learning_rate": 1.976764901730383e-05, |
|
"loss": 1.2161, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49340037712130735, |
|
"grad_norm": 2.514291286468506, |
|
"learning_rate": 1.9759746925466073e-05, |
|
"loss": 1.1772, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.4965430546825896, |
|
"grad_norm": 2.453972816467285, |
|
"learning_rate": 1.9751714331775125e-05, |
|
"loss": 1.1982, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.4996857322438718, |
|
"grad_norm": 6.017053604125977, |
|
"learning_rate": 1.9743551343638324e-05, |
|
"loss": 1.181, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.502828409805154, |
|
"grad_norm": 2.46274733543396, |
|
"learning_rate": 1.973525807020656e-05, |
|
"loss": 1.1537, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5059710873664363, |
|
"grad_norm": 2.6295454502105713, |
|
"learning_rate": 1.972683462237282e-05, |
|
"loss": 1.1961, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5091137649277184, |
|
"grad_norm": 5.519942760467529, |
|
"learning_rate": 1.9718281112770723e-05, |
|
"loss": 1.1998, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.5122564424890006, |
|
"grad_norm": 2.5744876861572266, |
|
"learning_rate": 1.9709597655772993e-05, |
|
"loss": 1.1511, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.5153991200502829, |
|
"grad_norm": 2.621332883834839, |
|
"learning_rate": 1.970078436748994e-05, |
|
"loss": 1.1398, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.518541797611565, |
|
"grad_norm": 5.679701328277588, |
|
"learning_rate": 1.9691841365767904e-05, |
|
"loss": 1.1899, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5216844751728472, |
|
"grad_norm": 2.662759780883789, |
|
"learning_rate": 1.9682768770187675e-05, |
|
"loss": 1.1442, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.5248271527341295, |
|
"grad_norm": 2.7816829681396484, |
|
"learning_rate": 1.967356670206291e-05, |
|
"loss": 1.2164, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.5279698302954117, |
|
"grad_norm": 5.280804634094238, |
|
"learning_rate": 1.9664235284438495e-05, |
|
"loss": 1.186, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.531112507856694, |
|
"grad_norm": 4.3773112297058105, |
|
"learning_rate": 1.96547746420889e-05, |
|
"loss": 1.2107, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.5342551854179761, |
|
"grad_norm": 2.773582696914673, |
|
"learning_rate": 1.9645184901516525e-05, |
|
"loss": 1.2216, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.5373978629792583, |
|
"grad_norm": 2.643582820892334, |
|
"learning_rate": 1.9635466190949987e-05, |
|
"loss": 1.1888, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 2.277792453765869, |
|
"learning_rate": 1.962561864034243e-05, |
|
"loss": 1.225, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.5436832181018227, |
|
"grad_norm": 2.580367088317871, |
|
"learning_rate": 1.9615642381369765e-05, |
|
"loss": 1.2064, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.5468258956631049, |
|
"grad_norm": 4.304893493652344, |
|
"learning_rate": 1.9605537547428922e-05, |
|
"loss": 1.1652, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.5499685732243872, |
|
"grad_norm": 2.5215561389923096, |
|
"learning_rate": 1.959530427363607e-05, |
|
"loss": 1.1909, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.5531112507856694, |
|
"grad_norm": 3.4145307540893555, |
|
"learning_rate": 1.958494269682479e-05, |
|
"loss": 1.1211, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.5562539283469516, |
|
"grad_norm": 4.1698079109191895, |
|
"learning_rate": 1.9574452955544274e-05, |
|
"loss": 1.1235, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.5593966059082338, |
|
"grad_norm": 4.188419818878174, |
|
"learning_rate": 1.9563835190057442e-05, |
|
"loss": 1.1925, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.562539283469516, |
|
"grad_norm": 4.869725704193115, |
|
"learning_rate": 1.9553089542339093e-05, |
|
"loss": 1.172, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.5656819610307983, |
|
"grad_norm": 4.675424575805664, |
|
"learning_rate": 1.9542216156073994e-05, |
|
"loss": 1.218, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.5688246385920804, |
|
"grad_norm": 2.655273914337158, |
|
"learning_rate": 1.9531215176654953e-05, |
|
"loss": 1.128, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.5719673161533627, |
|
"grad_norm": 3.345499277114868, |
|
"learning_rate": 1.9520086751180894e-05, |
|
"loss": 1.2236, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.5751099937146449, |
|
"grad_norm": 2.5058796405792236, |
|
"learning_rate": 1.9508831028454862e-05, |
|
"loss": 1.1895, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.578252671275927, |
|
"grad_norm": 2.2655258178710938, |
|
"learning_rate": 1.9497448158982062e-05, |
|
"loss": 1.1486, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 4.1772894859313965, |
|
"learning_rate": 1.948593829496783e-05, |
|
"loss": 1.1616, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.5845380263984915, |
|
"grad_norm": 2.290619373321533, |
|
"learning_rate": 1.9474301590315605e-05, |
|
"loss": 1.1508, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.5876807039597737, |
|
"grad_norm": 2.4885716438293457, |
|
"learning_rate": 1.9462538200624855e-05, |
|
"loss": 1.1727, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.590823381521056, |
|
"grad_norm": 2.564606189727783, |
|
"learning_rate": 1.9450648283189035e-05, |
|
"loss": 1.2178, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.5939660590823381, |
|
"grad_norm": 2.756286859512329, |
|
"learning_rate": 1.9438631996993424e-05, |
|
"loss": 1.2143, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.5971087366436204, |
|
"grad_norm": 4.876669883728027, |
|
"learning_rate": 1.9426489502713062e-05, |
|
"loss": 1.1908, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.6002514142049026, |
|
"grad_norm": 1.9889987707138062, |
|
"learning_rate": 1.9414220962710556e-05, |
|
"loss": 1.1404, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6033940917661847, |
|
"grad_norm": 2.6921072006225586, |
|
"learning_rate": 1.9401826541033935e-05, |
|
"loss": 1.2159, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.606536769327467, |
|
"grad_norm": 1.9745949506759644, |
|
"learning_rate": 1.9389306403414438e-05, |
|
"loss": 1.1828, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6096794468887492, |
|
"grad_norm": 2.012610912322998, |
|
"learning_rate": 1.9376660717264316e-05, |
|
"loss": 1.1413, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.6128221244500315, |
|
"grad_norm": 3.3427295684814453, |
|
"learning_rate": 1.9363889651674575e-05, |
|
"loss": 1.1888, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.6159648020113137, |
|
"grad_norm": 3.9471700191497803, |
|
"learning_rate": 1.935099337741274e-05, |
|
"loss": 1.1882, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.6191074795725958, |
|
"grad_norm": 2.1639480590820312, |
|
"learning_rate": 1.9337972066920533e-05, |
|
"loss": 1.1518, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.6222501571338781, |
|
"grad_norm": 2.18038272857666, |
|
"learning_rate": 1.9324825894311613e-05, |
|
"loss": 1.1424, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.6253928346951603, |
|
"grad_norm": 2.4745776653289795, |
|
"learning_rate": 1.9311555035369206e-05, |
|
"loss": 1.2065, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.6285355122564424, |
|
"grad_norm": 2.4123799800872803, |
|
"learning_rate": 1.9298159667543795e-05, |
|
"loss": 1.1677, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6316781898177247, |
|
"grad_norm": 2.8787992000579834, |
|
"learning_rate": 1.92846399699507e-05, |
|
"loss": 1.2035, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.6348208673790069, |
|
"grad_norm": 2.3905246257781982, |
|
"learning_rate": 1.927099612336773e-05, |
|
"loss": 1.1933, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.6379635449402892, |
|
"grad_norm": 3.8858561515808105, |
|
"learning_rate": 1.9257228310232738e-05, |
|
"loss": 1.1913, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.6411062225015713, |
|
"grad_norm": 3.567950963973999, |
|
"learning_rate": 1.924333671464118e-05, |
|
"loss": 1.2417, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.6442489000628535, |
|
"grad_norm": 1.8170944452285767, |
|
"learning_rate": 1.9229321522343677e-05, |
|
"loss": 1.1476, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.6473915776241358, |
|
"grad_norm": 3.044593095779419, |
|
"learning_rate": 1.9215182920743502e-05, |
|
"loss": 1.1939, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.650534255185418, |
|
"grad_norm": 3.13097882270813, |
|
"learning_rate": 1.9200921098894094e-05, |
|
"loss": 1.2007, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.6536769327467001, |
|
"grad_norm": 2.1468851566314697, |
|
"learning_rate": 1.918653624749652e-05, |
|
"loss": 1.1587, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.6568196103079824, |
|
"grad_norm": 2.9996323585510254, |
|
"learning_rate": 1.9172028558896935e-05, |
|
"loss": 1.1762, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.6599622878692646, |
|
"grad_norm": 2.5897107124328613, |
|
"learning_rate": 1.9157398227084002e-05, |
|
"loss": 1.1334, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.6631049654305469, |
|
"grad_norm": 3.80074143409729, |
|
"learning_rate": 1.91426454476863e-05, |
|
"loss": 1.2642, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.666247642991829, |
|
"grad_norm": 1.9756152629852295, |
|
"learning_rate": 1.9127770417969703e-05, |
|
"loss": 1.1418, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.6693903205531112, |
|
"grad_norm": 1.7655247449874878, |
|
"learning_rate": 1.9112773336834754e-05, |
|
"loss": 1.1605, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.6725329981143935, |
|
"grad_norm": 2.2654428482055664, |
|
"learning_rate": 1.9097654404814e-05, |
|
"loss": 1.2366, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 2.2166404724121094, |
|
"learning_rate": 1.9082413824069302e-05, |
|
"loss": 1.1469, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.6788183532369579, |
|
"grad_norm": 2.409104824066162, |
|
"learning_rate": 1.9067051798389148e-05, |
|
"loss": 1.1303, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.6819610307982401, |
|
"grad_norm": 2.2395966053009033, |
|
"learning_rate": 1.905156853318591e-05, |
|
"loss": 1.1507, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.6851037083595223, |
|
"grad_norm": 2.369009017944336, |
|
"learning_rate": 1.903596423549312e-05, |
|
"loss": 1.1468, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.6882463859208046, |
|
"grad_norm": 4.745908260345459, |
|
"learning_rate": 1.9020239113962677e-05, |
|
"loss": 1.1675, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.6913890634820867, |
|
"grad_norm": 2.9272751808166504, |
|
"learning_rate": 1.900439337886207e-05, |
|
"loss": 1.1604, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.6945317410433689, |
|
"grad_norm": 1.923449993133545, |
|
"learning_rate": 1.898842724207157e-05, |
|
"loss": 1.136, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 2.657238006591797, |
|
"learning_rate": 1.897234091708139e-05, |
|
"loss": 1.212, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.7008170961659334, |
|
"grad_norm": 2.484586715698242, |
|
"learning_rate": 1.8956134618988828e-05, |
|
"loss": 1.1938, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7039597737272156, |
|
"grad_norm": 2.127962589263916, |
|
"learning_rate": 1.89398085644954e-05, |
|
"loss": 1.138, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7071024512884978, |
|
"grad_norm": 4.683898448944092, |
|
"learning_rate": 1.8923362971903932e-05, |
|
"loss": 1.2393, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.71024512884978, |
|
"grad_norm": 2.090588092803955, |
|
"learning_rate": 1.8906798061115654e-05, |
|
"loss": 1.1721, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.7133878064110623, |
|
"grad_norm": 2.328129768371582, |
|
"learning_rate": 1.8890114053627244e-05, |
|
"loss": 1.1351, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.7165304839723444, |
|
"grad_norm": 2.18684458732605, |
|
"learning_rate": 1.8873311172527883e-05, |
|
"loss": 1.1793, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.7196731615336267, |
|
"grad_norm": 3.865600109100342, |
|
"learning_rate": 1.885638964249625e-05, |
|
"loss": 1.1605, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.7228158390949089, |
|
"grad_norm": 1.9220081567764282, |
|
"learning_rate": 1.8839349689797542e-05, |
|
"loss": 1.1056, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.725958516656191, |
|
"grad_norm": 2.822507858276367, |
|
"learning_rate": 1.8822191542280433e-05, |
|
"loss": 1.2242, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.7291011942174733, |
|
"grad_norm": 2.1855289936065674, |
|
"learning_rate": 1.880491542937404e-05, |
|
"loss": 1.1582, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.7322438717787555, |
|
"grad_norm": 2.4138951301574707, |
|
"learning_rate": 1.8787521582084833e-05, |
|
"loss": 1.1889, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.7353865493400377, |
|
"grad_norm": 4.04121732711792, |
|
"learning_rate": 1.8770010232993562e-05, |
|
"loss": 1.1682, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73852922690132, |
|
"grad_norm": 2.7555859088897705, |
|
"learning_rate": 1.875238161625216e-05, |
|
"loss": 1.1242, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.7416719044626021, |
|
"grad_norm": 2.1336309909820557, |
|
"learning_rate": 1.873463596758058e-05, |
|
"loss": 1.1823, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.7448145820238844, |
|
"grad_norm": 2.1928298473358154, |
|
"learning_rate": 1.871677352426367e-05, |
|
"loss": 1.1785, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.7479572595851666, |
|
"grad_norm": 1.8896750211715698, |
|
"learning_rate": 1.8698794525147988e-05, |
|
"loss": 1.1871, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.7510999371464487, |
|
"grad_norm": 2.354185104370117, |
|
"learning_rate": 1.8680699210638608e-05, |
|
"loss": 1.072, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.754242614707731, |
|
"grad_norm": 2.3728153705596924, |
|
"learning_rate": 1.8662487822695906e-05, |
|
"loss": 1.1607, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.7573852922690132, |
|
"grad_norm": 1.997661828994751, |
|
"learning_rate": 1.8644160604832344e-05, |
|
"loss": 1.1824, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.7605279698302954, |
|
"grad_norm": 2.4399378299713135, |
|
"learning_rate": 1.8625717802109173e-05, |
|
"loss": 1.1794, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.7636706473915776, |
|
"grad_norm": 2.1330130100250244, |
|
"learning_rate": 1.860715966113319e-05, |
|
"loss": 1.2458, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.7668133249528598, |
|
"grad_norm": 2.0650925636291504, |
|
"learning_rate": 1.8588486430053442e-05, |
|
"loss": 1.1785, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.7699560025141421, |
|
"grad_norm": 2.570436477661133, |
|
"learning_rate": 1.856969835855787e-05, |
|
"loss": 1.154, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.7730986800754243, |
|
"grad_norm": 4.064576625823975, |
|
"learning_rate": 1.855079569787002e-05, |
|
"loss": 1.1833, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.7762413576367064, |
|
"grad_norm": 2.199369430541992, |
|
"learning_rate": 1.8531778700745652e-05, |
|
"loss": 1.1681, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.7793840351979887, |
|
"grad_norm": 2.3457729816436768, |
|
"learning_rate": 1.851264762146937e-05, |
|
"loss": 1.1177, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.7825267127592709, |
|
"grad_norm": 1.790176272392273, |
|
"learning_rate": 1.849340271585121e-05, |
|
"loss": 1.1603, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.7856693903205532, |
|
"grad_norm": 1.7440158128738403, |
|
"learning_rate": 1.8474044241223247e-05, |
|
"loss": 1.1867, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.7888120678818353, |
|
"grad_norm": 2.208021640777588, |
|
"learning_rate": 1.8454572456436126e-05, |
|
"loss": 1.1381, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.7919547454431175, |
|
"grad_norm": 2.744593858718872, |
|
"learning_rate": 1.8434987621855615e-05, |
|
"loss": 1.1186, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.7950974230043998, |
|
"grad_norm": 2.6648318767547607, |
|
"learning_rate": 1.841528999935912e-05, |
|
"loss": 1.1345, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.798240100565682, |
|
"grad_norm": 2.1822752952575684, |
|
"learning_rate": 1.8395479852332185e-05, |
|
"loss": 1.2148, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.8013827781269641, |
|
"grad_norm": 2.2398853302001953, |
|
"learning_rate": 1.8375557445664962e-05, |
|
"loss": 1.1915, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8045254556882464, |
|
"grad_norm": 3.89241886138916, |
|
"learning_rate": 1.835552304574869e-05, |
|
"loss": 1.2088, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8076681332495286, |
|
"grad_norm": 2.0591068267822266, |
|
"learning_rate": 1.8335376920472098e-05, |
|
"loss": 1.2107, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 2.421823501586914, |
|
"learning_rate": 1.831511933921787e-05, |
|
"loss": 1.1696, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.813953488372093, |
|
"grad_norm": 3.9787890911102295, |
|
"learning_rate": 1.829475057285899e-05, |
|
"loss": 1.1756, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.8170961659333752, |
|
"grad_norm": 2.6043264865875244, |
|
"learning_rate": 1.8274270893755166e-05, |
|
"loss": 1.1729, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.8202388434946575, |
|
"grad_norm": 2.131624221801758, |
|
"learning_rate": 1.825368057574916e-05, |
|
"loss": 1.1465, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.8233815210559396, |
|
"grad_norm": 2.2352840900421143, |
|
"learning_rate": 1.823297989416314e-05, |
|
"loss": 1.1126, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.8265241986172219, |
|
"grad_norm": 1.9127349853515625, |
|
"learning_rate": 1.821216912579499e-05, |
|
"loss": 1.1081, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.8296668761785041, |
|
"grad_norm": 1.8138941526412964, |
|
"learning_rate": 1.819124854891461e-05, |
|
"loss": 1.1369, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.8328095537397863, |
|
"grad_norm": 3.4601540565490723, |
|
"learning_rate": 1.817021844326021e-05, |
|
"loss": 1.1378, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.8359522313010685, |
|
"grad_norm": 2.171710968017578, |
|
"learning_rate": 1.814907909003454e-05, |
|
"loss": 1.146, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.8390949088623507, |
|
"grad_norm": 2.376568078994751, |
|
"learning_rate": 1.8127830771901157e-05, |
|
"loss": 1.1759, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.8422375864236329, |
|
"grad_norm": 4.197197437286377, |
|
"learning_rate": 1.810647377298064e-05, |
|
"loss": 1.1319, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.8453802639849152, |
|
"grad_norm": 1.8338217735290527, |
|
"learning_rate": 1.8085008378846777e-05, |
|
"loss": 1.126, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.8485229415461973, |
|
"grad_norm": 4.26235818862915, |
|
"learning_rate": 1.806343487652276e-05, |
|
"loss": 1.1889, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.8516656191074796, |
|
"grad_norm": 2.5566670894622803, |
|
"learning_rate": 1.8041753554477344e-05, |
|
"loss": 1.1377, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.8548082966687618, |
|
"grad_norm": 3.85239839553833, |
|
"learning_rate": 1.8019964702620987e-05, |
|
"loss": 1.1859, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.857950974230044, |
|
"grad_norm": 1.832269549369812, |
|
"learning_rate": 1.7998068612301976e-05, |
|
"loss": 1.1816, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.8610936517913262, |
|
"grad_norm": 2.670729160308838, |
|
"learning_rate": 1.7976065576302528e-05, |
|
"loss": 1.1686, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.8642363293526084, |
|
"grad_norm": 2.3271324634552, |
|
"learning_rate": 1.7953955888834885e-05, |
|
"loss": 1.1683, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.8673790069138906, |
|
"grad_norm": 2.2425904273986816, |
|
"learning_rate": 1.7931739845537365e-05, |
|
"loss": 1.182, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.8705216844751729, |
|
"grad_norm": 1.5735398530960083, |
|
"learning_rate": 1.7909417743470415e-05, |
|
"loss": 1.0888, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.873664362036455, |
|
"grad_norm": 2.4160172939300537, |
|
"learning_rate": 1.7886989881112647e-05, |
|
"loss": 1.1943, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.8768070395977373, |
|
"grad_norm": 2.265516519546509, |
|
"learning_rate": 1.786445655835684e-05, |
|
"loss": 1.1819, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.8799497171590195, |
|
"grad_norm": 3.3854482173919678, |
|
"learning_rate": 1.784181807650592e-05, |
|
"loss": 1.1299, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.8830923947203017, |
|
"grad_norm": 1.209488868713379, |
|
"learning_rate": 1.7819074738268956e-05, |
|
"loss": 1.0379, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.8862350722815839, |
|
"grad_norm": 2.617237091064453, |
|
"learning_rate": 1.7796226847757084e-05, |
|
"loss": 1.2145, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.8893777498428661, |
|
"grad_norm": 4.028698921203613, |
|
"learning_rate": 1.7773274710479473e-05, |
|
"loss": 1.117, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.8925204274041484, |
|
"grad_norm": 2.382817506790161, |
|
"learning_rate": 1.7750218633339194e-05, |
|
"loss": 1.1122, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.8956631049654306, |
|
"grad_norm": 2.063629150390625, |
|
"learning_rate": 1.7727058924629165e-05, |
|
"loss": 1.1553, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.8988057825267127, |
|
"grad_norm": 1.7411893606185913, |
|
"learning_rate": 1.7703795894028003e-05, |
|
"loss": 1.1544, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.901948460087995, |
|
"grad_norm": 1.9747669696807861, |
|
"learning_rate": 1.768042985259588e-05, |
|
"loss": 1.1875, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.9050911376492772, |
|
"grad_norm": 2.338653326034546, |
|
"learning_rate": 1.765696111277038e-05, |
|
"loss": 1.1609, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.9082338152105593, |
|
"grad_norm": 2.3144114017486572, |
|
"learning_rate": 1.7633389988362305e-05, |
|
"loss": 1.1843, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.9113764927718416, |
|
"grad_norm": 2.072577953338623, |
|
"learning_rate": 1.760971679455149e-05, |
|
"loss": 1.1109, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.9145191703331238, |
|
"grad_norm": 2.2952466011047363, |
|
"learning_rate": 1.758594184788258e-05, |
|
"loss": 1.1714, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 0.9176618478944061, |
|
"grad_norm": 2.254108428955078, |
|
"learning_rate": 1.7562065466260816e-05, |
|
"loss": 1.1627, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.9208045254556882, |
|
"grad_norm": 2.286694049835205, |
|
"learning_rate": 1.753808796894775e-05, |
|
"loss": 1.135, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 0.9239472030169704, |
|
"grad_norm": 3.5377025604248047, |
|
"learning_rate": 1.751400967655701e-05, |
|
"loss": 1.1172, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.9270898805782527, |
|
"grad_norm": 1.9915852546691895, |
|
"learning_rate": 1.7489830911049995e-05, |
|
"loss": 1.1235, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 1.916908860206604, |
|
"learning_rate": 1.746555199573158e-05, |
|
"loss": 1.1604, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.933375235700817, |
|
"grad_norm": 2.122201442718506, |
|
"learning_rate": 1.7441173255245773e-05, |
|
"loss": 1.17, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 0.9365179132620993, |
|
"grad_norm": 2.4088730812072754, |
|
"learning_rate": 1.74166950155714e-05, |
|
"loss": 1.1222, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.9396605908233815, |
|
"grad_norm": 2.2933411598205566, |
|
"learning_rate": 1.7392117604017733e-05, |
|
"loss": 1.1322, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 0.9428032683846638, |
|
"grad_norm": 2.084245204925537, |
|
"learning_rate": 1.7367441349220108e-05, |
|
"loss": 1.139, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 2.220381498336792, |
|
"learning_rate": 1.734266658113555e-05, |
|
"loss": 1.1533, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 0.9490886235072281, |
|
"grad_norm": 2.4368200302124023, |
|
"learning_rate": 1.731779363103833e-05, |
|
"loss": 1.1276, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.9522313010685104, |
|
"grad_norm": 3.9131979942321777, |
|
"learning_rate": 1.729282283151557e-05, |
|
"loss": 1.2134, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 0.9553739786297926, |
|
"grad_norm": 2.0956265926361084, |
|
"learning_rate": 1.7267754516462773e-05, |
|
"loss": 1.1336, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.9585166561910748, |
|
"grad_norm": 2.3662939071655273, |
|
"learning_rate": 1.724258902107936e-05, |
|
"loss": 1.1807, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.961659333752357, |
|
"grad_norm": 2.2511234283447266, |
|
"learning_rate": 1.721732668186421e-05, |
|
"loss": 1.1407, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.9648020113136392, |
|
"grad_norm": 2.2256953716278076, |
|
"learning_rate": 1.7191967836611115e-05, |
|
"loss": 1.1797, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 0.9679446888749215, |
|
"grad_norm": 1.6837149858474731, |
|
"learning_rate": 1.716651282440432e-05, |
|
"loss": 1.1586, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.9710873664362036, |
|
"grad_norm": 1.941137433052063, |
|
"learning_rate": 1.7140961985613952e-05, |
|
"loss": 1.1739, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 0.9742300439974858, |
|
"grad_norm": 3.3290293216705322, |
|
"learning_rate": 1.711531566189146e-05, |
|
"loss": 1.1511, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.9773727215587681, |
|
"grad_norm": 2.164433002471924, |
|
"learning_rate": 1.7089574196165087e-05, |
|
"loss": 1.1144, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 0.9805153991200503, |
|
"grad_norm": 2.117098569869995, |
|
"learning_rate": 1.706373793263525e-05, |
|
"loss": 1.0804, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.9836580766813325, |
|
"grad_norm": 2.2358627319335938, |
|
"learning_rate": 1.7037807216769952e-05, |
|
"loss": 1.158, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 0.9868007542426147, |
|
"grad_norm": 2.1659979820251465, |
|
"learning_rate": 1.7011782395300152e-05, |
|
"loss": 1.1633, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.9899434318038969, |
|
"grad_norm": 1.9542781114578247, |
|
"learning_rate": 1.6985663816215156e-05, |
|
"loss": 1.2018, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.9930861093651792, |
|
"grad_norm": 2.3751957416534424, |
|
"learning_rate": 1.6959451828757915e-05, |
|
"loss": 1.1924, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.9962287869264613, |
|
"grad_norm": 2.5773661136627197, |
|
"learning_rate": 1.6933146783420412e-05, |
|
"loss": 1.2159, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 0.9993714644877436, |
|
"grad_norm": 1.986519694328308, |
|
"learning_rate": 1.6906749031938932e-05, |
|
"loss": 1.1635, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.0025141420490258, |
|
"grad_norm": 2.238046646118164, |
|
"learning_rate": 1.688025892728938e-05, |
|
"loss": 1.1517, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 1.005656819610308, |
|
"grad_norm": 2.3779406547546387, |
|
"learning_rate": 1.685367682368255e-05, |
|
"loss": 1.1341, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.0087994971715901, |
|
"grad_norm": 2.1286470890045166, |
|
"learning_rate": 1.6827003076559403e-05, |
|
"loss": 1.1293, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 1.0119421747328725, |
|
"grad_norm": 3.604013442993164, |
|
"learning_rate": 1.68002380425863e-05, |
|
"loss": 1.1733, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.0150848522941547, |
|
"grad_norm": 1.7821085453033447, |
|
"learning_rate": 1.6773382079650237e-05, |
|
"loss": 1.1215, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 1.0182275298554369, |
|
"grad_norm": 2.2310407161712646, |
|
"learning_rate": 1.6746435546854068e-05, |
|
"loss": 1.1364, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.021370207416719, |
|
"grad_norm": 2.5142269134521484, |
|
"learning_rate": 1.6719398804511682e-05, |
|
"loss": 1.1585, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 1.0245128849780012, |
|
"grad_norm": 2.2651455402374268, |
|
"learning_rate": 1.6692272214143224e-05, |
|
"loss": 1.1408, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.0276555625392834, |
|
"grad_norm": 2.247772693634033, |
|
"learning_rate": 1.6665056138470212e-05, |
|
"loss": 1.1134, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 1.0307982401005658, |
|
"grad_norm": 2.3006253242492676, |
|
"learning_rate": 1.6637750941410725e-05, |
|
"loss": 1.1252, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.033940917661848, |
|
"grad_norm": 1.8263660669326782, |
|
"learning_rate": 1.6610356988074517e-05, |
|
"loss": 1.1427, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 1.03708359522313, |
|
"grad_norm": 2.199976921081543, |
|
"learning_rate": 1.658287464475814e-05, |
|
"loss": 1.1208, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.0402262727844123, |
|
"grad_norm": 2.1440887451171875, |
|
"learning_rate": 1.6555304278940053e-05, |
|
"loss": 1.1305, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 1.0433689503456944, |
|
"grad_norm": 2.2581751346588135, |
|
"learning_rate": 1.65276462592757e-05, |
|
"loss": 1.1683, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.0465116279069768, |
|
"grad_norm": 1.7286102771759033, |
|
"learning_rate": 1.6499900955592587e-05, |
|
"loss": 1.1857, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 1.049654305468259, |
|
"grad_norm": 1.8500277996063232, |
|
"learning_rate": 1.6472068738885322e-05, |
|
"loss": 1.0997, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 1.0527969830295412, |
|
"grad_norm": 2.4594027996063232, |
|
"learning_rate": 1.644414998131067e-05, |
|
"loss": 1.0588, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 1.0559396605908233, |
|
"grad_norm": 2.3058676719665527, |
|
"learning_rate": 1.6416145056182577e-05, |
|
"loss": 1.1304, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 1.0590823381521055, |
|
"grad_norm": 1.9614354372024536, |
|
"learning_rate": 1.6388054337967173e-05, |
|
"loss": 1.1542, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 1.062225015713388, |
|
"grad_norm": 2.4512290954589844, |
|
"learning_rate": 1.635987820227776e-05, |
|
"loss": 1.1455, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 1.06536769327467, |
|
"grad_norm": 3.3408472537994385, |
|
"learning_rate": 1.6331617025869787e-05, |
|
"loss": 1.1008, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 1.0685103708359522, |
|
"grad_norm": 3.3527655601501465, |
|
"learning_rate": 1.6303271186635837e-05, |
|
"loss": 1.1311, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.0716530483972344, |
|
"grad_norm": 2.1265969276428223, |
|
"learning_rate": 1.627484106360055e-05, |
|
"loss": 1.1328, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 1.0747957259585166, |
|
"grad_norm": 2.1224098205566406, |
|
"learning_rate": 1.624632703691556e-05, |
|
"loss": 1.1432, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 1.077938403519799, |
|
"grad_norm": 2.1932625770568848, |
|
"learning_rate": 1.6217729487854404e-05, |
|
"loss": 1.1336, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 1.477196455001831, |
|
"learning_rate": 1.6189048798807453e-05, |
|
"loss": 1.0739, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 1.0842237586423633, |
|
"grad_norm": 2.2613472938537598, |
|
"learning_rate": 1.616028535327677e-05, |
|
"loss": 1.0342, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 1.0873664362036455, |
|
"grad_norm": 2.0612030029296875, |
|
"learning_rate": 1.6131439535870982e-05, |
|
"loss": 1.2227, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 1.0905091137649277, |
|
"grad_norm": 2.0832862854003906, |
|
"learning_rate": 1.6102511732300156e-05, |
|
"loss": 1.1417, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 1.0936517913262098, |
|
"grad_norm": 2.326160192489624, |
|
"learning_rate": 1.607350232937064e-05, |
|
"loss": 1.1104, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 1.0967944688874922, |
|
"grad_norm": 1.928688883781433, |
|
"learning_rate": 1.6044411714979866e-05, |
|
"loss": 1.1503, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 1.0999371464487744, |
|
"grad_norm": 2.209092617034912, |
|
"learning_rate": 1.6015240278111184e-05, |
|
"loss": 1.0927, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.1030798240100566, |
|
"grad_norm": 2.2068710327148438, |
|
"learning_rate": 1.5987941027952896e-05, |
|
"loss": 1.097, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 1.1062225015713387, |
|
"grad_norm": 2.1571450233459473, |
|
"learning_rate": 1.5958614441290784e-05, |
|
"loss": 1.0841, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 1.109365179132621, |
|
"grad_norm": 2.010093927383423, |
|
"learning_rate": 1.5929208179383714e-05, |
|
"loss": 1.1261, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 1.1125078566939033, |
|
"grad_norm": 2.4886395931243896, |
|
"learning_rate": 1.58997226354357e-05, |
|
"loss": 1.1357, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 1.1156505342551855, |
|
"grad_norm": 1.658639907836914, |
|
"learning_rate": 1.5870158203710876e-05, |
|
"loss": 1.0573, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 1.1187932118164676, |
|
"grad_norm": 2.1249542236328125, |
|
"learning_rate": 1.584051527952821e-05, |
|
"loss": 1.1161, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 1.1219358893777498, |
|
"grad_norm": 2.086566209793091, |
|
"learning_rate": 1.5810794259256238e-05, |
|
"loss": 1.1282, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 1.125078566939032, |
|
"grad_norm": 2.5150198936462402, |
|
"learning_rate": 1.5780995540307734e-05, |
|
"loss": 1.1408, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 1.1282212445003144, |
|
"grad_norm": 3.7854840755462646, |
|
"learning_rate": 1.575111952113444e-05, |
|
"loss": 1.1125, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 1.1313639220615965, |
|
"grad_norm": 2.2000720500946045, |
|
"learning_rate": 1.5721166601221697e-05, |
|
"loss": 1.099, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.1345065996228787, |
|
"grad_norm": 2.055802345275879, |
|
"learning_rate": 1.5691137181083126e-05, |
|
"loss": 1.1057, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 1.1376492771841609, |
|
"grad_norm": 1.9730473756790161, |
|
"learning_rate": 1.5661031662255254e-05, |
|
"loss": 1.0642, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 1.140791954745443, |
|
"grad_norm": 2.1444499492645264, |
|
"learning_rate": 1.563085044729218e-05, |
|
"loss": 1.1095, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 1.1439346323067254, |
|
"grad_norm": 3.2961201667785645, |
|
"learning_rate": 1.5600593939760146e-05, |
|
"loss": 1.1613, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 1.1470773098680076, |
|
"grad_norm": 2.405123710632324, |
|
"learning_rate": 1.557026254423218e-05, |
|
"loss": 1.1211, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 1.1502199874292898, |
|
"grad_norm": 3.308258533477783, |
|
"learning_rate": 1.5539856666282655e-05, |
|
"loss": 1.1381, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 1.153362664990572, |
|
"grad_norm": 2.2963814735412598, |
|
"learning_rate": 1.5509376712481898e-05, |
|
"loss": 1.1293, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 1.156505342551854, |
|
"grad_norm": 1.9797133207321167, |
|
"learning_rate": 1.5478823090390726e-05, |
|
"loss": 1.0968, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 1.1596480201131363, |
|
"grad_norm": 2.6032137870788574, |
|
"learning_rate": 1.5448196208555014e-05, |
|
"loss": 1.1462, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 1.9197511672973633, |
|
"learning_rate": 1.5417496476500212e-05, |
|
"loss": 1.0224, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.1659333752357008, |
|
"grad_norm": 2.2676188945770264, |
|
"learning_rate": 1.538672430472589e-05, |
|
"loss": 1.0625, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 1.169076052796983, |
|
"grad_norm": 2.4030728340148926, |
|
"learning_rate": 1.535588010470026e-05, |
|
"loss": 1.1171, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 1.1722187303582652, |
|
"grad_norm": 1.387372374534607, |
|
"learning_rate": 1.5324964288854615e-05, |
|
"loss": 1.0836, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 1.1753614079195476, |
|
"grad_norm": 2.1212260723114014, |
|
"learning_rate": 1.5293977270577895e-05, |
|
"loss": 1.1069, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 1.1785040854808297, |
|
"grad_norm": 2.2747840881347656, |
|
"learning_rate": 1.526291946421109e-05, |
|
"loss": 1.134, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 1.181646763042112, |
|
"grad_norm": 1.7659993171691895, |
|
"learning_rate": 1.5231791285041741e-05, |
|
"loss": 1.1512, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 1.184789440603394, |
|
"grad_norm": 1.815944790840149, |
|
"learning_rate": 1.5200593149298375e-05, |
|
"loss": 1.0769, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 1.1879321181646763, |
|
"grad_norm": 2.154893636703491, |
|
"learning_rate": 1.5169325474144936e-05, |
|
"loss": 1.1198, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 1.1910747957259584, |
|
"grad_norm": 1.9412492513656616, |
|
"learning_rate": 1.5137988677675215e-05, |
|
"loss": 1.115, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 1.1942174732872408, |
|
"grad_norm": 1.8665941953659058, |
|
"learning_rate": 1.5106583178907244e-05, |
|
"loss": 1.1067, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.197360150848523, |
|
"grad_norm": 1.9800127744674683, |
|
"learning_rate": 1.5075109397777714e-05, |
|
"loss": 1.0744, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 1.2005028284098052, |
|
"grad_norm": 3.109827756881714, |
|
"learning_rate": 1.5043567755136346e-05, |
|
"loss": 1.0699, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 1.2036455059710873, |
|
"grad_norm": 1.9781428575515747, |
|
"learning_rate": 1.5011958672740261e-05, |
|
"loss": 1.1205, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 1.2067881835323695, |
|
"grad_norm": 2.4044976234436035, |
|
"learning_rate": 1.498028257324836e-05, |
|
"loss": 1.11, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 1.2099308610936519, |
|
"grad_norm": 2.0579380989074707, |
|
"learning_rate": 1.4948539880215642e-05, |
|
"loss": 1.1586, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 1.213073538654934, |
|
"grad_norm": 3.235210418701172, |
|
"learning_rate": 1.4916731018087576e-05, |
|
"loss": 1.1164, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 2.258272647857666, |
|
"learning_rate": 1.4884856412194386e-05, |
|
"loss": 1.1057, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 1.2193588937774984, |
|
"grad_norm": 3.207092761993408, |
|
"learning_rate": 1.4852916488745409e-05, |
|
"loss": 1.1112, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 1.2225015713387806, |
|
"grad_norm": 2.802191972732544, |
|
"learning_rate": 1.4820911674823345e-05, |
|
"loss": 1.0854, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 1.2256442489000627, |
|
"grad_norm": 2.0806314945220947, |
|
"learning_rate": 1.4788842398378595e-05, |
|
"loss": 1.1596, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 1.2287869264613451, |
|
"grad_norm": 2.3855271339416504, |
|
"learning_rate": 1.4756709088223508e-05, |
|
"loss": 1.1108, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 1.2319296040226273, |
|
"grad_norm": 2.3936338424682617, |
|
"learning_rate": 1.4724512174026653e-05, |
|
"loss": 1.1988, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 1.2350722815839095, |
|
"grad_norm": 1.9991942644119263, |
|
"learning_rate": 1.4692252086307079e-05, |
|
"loss": 1.1588, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 1.2382149591451916, |
|
"grad_norm": 2.2678310871124268, |
|
"learning_rate": 1.465992925642856e-05, |
|
"loss": 1.084, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 1.241357636706474, |
|
"grad_norm": 2.767444610595703, |
|
"learning_rate": 1.4627544116593816e-05, |
|
"loss": 1.1242, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 1.2445003142677562, |
|
"grad_norm": 1.6328760385513306, |
|
"learning_rate": 1.4595097099838747e-05, |
|
"loss": 1.087, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 1.2476429918290384, |
|
"grad_norm": 1.8721544742584229, |
|
"learning_rate": 1.4562588640026629e-05, |
|
"loss": 1.1078, |
|
"step": 5955 |
|
}, |
|
{ |
|
"epoch": 1.2507856693903205, |
|
"grad_norm": 2.0327460765838623, |
|
"learning_rate": 1.453001917184233e-05, |
|
"loss": 1.0601, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 1.2539283469516027, |
|
"grad_norm": 1.8650814294815063, |
|
"learning_rate": 1.4497389130786474e-05, |
|
"loss": 1.0553, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 1.2570710245128849, |
|
"grad_norm": 2.416186809539795, |
|
"learning_rate": 1.4464698953169649e-05, |
|
"loss": 1.1542, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.260213702074167, |
|
"grad_norm": 3.0162692070007324, |
|
"learning_rate": 1.443194907610654e-05, |
|
"loss": 1.1424, |
|
"step": 6015 |
|
}, |
|
{ |
|
"epoch": 1.2633563796354494, |
|
"grad_norm": 2.833125352859497, |
|
"learning_rate": 1.4399139937510107e-05, |
|
"loss": 1.1691, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 1.2664990571967316, |
|
"grad_norm": 1.900730848312378, |
|
"learning_rate": 1.4366271976085719e-05, |
|
"loss": 1.0463, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 1.2696417347580138, |
|
"grad_norm": 2.243030071258545, |
|
"learning_rate": 1.4333345631325294e-05, |
|
"loss": 1.1671, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 1.2727844123192962, |
|
"grad_norm": 2.186521530151367, |
|
"learning_rate": 1.430036134350142e-05, |
|
"loss": 1.1653, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 1.2759270898805783, |
|
"grad_norm": 1.8847732543945312, |
|
"learning_rate": 1.4267319553661456e-05, |
|
"loss": 1.0842, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 1.2790697674418605, |
|
"grad_norm": 2.303593158721924, |
|
"learning_rate": 1.4234220703621658e-05, |
|
"loss": 1.0481, |
|
"step": 6105 |
|
}, |
|
{ |
|
"epoch": 1.2822124450031427, |
|
"grad_norm": 3.213543653488159, |
|
"learning_rate": 1.4201065235961258e-05, |
|
"loss": 1.1446, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 1.2853551225644249, |
|
"grad_norm": 2.283177137374878, |
|
"learning_rate": 1.4167853594016539e-05, |
|
"loss": 1.114, |
|
"step": 6135 |
|
}, |
|
{ |
|
"epoch": 1.288497800125707, |
|
"grad_norm": 3.1280677318573, |
|
"learning_rate": 1.4134586221874922e-05, |
|
"loss": 1.1568, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 1.2916404776869892, |
|
"grad_norm": 3.3405239582061768, |
|
"learning_rate": 1.410126356436902e-05, |
|
"loss": 1.0806, |
|
"step": 6165 |
|
}, |
|
{ |
|
"epoch": 1.2947831552482716, |
|
"grad_norm": 2.9373440742492676, |
|
"learning_rate": 1.406788606707069e-05, |
|
"loss": 1.128, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 1.2979258328095538, |
|
"grad_norm": 2.1123921871185303, |
|
"learning_rate": 1.4034454176285073e-05, |
|
"loss": 1.017, |
|
"step": 6195 |
|
}, |
|
{ |
|
"epoch": 1.301068510370836, |
|
"grad_norm": 2.298046827316284, |
|
"learning_rate": 1.4000968339044637e-05, |
|
"loss": 1.157, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 1.304211187932118, |
|
"grad_norm": 3.053687572479248, |
|
"learning_rate": 1.3967429003103175e-05, |
|
"loss": 1.1575, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 1.3073538654934005, |
|
"grad_norm": 2.9560487270355225, |
|
"learning_rate": 1.3933836616929856e-05, |
|
"loss": 1.1414, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 1.3104965430546827, |
|
"grad_norm": 2.401231050491333, |
|
"learning_rate": 1.3900191629703192e-05, |
|
"loss": 1.0565, |
|
"step": 6255 |
|
}, |
|
{ |
|
"epoch": 1.3136392206159648, |
|
"grad_norm": 2.4497265815734863, |
|
"learning_rate": 1.3866494491305058e-05, |
|
"loss": 1.1296, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 1.316781898177247, |
|
"grad_norm": 2.0227253437042236, |
|
"learning_rate": 1.3832745652314652e-05, |
|
"loss": 1.1229, |
|
"step": 6285 |
|
}, |
|
{ |
|
"epoch": 1.3199245757385292, |
|
"grad_norm": 1.9386115074157715, |
|
"learning_rate": 1.3798945564002493e-05, |
|
"loss": 1.0128, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.3230672532998113, |
|
"grad_norm": 1.967246413230896, |
|
"learning_rate": 1.376509467832437e-05, |
|
"loss": 1.1388, |
|
"step": 6315 |
|
}, |
|
{ |
|
"epoch": 1.3262099308610937, |
|
"grad_norm": 3.249537706375122, |
|
"learning_rate": 1.3731193447915316e-05, |
|
"loss": 1.0584, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 1.329352608422376, |
|
"grad_norm": 2.7894890308380127, |
|
"learning_rate": 1.3697242326083536e-05, |
|
"loss": 1.1075, |
|
"step": 6345 |
|
}, |
|
{ |
|
"epoch": 1.332495285983658, |
|
"grad_norm": 3.2315704822540283, |
|
"learning_rate": 1.3663241766804359e-05, |
|
"loss": 1.0972, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 1.3356379635449402, |
|
"grad_norm": 2.295436143875122, |
|
"learning_rate": 1.3629192224714163e-05, |
|
"loss": 1.1194, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 1.3387806411062226, |
|
"grad_norm": 1.2325416803359985, |
|
"learning_rate": 1.3595094155104297e-05, |
|
"loss": 1.099, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 1.3419233186675048, |
|
"grad_norm": 2.3862216472625732, |
|
"learning_rate": 1.3560948013914997e-05, |
|
"loss": 1.0684, |
|
"step": 6405 |
|
}, |
|
{ |
|
"epoch": 1.345065996228787, |
|
"grad_norm": 2.1027228832244873, |
|
"learning_rate": 1.3526754257729273e-05, |
|
"loss": 1.0776, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 1.3482086737900691, |
|
"grad_norm": 2.421541213989258, |
|
"learning_rate": 1.3492513343766831e-05, |
|
"loss": 1.1078, |
|
"step": 6435 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 2.7943713665008545, |
|
"learning_rate": 1.3458225729877933e-05, |
|
"loss": 1.1076, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.3544940289126335, |
|
"grad_norm": 2.077890396118164, |
|
"learning_rate": 1.3423891874537289e-05, |
|
"loss": 1.1609, |
|
"step": 6465 |
|
}, |
|
{ |
|
"epoch": 1.3576367064739157, |
|
"grad_norm": 2.0880138874053955, |
|
"learning_rate": 1.3389512236837931e-05, |
|
"loss": 1.0667, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 1.360779384035198, |
|
"grad_norm": 2.1045663356781006, |
|
"learning_rate": 1.3355087276485055e-05, |
|
"loss": 1.113, |
|
"step": 6495 |
|
}, |
|
{ |
|
"epoch": 1.3639220615964802, |
|
"grad_norm": 2.125664472579956, |
|
"learning_rate": 1.3320617453789894e-05, |
|
"loss": 1.0619, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 1.3670647391577624, |
|
"grad_norm": 2.1109349727630615, |
|
"learning_rate": 1.3286103229663554e-05, |
|
"loss": 1.1235, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 1.3702074167190446, |
|
"grad_norm": 2.0411548614501953, |
|
"learning_rate": 1.3251545065610848e-05, |
|
"loss": 1.0872, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 1.373350094280327, |
|
"grad_norm": 3.083622694015503, |
|
"learning_rate": 1.3216943423724135e-05, |
|
"loss": 1.117, |
|
"step": 6555 |
|
}, |
|
{ |
|
"epoch": 1.3764927718416091, |
|
"grad_norm": 1.8469839096069336, |
|
"learning_rate": 1.3182298766677136e-05, |
|
"loss": 1.1404, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 1.3796354494028913, |
|
"grad_norm": 2.121103525161743, |
|
"learning_rate": 1.3147611557718733e-05, |
|
"loss": 1.1007, |
|
"step": 6585 |
|
}, |
|
{ |
|
"epoch": 1.3827781269641735, |
|
"grad_norm": 2.183732271194458, |
|
"learning_rate": 1.3112882260666805e-05, |
|
"loss": 1.1362, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.3859208045254556, |
|
"grad_norm": 2.0963168144226074, |
|
"learning_rate": 1.3078111339902004e-05, |
|
"loss": 1.1019, |
|
"step": 6615 |
|
}, |
|
{ |
|
"epoch": 1.3890634820867378, |
|
"grad_norm": 3.269718885421753, |
|
"learning_rate": 1.3043299260361545e-05, |
|
"loss": 1.1073, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 1.3922061596480202, |
|
"grad_norm": 2.942408323287964, |
|
"learning_rate": 1.3008446487532998e-05, |
|
"loss": 1.0783, |
|
"step": 6645 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"grad_norm": 2.6441168785095215, |
|
"learning_rate": 1.2973553487448068e-05, |
|
"loss": 1.1656, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 1.3984915147705845, |
|
"grad_norm": 2.102250099182129, |
|
"learning_rate": 1.2938620726676345e-05, |
|
"loss": 1.1408, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 1.4016341923318667, |
|
"grad_norm": 3.086590051651001, |
|
"learning_rate": 1.2903648672319084e-05, |
|
"loss": 1.1059, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 1.404776869893149, |
|
"grad_norm": 2.3277883529663086, |
|
"learning_rate": 1.2868637792002952e-05, |
|
"loss": 1.1665, |
|
"step": 6705 |
|
}, |
|
{ |
|
"epoch": 1.4079195474544313, |
|
"grad_norm": 1.6698582172393799, |
|
"learning_rate": 1.2833588553873768e-05, |
|
"loss": 1.0649, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 1.4110622250157134, |
|
"grad_norm": 2.26218581199646, |
|
"learning_rate": 1.2798501426590262e-05, |
|
"loss": 1.1894, |
|
"step": 6735 |
|
}, |
|
{ |
|
"epoch": 1.4142049025769956, |
|
"grad_norm": 1.8945990800857544, |
|
"learning_rate": 1.276337687931778e-05, |
|
"loss": 1.1507, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.4173475801382778, |
|
"grad_norm": 2.481565475463867, |
|
"learning_rate": 1.2728215381722044e-05, |
|
"loss": 1.0937, |
|
"step": 6765 |
|
}, |
|
{ |
|
"epoch": 1.42049025769956, |
|
"grad_norm": 3.565307140350342, |
|
"learning_rate": 1.2693017403962842e-05, |
|
"loss": 1.197, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 1.4236329352608421, |
|
"grad_norm": 2.697265863418579, |
|
"learning_rate": 1.2657783416687763e-05, |
|
"loss": 1.0842, |
|
"step": 6795 |
|
}, |
|
{ |
|
"epoch": 1.4267756128221245, |
|
"grad_norm": 2.244889736175537, |
|
"learning_rate": 1.2622513891025889e-05, |
|
"loss": 1.0942, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 1.4299182903834067, |
|
"grad_norm": 2.428379774093628, |
|
"learning_rate": 1.2587209298581506e-05, |
|
"loss": 1.131, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 1.4330609679446888, |
|
"grad_norm": 2.320680618286133, |
|
"learning_rate": 1.2551870111427791e-05, |
|
"loss": 1.1279, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 1.436203645505971, |
|
"grad_norm": 2.004258632659912, |
|
"learning_rate": 1.25164968021005e-05, |
|
"loss": 1.0718, |
|
"step": 6855 |
|
}, |
|
{ |
|
"epoch": 1.4393463230672534, |
|
"grad_norm": 3.006464719772339, |
|
"learning_rate": 1.2481089843591655e-05, |
|
"loss": 1.1785, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 1.4424890006285356, |
|
"grad_norm": 2.481252670288086, |
|
"learning_rate": 1.2445649709343217e-05, |
|
"loss": 1.1382, |
|
"step": 6885 |
|
}, |
|
{ |
|
"epoch": 1.4456316781898177, |
|
"grad_norm": 2.247004985809326, |
|
"learning_rate": 1.2410176873240751e-05, |
|
"loss": 1.1179, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.4487743557511, |
|
"grad_norm": 2.7122623920440674, |
|
"learning_rate": 1.237467180960709e-05, |
|
"loss": 1.0569, |
|
"step": 6915 |
|
}, |
|
{ |
|
"epoch": 1.451917033312382, |
|
"grad_norm": 2.119673013687134, |
|
"learning_rate": 1.2339134993196007e-05, |
|
"loss": 1.0939, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 1.4550597108736643, |
|
"grad_norm": 2.4448978900909424, |
|
"learning_rate": 1.2303566899185842e-05, |
|
"loss": 1.0598, |
|
"step": 6945 |
|
}, |
|
{ |
|
"epoch": 1.4582023884349467, |
|
"grad_norm": 2.211289405822754, |
|
"learning_rate": 1.2267968003173166e-05, |
|
"loss": 1.1679, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 1.4613450659962288, |
|
"grad_norm": 1.9416890144348145, |
|
"learning_rate": 1.2232338781166422e-05, |
|
"loss": 1.0983, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 1.464487743557511, |
|
"grad_norm": 2.0862271785736084, |
|
"learning_rate": 1.2196679709579546e-05, |
|
"loss": 1.0911, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 1.4676304211187932, |
|
"grad_norm": 2.4440407752990723, |
|
"learning_rate": 1.2160991265225621e-05, |
|
"loss": 1.1073, |
|
"step": 7005 |
|
}, |
|
{ |
|
"epoch": 1.4707730986800756, |
|
"grad_norm": 2.1422340869903564, |
|
"learning_rate": 1.2125273925310465e-05, |
|
"loss": 1.0904, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 1.4739157762413577, |
|
"grad_norm": 2.2515573501586914, |
|
"learning_rate": 1.2089528167426294e-05, |
|
"loss": 1.1244, |
|
"step": 7035 |
|
}, |
|
{ |
|
"epoch": 1.47705845380264, |
|
"grad_norm": 2.149007558822632, |
|
"learning_rate": 1.2053754469545291e-05, |
|
"loss": 1.0963, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 1.480201131363922, |
|
"grad_norm": 3.1129860877990723, |
|
"learning_rate": 1.2017953310013255e-05, |
|
"loss": 1.1655, |
|
"step": 7065 |
|
}, |
|
{ |
|
"epoch": 1.4833438089252042, |
|
"grad_norm": 1.7863973379135132, |
|
"learning_rate": 1.1982125167543175e-05, |
|
"loss": 1.0844, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 2.28974986076355, |
|
"learning_rate": 1.1946270521208845e-05, |
|
"loss": 1.0585, |
|
"step": 7095 |
|
}, |
|
{ |
|
"epoch": 1.4896291640477686, |
|
"grad_norm": 2.3061442375183105, |
|
"learning_rate": 1.1910389850438457e-05, |
|
"loss": 1.1377, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 1.492771841609051, |
|
"grad_norm": 1.6387587785720825, |
|
"learning_rate": 1.1874483635008183e-05, |
|
"loss": 1.1109, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 1.4959145191703331, |
|
"grad_norm": 1.6554300785064697, |
|
"learning_rate": 1.1838552355035761e-05, |
|
"loss": 1.0686, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 1.4990571967316153, |
|
"grad_norm": 2.2356905937194824, |
|
"learning_rate": 1.1802596490974088e-05, |
|
"loss": 1.0877, |
|
"step": 7155 |
|
}, |
|
{ |
|
"epoch": 1.5021998742928977, |
|
"grad_norm": 2.838385820388794, |
|
"learning_rate": 1.1766616523604774e-05, |
|
"loss": 1.1015, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 1.5053425518541799, |
|
"grad_norm": 1.8725031614303589, |
|
"learning_rate": 1.1730612934031737e-05, |
|
"loss": 1.0806, |
|
"step": 7185 |
|
}, |
|
{ |
|
"epoch": 1.508485229415462, |
|
"grad_norm": 2.853379011154175, |
|
"learning_rate": 1.1694586203674742e-05, |
|
"loss": 1.0796, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.5116279069767442, |
|
"grad_norm": 2.5570309162139893, |
|
"learning_rate": 1.1658536814263e-05, |
|
"loss": 1.0773, |
|
"step": 7215 |
|
}, |
|
{ |
|
"epoch": 1.5147705845380264, |
|
"grad_norm": 2.4062445163726807, |
|
"learning_rate": 1.1622465247828681e-05, |
|
"loss": 0.9971, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 1.5179132620993085, |
|
"grad_norm": 2.4074044227600098, |
|
"learning_rate": 1.1586371986700522e-05, |
|
"loss": 1.0774, |
|
"step": 7245 |
|
}, |
|
{ |
|
"epoch": 1.5210559396605907, |
|
"grad_norm": 2.0330376625061035, |
|
"learning_rate": 1.155025751349732e-05, |
|
"loss": 1.1209, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 1.5241986172218729, |
|
"grad_norm": 2.229322671890259, |
|
"learning_rate": 1.1516531959834856e-05, |
|
"loss": 1.1011, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 1.5273412947831553, |
|
"grad_norm": 2.2459731101989746, |
|
"learning_rate": 1.1480377846161259e-05, |
|
"loss": 1.095, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 1.5304839723444374, |
|
"grad_norm": 1.9737571477890015, |
|
"learning_rate": 1.1444203937706724e-05, |
|
"loss": 1.0764, |
|
"step": 7305 |
|
}, |
|
{ |
|
"epoch": 1.5336266499057196, |
|
"grad_norm": 2.0487782955169678, |
|
"learning_rate": 1.1408010718168447e-05, |
|
"loss": 1.1322, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 1.536769327467002, |
|
"grad_norm": 2.7282018661499023, |
|
"learning_rate": 1.1371798671501813e-05, |
|
"loss": 1.1322, |
|
"step": 7335 |
|
}, |
|
{ |
|
"epoch": 1.5399120050282842, |
|
"grad_norm": 3.496860980987549, |
|
"learning_rate": 1.133556828191398e-05, |
|
"loss": 1.0829, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 1.5430546825895664, |
|
"grad_norm": 1.9811838865280151, |
|
"learning_rate": 1.1299320033857355e-05, |
|
"loss": 1.0813, |
|
"step": 7365 |
|
}, |
|
{ |
|
"epoch": 1.5461973601508485, |
|
"grad_norm": 2.5440211296081543, |
|
"learning_rate": 1.1263054412023158e-05, |
|
"loss": 1.0347, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 1.5493400377121307, |
|
"grad_norm": 1.8516343832015991, |
|
"learning_rate": 1.1226771901334901e-05, |
|
"loss": 1.0719, |
|
"step": 7395 |
|
}, |
|
{ |
|
"epoch": 1.5524827152734129, |
|
"grad_norm": 2.147573709487915, |
|
"learning_rate": 1.1190472986941939e-05, |
|
"loss": 1.068, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 1.555625392834695, |
|
"grad_norm": 1.999443531036377, |
|
"learning_rate": 1.1154158154212964e-05, |
|
"loss": 1.0674, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 1.5587680703959774, |
|
"grad_norm": 1.5025156736373901, |
|
"learning_rate": 1.111782788872952e-05, |
|
"loss": 1.1003, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 1.5619107479572596, |
|
"grad_norm": 2.162722587585449, |
|
"learning_rate": 1.1081482676279504e-05, |
|
"loss": 1.078, |
|
"step": 7455 |
|
}, |
|
{ |
|
"epoch": 1.5650534255185418, |
|
"grad_norm": 2.049753427505493, |
|
"learning_rate": 1.1045123002850683e-05, |
|
"loss": 1.1825, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 1.5681961030798242, |
|
"grad_norm": 2.4640021324157715, |
|
"learning_rate": 1.1008749354624181e-05, |
|
"loss": 1.0942, |
|
"step": 7485 |
|
}, |
|
{ |
|
"epoch": 1.5713387806411063, |
|
"grad_norm": 2.2149126529693604, |
|
"learning_rate": 1.0972362217967998e-05, |
|
"loss": 1.1415, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.5744814582023885, |
|
"grad_norm": 1.953383207321167, |
|
"learning_rate": 1.0935962079430468e-05, |
|
"loss": 1.0875, |
|
"step": 7515 |
|
}, |
|
{ |
|
"epoch": 1.5776241357636707, |
|
"grad_norm": 2.371173620223999, |
|
"learning_rate": 1.0899549425733811e-05, |
|
"loss": 1.1068, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 1.5807668133249528, |
|
"grad_norm": 2.135591983795166, |
|
"learning_rate": 1.0863124743767566e-05, |
|
"loss": 1.1094, |
|
"step": 7545 |
|
}, |
|
{ |
|
"epoch": 1.583909490886235, |
|
"grad_norm": 2.1090261936187744, |
|
"learning_rate": 1.0826688520582124e-05, |
|
"loss": 1.0459, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 1.5870521684475172, |
|
"grad_norm": 2.8963959217071533, |
|
"learning_rate": 1.0790241243382187e-05, |
|
"loss": 1.1019, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 1.5901948460087993, |
|
"grad_norm": 2.2832436561584473, |
|
"learning_rate": 1.0753783399520281e-05, |
|
"loss": 1.1125, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 1.5933375235700817, |
|
"grad_norm": 2.1328582763671875, |
|
"learning_rate": 1.0717315476490205e-05, |
|
"loss": 1.1088, |
|
"step": 7605 |
|
}, |
|
{ |
|
"epoch": 1.596480201131364, |
|
"grad_norm": 1.7953342199325562, |
|
"learning_rate": 1.0680837961920538e-05, |
|
"loss": 1.0812, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 1.5996228786926463, |
|
"grad_norm": 2.0836551189422607, |
|
"learning_rate": 1.064435134356812e-05, |
|
"loss": 1.1015, |
|
"step": 7635 |
|
}, |
|
{ |
|
"epoch": 1.6027655562539285, |
|
"grad_norm": 3.2132134437561035, |
|
"learning_rate": 1.0607856109311508e-05, |
|
"loss": 1.1377, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 1.6059082338152106, |
|
"grad_norm": 2.149661064147949, |
|
"learning_rate": 1.0571352747144477e-05, |
|
"loss": 1.0525, |
|
"step": 7665 |
|
}, |
|
{ |
|
"epoch": 1.6090509113764928, |
|
"grad_norm": 1.8691046237945557, |
|
"learning_rate": 1.053484174516948e-05, |
|
"loss": 1.0954, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 1.612193588937775, |
|
"grad_norm": 1.9283403158187866, |
|
"learning_rate": 1.049832359159112e-05, |
|
"loss": 1.1199, |
|
"step": 7695 |
|
}, |
|
{ |
|
"epoch": 1.6153362664990571, |
|
"grad_norm": 1.9671255350112915, |
|
"learning_rate": 1.0461798774709635e-05, |
|
"loss": 1.068, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 1.6184789440603393, |
|
"grad_norm": 2.721006393432617, |
|
"learning_rate": 1.0425267782914359e-05, |
|
"loss": 1.1274, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 2.0059800148010254, |
|
"learning_rate": 1.0388731104677191e-05, |
|
"loss": 1.0832, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 1.6247642991829039, |
|
"grad_norm": 2.346212863922119, |
|
"learning_rate": 1.0352189228546068e-05, |
|
"loss": 1.1092, |
|
"step": 7755 |
|
}, |
|
{ |
|
"epoch": 1.627906976744186, |
|
"grad_norm": 1.6566693782806396, |
|
"learning_rate": 1.031564264313843e-05, |
|
"loss": 1.033, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 1.6310496543054682, |
|
"grad_norm": 2.4089715480804443, |
|
"learning_rate": 1.0279091837134681e-05, |
|
"loss": 1.114, |
|
"step": 7785 |
|
}, |
|
{ |
|
"epoch": 1.6341923318667506, |
|
"grad_norm": 2.9500551223754883, |
|
"learning_rate": 1.0242537299271675e-05, |
|
"loss": 1.1282, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.6373350094280328, |
|
"grad_norm": 1.8416279554367065, |
|
"learning_rate": 1.0205979518336148e-05, |
|
"loss": 1.0857, |
|
"step": 7815 |
|
}, |
|
{ |
|
"epoch": 1.640477686989315, |
|
"grad_norm": 2.1354591846466064, |
|
"learning_rate": 1.0169418983158218e-05, |
|
"loss": 1.0807, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 1.6436203645505971, |
|
"grad_norm": 1.705767035484314, |
|
"learning_rate": 1.013285618260482e-05, |
|
"loss": 1.0273, |
|
"step": 7845 |
|
}, |
|
{ |
|
"epoch": 1.6467630421118793, |
|
"grad_norm": 1.9959582090377808, |
|
"learning_rate": 1.009629160557318e-05, |
|
"loss": 1.0705, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 1.6499057196731615, |
|
"grad_norm": 2.004734754562378, |
|
"learning_rate": 1.0059725740984285e-05, |
|
"loss": 1.1216, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 1.6530483972344436, |
|
"grad_norm": 2.1259968280792236, |
|
"learning_rate": 1.0023159077776332e-05, |
|
"loss": 1.1618, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 1.6561910747957258, |
|
"grad_norm": 1.855248212814331, |
|
"learning_rate": 9.986592104898202e-06, |
|
"loss": 1.1007, |
|
"step": 7905 |
|
}, |
|
{ |
|
"epoch": 1.6593337523570082, |
|
"grad_norm": 2.1948564052581787, |
|
"learning_rate": 9.950025311302914e-06, |
|
"loss": 1.1203, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 1.6624764299182904, |
|
"grad_norm": 2.140425205230713, |
|
"learning_rate": 9.913459185941085e-06, |
|
"loss": 1.0788, |
|
"step": 7935 |
|
}, |
|
{ |
|
"epoch": 1.6656191074795728, |
|
"grad_norm": 3.158501148223877, |
|
"learning_rate": 9.87689421775441e-06, |
|
"loss": 1.0806, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 1.668761785040855, |
|
"grad_norm": 1.9126044511795044, |
|
"learning_rate": 9.840330895669096e-06, |
|
"loss": 1.086, |
|
"step": 7965 |
|
}, |
|
{ |
|
"epoch": 1.671904462602137, |
|
"grad_norm": 2.586374044418335, |
|
"learning_rate": 9.803769708589352e-06, |
|
"loss": 1.0511, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 1.6750471401634193, |
|
"grad_norm": 2.192415475845337, |
|
"learning_rate": 9.767211145390827e-06, |
|
"loss": 1.1149, |
|
"step": 7995 |
|
}, |
|
{ |
|
"epoch": 1.6781898177247014, |
|
"grad_norm": 2.12808895111084, |
|
"learning_rate": 9.73065569491409e-06, |
|
"loss": 1.1015, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 1.6813324952859836, |
|
"grad_norm": 2.3615734577178955, |
|
"learning_rate": 9.694103845958095e-06, |
|
"loss": 1.126, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 1.6844751728472658, |
|
"grad_norm": 1.6970425844192505, |
|
"learning_rate": 9.657556087273632e-06, |
|
"loss": 1.0458, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 1.687617850408548, |
|
"grad_norm": 2.4654476642608643, |
|
"learning_rate": 9.621012907556793e-06, |
|
"loss": 1.1316, |
|
"step": 8055 |
|
}, |
|
{ |
|
"epoch": 1.6907605279698303, |
|
"grad_norm": 2.2715861797332764, |
|
"learning_rate": 9.584474795442454e-06, |
|
"loss": 1.0997, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 1.6939032055311125, |
|
"grad_norm": 2.117677688598633, |
|
"learning_rate": 9.547942239497716e-06, |
|
"loss": 1.0896, |
|
"step": 8085 |
|
}, |
|
{ |
|
"epoch": 1.6970458830923947, |
|
"grad_norm": 1.9031733274459839, |
|
"learning_rate": 9.511415728215407e-06, |
|
"loss": 1.0468, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.700188560653677, |
|
"grad_norm": 3.147984504699707, |
|
"learning_rate": 9.474895750007508e-06, |
|
"loss": 1.0695, |
|
"step": 8115 |
|
}, |
|
{ |
|
"epoch": 1.7033312382149592, |
|
"grad_norm": 2.6544809341430664, |
|
"learning_rate": 9.438382793198655e-06, |
|
"loss": 1.1083, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 1.7064739157762414, |
|
"grad_norm": 2.071148633956909, |
|
"learning_rate": 9.401877346019587e-06, |
|
"loss": 1.1407, |
|
"step": 8145 |
|
}, |
|
{ |
|
"epoch": 1.7096165933375236, |
|
"grad_norm": 2.063387155532837, |
|
"learning_rate": 9.36537989660065e-06, |
|
"loss": 1.0749, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 1.7127592708988058, |
|
"grad_norm": 1.9773895740509033, |
|
"learning_rate": 9.328890932965225e-06, |
|
"loss": 1.0723, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 1.715901948460088, |
|
"grad_norm": 3.0571677684783936, |
|
"learning_rate": 9.29241094302324e-06, |
|
"loss": 1.1296, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 1.71904462602137, |
|
"grad_norm": 2.203171730041504, |
|
"learning_rate": 9.255940414564621e-06, |
|
"loss": 1.1401, |
|
"step": 8205 |
|
}, |
|
{ |
|
"epoch": 1.7221873035826523, |
|
"grad_norm": 1.9974406957626343, |
|
"learning_rate": 9.21947983525279e-06, |
|
"loss": 1.1158, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 1.7253299811439347, |
|
"grad_norm": 2.110973834991455, |
|
"learning_rate": 9.183029692618134e-06, |
|
"loss": 1.0684, |
|
"step": 8235 |
|
}, |
|
{ |
|
"epoch": 1.7284726587052168, |
|
"grad_norm": 1.8630388975143433, |
|
"learning_rate": 9.146590474051483e-06, |
|
"loss": 1.0376, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 1.7316153362664992, |
|
"grad_norm": 2.1341989040374756, |
|
"learning_rate": 9.110162666797595e-06, |
|
"loss": 1.0955, |
|
"step": 8265 |
|
}, |
|
{ |
|
"epoch": 1.7347580138277814, |
|
"grad_norm": 2.5358388423919678, |
|
"learning_rate": 9.076174105269065e-06, |
|
"loss": 1.1651, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 1.7379006913890636, |
|
"grad_norm": 2.8575525283813477, |
|
"learning_rate": 9.039769740923183e-06, |
|
"loss": 1.1082, |
|
"step": 8295 |
|
}, |
|
{ |
|
"epoch": 1.7410433689503457, |
|
"grad_norm": 2.287961959838867, |
|
"learning_rate": 9.003378216236902e-06, |
|
"loss": 1.1151, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 1.744186046511628, |
|
"grad_norm": 1.2058899402618408, |
|
"learning_rate": 8.967000017817247e-06, |
|
"loss": 1.0793, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 1.74732872407291, |
|
"grad_norm": 2.0018842220306396, |
|
"learning_rate": 8.930635632093046e-06, |
|
"loss": 1.0819, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 1.7504714016341922, |
|
"grad_norm": 1.9017311334609985, |
|
"learning_rate": 8.894285545308437e-06, |
|
"loss": 1.0658, |
|
"step": 8355 |
|
}, |
|
{ |
|
"epoch": 1.7536140791954744, |
|
"grad_norm": 1.9596132040023804, |
|
"learning_rate": 8.857950243516364e-06, |
|
"loss": 1.1065, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 1.979177713394165, |
|
"learning_rate": 8.821630212572074e-06, |
|
"loss": 1.0362, |
|
"step": 8385 |
|
}, |
|
{ |
|
"epoch": 1.759899434318039, |
|
"grad_norm": 2.1770269870758057, |
|
"learning_rate": 8.785325938126608e-06, |
|
"loss": 1.0974, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.7630421118793211, |
|
"grad_norm": 2.125962734222412, |
|
"learning_rate": 8.749037905620334e-06, |
|
"loss": 1.1541, |
|
"step": 8415 |
|
}, |
|
{ |
|
"epoch": 1.7661847894406035, |
|
"grad_norm": 2.0153768062591553, |
|
"learning_rate": 8.712766600276435e-06, |
|
"loss": 1.1233, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 1.7693274670018857, |
|
"grad_norm": 2.15311336517334, |
|
"learning_rate": 8.676512507094438e-06, |
|
"loss": 1.0341, |
|
"step": 8445 |
|
}, |
|
{ |
|
"epoch": 1.7724701445631679, |
|
"grad_norm": 2.8032867908477783, |
|
"learning_rate": 8.640276110843702e-06, |
|
"loss": 1.0944, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 1.77561282212445, |
|
"grad_norm": 2.2355639934539795, |
|
"learning_rate": 8.604057896056965e-06, |
|
"loss": 1.1147, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 1.7787554996857322, |
|
"grad_norm": 2.222898006439209, |
|
"learning_rate": 8.567858347023843e-06, |
|
"loss": 1.1063, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 1.7818981772470144, |
|
"grad_norm": 2.043031692504883, |
|
"learning_rate": 8.531677947784383e-06, |
|
"loss": 1.1142, |
|
"step": 8505 |
|
}, |
|
{ |
|
"epoch": 1.7850408548082966, |
|
"grad_norm": 2.3380000591278076, |
|
"learning_rate": 8.495517182122546e-06, |
|
"loss": 1.1379, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 1.7881835323695787, |
|
"grad_norm": 1.9827522039413452, |
|
"learning_rate": 8.459376533559784e-06, |
|
"loss": 1.1067, |
|
"step": 8535 |
|
}, |
|
{ |
|
"epoch": 1.7913262099308611, |
|
"grad_norm": 2.2196788787841797, |
|
"learning_rate": 8.423256485348543e-06, |
|
"loss": 1.0563, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 1.7944688874921433, |
|
"grad_norm": 2.038891315460205, |
|
"learning_rate": 8.387157520465816e-06, |
|
"loss": 1.1151, |
|
"step": 8565 |
|
}, |
|
{ |
|
"epoch": 1.7976115650534257, |
|
"grad_norm": 2.597698211669922, |
|
"learning_rate": 8.351080121606685e-06, |
|
"loss": 1.1025, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 1.8007542426147078, |
|
"grad_norm": 2.8073418140411377, |
|
"learning_rate": 8.315024771177862e-06, |
|
"loss": 1.136, |
|
"step": 8595 |
|
}, |
|
{ |
|
"epoch": 1.80389692017599, |
|
"grad_norm": 2.143312692642212, |
|
"learning_rate": 8.278991951291234e-06, |
|
"loss": 1.0655, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 1.8070395977372722, |
|
"grad_norm": 1.5993940830230713, |
|
"learning_rate": 8.242982143757429e-06, |
|
"loss": 1.1023, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 1.8101822752985544, |
|
"grad_norm": 2.127690553665161, |
|
"learning_rate": 8.20699583007936e-06, |
|
"loss": 1.104, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 1.8133249528598365, |
|
"grad_norm": 2.57829213142395, |
|
"learning_rate": 8.171033491445806e-06, |
|
"loss": 1.0658, |
|
"step": 8655 |
|
}, |
|
{ |
|
"epoch": 1.8164676304211187, |
|
"grad_norm": 2.4844679832458496, |
|
"learning_rate": 8.13509560872495e-06, |
|
"loss": 1.1207, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 1.8196103079824009, |
|
"grad_norm": 1.7117849588394165, |
|
"learning_rate": 8.099182662457972e-06, |
|
"loss": 1.1019, |
|
"step": 8685 |
|
}, |
|
{ |
|
"epoch": 1.8227529855436833, |
|
"grad_norm": 1.6486339569091797, |
|
"learning_rate": 8.063295132852616e-06, |
|
"loss": 1.1566, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.8258956631049654, |
|
"grad_norm": 2.264791965484619, |
|
"learning_rate": 8.027433499776768e-06, |
|
"loss": 1.1269, |
|
"step": 8715 |
|
}, |
|
{ |
|
"epoch": 1.8290383406662476, |
|
"grad_norm": 2.0826590061187744, |
|
"learning_rate": 7.99159824275204e-06, |
|
"loss": 1.1668, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 1.83218101822753, |
|
"grad_norm": 3.4813215732574463, |
|
"learning_rate": 7.955789840947357e-06, |
|
"loss": 1.0613, |
|
"step": 8745 |
|
}, |
|
{ |
|
"epoch": 1.8353236957888122, |
|
"grad_norm": 1.950697898864746, |
|
"learning_rate": 7.920008773172549e-06, |
|
"loss": 1.1151, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 1.8384663733500943, |
|
"grad_norm": 2.761364459991455, |
|
"learning_rate": 7.884255517871955e-06, |
|
"loss": 1.0418, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 1.8416090509113765, |
|
"grad_norm": 2.035162925720215, |
|
"learning_rate": 7.848530553118024e-06, |
|
"loss": 1.1395, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 1.8447517284726587, |
|
"grad_norm": 2.061817169189453, |
|
"learning_rate": 7.812834356604913e-06, |
|
"loss": 1.1337, |
|
"step": 8805 |
|
}, |
|
{ |
|
"epoch": 1.8478944060339408, |
|
"grad_norm": 2.214123010635376, |
|
"learning_rate": 7.777167405642109e-06, |
|
"loss": 1.09, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 1.851037083595223, |
|
"grad_norm": 2.1376426219940186, |
|
"learning_rate": 7.741530177148041e-06, |
|
"loss": 1.0908, |
|
"step": 8835 |
|
}, |
|
{ |
|
"epoch": 1.8541797611565052, |
|
"grad_norm": 1.6116961240768433, |
|
"learning_rate": 7.70592314764371e-06, |
|
"loss": 1.1011, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 1.8573224387177876, |
|
"grad_norm": 2.253972291946411, |
|
"learning_rate": 7.670346793246317e-06, |
|
"loss": 1.0856, |
|
"step": 8865 |
|
}, |
|
{ |
|
"epoch": 1.8604651162790697, |
|
"grad_norm": 1.9321953058242798, |
|
"learning_rate": 7.634801589662882e-06, |
|
"loss": 1.0851, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 1.8636077938403521, |
|
"grad_norm": 2.758145332336426, |
|
"learning_rate": 7.599288012183897e-06, |
|
"loss": 1.0952, |
|
"step": 8895 |
|
}, |
|
{ |
|
"epoch": 1.8667504714016343, |
|
"grad_norm": 3.209368944168091, |
|
"learning_rate": 7.563806535676974e-06, |
|
"loss": 1.1081, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 1.8698931489629165, |
|
"grad_norm": 3.290693998336792, |
|
"learning_rate": 7.528357634580486e-06, |
|
"loss": 1.0403, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 1.8730358265241986, |
|
"grad_norm": 2.0125486850738525, |
|
"learning_rate": 7.4929417828972205e-06, |
|
"loss": 1.0801, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 1.8761785040854808, |
|
"grad_norm": 2.3519296646118164, |
|
"learning_rate": 7.457559454188052e-06, |
|
"loss": 1.1264, |
|
"step": 8955 |
|
}, |
|
{ |
|
"epoch": 1.879321181646763, |
|
"grad_norm": 2.394660472869873, |
|
"learning_rate": 7.422211121565607e-06, |
|
"loss": 1.1271, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 1.8824638592080452, |
|
"grad_norm": 2.127760648727417, |
|
"learning_rate": 7.386897257687924e-06, |
|
"loss": 1.1308, |
|
"step": 8985 |
|
}, |
|
{ |
|
"epoch": 1.8856065367693273, |
|
"grad_norm": 2.5212738513946533, |
|
"learning_rate": 7.3516183347521575e-06, |
|
"loss": 1.0758, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.8887492143306097, |
|
"grad_norm": 2.113893508911133, |
|
"learning_rate": 7.316374824488247e-06, |
|
"loss": 1.1463, |
|
"step": 9015 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 1.9831585884094238, |
|
"learning_rate": 7.281167198152615e-06, |
|
"loss": 1.1409, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 1.895034569453174, |
|
"grad_norm": 2.290170192718506, |
|
"learning_rate": 7.245995926521854e-06, |
|
"loss": 1.1156, |
|
"step": 9045 |
|
}, |
|
{ |
|
"epoch": 1.8981772470144564, |
|
"grad_norm": 2.2289717197418213, |
|
"learning_rate": 7.2108614798864585e-06, |
|
"loss": 1.0908, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 1.9013199245757386, |
|
"grad_norm": 2.7847399711608887, |
|
"learning_rate": 7.17576432804451e-06, |
|
"loss": 1.1137, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 1.9044626021370208, |
|
"grad_norm": 2.2532174587249756, |
|
"learning_rate": 7.140704940295404e-06, |
|
"loss": 1.0993, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 1.907605279698303, |
|
"grad_norm": 3.097644329071045, |
|
"learning_rate": 7.1056837854335804e-06, |
|
"loss": 1.1322, |
|
"step": 9105 |
|
}, |
|
{ |
|
"epoch": 1.9107479572595851, |
|
"grad_norm": 2.270334005355835, |
|
"learning_rate": 7.0707013317422446e-06, |
|
"loss": 1.0477, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 1.9138906348208673, |
|
"grad_norm": 2.698101282119751, |
|
"learning_rate": 7.035758046987122e-06, |
|
"loss": 1.0722, |
|
"step": 9135 |
|
}, |
|
{ |
|
"epoch": 1.9170333123821495, |
|
"grad_norm": 2.2851641178131104, |
|
"learning_rate": 7.000854398410182e-06, |
|
"loss": 1.1588, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 1.9201759899434316, |
|
"grad_norm": 2.714940071105957, |
|
"learning_rate": 6.965990852723404e-06, |
|
"loss": 1.0664, |
|
"step": 9165 |
|
}, |
|
{ |
|
"epoch": 1.923318667504714, |
|
"grad_norm": 2.079648971557617, |
|
"learning_rate": 6.9311678761025416e-06, |
|
"loss": 1.0701, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 1.9264613450659962, |
|
"grad_norm": 1.5704066753387451, |
|
"learning_rate": 6.896385934180867e-06, |
|
"loss": 1.085, |
|
"step": 9195 |
|
}, |
|
{ |
|
"epoch": 1.9296040226272786, |
|
"grad_norm": 2.2583060264587402, |
|
"learning_rate": 6.861645492042977e-06, |
|
"loss": 1.1032, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 1.9327467001885608, |
|
"grad_norm": 2.0253028869628906, |
|
"learning_rate": 6.826947014218543e-06, |
|
"loss": 1.1232, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 1.935889377749843, |
|
"grad_norm": 3.0294480323791504, |
|
"learning_rate": 6.792290964676123e-06, |
|
"loss": 1.1583, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 1.939032055311125, |
|
"grad_norm": 2.480926036834717, |
|
"learning_rate": 6.7576778068169335e-06, |
|
"loss": 1.126, |
|
"step": 9255 |
|
}, |
|
{ |
|
"epoch": 1.9421747328724073, |
|
"grad_norm": 2.393428087234497, |
|
"learning_rate": 6.725411298947324e-06, |
|
"loss": 1.0878, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 1.9453174104336894, |
|
"grad_norm": 2.0476040840148926, |
|
"learning_rate": 6.690882376871407e-06, |
|
"loss": 1.0794, |
|
"step": 9285 |
|
}, |
|
{ |
|
"epoch": 1.9484600879949716, |
|
"grad_norm": 2.075193166732788, |
|
"learning_rate": 6.656397702457396e-06, |
|
"loss": 1.0825, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.9516027655562538, |
|
"grad_norm": 2.0196661949157715, |
|
"learning_rate": 6.621957736814983e-06, |
|
"loss": 1.0755, |
|
"step": 9315 |
|
}, |
|
{ |
|
"epoch": 1.9547454431175362, |
|
"grad_norm": 2.4812753200531006, |
|
"learning_rate": 6.58756294045604e-06, |
|
"loss": 1.0883, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 1.9578881206788183, |
|
"grad_norm": 2.0828120708465576, |
|
"learning_rate": 6.553213773288465e-06, |
|
"loss": 1.1287, |
|
"step": 9345 |
|
}, |
|
{ |
|
"epoch": 1.9610307982401005, |
|
"grad_norm": 2.273803949356079, |
|
"learning_rate": 6.518910694610023e-06, |
|
"loss": 1.0481, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 1.964173475801383, |
|
"grad_norm": 3.1020278930664062, |
|
"learning_rate": 6.4846541631022155e-06, |
|
"loss": 1.078, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 1.967316153362665, |
|
"grad_norm": 2.7231500148773193, |
|
"learning_rate": 6.4504446368241315e-06, |
|
"loss": 1.0389, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 1.9704588309239472, |
|
"grad_norm": 2.4498469829559326, |
|
"learning_rate": 6.416282573206341e-06, |
|
"loss": 1.1052, |
|
"step": 9405 |
|
}, |
|
{ |
|
"epoch": 1.9736015084852294, |
|
"grad_norm": 2.266944408416748, |
|
"learning_rate": 6.382168429044769e-06, |
|
"loss": 1.1163, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 1.9767441860465116, |
|
"grad_norm": 2.877861976623535, |
|
"learning_rate": 6.34810266049459e-06, |
|
"loss": 1.1106, |
|
"step": 9435 |
|
}, |
|
{ |
|
"epoch": 1.9798868636077938, |
|
"grad_norm": 2.256692886352539, |
|
"learning_rate": 6.314085723064118e-06, |
|
"loss": 1.0929, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 1.983029541169076, |
|
"grad_norm": 2.337369203567505, |
|
"learning_rate": 6.2801180716087315e-06, |
|
"loss": 1.1019, |
|
"step": 9465 |
|
}, |
|
{ |
|
"epoch": 1.9861722187303583, |
|
"grad_norm": 3.803745746612549, |
|
"learning_rate": 6.246200160324789e-06, |
|
"loss": 1.0641, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 1.9893148962916405, |
|
"grad_norm": 2.354900598526001, |
|
"learning_rate": 6.21233244274354e-06, |
|
"loss": 1.1353, |
|
"step": 9495 |
|
}, |
|
{ |
|
"epoch": 1.9924575738529227, |
|
"grad_norm": 2.0362517833709717, |
|
"learning_rate": 6.178515371725083e-06, |
|
"loss": 1.0638, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 1.995600251414205, |
|
"grad_norm": 1.7420244216918945, |
|
"learning_rate": 6.144749399452294e-06, |
|
"loss": 1.0539, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 1.9987429289754872, |
|
"grad_norm": 1.9186304807662964, |
|
"learning_rate": 6.11103497742478e-06, |
|
"loss": 1.0553, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 2.0018856065367694, |
|
"grad_norm": 1.9180148839950562, |
|
"learning_rate": 6.077372556452858e-06, |
|
"loss": 1.078, |
|
"step": 9555 |
|
}, |
|
{ |
|
"epoch": 2.0050282840980516, |
|
"grad_norm": 1.8619567155838013, |
|
"learning_rate": 6.043762586651511e-06, |
|
"loss": 1.0238, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 2.0081709616593337, |
|
"grad_norm": 2.054584503173828, |
|
"learning_rate": 6.010205517434373e-06, |
|
"loss": 1.0361, |
|
"step": 9585 |
|
}, |
|
{ |
|
"epoch": 2.011313639220616, |
|
"grad_norm": 1.6725273132324219, |
|
"learning_rate": 5.97670179750772e-06, |
|
"loss": 1.0048, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.014456316781898, |
|
"grad_norm": 2.8712847232818604, |
|
"learning_rate": 5.943251874864476e-06, |
|
"loss": 1.0199, |
|
"step": 9615 |
|
}, |
|
{ |
|
"epoch": 2.0175989943431802, |
|
"grad_norm": 2.331249237060547, |
|
"learning_rate": 5.9098561967782165e-06, |
|
"loss": 1.0697, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 2.0207416719044624, |
|
"grad_norm": 2.359811782836914, |
|
"learning_rate": 5.876515209797189e-06, |
|
"loss": 1.0052, |
|
"step": 9645 |
|
}, |
|
{ |
|
"epoch": 2.023884349465745, |
|
"grad_norm": 3.5132248401641846, |
|
"learning_rate": 5.843229359738336e-06, |
|
"loss": 1.0845, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 2.027027027027027, |
|
"grad_norm": 1.9716084003448486, |
|
"learning_rate": 5.809999091681349e-06, |
|
"loss": 1.1177, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 2.0301697045883094, |
|
"grad_norm": 2.3846986293792725, |
|
"learning_rate": 5.776824849962706e-06, |
|
"loss": 1.0848, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 2.0333123821495915, |
|
"grad_norm": 1.9413962364196777, |
|
"learning_rate": 5.743707078169731e-06, |
|
"loss": 1.0824, |
|
"step": 9705 |
|
}, |
|
{ |
|
"epoch": 2.0364550597108737, |
|
"grad_norm": 1.9300832748413086, |
|
"learning_rate": 5.710646219134666e-06, |
|
"loss": 1.0474, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 2.039597737272156, |
|
"grad_norm": 2.1981208324432373, |
|
"learning_rate": 5.67764271492874e-06, |
|
"loss": 1.1053, |
|
"step": 9735 |
|
}, |
|
{ |
|
"epoch": 2.042740414833438, |
|
"grad_norm": 2.165830612182617, |
|
"learning_rate": 5.644697006856268e-06, |
|
"loss": 1.0203, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 2.04588309239472, |
|
"grad_norm": 2.567518711090088, |
|
"learning_rate": 5.61180953544876e-06, |
|
"loss": 1.0509, |
|
"step": 9765 |
|
}, |
|
{ |
|
"epoch": 2.0490257699560024, |
|
"grad_norm": 2.09709095954895, |
|
"learning_rate": 5.578980740459009e-06, |
|
"loss": 1.0906, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 2.0521684475172846, |
|
"grad_norm": 1.5784721374511719, |
|
"learning_rate": 5.546211060855211e-06, |
|
"loss": 1.0694, |
|
"step": 9795 |
|
}, |
|
{ |
|
"epoch": 2.0553111250785667, |
|
"grad_norm": 3.4078545570373535, |
|
"learning_rate": 5.513500934815113e-06, |
|
"loss": 1.0352, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 2.0584538026398493, |
|
"grad_norm": 2.0774762630462646, |
|
"learning_rate": 5.4808507997201544e-06, |
|
"loss": 1.0513, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 2.0615964802011315, |
|
"grad_norm": 3.19948148727417, |
|
"learning_rate": 5.448261092149596e-06, |
|
"loss": 1.0432, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 2.0647391577624137, |
|
"grad_norm": 2.4168691635131836, |
|
"learning_rate": 5.415732247874696e-06, |
|
"loss": 1.0354, |
|
"step": 9855 |
|
}, |
|
{ |
|
"epoch": 2.067881835323696, |
|
"grad_norm": 1.7410550117492676, |
|
"learning_rate": 5.383264701852886e-06, |
|
"loss": 1.0389, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 2.071024512884978, |
|
"grad_norm": 2.2156288623809814, |
|
"learning_rate": 5.350858888221957e-06, |
|
"loss": 1.0614, |
|
"step": 9885 |
|
}, |
|
{ |
|
"epoch": 2.07416719044626, |
|
"grad_norm": 3.3456056118011475, |
|
"learning_rate": 5.318515240294241e-06, |
|
"loss": 1.099, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.0773098680075424, |
|
"grad_norm": 3.286379814147949, |
|
"learning_rate": 5.28623419055083e-06, |
|
"loss": 1.0667, |
|
"step": 9915 |
|
}, |
|
{ |
|
"epoch": 2.0804525455688245, |
|
"grad_norm": 2.086221218109131, |
|
"learning_rate": 5.2540161706357855e-06, |
|
"loss": 1.0458, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 2.0835952231301067, |
|
"grad_norm": 3.3937273025512695, |
|
"learning_rate": 5.221861611350371e-06, |
|
"loss": 1.0696, |
|
"step": 9945 |
|
}, |
|
{ |
|
"epoch": 2.086737900691389, |
|
"grad_norm": 2.162592887878418, |
|
"learning_rate": 5.189770942647291e-06, |
|
"loss": 1.034, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 2.0898805782526715, |
|
"grad_norm": 2.144193649291992, |
|
"learning_rate": 5.157744593624939e-06, |
|
"loss": 1.0997, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 2.0930232558139537, |
|
"grad_norm": 2.1846065521240234, |
|
"learning_rate": 5.125782992521664e-06, |
|
"loss": 1.034, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 2.0951183741881416, |
|
"eval_accuracy": 0.8874096274375916, |
|
"eval_loss": 1.1195236444473267, |
|
"eval_runtime": 3488.4257, |
|
"eval_samples_per_second": 1.372, |
|
"eval_steps_per_second": 0.343, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.096165933375236, |
|
"grad_norm": 1.9800641536712646, |
|
"learning_rate": 5.09388656671004e-06, |
|
"loss": 1.0096, |
|
"step": 10005 |
|
}, |
|
{ |
|
"epoch": 2.099308610936518, |
|
"grad_norm": 2.919948101043701, |
|
"learning_rate": 5.062055742691154e-06, |
|
"loss": 1.0455, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 2.1024512884978, |
|
"grad_norm": 2.2051897048950195, |
|
"learning_rate": 5.0302909460889045e-06, |
|
"loss": 1.0675, |
|
"step": 10035 |
|
}, |
|
{ |
|
"epoch": 2.1055939660590823, |
|
"grad_norm": 3.871448278427124, |
|
"learning_rate": 4.998592601644306e-06, |
|
"loss": 1.0813, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 2.1087366436203645, |
|
"grad_norm": 2.2782132625579834, |
|
"learning_rate": 4.9669611332098155e-06, |
|
"loss": 1.0833, |
|
"step": 10065 |
|
}, |
|
{ |
|
"epoch": 2.1118793211816467, |
|
"grad_norm": 2.384378433227539, |
|
"learning_rate": 4.935396963743661e-06, |
|
"loss": 1.1207, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 2.115021998742929, |
|
"grad_norm": 2.561387300491333, |
|
"learning_rate": 4.9039005153041755e-06, |
|
"loss": 1.0639, |
|
"step": 10095 |
|
}, |
|
{ |
|
"epoch": 2.118164676304211, |
|
"grad_norm": 3.0452916622161865, |
|
"learning_rate": 4.872472209044182e-06, |
|
"loss": 1.0579, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 2.121307353865493, |
|
"grad_norm": 2.2836525440216064, |
|
"learning_rate": 4.841112465205337e-06, |
|
"loss": 1.0984, |
|
"step": 10125 |
|
}, |
|
{ |
|
"epoch": 2.124450031426776, |
|
"grad_norm": 3.4332149028778076, |
|
"learning_rate": 4.809821703112518e-06, |
|
"loss": 1.0762, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 2.127592708988058, |
|
"grad_norm": 2.4914424419403076, |
|
"learning_rate": 4.778600341168208e-06, |
|
"loss": 1.0371, |
|
"step": 10155 |
|
}, |
|
{ |
|
"epoch": 2.13073538654934, |
|
"grad_norm": 3.004932165145874, |
|
"learning_rate": 4.747448796846932e-06, |
|
"loss": 1.084, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 2.1338780641106223, |
|
"grad_norm": 2.9469964504241943, |
|
"learning_rate": 4.716367486689636e-06, |
|
"loss": 1.0769, |
|
"step": 10185 |
|
}, |
|
{ |
|
"epoch": 2.1370207416719045, |
|
"grad_norm": 1.8354699611663818, |
|
"learning_rate": 4.685356826298143e-06, |
|
"loss": 1.0308, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 2.1401634192331866, |
|
"grad_norm": 1.9137917757034302, |
|
"learning_rate": 4.654417230329575e-06, |
|
"loss": 1.0568, |
|
"step": 10215 |
|
}, |
|
{ |
|
"epoch": 2.143306096794469, |
|
"grad_norm": 2.0806703567504883, |
|
"learning_rate": 4.6235491124908295e-06, |
|
"loss": 1.0535, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 2.146448774355751, |
|
"grad_norm": 3.0409634113311768, |
|
"learning_rate": 4.5927528855330425e-06, |
|
"loss": 1.0421, |
|
"step": 10245 |
|
}, |
|
{ |
|
"epoch": 2.149591451917033, |
|
"grad_norm": 2.494769811630249, |
|
"learning_rate": 4.562028961246065e-06, |
|
"loss": 1.0494, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 2.1527341294783153, |
|
"grad_norm": 2.0933990478515625, |
|
"learning_rate": 4.5334188940857585e-06, |
|
"loss": 1.1007, |
|
"step": 10275 |
|
}, |
|
{ |
|
"epoch": 2.155876807039598, |
|
"grad_norm": 2.1855649948120117, |
|
"learning_rate": 4.502835919016285e-06, |
|
"loss": 1.1062, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 2.15901948460088, |
|
"grad_norm": 1.9125105142593384, |
|
"learning_rate": 4.472326448936779e-06, |
|
"loss": 1.0911, |
|
"step": 10305 |
|
}, |
|
{ |
|
"epoch": 2.1621621621621623, |
|
"grad_norm": 2.457319736480713, |
|
"learning_rate": 4.441890891802735e-06, |
|
"loss": 1.0553, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 2.1653048397234445, |
|
"grad_norm": 1.8059813976287842, |
|
"learning_rate": 4.411529654581325e-06, |
|
"loss": 1.0426, |
|
"step": 10335 |
|
}, |
|
{ |
|
"epoch": 2.1684475172847266, |
|
"grad_norm": 1.840040922164917, |
|
"learning_rate": 4.381243143245957e-06, |
|
"loss": 1.0701, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 2.171590194846009, |
|
"grad_norm": 2.875662326812744, |
|
"learning_rate": 4.351031762770848e-06, |
|
"loss": 1.0217, |
|
"step": 10365 |
|
}, |
|
{ |
|
"epoch": 2.174732872407291, |
|
"grad_norm": 2.7019827365875244, |
|
"learning_rate": 4.320895917125604e-06, |
|
"loss": 1.0767, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 2.177875549968573, |
|
"grad_norm": 2.331372022628784, |
|
"learning_rate": 4.290836009269824e-06, |
|
"loss": 1.0414, |
|
"step": 10395 |
|
}, |
|
{ |
|
"epoch": 2.1810182275298553, |
|
"grad_norm": 1.7536449432373047, |
|
"learning_rate": 4.260852441147711e-06, |
|
"loss": 1.0328, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 2.1841609050911375, |
|
"grad_norm": 2.233278512954712, |
|
"learning_rate": 4.230945613682696e-06, |
|
"loss": 1.0464, |
|
"step": 10425 |
|
}, |
|
{ |
|
"epoch": 2.1873035826524196, |
|
"grad_norm": 2.3253679275512695, |
|
"learning_rate": 4.2011159267720645e-06, |
|
"loss": 1.0734, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 2.1904462602137023, |
|
"grad_norm": 2.1976492404937744, |
|
"learning_rate": 4.171363779281644e-06, |
|
"loss": 1.0089, |
|
"step": 10455 |
|
}, |
|
{ |
|
"epoch": 2.1935889377749844, |
|
"grad_norm": 1.6631523370742798, |
|
"learning_rate": 4.141689569040433e-06, |
|
"loss": 1.0598, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 2.1967316153362666, |
|
"grad_norm": 2.9074153900146484, |
|
"learning_rate": 4.112093692835306e-06, |
|
"loss": 1.0564, |
|
"step": 10485 |
|
}, |
|
{ |
|
"epoch": 2.1998742928975488, |
|
"grad_norm": 2.0175225734710693, |
|
"learning_rate": 4.082576546405683e-06, |
|
"loss": 1.009, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.203016970458831, |
|
"grad_norm": 2.74937105178833, |
|
"learning_rate": 4.053138524438268e-06, |
|
"loss": 1.0614, |
|
"step": 10515 |
|
}, |
|
{ |
|
"epoch": 2.206159648020113, |
|
"grad_norm": 1.9003492593765259, |
|
"learning_rate": 4.0237800205617585e-06, |
|
"loss": 1.0904, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 2.2093023255813953, |
|
"grad_norm": 2.6317954063415527, |
|
"learning_rate": 3.994501427341575e-06, |
|
"loss": 1.0716, |
|
"step": 10545 |
|
}, |
|
{ |
|
"epoch": 2.2124450031426774, |
|
"grad_norm": 1.7894374132156372, |
|
"learning_rate": 3.965303136274614e-06, |
|
"loss": 1.1267, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 2.2155876807039596, |
|
"grad_norm": 2.2193076610565186, |
|
"learning_rate": 3.936185537784019e-06, |
|
"loss": 1.0509, |
|
"step": 10575 |
|
}, |
|
{ |
|
"epoch": 2.218730358265242, |
|
"grad_norm": 2.435281753540039, |
|
"learning_rate": 3.9071490212139665e-06, |
|
"loss": 1.0699, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 2.2218730358265244, |
|
"grad_norm": 2.5646512508392334, |
|
"learning_rate": 3.878193974824444e-06, |
|
"loss": 1.0761, |
|
"step": 10605 |
|
}, |
|
{ |
|
"epoch": 2.2250157133878066, |
|
"grad_norm": 2.3595800399780273, |
|
"learning_rate": 3.849320785786062e-06, |
|
"loss": 1.0842, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 2.2281583909490887, |
|
"grad_norm": 2.8445732593536377, |
|
"learning_rate": 3.820529840174885e-06, |
|
"loss": 1.0093, |
|
"step": 10635 |
|
}, |
|
{ |
|
"epoch": 2.231301068510371, |
|
"grad_norm": 2.228774309158325, |
|
"learning_rate": 3.7918215229672606e-06, |
|
"loss": 1.1004, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 2.234443746071653, |
|
"grad_norm": 2.617530584335327, |
|
"learning_rate": 3.7631962180346905e-06, |
|
"loss": 1.0769, |
|
"step": 10665 |
|
}, |
|
{ |
|
"epoch": 2.2375864236329353, |
|
"grad_norm": 2.4638450145721436, |
|
"learning_rate": 3.73465430813866e-06, |
|
"loss": 1.0703, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 2.2407291011942174, |
|
"grad_norm": 2.0117123126983643, |
|
"learning_rate": 3.706196174925558e-06, |
|
"loss": 1.0354, |
|
"step": 10695 |
|
}, |
|
{ |
|
"epoch": 2.2438717787554996, |
|
"grad_norm": 2.4193451404571533, |
|
"learning_rate": 3.6778221989215537e-06, |
|
"loss": 1.0742, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 2.2470144563167818, |
|
"grad_norm": 2.2464051246643066, |
|
"learning_rate": 3.6495327595275223e-06, |
|
"loss": 1.0563, |
|
"step": 10725 |
|
}, |
|
{ |
|
"epoch": 2.250157133878064, |
|
"grad_norm": 2.0514557361602783, |
|
"learning_rate": 3.6213282350139455e-06, |
|
"loss": 1.078, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 2.253299811439346, |
|
"grad_norm": 3.3520901203155518, |
|
"learning_rate": 3.593209002515885e-06, |
|
"loss": 1.0656, |
|
"step": 10755 |
|
}, |
|
{ |
|
"epoch": 2.2564424890006287, |
|
"grad_norm": 1.7619084119796753, |
|
"learning_rate": 3.56517543802792e-06, |
|
"loss": 1.0591, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 2.259585166561911, |
|
"grad_norm": 2.095613718032837, |
|
"learning_rate": 3.5372279163991285e-06, |
|
"loss": 1.0567, |
|
"step": 10785 |
|
}, |
|
{ |
|
"epoch": 2.262727844123193, |
|
"grad_norm": 2.4718666076660156, |
|
"learning_rate": 3.5093668113280688e-06, |
|
"loss": 1.0847, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 2.2658705216844752, |
|
"grad_norm": 3.0717573165893555, |
|
"learning_rate": 3.481592495357786e-06, |
|
"loss": 1.125, |
|
"step": 10815 |
|
}, |
|
{ |
|
"epoch": 2.2690131992457574, |
|
"grad_norm": 2.8368752002716064, |
|
"learning_rate": 3.4539053398708313e-06, |
|
"loss": 1.0832, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 2.2721558768070396, |
|
"grad_norm": 2.2466132640838623, |
|
"learning_rate": 3.4263057150842927e-06, |
|
"loss": 1.0572, |
|
"step": 10845 |
|
}, |
|
{ |
|
"epoch": 2.2752985543683217, |
|
"grad_norm": 2.3740978240966797, |
|
"learning_rate": 3.398793990044851e-06, |
|
"loss": 1.0886, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 2.278441231929604, |
|
"grad_norm": 2.0145745277404785, |
|
"learning_rate": 3.3713705326238354e-06, |
|
"loss": 0.9999, |
|
"step": 10875 |
|
}, |
|
{ |
|
"epoch": 2.281583909490886, |
|
"grad_norm": 2.8063840866088867, |
|
"learning_rate": 3.3440357095123142e-06, |
|
"loss": 1.029, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 2.2847265870521687, |
|
"grad_norm": 2.4196486473083496, |
|
"learning_rate": 3.3167898862161896e-06, |
|
"loss": 1.0736, |
|
"step": 10905 |
|
}, |
|
{ |
|
"epoch": 2.287869264613451, |
|
"grad_norm": 2.1821863651275635, |
|
"learning_rate": 3.289633427051295e-06, |
|
"loss": 1.0106, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 2.291011942174733, |
|
"grad_norm": 2.0182254314422607, |
|
"learning_rate": 3.262566695138557e-06, |
|
"loss": 1.0717, |
|
"step": 10935 |
|
}, |
|
{ |
|
"epoch": 2.294154619736015, |
|
"grad_norm": 3.4760282039642334, |
|
"learning_rate": 3.2355900523991057e-06, |
|
"loss": 1.081, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 2.2972972972972974, |
|
"grad_norm": 1.5960103273391724, |
|
"learning_rate": 3.2087038595494567e-06, |
|
"loss": 0.9858, |
|
"step": 10965 |
|
}, |
|
{ |
|
"epoch": 2.3004399748585795, |
|
"grad_norm": 2.3802037239074707, |
|
"learning_rate": 3.18190847609667e-06, |
|
"loss": 1.1179, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 2.3035826524198617, |
|
"grad_norm": 2.2108407020568848, |
|
"learning_rate": 3.155204260333561e-06, |
|
"loss": 1.0281, |
|
"step": 10995 |
|
}, |
|
{ |
|
"epoch": 2.306725329981144, |
|
"grad_norm": 2.4052934646606445, |
|
"learning_rate": 3.1285915693339074e-06, |
|
"loss": 1.0728, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 2.309868007542426, |
|
"grad_norm": 2.564016342163086, |
|
"learning_rate": 3.1020707589476638e-06, |
|
"loss": 1.0539, |
|
"step": 11025 |
|
}, |
|
{ |
|
"epoch": 2.313010685103708, |
|
"grad_norm": 1.9333407878875732, |
|
"learning_rate": 3.0756421837962013e-06, |
|
"loss": 1.0316, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 2.3161533626649904, |
|
"grad_norm": 2.317944049835205, |
|
"learning_rate": 3.0493061972675808e-06, |
|
"loss": 1.0568, |
|
"step": 11055 |
|
}, |
|
{ |
|
"epoch": 2.3192960402262726, |
|
"grad_norm": 1.7732441425323486, |
|
"learning_rate": 3.0230631515118236e-06, |
|
"loss": 1.0107, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 2.322438717787555, |
|
"grad_norm": 2.109452724456787, |
|
"learning_rate": 2.9969133974361985e-06, |
|
"loss": 1.0543, |
|
"step": 11085 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 2.198519468307495, |
|
"learning_rate": 2.970857284700517e-06, |
|
"loss": 1.0958, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 2.3287240729101195, |
|
"grad_norm": 2.7568089962005615, |
|
"learning_rate": 2.9448951617124878e-06, |
|
"loss": 1.0141, |
|
"step": 11115 |
|
}, |
|
{ |
|
"epoch": 2.3318667504714017, |
|
"grad_norm": 3.3935024738311768, |
|
"learning_rate": 2.919027375623028e-06, |
|
"loss": 1.0298, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 2.335009428032684, |
|
"grad_norm": 2.245539903640747, |
|
"learning_rate": 2.893254272321655e-06, |
|
"loss": 1.0677, |
|
"step": 11145 |
|
}, |
|
{ |
|
"epoch": 2.338152105593966, |
|
"grad_norm": 3.133040428161621, |
|
"learning_rate": 2.8675761964318148e-06, |
|
"loss": 1.0253, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 2.341294783155248, |
|
"grad_norm": 2.1391761302948, |
|
"learning_rate": 2.8419934913063175e-06, |
|
"loss": 1.0297, |
|
"step": 11175 |
|
}, |
|
{ |
|
"epoch": 2.3444374607165304, |
|
"grad_norm": 3.1094396114349365, |
|
"learning_rate": 2.8165064990227255e-06, |
|
"loss": 1.0369, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 2.3475801382778125, |
|
"grad_norm": 3.4849114418029785, |
|
"learning_rate": 2.791115560378781e-06, |
|
"loss": 1.0666, |
|
"step": 11205 |
|
}, |
|
{ |
|
"epoch": 2.350722815839095, |
|
"grad_norm": 2.6266283988952637, |
|
"learning_rate": 2.7658210148878515e-06, |
|
"loss": 1.1017, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 2.3538654934003773, |
|
"grad_norm": 3.290055990219116, |
|
"learning_rate": 2.7406232007743892e-06, |
|
"loss": 1.017, |
|
"step": 11235 |
|
}, |
|
{ |
|
"epoch": 2.3570081709616595, |
|
"grad_norm": 3.80057954788208, |
|
"learning_rate": 2.7155224549694114e-06, |
|
"loss": 1.0566, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 2.3601508485229417, |
|
"grad_norm": 2.253140926361084, |
|
"learning_rate": 2.690519113105988e-06, |
|
"loss": 1.1236, |
|
"step": 11265 |
|
}, |
|
{ |
|
"epoch": 2.363293526084224, |
|
"grad_norm": 2.4676170349121094, |
|
"learning_rate": 2.667270835657341e-06, |
|
"loss": 1.0442, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 2.366436203645506, |
|
"grad_norm": 2.411430597305298, |
|
"learning_rate": 2.6424567549411838e-06, |
|
"loss": 1.0645, |
|
"step": 11295 |
|
}, |
|
{ |
|
"epoch": 2.369578881206788, |
|
"grad_norm": 2.473489284515381, |
|
"learning_rate": 2.6177410551600802e-06, |
|
"loss": 1.1046, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 2.3727215587680703, |
|
"grad_norm": 2.323939800262451, |
|
"learning_rate": 2.593124066798477e-06, |
|
"loss": 1.0366, |
|
"step": 11325 |
|
}, |
|
{ |
|
"epoch": 2.3758642363293525, |
|
"grad_norm": 2.2324061393737793, |
|
"learning_rate": 2.5686061190209134e-06, |
|
"loss": 1.036, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 2.3790069138906347, |
|
"grad_norm": 3.393950939178467, |
|
"learning_rate": 2.5441875396676186e-06, |
|
"loss": 1.0795, |
|
"step": 11355 |
|
}, |
|
{ |
|
"epoch": 2.382149591451917, |
|
"grad_norm": 2.0376970767974854, |
|
"learning_rate": 2.5198686552501196e-06, |
|
"loss": 1.0682, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 2.385292269013199, |
|
"grad_norm": 2.5424187183380127, |
|
"learning_rate": 2.4956497909468713e-06, |
|
"loss": 1.0599, |
|
"step": 11385 |
|
}, |
|
{ |
|
"epoch": 2.3884349465744816, |
|
"grad_norm": 2.3934738636016846, |
|
"learning_rate": 2.4715312705989236e-06, |
|
"loss": 1.0725, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 2.391577624135764, |
|
"grad_norm": 2.5349321365356445, |
|
"learning_rate": 2.447513416705579e-06, |
|
"loss": 0.977, |
|
"step": 11415 |
|
}, |
|
{ |
|
"epoch": 2.394720301697046, |
|
"grad_norm": 2.4864916801452637, |
|
"learning_rate": 2.4235965504200974e-06, |
|
"loss": 1.0452, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 2.397862979258328, |
|
"grad_norm": 2.2827324867248535, |
|
"learning_rate": 2.3997809915453686e-06, |
|
"loss": 1.0459, |
|
"step": 11445 |
|
}, |
|
{ |
|
"epoch": 2.4010056568196103, |
|
"grad_norm": 2.4698305130004883, |
|
"learning_rate": 2.376067058529672e-06, |
|
"loss": 1.0619, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 2.4041483343808925, |
|
"grad_norm": 2.9925546646118164, |
|
"learning_rate": 2.3524550684623927e-06, |
|
"loss": 1.0541, |
|
"step": 11475 |
|
}, |
|
{ |
|
"epoch": 2.4072910119421747, |
|
"grad_norm": 2.6975173950195312, |
|
"learning_rate": 2.328945337069809e-06, |
|
"loss": 1.1299, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 2.410433689503457, |
|
"grad_norm": 2.239131212234497, |
|
"learning_rate": 2.305538178710831e-06, |
|
"loss": 1.0085, |
|
"step": 11505 |
|
}, |
|
{ |
|
"epoch": 2.413576367064739, |
|
"grad_norm": 2.0249500274658203, |
|
"learning_rate": 2.282233906372835e-06, |
|
"loss": 1.0513, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 2.4167190446260216, |
|
"grad_norm": 2.393962860107422, |
|
"learning_rate": 2.2590328316674582e-06, |
|
"loss": 1.0077, |
|
"step": 11535 |
|
}, |
|
{ |
|
"epoch": 2.4198617221873038, |
|
"grad_norm": 3.019960880279541, |
|
"learning_rate": 2.2359352648264387e-06, |
|
"loss": 1.0446, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 2.423004399748586, |
|
"grad_norm": 2.6998488903045654, |
|
"learning_rate": 2.2129415146974665e-06, |
|
"loss": 1.0773, |
|
"step": 11565 |
|
}, |
|
{ |
|
"epoch": 2.426147077309868, |
|
"grad_norm": 3.3861067295074463, |
|
"learning_rate": 2.190051888740048e-06, |
|
"loss": 1.103, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 2.4292897548711503, |
|
"grad_norm": 2.977205514907837, |
|
"learning_rate": 2.1672666930214047e-06, |
|
"loss": 1.0307, |
|
"step": 11595 |
|
}, |
|
{ |
|
"epoch": 2.4324324324324325, |
|
"grad_norm": 3.0046980381011963, |
|
"learning_rate": 2.1445862322123734e-06, |
|
"loss": 1.0402, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 2.4355751099937146, |
|
"grad_norm": 2.431718111038208, |
|
"learning_rate": 2.1220108095833345e-06, |
|
"loss": 1.0802, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 2.438717787554997, |
|
"grad_norm": 1.7797611951828003, |
|
"learning_rate": 2.099540727000159e-06, |
|
"loss": 1.0833, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 2.441860465116279, |
|
"grad_norm": 2.224626302719116, |
|
"learning_rate": 2.077176284920166e-06, |
|
"loss": 1.0358, |
|
"step": 11655 |
|
}, |
|
{ |
|
"epoch": 2.445003142677561, |
|
"grad_norm": 2.7776806354522705, |
|
"learning_rate": 2.054917782388113e-06, |
|
"loss": 1.0251, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 2.4481458202388433, |
|
"grad_norm": 2.1015563011169434, |
|
"learning_rate": 2.0327655170321915e-06, |
|
"loss": 1.0326, |
|
"step": 11685 |
|
}, |
|
{ |
|
"epoch": 2.4512884978001255, |
|
"grad_norm": 3.1175501346588135, |
|
"learning_rate": 2.010719785060048e-06, |
|
"loss": 1.0697, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 2.454431175361408, |
|
"grad_norm": 2.0512046813964844, |
|
"learning_rate": 1.9887808812548272e-06, |
|
"loss": 1.063, |
|
"step": 11715 |
|
}, |
|
{ |
|
"epoch": 2.4575738529226903, |
|
"grad_norm": 2.4269354343414307, |
|
"learning_rate": 1.966949098971227e-06, |
|
"loss": 1.0623, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 2.4607165304839724, |
|
"grad_norm": 2.026348352432251, |
|
"learning_rate": 1.945224730131576e-06, |
|
"loss": 1.0249, |
|
"step": 11745 |
|
}, |
|
{ |
|
"epoch": 2.4638592080452546, |
|
"grad_norm": 2.1796414852142334, |
|
"learning_rate": 1.9236080652219248e-06, |
|
"loss": 1.0627, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 2.4670018856065368, |
|
"grad_norm": 2.8909108638763428, |
|
"learning_rate": 1.9020993932881803e-06, |
|
"loss": 0.9954, |
|
"step": 11775 |
|
}, |
|
{ |
|
"epoch": 2.470144563167819, |
|
"grad_norm": 1.939376950263977, |
|
"learning_rate": 1.8806990019322169e-06, |
|
"loss": 0.9819, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 2.473287240729101, |
|
"grad_norm": 2.434191942214966, |
|
"learning_rate": 1.8594071773080492e-06, |
|
"loss": 1.0858, |
|
"step": 11805 |
|
}, |
|
{ |
|
"epoch": 2.4764299182903833, |
|
"grad_norm": 2.3099939823150635, |
|
"learning_rate": 1.8382242041179876e-06, |
|
"loss": 1.0571, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 2.4795725958516655, |
|
"grad_norm": 2.7791974544525146, |
|
"learning_rate": 1.8171503656088574e-06, |
|
"loss": 1.0598, |
|
"step": 11835 |
|
}, |
|
{ |
|
"epoch": 2.482715273412948, |
|
"grad_norm": 2.9822537899017334, |
|
"learning_rate": 1.7961859435681882e-06, |
|
"loss": 1.029, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 2.4858579509742302, |
|
"grad_norm": 2.644841194152832, |
|
"learning_rate": 1.7753312183204486e-06, |
|
"loss": 1.12, |
|
"step": 11865 |
|
}, |
|
{ |
|
"epoch": 2.4890006285355124, |
|
"grad_norm": 2.436312437057495, |
|
"learning_rate": 1.7545864687233127e-06, |
|
"loss": 1.0353, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 2.4921433060967946, |
|
"grad_norm": 2.3745834827423096, |
|
"learning_rate": 1.7339519721639164e-06, |
|
"loss": 1.0758, |
|
"step": 11895 |
|
}, |
|
{ |
|
"epoch": 2.4952859836580767, |
|
"grad_norm": 2.9353575706481934, |
|
"learning_rate": 1.713428004555162e-06, |
|
"loss": 1.0455, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 2.498428661219359, |
|
"grad_norm": 2.2262260913848877, |
|
"learning_rate": 1.693014840332009e-06, |
|
"loss": 1.0345, |
|
"step": 11925 |
|
}, |
|
{ |
|
"epoch": 2.501571338780641, |
|
"grad_norm": 2.1293249130249023, |
|
"learning_rate": 1.6727127524478227e-06, |
|
"loss": 1.0262, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 2.5047140163419233, |
|
"grad_norm": 2.169814348220825, |
|
"learning_rate": 1.6525220123707176e-06, |
|
"loss": 1.0121, |
|
"step": 11955 |
|
}, |
|
{ |
|
"epoch": 2.5078566939032054, |
|
"grad_norm": 2.504615545272827, |
|
"learning_rate": 1.6324428900799273e-06, |
|
"loss": 1.0202, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 2.5109993714644876, |
|
"grad_norm": 2.8434998989105225, |
|
"learning_rate": 1.6124756540621966e-06, |
|
"loss": 1.0042, |
|
"step": 11985 |
|
}, |
|
{ |
|
"epoch": 2.5141420490257698, |
|
"grad_norm": 1.939417839050293, |
|
"learning_rate": 1.5926205713081876e-06, |
|
"loss": 1.1108, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.517284726587052, |
|
"grad_norm": 2.569380283355713, |
|
"learning_rate": 1.572877907308913e-06, |
|
"loss": 1.0067, |
|
"step": 12015 |
|
}, |
|
{ |
|
"epoch": 2.520427404148334, |
|
"grad_norm": 2.747483253479004, |
|
"learning_rate": 1.5532479260521849e-06, |
|
"loss": 1.0509, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 2.5235700817096167, |
|
"grad_norm": 1.7206883430480957, |
|
"learning_rate": 1.533730890019086e-06, |
|
"loss": 1.0901, |
|
"step": 12045 |
|
}, |
|
{ |
|
"epoch": 2.526712759270899, |
|
"grad_norm": 2.222593069076538, |
|
"learning_rate": 1.5143270601804582e-06, |
|
"loss": 1.0182, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 2.529855436832181, |
|
"grad_norm": 2.6068830490112305, |
|
"learning_rate": 1.4950366959934116e-06, |
|
"loss": 1.0768, |
|
"step": 12075 |
|
}, |
|
{ |
|
"epoch": 2.5329981143934632, |
|
"grad_norm": 2.674351692199707, |
|
"learning_rate": 1.475860055397862e-06, |
|
"loss": 1.0114, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 2.5361407919547454, |
|
"grad_norm": 3.2743771076202393, |
|
"learning_rate": 1.4567973948130731e-06, |
|
"loss": 1.0805, |
|
"step": 12105 |
|
}, |
|
{ |
|
"epoch": 2.5392834695160276, |
|
"grad_norm": 3.0732719898223877, |
|
"learning_rate": 1.4378489691342345e-06, |
|
"loss": 1.0802, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 2.5424261470773097, |
|
"grad_norm": 2.502260446548462, |
|
"learning_rate": 1.4190150317290485e-06, |
|
"loss": 1.0701, |
|
"step": 12135 |
|
}, |
|
{ |
|
"epoch": 2.5455688246385924, |
|
"grad_norm": 2.1941142082214355, |
|
"learning_rate": 1.4002958344343465e-06, |
|
"loss": 1.0671, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 2.5487115021998745, |
|
"grad_norm": 2.5131101608276367, |
|
"learning_rate": 1.3816916275527193e-06, |
|
"loss": 1.0483, |
|
"step": 12165 |
|
}, |
|
{ |
|
"epoch": 2.5518541797611567, |
|
"grad_norm": 2.770385503768921, |
|
"learning_rate": 1.36320265984917e-06, |
|
"loss": 1.0708, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 2.554996857322439, |
|
"grad_norm": 2.3241212368011475, |
|
"learning_rate": 1.3448291785477874e-06, |
|
"loss": 1.0521, |
|
"step": 12195 |
|
}, |
|
{ |
|
"epoch": 2.558139534883721, |
|
"grad_norm": 3.1057024002075195, |
|
"learning_rate": 1.3265714293284414e-06, |
|
"loss": 1.0705, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 2.561282212445003, |
|
"grad_norm": 1.8851613998413086, |
|
"learning_rate": 1.3084296563235033e-06, |
|
"loss": 1.037, |
|
"step": 12225 |
|
}, |
|
{ |
|
"epoch": 2.5644248900062854, |
|
"grad_norm": 2.893693685531616, |
|
"learning_rate": 1.2904041021145597e-06, |
|
"loss": 1.0582, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 2.5675675675675675, |
|
"grad_norm": 2.2626419067382812, |
|
"learning_rate": 1.2724950077292042e-06, |
|
"loss": 1.0213, |
|
"step": 12255 |
|
}, |
|
{ |
|
"epoch": 2.5707102451288497, |
|
"grad_norm": 2.1927521228790283, |
|
"learning_rate": 1.2547026126377849e-06, |
|
"loss": 1.0542, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 2.573852922690132, |
|
"grad_norm": 2.205655097961426, |
|
"learning_rate": 1.2382018758160751e-06, |
|
"loss": 1.0494, |
|
"step": 12285 |
|
}, |
|
{ |
|
"epoch": 2.576995600251414, |
|
"grad_norm": 2.98274827003479, |
|
"learning_rate": 1.2206357725870321e-06, |
|
"loss": 1.0926, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 2.5801382778126962, |
|
"grad_norm": 2.7129874229431152, |
|
"learning_rate": 1.2031870620844844e-06, |
|
"loss": 1.0396, |
|
"step": 12315 |
|
}, |
|
{ |
|
"epoch": 2.5832809553739784, |
|
"grad_norm": 2.8410415649414062, |
|
"learning_rate": 1.1858559776227863e-06, |
|
"loss": 1.0353, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 2.586423632935261, |
|
"grad_norm": 2.896530866622925, |
|
"learning_rate": 1.1686427509434617e-06, |
|
"loss": 1.1285, |
|
"step": 12345 |
|
}, |
|
{ |
|
"epoch": 2.589566310496543, |
|
"grad_norm": 2.223740339279175, |
|
"learning_rate": 1.1515476122121094e-06, |
|
"loss": 1.0727, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 2.5927089880578253, |
|
"grad_norm": 2.6268301010131836, |
|
"learning_rate": 1.1345707900153203e-06, |
|
"loss": 1.016, |
|
"step": 12375 |
|
}, |
|
{ |
|
"epoch": 2.5958516656191075, |
|
"grad_norm": 2.7400355339050293, |
|
"learning_rate": 1.1177125113576237e-06, |
|
"loss": 1.0317, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 2.5989943431803897, |
|
"grad_norm": 2.5901083946228027, |
|
"learning_rate": 1.100973001658453e-06, |
|
"loss": 1.0526, |
|
"step": 12405 |
|
}, |
|
{ |
|
"epoch": 2.602137020741672, |
|
"grad_norm": 2.201157569885254, |
|
"learning_rate": 1.084352484749126e-06, |
|
"loss": 1.0881, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 2.605279698302954, |
|
"grad_norm": 3.0149240493774414, |
|
"learning_rate": 1.067851182869859e-06, |
|
"loss": 1.02, |
|
"step": 12435 |
|
}, |
|
{ |
|
"epoch": 2.608422375864236, |
|
"grad_norm": 2.6754496097564697, |
|
"learning_rate": 1.051469316666791e-06, |
|
"loss": 1.0289, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 2.611565053425519, |
|
"grad_norm": 2.752760648727417, |
|
"learning_rate": 1.0352071051890333e-06, |
|
"loss": 1.0687, |
|
"step": 12465 |
|
}, |
|
{ |
|
"epoch": 2.614707730986801, |
|
"grad_norm": 2.3588600158691406, |
|
"learning_rate": 1.0190647658857433e-06, |
|
"loss": 1.0514, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 2.617850408548083, |
|
"grad_norm": 2.367644786834717, |
|
"learning_rate": 1.0030425146032141e-06, |
|
"loss": 0.9977, |
|
"step": 12495 |
|
}, |
|
{ |
|
"epoch": 2.6209930861093653, |
|
"grad_norm": 3.080481767654419, |
|
"learning_rate": 9.8714056558199e-07, |
|
"loss": 1.1113, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 2.6241357636706475, |
|
"grad_norm": 3.0548975467681885, |
|
"learning_rate": 9.713591314539938e-07, |
|
"loss": 1.0369, |
|
"step": 12525 |
|
}, |
|
{ |
|
"epoch": 2.6272784412319297, |
|
"grad_norm": 2.131619930267334, |
|
"learning_rate": 9.556984232397037e-07, |
|
"loss": 1.0072, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 2.630421118793212, |
|
"grad_norm": 2.963510036468506, |
|
"learning_rate": 9.40158650345312e-07, |
|
"loss": 1.0475, |
|
"step": 12555 |
|
}, |
|
{ |
|
"epoch": 2.633563796354494, |
|
"grad_norm": 2.7937393188476562, |
|
"learning_rate": 9.24740020559931e-07, |
|
"loss": 1.0605, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 2.636706473915776, |
|
"grad_norm": 2.5828826427459717, |
|
"learning_rate": 9.094427400528128e-07, |
|
"loss": 1.0657, |
|
"step": 12585 |
|
}, |
|
{ |
|
"epoch": 2.6398491514770583, |
|
"grad_norm": 2.3063597679138184, |
|
"learning_rate": 8.942670133706044e-07, |
|
"loss": 1.015, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 2.6429918290383405, |
|
"grad_norm": 2.2541069984436035, |
|
"learning_rate": 8.792130434345947e-07, |
|
"loss": 1.0939, |
|
"step": 12615 |
|
}, |
|
{ |
|
"epoch": 2.6461345065996227, |
|
"grad_norm": 2.0006487369537354, |
|
"learning_rate": 8.642810315380168e-07, |
|
"loss": 1.0214, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 2.649277184160905, |
|
"grad_norm": 2.5538716316223145, |
|
"learning_rate": 8.494711773433395e-07, |
|
"loss": 1.0569, |
|
"step": 12645 |
|
}, |
|
{ |
|
"epoch": 2.6524198617221875, |
|
"grad_norm": 2.3980712890625, |
|
"learning_rate": 8.347836788796148e-07, |
|
"loss": 1.033, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 2.6555625392834696, |
|
"grad_norm": 2.408301830291748, |
|
"learning_rate": 8.202187325398248e-07, |
|
"loss": 1.0013, |
|
"step": 12675 |
|
}, |
|
{ |
|
"epoch": 2.658705216844752, |
|
"grad_norm": 1.9503532648086548, |
|
"learning_rate": 8.057765330782486e-07, |
|
"loss": 0.9753, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 2.661847894406034, |
|
"grad_norm": 2.117751359939575, |
|
"learning_rate": 7.914572736078618e-07, |
|
"loss": 1.091, |
|
"step": 12705 |
|
}, |
|
{ |
|
"epoch": 2.664990571967316, |
|
"grad_norm": 2.651381731033325, |
|
"learning_rate": 7.772611455977586e-07, |
|
"loss": 1.0366, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 2.6681332495285983, |
|
"grad_norm": 2.568176031112671, |
|
"learning_rate": 7.631883388705897e-07, |
|
"loss": 1.0775, |
|
"step": 12735 |
|
}, |
|
{ |
|
"epoch": 2.6712759270898805, |
|
"grad_norm": 2.860661268234253, |
|
"learning_rate": 7.492390416000217e-07, |
|
"loss": 1.0576, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 2.6744186046511627, |
|
"grad_norm": 2.9709537029266357, |
|
"learning_rate": 7.354134403082191e-07, |
|
"loss": 1.0724, |
|
"step": 12765 |
|
}, |
|
{ |
|
"epoch": 2.6775612822124453, |
|
"grad_norm": 1.8460935354232788, |
|
"learning_rate": 7.217117198633561e-07, |
|
"loss": 1.0227, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 2.6807039597737274, |
|
"grad_norm": 2.6005988121032715, |
|
"learning_rate": 7.081340634771394e-07, |
|
"loss": 1.0787, |
|
"step": 12795 |
|
}, |
|
{ |
|
"epoch": 2.6838466373350096, |
|
"grad_norm": 2.098374366760254, |
|
"learning_rate": 6.946806527023686e-07, |
|
"loss": 1.0402, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 2.686989314896292, |
|
"grad_norm": 2.3875677585601807, |
|
"learning_rate": 6.813516674304888e-07, |
|
"loss": 1.0367, |
|
"step": 12825 |
|
}, |
|
{ |
|
"epoch": 2.690131992457574, |
|
"grad_norm": 1.9632166624069214, |
|
"learning_rate": 6.681472858892068e-07, |
|
"loss": 1.0026, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 2.693274670018856, |
|
"grad_norm": 2.6119518280029297, |
|
"learning_rate": 6.550676846400927e-07, |
|
"loss": 1.0865, |
|
"step": 12855 |
|
}, |
|
{ |
|
"epoch": 2.6964173475801383, |
|
"grad_norm": 2.205327272415161, |
|
"learning_rate": 6.421130385762275e-07, |
|
"loss": 1.0165, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 2.6995600251414205, |
|
"grad_norm": 3.523000478744507, |
|
"learning_rate": 6.292835209198622e-07, |
|
"loss": 1.0645, |
|
"step": 12885 |
|
}, |
|
{ |
|
"epoch": 2.7027027027027026, |
|
"grad_norm": 2.800053119659424, |
|
"learning_rate": 6.165793032200984e-07, |
|
"loss": 1.028, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 2.705845380263985, |
|
"grad_norm": 2.6920459270477295, |
|
"learning_rate": 6.040005553505979e-07, |
|
"loss": 1.0499, |
|
"step": 12915 |
|
}, |
|
{ |
|
"epoch": 2.708988057825267, |
|
"grad_norm": 2.7640254497528076, |
|
"learning_rate": 5.915474455073111e-07, |
|
"loss": 1.0495, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 2.712130735386549, |
|
"grad_norm": 2.8138866424560547, |
|
"learning_rate": 5.792201402062281e-07, |
|
"loss": 1.0698, |
|
"step": 12945 |
|
}, |
|
{ |
|
"epoch": 2.7152734129478313, |
|
"grad_norm": 3.2253429889678955, |
|
"learning_rate": 5.670188042811498e-07, |
|
"loss": 1.0635, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 2.718416090509114, |
|
"grad_norm": 2.7197041511535645, |
|
"learning_rate": 5.549436008814868e-07, |
|
"loss": 1.0507, |
|
"step": 12975 |
|
}, |
|
{ |
|
"epoch": 2.721558768070396, |
|
"grad_norm": 2.0629515647888184, |
|
"learning_rate": 5.429946914700745e-07, |
|
"loss": 1.0665, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 2.7247014456316783, |
|
"grad_norm": 1.5886244773864746, |
|
"learning_rate": 5.311722358210147e-07, |
|
"loss": 1.0672, |
|
"step": 13005 |
|
}, |
|
{ |
|
"epoch": 2.7278441231929604, |
|
"grad_norm": 2.3268556594848633, |
|
"learning_rate": 5.194763920175461e-07, |
|
"loss": 1.0415, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 2.7309868007542426, |
|
"grad_norm": 2.352407693862915, |
|
"learning_rate": 5.079073164499216e-07, |
|
"loss": 1.0628, |
|
"step": 13035 |
|
}, |
|
{ |
|
"epoch": 2.7341294783155248, |
|
"grad_norm": 2.2687618732452393, |
|
"learning_rate": 4.964651638133211e-07, |
|
"loss": 1.1221, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 2.737272155876807, |
|
"grad_norm": 2.1698989868164062, |
|
"learning_rate": 4.851500871057791e-07, |
|
"loss": 1.0301, |
|
"step": 13065 |
|
}, |
|
{ |
|
"epoch": 2.740414833438089, |
|
"grad_norm": 2.6248748302459717, |
|
"learning_rate": 4.7396223762614545e-07, |
|
"loss": 1.1054, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 2.7435575109993717, |
|
"grad_norm": 2.520930767059326, |
|
"learning_rate": 4.629017649720602e-07, |
|
"loss": 1.0437, |
|
"step": 13095 |
|
}, |
|
{ |
|
"epoch": 2.746700188560654, |
|
"grad_norm": 3.120213508605957, |
|
"learning_rate": 4.519688170379499e-07, |
|
"loss": 1.0116, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 2.749842866121936, |
|
"grad_norm": 2.3425211906433105, |
|
"learning_rate": 4.411635400130521e-07, |
|
"loss": 1.0608, |
|
"step": 13125 |
|
}, |
|
{ |
|
"epoch": 2.7529855436832182, |
|
"grad_norm": 2.232712745666504, |
|
"learning_rate": 4.3048607837945954e-07, |
|
"loss": 1.0401, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 2.7561282212445004, |
|
"grad_norm": 2.2147791385650635, |
|
"learning_rate": 4.1993657491019355e-07, |
|
"loss": 1.0217, |
|
"step": 13155 |
|
}, |
|
{ |
|
"epoch": 2.7592708988057826, |
|
"grad_norm": 2.526965618133545, |
|
"learning_rate": 4.095151706672884e-07, |
|
"loss": 1.0915, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 2.7624135763670647, |
|
"grad_norm": 1.9940340518951416, |
|
"learning_rate": 3.9922200499990404e-07, |
|
"loss": 1.0553, |
|
"step": 13185 |
|
}, |
|
{ |
|
"epoch": 2.765556253928347, |
|
"grad_norm": 2.973738431930542, |
|
"learning_rate": 3.890572155424721e-07, |
|
"loss": 1.0615, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 2.768698931489629, |
|
"grad_norm": 2.3078935146331787, |
|
"learning_rate": 3.790209382128451e-07, |
|
"loss": 1.0285, |
|
"step": 13215 |
|
}, |
|
{ |
|
"epoch": 2.7718416090509113, |
|
"grad_norm": 2.037052869796753, |
|
"learning_rate": 3.691133072104891e-07, |
|
"loss": 1.0545, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 2.7749842866121934, |
|
"grad_norm": 2.9037740230560303, |
|
"learning_rate": 3.59334455014676e-07, |
|
"loss": 1.0151, |
|
"step": 13245 |
|
}, |
|
{ |
|
"epoch": 2.7781269641734756, |
|
"grad_norm": 2.245582103729248, |
|
"learning_rate": 3.4968451238272527e-07, |
|
"loss": 1.0715, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 2.7812696417347578, |
|
"grad_norm": 2.7545788288116455, |
|
"learning_rate": 3.4079431818805573e-07, |
|
"loss": 1.0315, |
|
"step": 13275 |
|
}, |
|
{ |
|
"epoch": 2.7844123192960404, |
|
"grad_norm": 3.0721166133880615, |
|
"learning_rate": 3.313939650727771e-07, |
|
"loss": 1.0796, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 2.7875549968573226, |
|
"grad_norm": 2.1562039852142334, |
|
"learning_rate": 3.22122895125897e-07, |
|
"loss": 1.0105, |
|
"step": 13305 |
|
}, |
|
{ |
|
"epoch": 2.7906976744186047, |
|
"grad_norm": 2.4602880477905273, |
|
"learning_rate": 3.129812323149528e-07, |
|
"loss": 1.0805, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 2.793840351979887, |
|
"grad_norm": 2.485700845718384, |
|
"learning_rate": 3.039690988771271e-07, |
|
"loss": 1.1113, |
|
"step": 13335 |
|
}, |
|
{ |
|
"epoch": 2.796983029541169, |
|
"grad_norm": 2.2793211936950684, |
|
"learning_rate": 2.950866153176024e-07, |
|
"loss": 1.0269, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 2.8001257071024512, |
|
"grad_norm": 2.4900388717651367, |
|
"learning_rate": 2.8633390040796014e-07, |
|
"loss": 1.0586, |
|
"step": 13365 |
|
}, |
|
{ |
|
"epoch": 2.8032683846637334, |
|
"grad_norm": 2.7282629013061523, |
|
"learning_rate": 2.7771107118458874e-07, |
|
"loss": 1.0752, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 2.8064110622250156, |
|
"grad_norm": 2.986842632293701, |
|
"learning_rate": 2.692182429471157e-07, |
|
"loss": 1.0826, |
|
"step": 13395 |
|
}, |
|
{ |
|
"epoch": 2.809553739786298, |
|
"grad_norm": 1.311810851097107, |
|
"learning_rate": 2.608555292568671e-07, |
|
"loss": 1.0583, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 2.8126964173475804, |
|
"grad_norm": 2.533581256866455, |
|
"learning_rate": 2.526230419353537e-07, |
|
"loss": 1.0604, |
|
"step": 13425 |
|
}, |
|
{ |
|
"epoch": 2.8158390949088625, |
|
"grad_norm": 2.3201465606689453, |
|
"learning_rate": 2.445208910627717e-07, |
|
"loss": 1.0868, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 2.8189817724701447, |
|
"grad_norm": 2.0772528648376465, |
|
"learning_rate": 2.3654918497653334e-07, |
|
"loss": 1.0854, |
|
"step": 13455 |
|
}, |
|
{ |
|
"epoch": 2.822124450031427, |
|
"grad_norm": 2.761883020401001, |
|
"learning_rate": 2.287080302698097e-07, |
|
"loss": 1.0408, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 2.825267127592709, |
|
"grad_norm": 2.217202663421631, |
|
"learning_rate": 2.2099753179011807e-07, |
|
"loss": 1.0712, |
|
"step": 13485 |
|
}, |
|
{ |
|
"epoch": 2.828409805153991, |
|
"grad_norm": 2.955160140991211, |
|
"learning_rate": 2.134177926379144e-07, |
|
"loss": 1.0678, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.8315524827152734, |
|
"grad_norm": 2.9105584621429443, |
|
"learning_rate": 2.0596891416521215e-07, |
|
"loss": 1.1113, |
|
"step": 13515 |
|
}, |
|
{ |
|
"epoch": 2.8346951602765555, |
|
"grad_norm": 2.016284704208374, |
|
"learning_rate": 1.9865099597422887e-07, |
|
"loss": 1.0373, |
|
"step": 13530 |
|
}, |
|
{ |
|
"epoch": 2.8378378378378377, |
|
"grad_norm": 2.6535935401916504, |
|
"learning_rate": 1.9146413591605516e-07, |
|
"loss": 1.0911, |
|
"step": 13545 |
|
}, |
|
{ |
|
"epoch": 2.84098051539912, |
|
"grad_norm": 3.204658031463623, |
|
"learning_rate": 1.844084300893456e-07, |
|
"loss": 1.0636, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 2.844123192960402, |
|
"grad_norm": 2.872605085372925, |
|
"learning_rate": 1.7748397283903874e-07, |
|
"loss": 1.093, |
|
"step": 13575 |
|
}, |
|
{ |
|
"epoch": 2.8472658705216842, |
|
"grad_norm": 2.8483595848083496, |
|
"learning_rate": 1.7069085675508356e-07, |
|
"loss": 1.0961, |
|
"step": 13590 |
|
}, |
|
{ |
|
"epoch": 2.850408548082967, |
|
"grad_norm": 2.082428455352783, |
|
"learning_rate": 1.6402917267121398e-07, |
|
"loss": 1.0593, |
|
"step": 13605 |
|
}, |
|
{ |
|
"epoch": 2.853551225644249, |
|
"grad_norm": 2.1791653633117676, |
|
"learning_rate": 1.5749900966372523e-07, |
|
"loss": 1.0552, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 2.856693903205531, |
|
"grad_norm": 1.7146304845809937, |
|
"learning_rate": 1.5110045505029036e-07, |
|
"loss": 1.0435, |
|
"step": 13635 |
|
}, |
|
{ |
|
"epoch": 2.8598365807668134, |
|
"grad_norm": 2.64682674407959, |
|
"learning_rate": 1.4483359438878574e-07, |
|
"loss": 1.023, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 2.8629792583280955, |
|
"grad_norm": 2.390026569366455, |
|
"learning_rate": 1.386985114761541e-07, |
|
"loss": 1.0559, |
|
"step": 13665 |
|
}, |
|
{ |
|
"epoch": 2.8661219358893777, |
|
"grad_norm": 2.797459602355957, |
|
"learning_rate": 1.326952883472743e-07, |
|
"loss": 1.1022, |
|
"step": 13680 |
|
}, |
|
{ |
|
"epoch": 2.86926461345066, |
|
"grad_norm": 1.6154651641845703, |
|
"learning_rate": 1.2682400527387674e-07, |
|
"loss": 1.0415, |
|
"step": 13695 |
|
}, |
|
{ |
|
"epoch": 2.872407291011942, |
|
"grad_norm": 2.7609522342681885, |
|
"learning_rate": 1.2108474076345855e-07, |
|
"loss": 1.0901, |
|
"step": 13710 |
|
}, |
|
{ |
|
"epoch": 2.8755499685732246, |
|
"grad_norm": 2.618767499923706, |
|
"learning_rate": 1.1547757155824124e-07, |
|
"loss": 1.0118, |
|
"step": 13725 |
|
}, |
|
{ |
|
"epoch": 2.878692646134507, |
|
"grad_norm": 2.90389347076416, |
|
"learning_rate": 1.1000257263414139e-07, |
|
"loss": 1.0729, |
|
"step": 13740 |
|
}, |
|
{ |
|
"epoch": 2.881835323695789, |
|
"grad_norm": 2.361311435699463, |
|
"learning_rate": 1.0465981719976814e-07, |
|
"loss": 1.0597, |
|
"step": 13755 |
|
}, |
|
{ |
|
"epoch": 2.884978001257071, |
|
"grad_norm": 1.77293062210083, |
|
"learning_rate": 9.944937669544741e-08, |
|
"loss": 1.0155, |
|
"step": 13770 |
|
}, |
|
{ |
|
"epoch": 2.8881206788183533, |
|
"grad_norm": 2.5535075664520264, |
|
"learning_rate": 9.437132079226142e-08, |
|
"loss": 1.0966, |
|
"step": 13785 |
|
}, |
|
{ |
|
"epoch": 2.8912633563796355, |
|
"grad_norm": 2.4783518314361572, |
|
"learning_rate": 8.942571739112171e-08, |
|
"loss": 1.0554, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 2.8944060339409177, |
|
"grad_norm": 1.9884682893753052, |
|
"learning_rate": 8.46126326218577e-08, |
|
"loss": 0.9958, |
|
"step": 13815 |
|
}, |
|
{ |
|
"epoch": 2.8975487115022, |
|
"grad_norm": 1.811047911643982, |
|
"learning_rate": 7.99321308423362e-08, |
|
"loss": 1.0913, |
|
"step": 13830 |
|
}, |
|
{ |
|
"epoch": 2.900691389063482, |
|
"grad_norm": 2.5902936458587646, |
|
"learning_rate": 7.538427463759768e-08, |
|
"loss": 1.0423, |
|
"step": 13845 |
|
}, |
|
{ |
|
"epoch": 2.903834066624764, |
|
"grad_norm": 2.312286853790283, |
|
"learning_rate": 7.096912481901919e-08, |
|
"loss": 1.0178, |
|
"step": 13860 |
|
}, |
|
{ |
|
"epoch": 2.9069767441860463, |
|
"grad_norm": 1.918196201324463, |
|
"learning_rate": 6.668674042350498e-08, |
|
"loss": 1.053, |
|
"step": 13875 |
|
}, |
|
{ |
|
"epoch": 2.9101194217473285, |
|
"grad_norm": 1.8703136444091797, |
|
"learning_rate": 6.253717871269271e-08, |
|
"loss": 1.0538, |
|
"step": 13890 |
|
}, |
|
{ |
|
"epoch": 2.9132620993086107, |
|
"grad_norm": 2.3557560443878174, |
|
"learning_rate": 5.8520495172190714e-08, |
|
"loss": 1.0793, |
|
"step": 13905 |
|
}, |
|
{ |
|
"epoch": 2.9164047768698933, |
|
"grad_norm": 2.194120407104492, |
|
"learning_rate": 5.4636743510835256e-08, |
|
"loss": 1.0484, |
|
"step": 13920 |
|
}, |
|
{ |
|
"epoch": 2.9195474544311755, |
|
"grad_norm": 1.7967839241027832, |
|
"learning_rate": 5.088597565997111e-08, |
|
"loss": 1.0467, |
|
"step": 13935 |
|
}, |
|
{ |
|
"epoch": 2.9226901319924576, |
|
"grad_norm": 1.9517879486083984, |
|
"learning_rate": 4.72682417727599e-08, |
|
"loss": 1.0425, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 2.92583280955374, |
|
"grad_norm": 2.5437710285186768, |
|
"learning_rate": 4.37835902235062e-08, |
|
"loss": 1.0693, |
|
"step": 13965 |
|
}, |
|
{ |
|
"epoch": 2.928975487115022, |
|
"grad_norm": 2.4388108253479004, |
|
"learning_rate": 4.043206760701246e-08, |
|
"loss": 1.1279, |
|
"step": 13980 |
|
}, |
|
{ |
|
"epoch": 2.932118164676304, |
|
"grad_norm": 2.955836057662964, |
|
"learning_rate": 3.7213718737958425e-08, |
|
"loss": 1.0869, |
|
"step": 13995 |
|
}, |
|
{ |
|
"epoch": 2.9352608422375863, |
|
"grad_norm": 2.5022730827331543, |
|
"learning_rate": 3.412858665029495e-08, |
|
"loss": 1.0562, |
|
"step": 14010 |
|
}, |
|
{ |
|
"epoch": 2.9384035197988685, |
|
"grad_norm": 2.614485502243042, |
|
"learning_rate": 3.117671259667554e-08, |
|
"loss": 1.0316, |
|
"step": 14025 |
|
}, |
|
{ |
|
"epoch": 2.941546197360151, |
|
"grad_norm": 1.9243937730789185, |
|
"learning_rate": 2.8358136047902385e-08, |
|
"loss": 1.0355, |
|
"step": 14040 |
|
}, |
|
{ |
|
"epoch": 2.9446888749214333, |
|
"grad_norm": 2.475581407546997, |
|
"learning_rate": 2.5672894692397865e-08, |
|
"loss": 1.0007, |
|
"step": 14055 |
|
}, |
|
{ |
|
"epoch": 2.9478315524827154, |
|
"grad_norm": 2.3144288063049316, |
|
"learning_rate": 2.312102443569941e-08, |
|
"loss": 1.0889, |
|
"step": 14070 |
|
}, |
|
{ |
|
"epoch": 2.9509742300439976, |
|
"grad_norm": 1.9073032140731812, |
|
"learning_rate": 2.0702559399985444e-08, |
|
"loss": 1.0502, |
|
"step": 14085 |
|
}, |
|
{ |
|
"epoch": 2.95411690760528, |
|
"grad_norm": 2.616192579269409, |
|
"learning_rate": 1.8417531923609066e-08, |
|
"loss": 1.0767, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 2.957259585166562, |
|
"grad_norm": 2.3188884258270264, |
|
"learning_rate": 1.6265972560675082e-08, |
|
"loss": 1.0091, |
|
"step": 14115 |
|
}, |
|
{ |
|
"epoch": 2.960402262727844, |
|
"grad_norm": 2.5977001190185547, |
|
"learning_rate": 1.424791008062587e-08, |
|
"loss": 1.0617, |
|
"step": 14130 |
|
}, |
|
{ |
|
"epoch": 2.9635449402891263, |
|
"grad_norm": 2.5720694065093994, |
|
"learning_rate": 1.2363371467861706e-08, |
|
"loss": 1.0382, |
|
"step": 14145 |
|
}, |
|
{ |
|
"epoch": 2.9666876178504085, |
|
"grad_norm": 2.201608657836914, |
|
"learning_rate": 1.061238192137104e-08, |
|
"loss": 1.0358, |
|
"step": 14160 |
|
}, |
|
{ |
|
"epoch": 2.9698302954116906, |
|
"grad_norm": 2.547421455383301, |
|
"learning_rate": 8.994964854404098e-09, |
|
"loss": 1.0699, |
|
"step": 14175 |
|
}, |
|
{ |
|
"epoch": 2.972972972972973, |
|
"grad_norm": 2.6077804565429688, |
|
"learning_rate": 7.51114189415314e-09, |
|
"loss": 1.0301, |
|
"step": 14190 |
|
}, |
|
{ |
|
"epoch": 2.976115650534255, |
|
"grad_norm": 2.2802724838256836, |
|
"learning_rate": 6.16093288146491e-09, |
|
"loss": 1.0323, |
|
"step": 14205 |
|
}, |
|
{ |
|
"epoch": 2.979258328095537, |
|
"grad_norm": 3.556347608566284, |
|
"learning_rate": 4.9443558705764e-09, |
|
"loss": 1.057, |
|
"step": 14220 |
|
}, |
|
{ |
|
"epoch": 2.9824010056568198, |
|
"grad_norm": 2.6890947818756104, |
|
"learning_rate": 3.86142712887172e-09, |
|
"loss": 1.0276, |
|
"step": 14235 |
|
}, |
|
{ |
|
"epoch": 2.985543683218102, |
|
"grad_norm": 1.9261900186538696, |
|
"learning_rate": 2.912161136665592e-09, |
|
"loss": 0.9678, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 2.988686360779384, |
|
"grad_norm": 1.9330463409423828, |
|
"learning_rate": 2.096570587010183e-09, |
|
"loss": 1.07, |
|
"step": 14265 |
|
}, |
|
{ |
|
"epoch": 2.9918290383406663, |
|
"grad_norm": 2.8647425174713135, |
|
"learning_rate": 1.4559673424208165e-09, |
|
"loss": 1.0322, |
|
"step": 14280 |
|
}, |
|
{ |
|
"epoch": 2.9949717159019484, |
|
"grad_norm": 2.028707504272461, |
|
"learning_rate": 8.98845328231035e-10, |
|
"loss": 1.0043, |
|
"step": 14295 |
|
}, |
|
{ |
|
"epoch": 2.9981143934632306, |
|
"grad_norm": 2.955282688140869, |
|
"learning_rate": 4.754256775185706e-10, |
|
"loss": 1.0309, |
|
"step": 14310 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 14319, |
|
"total_flos": 3.0631036932776264e+18, |
|
"train_loss": 1.1692348350084263, |
|
"train_runtime": 44795.2526, |
|
"train_samples_per_second": 1.279, |
|
"train_steps_per_second": 0.32 |
|
} |
|
], |
|
"logging_steps": 15, |
|
"max_steps": 14319, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.0631036932776264e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|