|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.20017926501344488, |
|
"eval_steps": 500, |
|
"global_step": 1340, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00014938751120406333, |
|
"grad_norm": 0.5712769031524658, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7864, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00029877502240812666, |
|
"grad_norm": 0.8328073620796204, |
|
"learning_rate": 4e-05, |
|
"loss": 1.954, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00044816253361219, |
|
"grad_norm": 0.3498156666755676, |
|
"learning_rate": 6e-05, |
|
"loss": 1.0704, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0005975500448162533, |
|
"grad_norm": 0.46848297119140625, |
|
"learning_rate": 8e-05, |
|
"loss": 1.3401, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0007469375560203167, |
|
"grad_norm": 0.5157446265220642, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2173, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00089632506722438, |
|
"grad_norm": 0.34888526797294617, |
|
"learning_rate": 0.00012, |
|
"loss": 1.2156, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0010457125784284435, |
|
"grad_norm": 0.25781184434890747, |
|
"learning_rate": 0.00014, |
|
"loss": 1.1595, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0011951000896325067, |
|
"grad_norm": 0.39395585656166077, |
|
"learning_rate": 0.00016, |
|
"loss": 1.447, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00134448760083657, |
|
"grad_norm": 0.42594340443611145, |
|
"learning_rate": 0.00018, |
|
"loss": 0.8779, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0014938751120406335, |
|
"grad_norm": 0.29849162697792053, |
|
"learning_rate": 0.0002, |
|
"loss": 1.214, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0016432626232446967, |
|
"grad_norm": 0.206129252910614, |
|
"learning_rate": 0.00019999998895420804, |
|
"loss": 1.222, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00179265013444876, |
|
"grad_norm": 0.20143035054206848, |
|
"learning_rate": 0.0001999999558168346, |
|
"loss": 1.2729, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0019420376456528235, |
|
"grad_norm": 0.3609657883644104, |
|
"learning_rate": 0.00019999990058788703, |
|
"loss": 1.14, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002091425156856887, |
|
"grad_norm": 0.22608326375484467, |
|
"learning_rate": 0.00019999982326737747, |
|
"loss": 1.2138, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00224081266806095, |
|
"grad_norm": 0.40498849749565125, |
|
"learning_rate": 0.00019999972385532303, |
|
"loss": 1.4689, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0023902001792650133, |
|
"grad_norm": 0.215151846408844, |
|
"learning_rate": 0.0001999996023517457, |
|
"loss": 1.0072, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0025395876904690767, |
|
"grad_norm": 0.28476935625076294, |
|
"learning_rate": 0.0001999994587566723, |
|
"loss": 0.9636, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00268897520167314, |
|
"grad_norm": 0.2294227033853531, |
|
"learning_rate": 0.0001999992930701345, |
|
"loss": 0.8722, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0028383627128772035, |
|
"grad_norm": 0.24489282071590424, |
|
"learning_rate": 0.00019999910529216902, |
|
"loss": 0.913, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.002987750224081267, |
|
"grad_norm": 0.24592871963977814, |
|
"learning_rate": 0.00019999889542281728, |
|
"loss": 1.2072, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00313713773528533, |
|
"grad_norm": 0.19988738000392914, |
|
"learning_rate": 0.0001999986634621256, |
|
"loss": 1.0463, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0032865252464893933, |
|
"grad_norm": 0.16961582005023956, |
|
"learning_rate": 0.00019999840941014525, |
|
"loss": 0.8549, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0034359127576934568, |
|
"grad_norm": 0.1631389707326889, |
|
"learning_rate": 0.0001999981332669324, |
|
"loss": 0.8112, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00358530026889752, |
|
"grad_norm": 0.1402096003293991, |
|
"learning_rate": 0.00019999783503254803, |
|
"loss": 1.0202, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0037346877801015836, |
|
"grad_norm": 0.3608367443084717, |
|
"learning_rate": 0.000199997514707058, |
|
"loss": 1.4076, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.003884075291305647, |
|
"grad_norm": 0.15870416164398193, |
|
"learning_rate": 0.0001999971722905331, |
|
"loss": 0.9372, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00403346280250971, |
|
"grad_norm": 0.21479852497577667, |
|
"learning_rate": 0.00019999680778304897, |
|
"loss": 1.0237, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004182850313713774, |
|
"grad_norm": 0.6759325861930847, |
|
"learning_rate": 0.00019999642118468614, |
|
"loss": 1.7162, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004332237824917837, |
|
"grad_norm": 0.15937522053718567, |
|
"learning_rate": 0.00019999601249552998, |
|
"loss": 0.7851, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0044816253361219, |
|
"grad_norm": 0.12428741902112961, |
|
"learning_rate": 0.00019999558171567082, |
|
"loss": 0.9342, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004631012847325963, |
|
"grad_norm": 0.2223120778799057, |
|
"learning_rate": 0.0001999951288452038, |
|
"loss": 1.1871, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.004780400358530027, |
|
"grad_norm": 0.19341352581977844, |
|
"learning_rate": 0.000199994653884229, |
|
"loss": 0.9797, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00492978786973409, |
|
"grad_norm": 0.41403812170028687, |
|
"learning_rate": 0.0001999941568328513, |
|
"loss": 0.9629, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.005079175380938153, |
|
"grad_norm": 0.29959213733673096, |
|
"learning_rate": 0.00019999363769118055, |
|
"loss": 1.231, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.005228562892142217, |
|
"grad_norm": 0.17178373038768768, |
|
"learning_rate": 0.00019999309645933142, |
|
"loss": 0.7834, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.00537795040334628, |
|
"grad_norm": 0.2903209626674652, |
|
"learning_rate": 0.00019999253313742344, |
|
"loss": 1.2852, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.005527337914550344, |
|
"grad_norm": 0.3148847818374634, |
|
"learning_rate": 0.00019999194772558112, |
|
"loss": 1.376, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.005676725425754407, |
|
"grad_norm": 0.4141716957092285, |
|
"learning_rate": 0.00019999134022393375, |
|
"loss": 0.9684, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0058261129369584705, |
|
"grad_norm": 0.2509758770465851, |
|
"learning_rate": 0.00019999071063261554, |
|
"loss": 1.1722, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.005975500448162534, |
|
"grad_norm": 0.1800830066204071, |
|
"learning_rate": 0.0001999900589517656, |
|
"loss": 1.0264, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.006124887959366597, |
|
"grad_norm": 0.19982534646987915, |
|
"learning_rate": 0.00019998938518152787, |
|
"loss": 0.7343, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.00627427547057066, |
|
"grad_norm": 0.1816824972629547, |
|
"learning_rate": 0.0001999886893220512, |
|
"loss": 0.9136, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.006423662981774723, |
|
"grad_norm": 0.216154083609581, |
|
"learning_rate": 0.0001999879713734893, |
|
"loss": 1.0128, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.006573050492978787, |
|
"grad_norm": 0.16445758938789368, |
|
"learning_rate": 0.0001999872313360008, |
|
"loss": 0.9593, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00672243800418285, |
|
"grad_norm": 0.14644479751586914, |
|
"learning_rate": 0.00019998646920974919, |
|
"loss": 0.9424, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0068718255153869135, |
|
"grad_norm": 0.3356267809867859, |
|
"learning_rate": 0.00019998568499490283, |
|
"loss": 0.8878, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.007021213026590977, |
|
"grad_norm": 0.4974273443222046, |
|
"learning_rate": 0.00019998487869163497, |
|
"loss": 1.338, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.00717060053779504, |
|
"grad_norm": 0.1881432980298996, |
|
"learning_rate": 0.00019998405030012371, |
|
"loss": 0.882, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.007319988048999104, |
|
"grad_norm": 0.1419169157743454, |
|
"learning_rate": 0.0001999831998205521, |
|
"loss": 0.7255, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.007469375560203167, |
|
"grad_norm": 0.16964636743068695, |
|
"learning_rate": 0.00019998232725310796, |
|
"loss": 1.0151, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.007618763071407231, |
|
"grad_norm": 0.18110951781272888, |
|
"learning_rate": 0.0001999814325979841, |
|
"loss": 0.9705, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.007768150582611294, |
|
"grad_norm": 0.13390210270881653, |
|
"learning_rate": 0.00019998051585537818, |
|
"loss": 0.6999, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.007917538093815357, |
|
"grad_norm": 0.13011352717876434, |
|
"learning_rate": 0.00019997957702549269, |
|
"loss": 0.5496, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.00806692560501942, |
|
"grad_norm": 0.30082619190216064, |
|
"learning_rate": 0.00019997861610853503, |
|
"loss": 1.1625, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.008216313116223483, |
|
"grad_norm": 0.18591542541980743, |
|
"learning_rate": 0.00019997763310471752, |
|
"loss": 0.8686, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.008365700627427548, |
|
"grad_norm": 0.20125310122966766, |
|
"learning_rate": 0.00019997662801425725, |
|
"loss": 0.7371, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.00851508813863161, |
|
"grad_norm": 0.14571277797222137, |
|
"learning_rate": 0.00019997560083737632, |
|
"loss": 1.0109, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.008664475649835674, |
|
"grad_norm": 0.1365530639886856, |
|
"learning_rate": 0.00019997455157430165, |
|
"loss": 0.7709, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.008813863161039737, |
|
"grad_norm": 0.14306791126728058, |
|
"learning_rate": 0.000199973480225265, |
|
"loss": 0.7487, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0089632506722438, |
|
"grad_norm": 0.2880021929740906, |
|
"learning_rate": 0.00019997238679050308, |
|
"loss": 1.2318, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009112638183447864, |
|
"grad_norm": 0.1913180649280548, |
|
"learning_rate": 0.00019997127127025746, |
|
"loss": 0.8861, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.009262025694651926, |
|
"grad_norm": 0.44290822744369507, |
|
"learning_rate": 0.00019997013366477453, |
|
"loss": 1.3866, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.00941141320585599, |
|
"grad_norm": 0.12451759725809097, |
|
"learning_rate": 0.00019996897397430563, |
|
"loss": 0.855, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.009560800717060053, |
|
"grad_norm": 0.2885231375694275, |
|
"learning_rate": 0.00019996779219910696, |
|
"loss": 1.147, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.009710188228264117, |
|
"grad_norm": 0.14056627452373505, |
|
"learning_rate": 0.00019996658833943957, |
|
"loss": 0.7883, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.00985957573946818, |
|
"grad_norm": 0.14305633306503296, |
|
"learning_rate": 0.00019996536239556942, |
|
"loss": 0.5779, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010008963250672244, |
|
"grad_norm": 0.17166224122047424, |
|
"learning_rate": 0.00019996411436776738, |
|
"loss": 0.9014, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.010158350761876307, |
|
"grad_norm": 0.15985064208507538, |
|
"learning_rate": 0.0001999628442563091, |
|
"loss": 0.9639, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.010307738273080371, |
|
"grad_norm": 0.21808995306491852, |
|
"learning_rate": 0.0001999615520614752, |
|
"loss": 0.7525, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.010457125784284434, |
|
"grad_norm": 0.21660713851451874, |
|
"learning_rate": 0.00019996023778355113, |
|
"loss": 0.9379, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.010606513295488498, |
|
"grad_norm": 0.20963691174983978, |
|
"learning_rate": 0.00019995890142282728, |
|
"loss": 0.6632, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.01075590080669256, |
|
"grad_norm": 0.2161312997341156, |
|
"learning_rate": 0.00019995754297959882, |
|
"loss": 0.8932, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.010905288317896623, |
|
"grad_norm": 0.17720285058021545, |
|
"learning_rate": 0.00019995616245416584, |
|
"loss": 0.5283, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.011054675829100687, |
|
"grad_norm": 0.18077623844146729, |
|
"learning_rate": 0.0001999547598468334, |
|
"loss": 0.603, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.01120406334030475, |
|
"grad_norm": 0.19094090163707733, |
|
"learning_rate": 0.00019995333515791125, |
|
"loss": 0.9712, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.011353450851508814, |
|
"grad_norm": 0.2597976326942444, |
|
"learning_rate": 0.00019995188838771425, |
|
"loss": 0.7913, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.011502838362712877, |
|
"grad_norm": 0.26235243678092957, |
|
"learning_rate": 0.00019995041953656194, |
|
"loss": 0.7578, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.011652225873916941, |
|
"grad_norm": 0.1809563785791397, |
|
"learning_rate": 0.0001999489286047788, |
|
"loss": 0.9168, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.011801613385121004, |
|
"grad_norm": 0.22458118200302124, |
|
"learning_rate": 0.0001999474155926942, |
|
"loss": 1.1456, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.011951000896325068, |
|
"grad_norm": 0.49804338812828064, |
|
"learning_rate": 0.00019994588050064243, |
|
"loss": 1.1896, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01210038840752913, |
|
"grad_norm": 0.2247380167245865, |
|
"learning_rate": 0.00019994432332896258, |
|
"loss": 0.958, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.012249775918733195, |
|
"grad_norm": 0.17649094760417938, |
|
"learning_rate": 0.00019994274407799872, |
|
"loss": 0.9417, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.012399163429937257, |
|
"grad_norm": 0.24156750738620758, |
|
"learning_rate": 0.00019994114274809964, |
|
"loss": 0.9959, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.01254855094114132, |
|
"grad_norm": 0.2841317355632782, |
|
"learning_rate": 0.00019993951933961913, |
|
"loss": 1.296, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.012697938452345384, |
|
"grad_norm": 0.26792535185813904, |
|
"learning_rate": 0.00019993787385291588, |
|
"loss": 1.0296, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.012847325963549447, |
|
"grad_norm": 0.11657895892858505, |
|
"learning_rate": 0.00019993620628835332, |
|
"loss": 0.6993, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.01299671347475351, |
|
"grad_norm": 0.15463922917842865, |
|
"learning_rate": 0.0001999345166462999, |
|
"loss": 0.6925, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.013146100985957573, |
|
"grad_norm": 0.1776818037033081, |
|
"learning_rate": 0.0001999328049271289, |
|
"loss": 0.7629, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.013295488497161638, |
|
"grad_norm": 0.16185057163238525, |
|
"learning_rate": 0.00019993107113121844, |
|
"loss": 1.0736, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0134448760083657, |
|
"grad_norm": 0.3227018415927887, |
|
"learning_rate": 0.0001999293152589515, |
|
"loss": 0.8586, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.013594263519569764, |
|
"grad_norm": 0.16486088931560516, |
|
"learning_rate": 0.00019992753731071602, |
|
"loss": 1.0614, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.013743651030773827, |
|
"grad_norm": 0.1495029777288437, |
|
"learning_rate": 0.0001999257372869048, |
|
"loss": 0.8561, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.013893038541977891, |
|
"grad_norm": 0.16044558584690094, |
|
"learning_rate": 0.00019992391518791546, |
|
"loss": 1.0294, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.014042426053181954, |
|
"grad_norm": 0.16251197457313538, |
|
"learning_rate": 0.00019992207101415053, |
|
"loss": 0.838, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.014191813564386018, |
|
"grad_norm": 0.47265177965164185, |
|
"learning_rate": 0.00019992020476601745, |
|
"loss": 1.4965, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01434120107559008, |
|
"grad_norm": 0.16032789647579193, |
|
"learning_rate": 0.00019991831644392848, |
|
"loss": 0.9541, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.014490588586794143, |
|
"grad_norm": 0.1563224196434021, |
|
"learning_rate": 0.0001999164060483008, |
|
"loss": 0.6754, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.014639976097998207, |
|
"grad_norm": 0.2469077855348587, |
|
"learning_rate": 0.00019991447357955639, |
|
"loss": 0.9911, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.01478936360920227, |
|
"grad_norm": 0.13441061973571777, |
|
"learning_rate": 0.00019991251903812225, |
|
"loss": 0.7342, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.014938751120406334, |
|
"grad_norm": 0.20193825662136078, |
|
"learning_rate": 0.00019991054242443008, |
|
"loss": 0.6834, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.015088138631610397, |
|
"grad_norm": 0.1552024483680725, |
|
"learning_rate": 0.0001999085437389166, |
|
"loss": 0.8257, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.015237526142814461, |
|
"grad_norm": 0.25607559084892273, |
|
"learning_rate": 0.00019990652298202335, |
|
"loss": 1.1036, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.015386913654018524, |
|
"grad_norm": 0.11925558000802994, |
|
"learning_rate": 0.00019990448015419675, |
|
"loss": 0.6345, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.015536301165222588, |
|
"grad_norm": 0.28443989157676697, |
|
"learning_rate": 0.00019990241525588804, |
|
"loss": 1.1217, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.01568568867642665, |
|
"grad_norm": 0.15237168967723846, |
|
"learning_rate": 0.00019990032828755345, |
|
"loss": 0.7735, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.015835076187630713, |
|
"grad_norm": 0.173554927110672, |
|
"learning_rate": 0.000199898219249654, |
|
"loss": 1.1093, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.01598446369883478, |
|
"grad_norm": 0.14035388827323914, |
|
"learning_rate": 0.0001998960881426556, |
|
"loss": 0.8206, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01613385121003884, |
|
"grad_norm": 0.13699080049991608, |
|
"learning_rate": 0.00019989393496702907, |
|
"loss": 1.0426, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.016283238721242904, |
|
"grad_norm": 0.1313813477754593, |
|
"learning_rate": 0.00019989175972325005, |
|
"loss": 0.7804, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.016432626232446967, |
|
"grad_norm": 0.15459921956062317, |
|
"learning_rate": 0.00019988956241179912, |
|
"loss": 0.9949, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01658201374365103, |
|
"grad_norm": 0.20009522140026093, |
|
"learning_rate": 0.00019988734303316168, |
|
"loss": 1.0159, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.016731401254855095, |
|
"grad_norm": 0.17909283936023712, |
|
"learning_rate": 0.00019988510158782804, |
|
"loss": 0.5281, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.016880788766059158, |
|
"grad_norm": 0.6786065697669983, |
|
"learning_rate": 0.00019988283807629334, |
|
"loss": 1.2808, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.01703017627726322, |
|
"grad_norm": 0.15410082042217255, |
|
"learning_rate": 0.00019988055249905767, |
|
"loss": 1.0755, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.017179563788467283, |
|
"grad_norm": 0.19600564241409302, |
|
"learning_rate": 0.00019987824485662593, |
|
"loss": 1.2168, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.01732895129967135, |
|
"grad_norm": 0.1903967261314392, |
|
"learning_rate": 0.00019987591514950787, |
|
"loss": 0.9987, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.01747833881087541, |
|
"grad_norm": 0.13745927810668945, |
|
"learning_rate": 0.00019987356337821822, |
|
"loss": 0.9328, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.017627726322079474, |
|
"grad_norm": 0.18400371074676514, |
|
"learning_rate": 0.00019987118954327654, |
|
"loss": 0.6686, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.017777113833283537, |
|
"grad_norm": 0.29325294494628906, |
|
"learning_rate": 0.00019986879364520716, |
|
"loss": 1.173, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0179265013444876, |
|
"grad_norm": 0.15400448441505432, |
|
"learning_rate": 0.00019986637568453945, |
|
"loss": 0.897, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.018075888855691665, |
|
"grad_norm": 0.2178712785243988, |
|
"learning_rate": 0.00019986393566180755, |
|
"loss": 1.2885, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.018225276366895728, |
|
"grad_norm": 0.15217332541942596, |
|
"learning_rate": 0.00019986147357755048, |
|
"loss": 0.9847, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01837466387809979, |
|
"grad_norm": 0.35806745290756226, |
|
"learning_rate": 0.0001998589894323122, |
|
"loss": 1.0114, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.018524051389303853, |
|
"grad_norm": 0.46978825330734253, |
|
"learning_rate": 0.00019985648322664145, |
|
"loss": 1.33, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.01867343890050792, |
|
"grad_norm": 0.3191573917865753, |
|
"learning_rate": 0.00019985395496109192, |
|
"loss": 0.8691, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.01882282641171198, |
|
"grad_norm": 0.6782920360565186, |
|
"learning_rate": 0.00019985140463622215, |
|
"loss": 1.7387, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.018972213922916044, |
|
"grad_norm": 0.4556753933429718, |
|
"learning_rate": 0.0001998488322525955, |
|
"loss": 1.9508, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.019121601434120106, |
|
"grad_norm": 0.1401677280664444, |
|
"learning_rate": 0.0001998462378107803, |
|
"loss": 0.8127, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.019270988945324172, |
|
"grad_norm": 0.1187644675374031, |
|
"learning_rate": 0.00019984362131134968, |
|
"loss": 0.5421, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.019420376456528235, |
|
"grad_norm": 0.17097711563110352, |
|
"learning_rate": 0.0001998409827548817, |
|
"loss": 0.7335, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.019569763967732298, |
|
"grad_norm": 0.12986791133880615, |
|
"learning_rate": 0.00019983832214195917, |
|
"loss": 0.7151, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.01971915147893636, |
|
"grad_norm": 0.14413940906524658, |
|
"learning_rate": 0.00019983563947316996, |
|
"loss": 0.6833, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.019868538990140423, |
|
"grad_norm": 0.1424313336610794, |
|
"learning_rate": 0.00019983293474910667, |
|
"loss": 0.8199, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.02001792650134449, |
|
"grad_norm": 0.11715007573366165, |
|
"learning_rate": 0.00019983020797036683, |
|
"loss": 0.6079, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.02016731401254855, |
|
"grad_norm": 0.11654297262430191, |
|
"learning_rate": 0.00019982745913755282, |
|
"loss": 0.5302, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.020316701523752614, |
|
"grad_norm": 0.1768021434545517, |
|
"learning_rate": 0.00019982468825127187, |
|
"loss": 0.5412, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.020466089034956676, |
|
"grad_norm": 0.2620059549808502, |
|
"learning_rate": 0.00019982189531213618, |
|
"loss": 1.0321, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.020615476546160742, |
|
"grad_norm": 0.1472969800233841, |
|
"learning_rate": 0.0001998190803207627, |
|
"loss": 1.0097, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.020764864057364805, |
|
"grad_norm": 0.1889103651046753, |
|
"learning_rate": 0.00019981624327777332, |
|
"loss": 0.7423, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.020914251568568867, |
|
"grad_norm": 0.20594674348831177, |
|
"learning_rate": 0.0001998133841837948, |
|
"loss": 1.3315, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02106363907977293, |
|
"grad_norm": 0.19783402979373932, |
|
"learning_rate": 0.00019981050303945877, |
|
"loss": 1.0435, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.021213026590976996, |
|
"grad_norm": 0.12695230543613434, |
|
"learning_rate": 0.00019980759984540168, |
|
"loss": 0.8, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.02136241410218106, |
|
"grad_norm": 0.18909701704978943, |
|
"learning_rate": 0.0001998046746022649, |
|
"loss": 0.7549, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.02151180161338512, |
|
"grad_norm": 0.2600228488445282, |
|
"learning_rate": 0.0001998017273106947, |
|
"loss": 0.6657, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.021661189124589184, |
|
"grad_norm": 0.2999661862850189, |
|
"learning_rate": 0.00019979875797134216, |
|
"loss": 0.9626, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.021810576635793246, |
|
"grad_norm": 0.2698724567890167, |
|
"learning_rate": 0.00019979576658486325, |
|
"loss": 0.8856, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.021959964146997312, |
|
"grad_norm": 0.14443442225456238, |
|
"learning_rate": 0.0001997927531519188, |
|
"loss": 0.9285, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.022109351658201375, |
|
"grad_norm": 0.21873806416988373, |
|
"learning_rate": 0.00019978971767317457, |
|
"loss": 0.9565, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.022258739169405437, |
|
"grad_norm": 0.2041054517030716, |
|
"learning_rate": 0.0001997866601493011, |
|
"loss": 0.9455, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0224081266806095, |
|
"grad_norm": 0.19350048899650574, |
|
"learning_rate": 0.00019978358058097388, |
|
"loss": 0.8047, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.022557514191813566, |
|
"grad_norm": 0.25492972135543823, |
|
"learning_rate": 0.00019978047896887323, |
|
"loss": 1.234, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.02270690170301763, |
|
"grad_norm": 0.1414925754070282, |
|
"learning_rate": 0.0001997773553136843, |
|
"loss": 0.9703, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.02285628921422169, |
|
"grad_norm": 0.15996232628822327, |
|
"learning_rate": 0.00019977420961609721, |
|
"loss": 1.1475, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.023005676725425753, |
|
"grad_norm": 0.1661243587732315, |
|
"learning_rate": 0.00019977104187680688, |
|
"loss": 0.8574, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.02315506423662982, |
|
"grad_norm": 0.17797410488128662, |
|
"learning_rate": 0.00019976785209651309, |
|
"loss": 0.6742, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.023304451747833882, |
|
"grad_norm": 0.1401919722557068, |
|
"learning_rate": 0.00019976464027592053, |
|
"loss": 0.7065, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.023453839259037945, |
|
"grad_norm": 0.3743472695350647, |
|
"learning_rate": 0.00019976140641573875, |
|
"loss": 1.1391, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.023603226770242007, |
|
"grad_norm": 0.12242946028709412, |
|
"learning_rate": 0.00019975815051668217, |
|
"loss": 0.7517, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.02375261428144607, |
|
"grad_norm": 0.12851974368095398, |
|
"learning_rate": 0.00019975487257947004, |
|
"loss": 0.5795, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.023902001792650136, |
|
"grad_norm": 0.15131576359272003, |
|
"learning_rate": 0.0001997515726048265, |
|
"loss": 0.5735, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.024051389303854198, |
|
"grad_norm": 0.2606663703918457, |
|
"learning_rate": 0.00019974825059348062, |
|
"loss": 0.7677, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.02420077681505826, |
|
"grad_norm": 0.3116661608219147, |
|
"learning_rate": 0.00019974490654616625, |
|
"loss": 1.1465, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.024350164326262323, |
|
"grad_norm": 0.15296891331672668, |
|
"learning_rate": 0.00019974154046362212, |
|
"loss": 0.9154, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.02449955183746639, |
|
"grad_norm": 0.12761889398097992, |
|
"learning_rate": 0.0001997381523465919, |
|
"loss": 0.7902, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.024648939348670452, |
|
"grad_norm": 0.11566565185785294, |
|
"learning_rate": 0.00019973474219582405, |
|
"loss": 0.5558, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.024798326859874514, |
|
"grad_norm": 0.12765946984291077, |
|
"learning_rate": 0.00019973131001207195, |
|
"loss": 0.7314, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.024947714371078577, |
|
"grad_norm": 0.12568241357803345, |
|
"learning_rate": 0.00019972785579609376, |
|
"loss": 0.9476, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.02509710188228264, |
|
"grad_norm": 0.21633781492710114, |
|
"learning_rate": 0.00019972437954865265, |
|
"loss": 1.3121, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.025246489393486705, |
|
"grad_norm": 0.14913877844810486, |
|
"learning_rate": 0.00019972088127051657, |
|
"loss": 0.8247, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.025395876904690768, |
|
"grad_norm": 0.16602329909801483, |
|
"learning_rate": 0.00019971736096245825, |
|
"loss": 1.064, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.02554526441589483, |
|
"grad_norm": 0.28753912448883057, |
|
"learning_rate": 0.00019971381862525552, |
|
"loss": 1.4888, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.025694651927098893, |
|
"grad_norm": 0.13800154626369476, |
|
"learning_rate": 0.00019971025425969083, |
|
"loss": 0.5945, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.02584403943830296, |
|
"grad_norm": 0.22065165638923645, |
|
"learning_rate": 0.00019970666786655166, |
|
"loss": 0.8695, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.02599342694950702, |
|
"grad_norm": 0.3128964900970459, |
|
"learning_rate": 0.0001997030594466303, |
|
"loss": 1.1673, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.026142814460711084, |
|
"grad_norm": 0.20117323100566864, |
|
"learning_rate": 0.00019969942900072387, |
|
"loss": 1.0395, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.026292201971915147, |
|
"grad_norm": 0.14214551448822021, |
|
"learning_rate": 0.00019969577652963444, |
|
"loss": 0.5135, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.026441589483119213, |
|
"grad_norm": 0.20523157715797424, |
|
"learning_rate": 0.00019969210203416883, |
|
"loss": 1.0156, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.026590976994323275, |
|
"grad_norm": 0.1385200172662735, |
|
"learning_rate": 0.0001996884055151389, |
|
"loss": 0.5202, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.026740364505527338, |
|
"grad_norm": 0.14539465308189392, |
|
"learning_rate": 0.00019968468697336117, |
|
"loss": 0.6127, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0268897520167314, |
|
"grad_norm": 0.2886626720428467, |
|
"learning_rate": 0.00019968094640965717, |
|
"loss": 1.213, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.027039139527935463, |
|
"grad_norm": 0.22695106267929077, |
|
"learning_rate": 0.00019967718382485323, |
|
"loss": 1.137, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.02718852703913953, |
|
"grad_norm": 0.17426621913909912, |
|
"learning_rate": 0.00019967339921978062, |
|
"loss": 0.8978, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.02733791455034359, |
|
"grad_norm": 0.2875833809375763, |
|
"learning_rate": 0.00019966959259527534, |
|
"loss": 0.8688, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.027487302061547654, |
|
"grad_norm": 0.14763344824314117, |
|
"learning_rate": 0.00019966576395217837, |
|
"loss": 0.564, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.027636689572751717, |
|
"grad_norm": 0.24988707900047302, |
|
"learning_rate": 0.00019966191329133555, |
|
"loss": 0.429, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.027786077083955783, |
|
"grad_norm": 0.16043034195899963, |
|
"learning_rate": 0.0001996580406135975, |
|
"loss": 0.7251, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.027935464595159845, |
|
"grad_norm": 0.2776719033718109, |
|
"learning_rate": 0.00019965414591981975, |
|
"loss": 1.3513, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.028084852106363908, |
|
"grad_norm": 0.1749371886253357, |
|
"learning_rate": 0.00019965022921086275, |
|
"loss": 0.7383, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.02823423961756797, |
|
"grad_norm": 0.2881135940551758, |
|
"learning_rate": 0.00019964629048759176, |
|
"loss": 1.0511, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.028383627128772036, |
|
"grad_norm": 0.11646547168493271, |
|
"learning_rate": 0.00019964232975087687, |
|
"loss": 0.6575, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0285330146399761, |
|
"grad_norm": 0.1457265317440033, |
|
"learning_rate": 0.0001996383470015931, |
|
"loss": 0.7538, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.02868240215118016, |
|
"grad_norm": 0.13400131464004517, |
|
"learning_rate": 0.00019963434224062025, |
|
"loss": 0.7773, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.028831789662384224, |
|
"grad_norm": 0.23350481688976288, |
|
"learning_rate": 0.0001996303154688431, |
|
"loss": 1.2769, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.028981177173588286, |
|
"grad_norm": 0.563207745552063, |
|
"learning_rate": 0.0001996262666871512, |
|
"loss": 1.5822, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.029130564684792352, |
|
"grad_norm": 0.5377495884895325, |
|
"learning_rate": 0.00019962219589643898, |
|
"loss": 1.4911, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.029279952195996415, |
|
"grad_norm": 0.15726317465305328, |
|
"learning_rate": 0.00019961810309760577, |
|
"loss": 0.7824, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.029429339707200478, |
|
"grad_norm": 0.15040062367916107, |
|
"learning_rate": 0.00019961398829155568, |
|
"loss": 0.7541, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.02957872721840454, |
|
"grad_norm": 0.12620890140533447, |
|
"learning_rate": 0.00019960985147919778, |
|
"loss": 0.932, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.029728114729608606, |
|
"grad_norm": 0.15029945969581604, |
|
"learning_rate": 0.00019960569266144597, |
|
"loss": 0.9588, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.02987750224081267, |
|
"grad_norm": 0.16770395636558533, |
|
"learning_rate": 0.00019960151183921897, |
|
"loss": 0.5964, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03002688975201673, |
|
"grad_norm": 0.12649193406105042, |
|
"learning_rate": 0.0001995973090134404, |
|
"loss": 0.8221, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.030176277263220794, |
|
"grad_norm": 0.15091091394424438, |
|
"learning_rate": 0.00019959308418503877, |
|
"loss": 0.8811, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.03032566477442486, |
|
"grad_norm": 0.12077690660953522, |
|
"learning_rate": 0.00019958883735494732, |
|
"loss": 0.6411, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.030475052285628922, |
|
"grad_norm": 0.15071533620357513, |
|
"learning_rate": 0.00019958456852410433, |
|
"loss": 0.7742, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.030624439796832985, |
|
"grad_norm": 0.1368575096130371, |
|
"learning_rate": 0.00019958027769345277, |
|
"loss": 1.1054, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.030773827308037047, |
|
"grad_norm": 0.22218400239944458, |
|
"learning_rate": 0.0001995759648639406, |
|
"loss": 0.8127, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.03092321481924111, |
|
"grad_norm": 0.12783220410346985, |
|
"learning_rate": 0.00019957163003652063, |
|
"loss": 0.6916, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.031072602330445176, |
|
"grad_norm": 0.18626387417316437, |
|
"learning_rate": 0.00019956727321215044, |
|
"loss": 0.8217, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.03122198984164924, |
|
"grad_norm": 0.12861424684524536, |
|
"learning_rate": 0.0001995628943917925, |
|
"loss": 1.0327, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.0313713773528533, |
|
"grad_norm": 0.17585206031799316, |
|
"learning_rate": 0.00019955849357641424, |
|
"loss": 0.6836, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.03152076486405737, |
|
"grad_norm": 0.1541229784488678, |
|
"learning_rate": 0.0001995540707669878, |
|
"loss": 1.1849, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.031670152375261426, |
|
"grad_norm": 0.13117974996566772, |
|
"learning_rate": 0.00019954962596449024, |
|
"loss": 0.7779, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.03181953988646549, |
|
"grad_norm": 0.3847130537033081, |
|
"learning_rate": 0.00019954515916990358, |
|
"loss": 1.4158, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.03196892739766956, |
|
"grad_norm": 0.21019677817821503, |
|
"learning_rate": 0.0001995406703842145, |
|
"loss": 1.0807, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.03211831490887362, |
|
"grad_norm": 0.10954124480485916, |
|
"learning_rate": 0.0001995361596084147, |
|
"loss": 0.7142, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.03226770242007768, |
|
"grad_norm": 0.25598999857902527, |
|
"learning_rate": 0.0001995316268435007, |
|
"loss": 0.7528, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.03241708993128174, |
|
"grad_norm": 0.16298946738243103, |
|
"learning_rate": 0.0001995270720904738, |
|
"loss": 0.6978, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.03256647744248581, |
|
"grad_norm": 0.37723109126091003, |
|
"learning_rate": 0.00019952249535034025, |
|
"loss": 1.0603, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.032715864953689874, |
|
"grad_norm": 0.1600012332201004, |
|
"learning_rate": 0.00019951789662411113, |
|
"loss": 0.7634, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.03286525246489393, |
|
"grad_norm": 0.1618419587612152, |
|
"learning_rate": 0.00019951327591280236, |
|
"loss": 0.8068, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.033014639976098, |
|
"grad_norm": 0.11850997805595398, |
|
"learning_rate": 0.00019950863321743475, |
|
"loss": 0.7883, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.03316402748730206, |
|
"grad_norm": 0.18700255453586578, |
|
"learning_rate": 0.0001995039685390339, |
|
"loss": 0.5821, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.033313414998506125, |
|
"grad_norm": 0.19264543056488037, |
|
"learning_rate": 0.00019949928187863036, |
|
"loss": 0.7578, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.03346280250971019, |
|
"grad_norm": 0.23828521370887756, |
|
"learning_rate": 0.00019949457323725946, |
|
"loss": 1.1739, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.03361219002091425, |
|
"grad_norm": 0.2178596705198288, |
|
"learning_rate": 0.0001994898426159614, |
|
"loss": 0.6075, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.033761577532118316, |
|
"grad_norm": 0.13766655325889587, |
|
"learning_rate": 0.0001994850900157813, |
|
"loss": 0.6941, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.03391096504332238, |
|
"grad_norm": 0.345782071352005, |
|
"learning_rate": 0.00019948031543776904, |
|
"loss": 1.0944, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.03406035255452644, |
|
"grad_norm": 0.1483844369649887, |
|
"learning_rate": 0.0001994755188829794, |
|
"loss": 0.9752, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.03420974006573051, |
|
"grad_norm": 0.16494938731193542, |
|
"learning_rate": 0.00019947070035247205, |
|
"loss": 0.7408, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.034359127576934566, |
|
"grad_norm": 0.15683765709400177, |
|
"learning_rate": 0.00019946585984731142, |
|
"loss": 0.6498, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.03450851508813863, |
|
"grad_norm": 0.17898918688297272, |
|
"learning_rate": 0.0001994609973685669, |
|
"loss": 0.7338, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.0346579025993427, |
|
"grad_norm": 0.15439556539058685, |
|
"learning_rate": 0.00019945611291731274, |
|
"loss": 0.854, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.03480729011054676, |
|
"grad_norm": 0.16742883622646332, |
|
"learning_rate": 0.0001994512064946279, |
|
"loss": 0.5851, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.03495667762175082, |
|
"grad_norm": 0.17511384189128876, |
|
"learning_rate": 0.00019944627810159632, |
|
"loss": 0.569, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.03510606513295488, |
|
"grad_norm": 0.26125359535217285, |
|
"learning_rate": 0.0001994413277393067, |
|
"loss": 0.862, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.03525545264415895, |
|
"grad_norm": 0.1439584493637085, |
|
"learning_rate": 0.00019943635540885279, |
|
"loss": 1.1311, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.035404840155363014, |
|
"grad_norm": 0.146185502409935, |
|
"learning_rate": 0.00019943136111133294, |
|
"loss": 0.9574, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.03555422766656707, |
|
"grad_norm": 5.1866774559021, |
|
"learning_rate": 0.00019942634484785052, |
|
"loss": 3.2188, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.03570361517777114, |
|
"grad_norm": 0.13537189364433289, |
|
"learning_rate": 0.00019942130661951372, |
|
"loss": 0.6154, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.0358530026889752, |
|
"grad_norm": 0.13716812431812286, |
|
"learning_rate": 0.00019941624642743548, |
|
"loss": 0.7604, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.036002390200179264, |
|
"grad_norm": 0.37018468976020813, |
|
"learning_rate": 0.0001994111642727338, |
|
"loss": 0.8598, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.03615177771138333, |
|
"grad_norm": 0.1747826188802719, |
|
"learning_rate": 0.0001994060601565313, |
|
"loss": 0.6497, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.03630116522258739, |
|
"grad_norm": 0.12225501239299774, |
|
"learning_rate": 0.0001994009340799556, |
|
"loss": 0.7278, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.036450552733791455, |
|
"grad_norm": 0.17295411229133606, |
|
"learning_rate": 0.00019939578604413912, |
|
"loss": 0.8747, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.03659994024499552, |
|
"grad_norm": 0.1815291941165924, |
|
"learning_rate": 0.00019939061605021917, |
|
"loss": 0.7242, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.03674932775619958, |
|
"grad_norm": 0.25227025151252747, |
|
"learning_rate": 0.00019938542409933787, |
|
"loss": 0.7818, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.036898715267403646, |
|
"grad_norm": 0.280819296836853, |
|
"learning_rate": 0.00019938021019264221, |
|
"loss": 0.7471, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.037048102778607706, |
|
"grad_norm": 0.1746496856212616, |
|
"learning_rate": 0.000199374974331284, |
|
"loss": 0.9149, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.03719749028981177, |
|
"grad_norm": 0.2506274878978729, |
|
"learning_rate": 0.00019936971651641995, |
|
"loss": 0.8666, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.03734687780101584, |
|
"grad_norm": 0.12307952344417572, |
|
"learning_rate": 0.00019936443674921158, |
|
"loss": 0.7737, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0374962653122199, |
|
"grad_norm": 0.13615377247333527, |
|
"learning_rate": 0.0001993591350308253, |
|
"loss": 0.7592, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.03764565282342396, |
|
"grad_norm": 0.16808447241783142, |
|
"learning_rate": 0.0001993538113624323, |
|
"loss": 0.8599, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.03779504033462802, |
|
"grad_norm": 0.11544547975063324, |
|
"learning_rate": 0.00019934846574520872, |
|
"loss": 0.7348, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.03794442784583209, |
|
"grad_norm": 0.20908010005950928, |
|
"learning_rate": 0.00019934309818033544, |
|
"loss": 0.6674, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.038093815357036154, |
|
"grad_norm": 0.1379510909318924, |
|
"learning_rate": 0.00019933770866899825, |
|
"loss": 0.7295, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.03824320286824021, |
|
"grad_norm": 0.15058402717113495, |
|
"learning_rate": 0.0001993322972123878, |
|
"loss": 1.1005, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.03839259037944428, |
|
"grad_norm": 0.1941765695810318, |
|
"learning_rate": 0.00019932686381169955, |
|
"loss": 0.7658, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.038541977890648345, |
|
"grad_norm": 0.1889600306749344, |
|
"learning_rate": 0.0001993214084681338, |
|
"loss": 0.9765, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.038691365401852404, |
|
"grad_norm": 0.1466747522354126, |
|
"learning_rate": 0.00019931593118289578, |
|
"loss": 0.5899, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.03884075291305647, |
|
"grad_norm": 0.2811049520969391, |
|
"learning_rate": 0.00019931043195719548, |
|
"loss": 0.8255, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.03899014042426053, |
|
"grad_norm": 0.1345093995332718, |
|
"learning_rate": 0.00019930491079224772, |
|
"loss": 0.9015, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.039139527935464595, |
|
"grad_norm": 0.9843081831932068, |
|
"learning_rate": 0.00019929936768927232, |
|
"loss": 1.5562, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.03928891544666866, |
|
"grad_norm": 0.18343736231327057, |
|
"learning_rate": 0.00019929380264949376, |
|
"loss": 0.8783, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.03943830295787272, |
|
"grad_norm": 0.29955071210861206, |
|
"learning_rate": 0.00019928821567414144, |
|
"loss": 1.0868, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.039587690469076786, |
|
"grad_norm": 0.16872237622737885, |
|
"learning_rate": 0.00019928260676444965, |
|
"loss": 0.7375, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.039737077980280845, |
|
"grad_norm": 0.1343865543603897, |
|
"learning_rate": 0.00019927697592165747, |
|
"loss": 1.0279, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.03988646549148491, |
|
"grad_norm": 0.2587420642375946, |
|
"learning_rate": 0.00019927132314700885, |
|
"loss": 0.8529, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.04003585300268898, |
|
"grad_norm": 0.23096689581871033, |
|
"learning_rate": 0.00019926564844175256, |
|
"loss": 0.8726, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.040185240513893036, |
|
"grad_norm": 0.13639822602272034, |
|
"learning_rate": 0.00019925995180714224, |
|
"loss": 0.6957, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0403346280250971, |
|
"grad_norm": 0.13751177489757538, |
|
"learning_rate": 0.00019925423324443638, |
|
"loss": 0.7239, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04048401553630117, |
|
"grad_norm": 0.12963712215423584, |
|
"learning_rate": 0.0001992484927548983, |
|
"loss": 0.686, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.04063340304750523, |
|
"grad_norm": 0.13808754086494446, |
|
"learning_rate": 0.00019924273033979613, |
|
"loss": 0.973, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.04078279055870929, |
|
"grad_norm": 0.10808500647544861, |
|
"learning_rate": 0.0001992369460004029, |
|
"loss": 0.6725, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.04093217806991335, |
|
"grad_norm": 0.14959432184696198, |
|
"learning_rate": 0.0001992311397379965, |
|
"loss": 0.9753, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.04108156558111742, |
|
"grad_norm": 0.21491862833499908, |
|
"learning_rate": 0.00019922531155385954, |
|
"loss": 0.9506, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.041230953092321485, |
|
"grad_norm": 0.16956715285778046, |
|
"learning_rate": 0.00019921946144927966, |
|
"loss": 1.0777, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.041380340603525544, |
|
"grad_norm": 0.17658768594264984, |
|
"learning_rate": 0.00019921358942554917, |
|
"loss": 0.6994, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.04152972811472961, |
|
"grad_norm": 0.20894332230091095, |
|
"learning_rate": 0.0001992076954839653, |
|
"loss": 1.0217, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.04167911562593367, |
|
"grad_norm": 0.15689632296562195, |
|
"learning_rate": 0.00019920177962583015, |
|
"loss": 0.4844, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.041828503137137735, |
|
"grad_norm": 0.24510236084461212, |
|
"learning_rate": 0.00019919584185245062, |
|
"loss": 0.8981, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.0419778906483418, |
|
"grad_norm": 0.1385307013988495, |
|
"learning_rate": 0.00019918988216513844, |
|
"loss": 0.9278, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.04212727815954586, |
|
"grad_norm": 0.15266434848308563, |
|
"learning_rate": 0.00019918390056521018, |
|
"loss": 0.7803, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.042276665670749926, |
|
"grad_norm": 0.920637309551239, |
|
"learning_rate": 0.00019917789705398728, |
|
"loss": 2.0334, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.04242605318195399, |
|
"grad_norm": 0.16596724092960358, |
|
"learning_rate": 0.00019917187163279605, |
|
"loss": 0.5195, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.04257544069315805, |
|
"grad_norm": 0.21556362509727478, |
|
"learning_rate": 0.00019916582430296758, |
|
"loss": 1.0858, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.04272482820436212, |
|
"grad_norm": 0.13357070088386536, |
|
"learning_rate": 0.00019915975506583778, |
|
"loss": 0.7042, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.042874215715566176, |
|
"grad_norm": 0.13493870198726654, |
|
"learning_rate": 0.00019915366392274752, |
|
"loss": 0.7329, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.04302360322677024, |
|
"grad_norm": 0.13088954985141754, |
|
"learning_rate": 0.00019914755087504236, |
|
"loss": 0.6911, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.04317299073797431, |
|
"grad_norm": 0.12659801542758942, |
|
"learning_rate": 0.0001991414159240728, |
|
"loss": 0.9972, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.04332237824917837, |
|
"grad_norm": 0.14888939261436462, |
|
"learning_rate": 0.00019913525907119418, |
|
"loss": 0.917, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.04347176576038243, |
|
"grad_norm": 0.15505638718605042, |
|
"learning_rate": 0.00019912908031776655, |
|
"loss": 0.5189, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.04362115327158649, |
|
"grad_norm": 0.1389274001121521, |
|
"learning_rate": 0.000199122879665155, |
|
"loss": 0.9571, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.04377054078279056, |
|
"grad_norm": 0.5212099552154541, |
|
"learning_rate": 0.0001991166571147293, |
|
"loss": 1.7933, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.043919928293994624, |
|
"grad_norm": 0.15124285221099854, |
|
"learning_rate": 0.0001991104126678641, |
|
"loss": 0.8451, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.04406931580519868, |
|
"grad_norm": 0.12665098905563354, |
|
"learning_rate": 0.0001991041463259389, |
|
"loss": 0.8399, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.04421870331640275, |
|
"grad_norm": 0.41863369941711426, |
|
"learning_rate": 0.00019909785809033806, |
|
"loss": 0.6476, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.044368090827606815, |
|
"grad_norm": 0.12471672147512436, |
|
"learning_rate": 0.00019909154796245076, |
|
"loss": 0.8806, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.044517478338810874, |
|
"grad_norm": 0.21696336567401886, |
|
"learning_rate": 0.00019908521594367098, |
|
"loss": 1.0237, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.04466686585001494, |
|
"grad_norm": 0.10489077866077423, |
|
"learning_rate": 0.00019907886203539757, |
|
"loss": 0.5955, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.044816253361219, |
|
"grad_norm": 0.17574696242809296, |
|
"learning_rate": 0.0001990724862390342, |
|
"loss": 0.669, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.044965640872423066, |
|
"grad_norm": 0.16877828538417816, |
|
"learning_rate": 0.00019906608855598939, |
|
"loss": 0.8991, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.04511502838362713, |
|
"grad_norm": 0.16386249661445618, |
|
"learning_rate": 0.0001990596689876765, |
|
"loss": 0.9378, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.04526441589483119, |
|
"grad_norm": 0.1148744598031044, |
|
"learning_rate": 0.00019905322753551368, |
|
"loss": 0.5595, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.04541380340603526, |
|
"grad_norm": 0.1777895838022232, |
|
"learning_rate": 0.00019904676420092404, |
|
"loss": 0.9161, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.045563190917239316, |
|
"grad_norm": 0.14002953469753265, |
|
"learning_rate": 0.0001990402789853353, |
|
"loss": 0.6209, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.04571257842844338, |
|
"grad_norm": 0.3308524489402771, |
|
"learning_rate": 0.00019903377189018024, |
|
"loss": 0.9947, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.04586196593964745, |
|
"grad_norm": 0.16171899437904358, |
|
"learning_rate": 0.00019902724291689637, |
|
"loss": 0.9893, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.04601135345085151, |
|
"grad_norm": 0.1441776603460312, |
|
"learning_rate": 0.000199020692066926, |
|
"loss": 0.8799, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.04616074096205557, |
|
"grad_norm": 0.12858013808727264, |
|
"learning_rate": 0.00019901411934171638, |
|
"loss": 0.6444, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.04631012847325964, |
|
"grad_norm": 0.14368069171905518, |
|
"learning_rate": 0.00019900752474271945, |
|
"loss": 0.4694, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0464595159844637, |
|
"grad_norm": 0.16599714756011963, |
|
"learning_rate": 0.00019900090827139214, |
|
"loss": 0.7362, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.046608903495667764, |
|
"grad_norm": 0.18922938406467438, |
|
"learning_rate": 0.0001989942699291961, |
|
"loss": 0.9204, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.04675829100687182, |
|
"grad_norm": 0.17513027787208557, |
|
"learning_rate": 0.00019898760971759783, |
|
"loss": 0.5341, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.04690767851807589, |
|
"grad_norm": 0.19131267070770264, |
|
"learning_rate": 0.0001989809276380687, |
|
"loss": 1.0719, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.047057066029279955, |
|
"grad_norm": 0.1646454632282257, |
|
"learning_rate": 0.00019897422369208488, |
|
"loss": 0.9124, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.047206453540484014, |
|
"grad_norm": 0.17317859828472137, |
|
"learning_rate": 0.00019896749788112737, |
|
"loss": 0.7388, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.04735584105168808, |
|
"grad_norm": 0.2774486839771271, |
|
"learning_rate": 0.00019896075020668202, |
|
"loss": 0.8183, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.04750522856289214, |
|
"grad_norm": 0.14470195770263672, |
|
"learning_rate": 0.0001989539806702395, |
|
"loss": 0.8357, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.047654616074096205, |
|
"grad_norm": 0.1452193409204483, |
|
"learning_rate": 0.00019894718927329524, |
|
"loss": 0.9126, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.04780400358530027, |
|
"grad_norm": 0.15406997501850128, |
|
"learning_rate": 0.0001989403760173497, |
|
"loss": 0.6407, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.04795339109650433, |
|
"grad_norm": 0.2753758728504181, |
|
"learning_rate": 0.00019893354090390791, |
|
"loss": 1.1197, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.048102778607708396, |
|
"grad_norm": 0.14286638796329498, |
|
"learning_rate": 0.00019892668393447997, |
|
"loss": 0.7519, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.04825216611891246, |
|
"grad_norm": 0.27783840894699097, |
|
"learning_rate": 0.0001989198051105806, |
|
"loss": 0.8173, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.04840155363011652, |
|
"grad_norm": 0.14072780311107635, |
|
"learning_rate": 0.00019891290443372944, |
|
"loss": 0.7387, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.04855094114132059, |
|
"grad_norm": 0.17845518887043, |
|
"learning_rate": 0.00019890598190545102, |
|
"loss": 0.9028, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.048700328652524646, |
|
"grad_norm": 0.16201937198638916, |
|
"learning_rate": 0.0001988990375272746, |
|
"loss": 0.6775, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.04884971616372871, |
|
"grad_norm": 0.3842596709728241, |
|
"learning_rate": 0.00019889207130073432, |
|
"loss": 1.166, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.04899910367493278, |
|
"grad_norm": 0.1811336725950241, |
|
"learning_rate": 0.0001988850832273691, |
|
"loss": 0.5225, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.04914849118613684, |
|
"grad_norm": 0.1348968744277954, |
|
"learning_rate": 0.0001988780733087228, |
|
"loss": 0.6588, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.049297878697340904, |
|
"grad_norm": 0.15417031943798065, |
|
"learning_rate": 0.0001988710415463439, |
|
"loss": 1.0178, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.04944726620854496, |
|
"grad_norm": 0.1732081174850464, |
|
"learning_rate": 0.0001988639879417859, |
|
"loss": 0.9023, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.04959665371974903, |
|
"grad_norm": 0.20192794501781464, |
|
"learning_rate": 0.00019885691249660702, |
|
"loss": 0.9105, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.049746041230953095, |
|
"grad_norm": 0.17755641043186188, |
|
"learning_rate": 0.0001988498152123704, |
|
"loss": 0.9222, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.049895428742157154, |
|
"grad_norm": 0.10902781784534454, |
|
"learning_rate": 0.00019884269609064386, |
|
"loss": 0.6073, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.05004481625336122, |
|
"grad_norm": 0.19006772339344025, |
|
"learning_rate": 0.00019883555513300019, |
|
"loss": 1.1671, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.05019420376456528, |
|
"grad_norm": 0.1430109441280365, |
|
"learning_rate": 0.0001988283923410169, |
|
"loss": 0.9732, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.050343591275769345, |
|
"grad_norm": 0.2234259843826294, |
|
"learning_rate": 0.00019882120771627638, |
|
"loss": 1.3184, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.05049297878697341, |
|
"grad_norm": 0.1166071966290474, |
|
"learning_rate": 0.00019881400126036582, |
|
"loss": 0.6163, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.05064236629817747, |
|
"grad_norm": 0.12911243736743927, |
|
"learning_rate": 0.0001988067729748773, |
|
"loss": 0.9934, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.050791753809381536, |
|
"grad_norm": 0.14424997568130493, |
|
"learning_rate": 0.00019879952286140754, |
|
"loss": 0.6436, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.0509411413205856, |
|
"grad_norm": 0.17660249769687653, |
|
"learning_rate": 0.00019879225092155834, |
|
"loss": 0.8772, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.05109052883178966, |
|
"grad_norm": 0.126225546002388, |
|
"learning_rate": 0.0001987849571569361, |
|
"loss": 0.5238, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.05123991634299373, |
|
"grad_norm": 0.1732265204191208, |
|
"learning_rate": 0.00019877764156915213, |
|
"loss": 0.7972, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.051389303854197786, |
|
"grad_norm": 0.2713650166988373, |
|
"learning_rate": 0.0001987703041598226, |
|
"loss": 0.9757, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.05153869136540185, |
|
"grad_norm": 0.1454247683286667, |
|
"learning_rate": 0.00019876294493056845, |
|
"loss": 0.9633, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.05168807887660592, |
|
"grad_norm": 0.12023693323135376, |
|
"learning_rate": 0.00019875556388301543, |
|
"loss": 0.5894, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.05183746638780998, |
|
"grad_norm": 0.15992896258831024, |
|
"learning_rate": 0.0001987481610187941, |
|
"loss": 0.7281, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.05198685389901404, |
|
"grad_norm": 0.15995372831821442, |
|
"learning_rate": 0.00019874073633953997, |
|
"loss": 0.6077, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.0521362414102181, |
|
"grad_norm": 0.12626518309116364, |
|
"learning_rate": 0.0001987332898468932, |
|
"loss": 0.8621, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.05228562892142217, |
|
"grad_norm": 0.1364617496728897, |
|
"learning_rate": 0.00019872582154249884, |
|
"loss": 0.7432, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.052435016432626234, |
|
"grad_norm": 0.17311328649520874, |
|
"learning_rate": 0.00019871833142800675, |
|
"loss": 0.7599, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.052584403943830293, |
|
"grad_norm": 0.18369294703006744, |
|
"learning_rate": 0.00019871081950507163, |
|
"loss": 0.7234, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.05273379145503436, |
|
"grad_norm": 0.12449700385332108, |
|
"learning_rate": 0.00019870328577535303, |
|
"loss": 0.8287, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.052883178966238426, |
|
"grad_norm": 0.10272317379713058, |
|
"learning_rate": 0.00019869573024051517, |
|
"loss": 0.5652, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.053032566477442485, |
|
"grad_norm": 0.16783328354358673, |
|
"learning_rate": 0.00019868815290222726, |
|
"loss": 0.5231, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.05318195398864655, |
|
"grad_norm": 0.26311105489730835, |
|
"learning_rate": 0.00019868055376216323, |
|
"loss": 0.6964, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.05333134149985061, |
|
"grad_norm": 0.32151147723197937, |
|
"learning_rate": 0.00019867293282200188, |
|
"loss": 1.0218, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.053480729011054676, |
|
"grad_norm": 0.23724707961082458, |
|
"learning_rate": 0.00019866529008342673, |
|
"loss": 1.0303, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.05363011652225874, |
|
"grad_norm": 0.1722903996706009, |
|
"learning_rate": 0.00019865762554812624, |
|
"loss": 0.9047, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.0537795040334628, |
|
"grad_norm": 0.20011122524738312, |
|
"learning_rate": 0.00019864993921779361, |
|
"loss": 0.8151, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.05392889154466687, |
|
"grad_norm": 0.13171933591365814, |
|
"learning_rate": 0.0001986422310941269, |
|
"loss": 0.7425, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.054078279055870926, |
|
"grad_norm": 0.14532333612442017, |
|
"learning_rate": 0.0001986345011788289, |
|
"loss": 0.8868, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.05422766656707499, |
|
"grad_norm": 0.17356187105178833, |
|
"learning_rate": 0.00019862674947360729, |
|
"loss": 0.4887, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.05437705407827906, |
|
"grad_norm": 0.24108925461769104, |
|
"learning_rate": 0.00019861897598017457, |
|
"loss": 1.1633, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.05452644158948312, |
|
"grad_norm": 0.16384924948215485, |
|
"learning_rate": 0.00019861118070024802, |
|
"loss": 0.8454, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.05467582910068718, |
|
"grad_norm": 0.1604813188314438, |
|
"learning_rate": 0.00019860336363554973, |
|
"loss": 0.6332, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.05482521661189125, |
|
"grad_norm": 0.19130012392997742, |
|
"learning_rate": 0.00019859552478780659, |
|
"loss": 0.9221, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.05497460412309531, |
|
"grad_norm": 0.22768980264663696, |
|
"learning_rate": 0.0001985876641587504, |
|
"loss": 0.9983, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.055123991634299374, |
|
"grad_norm": 0.13694654405117035, |
|
"learning_rate": 0.0001985797817501176, |
|
"loss": 0.633, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.05527337914550343, |
|
"grad_norm": 0.1287558525800705, |
|
"learning_rate": 0.00019857187756364958, |
|
"loss": 0.6729, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.0554227666567075, |
|
"grad_norm": 0.12864962220191956, |
|
"learning_rate": 0.00019856395160109256, |
|
"loss": 0.8324, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.055572154167911565, |
|
"grad_norm": 0.15732339024543762, |
|
"learning_rate": 0.00019855600386419744, |
|
"loss": 0.7524, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.055721541679115624, |
|
"grad_norm": 0.19376401603221893, |
|
"learning_rate": 0.00019854803435472, |
|
"loss": 0.9524, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.05587092919031969, |
|
"grad_norm": 0.17447201907634735, |
|
"learning_rate": 0.00019854004307442088, |
|
"loss": 0.9536, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.05602031670152375, |
|
"grad_norm": 0.1653999537229538, |
|
"learning_rate": 0.00019853203002506543, |
|
"loss": 0.6763, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.056169704212727815, |
|
"grad_norm": 0.21575714647769928, |
|
"learning_rate": 0.0001985239952084239, |
|
"loss": 1.2311, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.05631909172393188, |
|
"grad_norm": 0.18763162195682526, |
|
"learning_rate": 0.0001985159386262713, |
|
"loss": 0.7904, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.05646847923513594, |
|
"grad_norm": 0.14876295626163483, |
|
"learning_rate": 0.0001985078602803874, |
|
"loss": 0.7818, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.056617866746340006, |
|
"grad_norm": 0.15621663630008698, |
|
"learning_rate": 0.0001984997601725569, |
|
"loss": 0.8913, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.05676725425754407, |
|
"grad_norm": 0.154473677277565, |
|
"learning_rate": 0.00019849163830456922, |
|
"loss": 0.5824, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.05691664176874813, |
|
"grad_norm": 0.17189285159111023, |
|
"learning_rate": 0.00019848349467821864, |
|
"loss": 0.7574, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.0570660292799522, |
|
"grad_norm": 0.17504605650901794, |
|
"learning_rate": 0.00019847532929530415, |
|
"loss": 0.746, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.05721541679115626, |
|
"grad_norm": 0.14172236621379852, |
|
"learning_rate": 0.00019846714215762966, |
|
"loss": 0.9479, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.05736480430236032, |
|
"grad_norm": 0.18013326823711395, |
|
"learning_rate": 0.00019845893326700384, |
|
"loss": 0.7438, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.05751419181356439, |
|
"grad_norm": 0.12909965217113495, |
|
"learning_rate": 0.00019845070262524016, |
|
"loss": 0.6857, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.05766357932476845, |
|
"grad_norm": 0.15466244518756866, |
|
"learning_rate": 0.00019844245023415685, |
|
"loss": 0.9448, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.057812966835972514, |
|
"grad_norm": 0.6459704637527466, |
|
"learning_rate": 0.0001984341760955771, |
|
"loss": 1.7624, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.05796235434717657, |
|
"grad_norm": 0.12828199565410614, |
|
"learning_rate": 0.0001984258802113287, |
|
"loss": 0.6321, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.05811174185838064, |
|
"grad_norm": 0.19720108807086945, |
|
"learning_rate": 0.0001984175625832444, |
|
"loss": 0.742, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.058261129369584705, |
|
"grad_norm": 0.1498088240623474, |
|
"learning_rate": 0.0001984092232131616, |
|
"loss": 0.8326, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.058410516880788764, |
|
"grad_norm": 0.21480423212051392, |
|
"learning_rate": 0.0001984008621029227, |
|
"loss": 0.7597, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.05855990439199283, |
|
"grad_norm": 0.29573026299476624, |
|
"learning_rate": 0.0001983924792543748, |
|
"loss": 1.3511, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.058709291903196896, |
|
"grad_norm": 0.1259469836950302, |
|
"learning_rate": 0.0001983840746693698, |
|
"loss": 0.7294, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.058858679414400955, |
|
"grad_norm": 0.14026756584644318, |
|
"learning_rate": 0.00019837564834976432, |
|
"loss": 0.6073, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.05900806692560502, |
|
"grad_norm": 0.1396723836660385, |
|
"learning_rate": 0.00019836720029741995, |
|
"loss": 0.532, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.05915745443680908, |
|
"grad_norm": 0.33731967210769653, |
|
"learning_rate": 0.000198358730514203, |
|
"loss": 1.0597, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.059306841948013146, |
|
"grad_norm": 0.14965583384037018, |
|
"learning_rate": 0.00019835023900198454, |
|
"loss": 0.9816, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.05945622945921721, |
|
"grad_norm": 0.18267478048801422, |
|
"learning_rate": 0.0001983417257626405, |
|
"loss": 0.7945, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.05960561697042127, |
|
"grad_norm": 0.39048805832862854, |
|
"learning_rate": 0.0001983331907980516, |
|
"loss": 1.1221, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.05975500448162534, |
|
"grad_norm": 0.11438261717557907, |
|
"learning_rate": 0.00019832463411010331, |
|
"loss": 0.6491, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.059904391992829396, |
|
"grad_norm": 0.1477927714586258, |
|
"learning_rate": 0.00019831605570068596, |
|
"loss": 0.8062, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.06005377950403346, |
|
"grad_norm": 0.12166056036949158, |
|
"learning_rate": 0.0001983074555716947, |
|
"loss": 0.8609, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.06020316701523753, |
|
"grad_norm": 0.13299763202667236, |
|
"learning_rate": 0.00019829883372502935, |
|
"loss": 0.9124, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.06035255452644159, |
|
"grad_norm": 0.5741954445838928, |
|
"learning_rate": 0.00019829019016259468, |
|
"loss": 1.4774, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.060501942037645653, |
|
"grad_norm": 0.12422844022512436, |
|
"learning_rate": 0.00019828152488630016, |
|
"loss": 0.7628, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.06065132954884972, |
|
"grad_norm": 0.22834278643131256, |
|
"learning_rate": 0.00019827283789806011, |
|
"loss": 1.0135, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.06080071706005378, |
|
"grad_norm": 0.2153419405221939, |
|
"learning_rate": 0.00019826412919979358, |
|
"loss": 1.0723, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.060950104571257845, |
|
"grad_norm": 0.18992449343204498, |
|
"learning_rate": 0.0001982553987934245, |
|
"loss": 0.5903, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.061099492082461904, |
|
"grad_norm": 0.14883364737033844, |
|
"learning_rate": 0.00019824664668088155, |
|
"loss": 0.7192, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.06124887959366597, |
|
"grad_norm": 0.12347893416881561, |
|
"learning_rate": 0.0001982378728640982, |
|
"loss": 0.785, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.061398267104870036, |
|
"grad_norm": 0.14353862404823303, |
|
"learning_rate": 0.0001982290773450127, |
|
"loss": 0.6293, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.061547654616074095, |
|
"grad_norm": 0.13001669943332672, |
|
"learning_rate": 0.00019822026012556818, |
|
"loss": 0.7083, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.06169704212727816, |
|
"grad_norm": 0.15990330278873444, |
|
"learning_rate": 0.00019821142120771246, |
|
"loss": 0.6908, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.06184642963848222, |
|
"grad_norm": 0.27101776003837585, |
|
"learning_rate": 0.0001982025605933982, |
|
"loss": 0.7311, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.061995817149686286, |
|
"grad_norm": 0.3568134903907776, |
|
"learning_rate": 0.00019819367828458287, |
|
"loss": 1.0127, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.06214520466089035, |
|
"grad_norm": 0.13109736144542694, |
|
"learning_rate": 0.0001981847742832287, |
|
"loss": 0.8776, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.06229459217209441, |
|
"grad_norm": 0.1522371917963028, |
|
"learning_rate": 0.0001981758485913027, |
|
"loss": 0.9513, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.06244397968329848, |
|
"grad_norm": 0.15717531740665436, |
|
"learning_rate": 0.00019816690121077674, |
|
"loss": 1.084, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.06259336719450254, |
|
"grad_norm": 0.26732301712036133, |
|
"learning_rate": 0.00019815793214362742, |
|
"loss": 1.0219, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.0627427547057066, |
|
"grad_norm": 0.1595582216978073, |
|
"learning_rate": 0.00019814894139183614, |
|
"loss": 0.7484, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.06289214221691067, |
|
"grad_norm": 0.24443097412586212, |
|
"learning_rate": 0.00019813992895738908, |
|
"loss": 1.1702, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.06304152972811473, |
|
"grad_norm": 0.12789739668369293, |
|
"learning_rate": 0.00019813089484227732, |
|
"loss": 0.9188, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.06319091723931879, |
|
"grad_norm": 0.13589276373386383, |
|
"learning_rate": 0.00019812183904849653, |
|
"loss": 0.5864, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.06334030475052285, |
|
"grad_norm": 0.20833753049373627, |
|
"learning_rate": 0.00019811276157804733, |
|
"loss": 0.9182, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.06348969226172692, |
|
"grad_norm": 0.16936369240283966, |
|
"learning_rate": 0.0001981036624329351, |
|
"loss": 0.7968, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.06363907977293098, |
|
"grad_norm": 0.21851451694965363, |
|
"learning_rate": 0.00019809454161516993, |
|
"loss": 0.6956, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.06378846728413505, |
|
"grad_norm": 0.15087386965751648, |
|
"learning_rate": 0.0001980853991267668, |
|
"loss": 1.0246, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.06393785479533912, |
|
"grad_norm": 0.12114626169204712, |
|
"learning_rate": 0.00019807623496974537, |
|
"loss": 0.7678, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.06408724230654317, |
|
"grad_norm": 0.20948128402233124, |
|
"learning_rate": 0.00019806704914613024, |
|
"loss": 1.1529, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.06423662981774723, |
|
"grad_norm": 0.1780691146850586, |
|
"learning_rate": 0.0001980578416579506, |
|
"loss": 0.9683, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0643860173289513, |
|
"grad_norm": 0.14259085059165955, |
|
"learning_rate": 0.00019804861250724063, |
|
"loss": 0.4694, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.06453540484015537, |
|
"grad_norm": 0.2003784030675888, |
|
"learning_rate": 0.00019803936169603912, |
|
"loss": 0.6101, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.06468479235135943, |
|
"grad_norm": 0.19409967958927155, |
|
"learning_rate": 0.00019803008922638976, |
|
"loss": 1.2219, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.06483417986256348, |
|
"grad_norm": 0.12308470904827118, |
|
"learning_rate": 0.00019802079510034096, |
|
"loss": 0.8568, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.06498356737376755, |
|
"grad_norm": 0.3875686228275299, |
|
"learning_rate": 0.00019801147931994596, |
|
"loss": 1.027, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.06513295488497162, |
|
"grad_norm": 0.21796062588691711, |
|
"learning_rate": 0.00019800214188726276, |
|
"loss": 1.2517, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.06528234239617568, |
|
"grad_norm": 0.14813588559627533, |
|
"learning_rate": 0.00019799278280435413, |
|
"loss": 0.8185, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.06543172990737975, |
|
"grad_norm": 0.215727299451828, |
|
"learning_rate": 0.00019798340207328766, |
|
"loss": 1.3439, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.0655811174185838, |
|
"grad_norm": 0.25380611419677734, |
|
"learning_rate": 0.0001979739996961357, |
|
"loss": 1.2215, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.06573050492978787, |
|
"grad_norm": 0.16782647371292114, |
|
"learning_rate": 0.00019796457567497537, |
|
"loss": 0.9321, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.06587989244099193, |
|
"grad_norm": 0.13334587216377258, |
|
"learning_rate": 0.0001979551300118886, |
|
"loss": 0.8722, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.066029279952196, |
|
"grad_norm": 0.27328580617904663, |
|
"learning_rate": 0.0001979456627089621, |
|
"loss": 0.9013, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.06617866746340006, |
|
"grad_norm": 0.10864201933145523, |
|
"learning_rate": 0.0001979361737682873, |
|
"loss": 0.6201, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.06632805497460412, |
|
"grad_norm": 0.1800958216190338, |
|
"learning_rate": 0.0001979266631919605, |
|
"loss": 0.727, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.06647744248580818, |
|
"grad_norm": 0.14912551641464233, |
|
"learning_rate": 0.00019791713098208272, |
|
"loss": 0.6877, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.06662682999701225, |
|
"grad_norm": 0.3135051727294922, |
|
"learning_rate": 0.00019790757714075979, |
|
"loss": 0.9517, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.06677621750821632, |
|
"grad_norm": 0.23976337909698486, |
|
"learning_rate": 0.0001978980016701023, |
|
"loss": 0.9952, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.06692560501942038, |
|
"grad_norm": 0.16540992259979248, |
|
"learning_rate": 0.00019788840457222556, |
|
"loss": 0.796, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.06707499253062443, |
|
"grad_norm": 0.19656099379062653, |
|
"learning_rate": 0.00019787878584924984, |
|
"loss": 0.7593, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.0672243800418285, |
|
"grad_norm": 0.12927167117595673, |
|
"learning_rate": 0.0001978691455033, |
|
"loss": 0.819, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.06737376755303257, |
|
"grad_norm": 0.19154123961925507, |
|
"learning_rate": 0.00019785948353650572, |
|
"loss": 0.9713, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.06752315506423663, |
|
"grad_norm": 0.12065298110246658, |
|
"learning_rate": 0.0001978497999510015, |
|
"loss": 0.7942, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.0676725425754407, |
|
"grad_norm": 0.1475251317024231, |
|
"learning_rate": 0.00019784009474892666, |
|
"loss": 0.8598, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.06782193008664476, |
|
"grad_norm": 0.14072950184345245, |
|
"learning_rate": 0.00019783036793242516, |
|
"loss": 0.8956, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.06797131759784882, |
|
"grad_norm": 0.20561757683753967, |
|
"learning_rate": 0.00019782061950364584, |
|
"loss": 0.9214, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.06812070510905288, |
|
"grad_norm": 0.1547221839427948, |
|
"learning_rate": 0.00019781084946474226, |
|
"loss": 0.9042, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.06827009262025695, |
|
"grad_norm": 0.233870729804039, |
|
"learning_rate": 0.0001978010578178728, |
|
"loss": 1.0727, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.06841948013146101, |
|
"grad_norm": 0.19565890729427338, |
|
"learning_rate": 0.00019779124456520056, |
|
"loss": 0.8425, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.06856886764266508, |
|
"grad_norm": 0.15683157742023468, |
|
"learning_rate": 0.00019778140970889348, |
|
"loss": 0.7989, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.06871825515386913, |
|
"grad_norm": 0.12861517071723938, |
|
"learning_rate": 0.0001977715532511242, |
|
"loss": 0.8849, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.0688676426650732, |
|
"grad_norm": 0.12733756005764008, |
|
"learning_rate": 0.00019776167519407022, |
|
"loss": 0.7984, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.06901703017627726, |
|
"grad_norm": 0.14494583010673523, |
|
"learning_rate": 0.0001977517755399137, |
|
"loss": 0.8234, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.06916641768748133, |
|
"grad_norm": 0.15694382786750793, |
|
"learning_rate": 0.00019774185429084166, |
|
"loss": 0.8476, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.0693158051986854, |
|
"grad_norm": 0.2537318766117096, |
|
"learning_rate": 0.00019773191144904586, |
|
"loss": 0.9928, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.06946519270988945, |
|
"grad_norm": 0.45019641518592834, |
|
"learning_rate": 0.0001977219470167228, |
|
"loss": 1.5181, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.06961458022109351, |
|
"grad_norm": 0.18655377626419067, |
|
"learning_rate": 0.00019771196099607386, |
|
"loss": 0.7463, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.06976396773229758, |
|
"grad_norm": 0.1978912502527237, |
|
"learning_rate": 0.00019770195338930503, |
|
"loss": 1.0358, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.06991335524350165, |
|
"grad_norm": 0.15748754143714905, |
|
"learning_rate": 0.00019769192419862716, |
|
"loss": 0.6154, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.07006274275470571, |
|
"grad_norm": 0.17752663791179657, |
|
"learning_rate": 0.00019768187342625592, |
|
"loss": 0.6289, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.07021213026590976, |
|
"grad_norm": 0.12329550832509995, |
|
"learning_rate": 0.0001976718010744116, |
|
"loss": 0.9018, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.07036151777711383, |
|
"grad_norm": 0.17692221701145172, |
|
"learning_rate": 0.00019766170714531937, |
|
"loss": 1.0348, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.0705109052883179, |
|
"grad_norm": 0.2912501096725464, |
|
"learning_rate": 0.00019765159164120916, |
|
"loss": 0.9097, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.07066029279952196, |
|
"grad_norm": 0.118867889046669, |
|
"learning_rate": 0.00019764145456431566, |
|
"loss": 0.6933, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.07080968031072603, |
|
"grad_norm": 0.15778154134750366, |
|
"learning_rate": 0.00019763129591687827, |
|
"loss": 0.5504, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.07095906782193008, |
|
"grad_norm": 0.14886140823364258, |
|
"learning_rate": 0.00019762111570114122, |
|
"loss": 0.8209, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.07110845533313415, |
|
"grad_norm": 0.10714101046323776, |
|
"learning_rate": 0.00019761091391935347, |
|
"loss": 0.6473, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.07125784284433821, |
|
"grad_norm": 0.15396277606487274, |
|
"learning_rate": 0.00019760069057376875, |
|
"loss": 0.8179, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.07140723035554228, |
|
"grad_norm": 0.1531289666891098, |
|
"learning_rate": 0.00019759044566664558, |
|
"loss": 0.9508, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.07155661786674634, |
|
"grad_norm": 0.11772772669792175, |
|
"learning_rate": 0.0001975801792002472, |
|
"loss": 0.6606, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.0717060053779504, |
|
"grad_norm": 0.16291484236717224, |
|
"learning_rate": 0.00019756989117684164, |
|
"loss": 0.6476, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.07185539288915446, |
|
"grad_norm": 0.15869931876659393, |
|
"learning_rate": 0.00019755958159870172, |
|
"loss": 0.923, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.07200478040035853, |
|
"grad_norm": 0.1591719686985016, |
|
"learning_rate": 0.00019754925046810493, |
|
"loss": 0.7225, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.0721541679115626, |
|
"grad_norm": 0.1619904637336731, |
|
"learning_rate": 0.00019753889778733363, |
|
"loss": 0.7185, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.07230355542276666, |
|
"grad_norm": 0.12438057363033295, |
|
"learning_rate": 0.00019752852355867486, |
|
"loss": 0.7659, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.07245294293397073, |
|
"grad_norm": 0.11964958906173706, |
|
"learning_rate": 0.00019751812778442046, |
|
"loss": 0.7564, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.07260233044517478, |
|
"grad_norm": 0.2836957573890686, |
|
"learning_rate": 0.00019750771046686704, |
|
"loss": 1.4225, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.07275171795637884, |
|
"grad_norm": 0.13266032934188843, |
|
"learning_rate": 0.00019749727160831593, |
|
"loss": 0.7952, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.07290110546758291, |
|
"grad_norm": 0.12412168830633163, |
|
"learning_rate": 0.00019748681121107325, |
|
"loss": 0.9047, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.07305049297878698, |
|
"grad_norm": 0.20590822398662567, |
|
"learning_rate": 0.00019747632927744982, |
|
"loss": 0.7496, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.07319988048999104, |
|
"grad_norm": 0.18052905797958374, |
|
"learning_rate": 0.00019746582580976136, |
|
"loss": 0.6724, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.0733492680011951, |
|
"grad_norm": 0.24314431846141815, |
|
"learning_rate": 0.0001974553008103282, |
|
"loss": 0.8552, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.07349865551239916, |
|
"grad_norm": 0.14932356774806976, |
|
"learning_rate": 0.00019744475428147546, |
|
"loss": 0.8878, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.07364804302360323, |
|
"grad_norm": 0.12878401577472687, |
|
"learning_rate": 0.00019743418622553303, |
|
"loss": 0.7097, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.07379743053480729, |
|
"grad_norm": 0.1838715523481369, |
|
"learning_rate": 0.00019742359664483563, |
|
"loss": 1.0581, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.07394681804601136, |
|
"grad_norm": 0.13782045245170593, |
|
"learning_rate": 0.0001974129855417226, |
|
"loss": 1.0439, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.07409620555721541, |
|
"grad_norm": 0.179475337266922, |
|
"learning_rate": 0.00019740235291853812, |
|
"loss": 0.7708, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.07424559306841948, |
|
"grad_norm": 0.1903340369462967, |
|
"learning_rate": 0.0001973916987776311, |
|
"loss": 1.3356, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.07439498057962354, |
|
"grad_norm": 0.13480018079280853, |
|
"learning_rate": 0.00019738102312135523, |
|
"loss": 0.5876, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.07454436809082761, |
|
"grad_norm": 0.10856112837791443, |
|
"learning_rate": 0.0001973703259520689, |
|
"loss": 0.6991, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.07469375560203168, |
|
"grad_norm": 0.3036371171474457, |
|
"learning_rate": 0.0001973596072721353, |
|
"loss": 1.0438, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07484314311323573, |
|
"grad_norm": 0.12173090875148773, |
|
"learning_rate": 0.0001973488670839224, |
|
"loss": 0.5836, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.0749925306244398, |
|
"grad_norm": 0.2667033076286316, |
|
"learning_rate": 0.00019733810538980281, |
|
"loss": 0.49, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.07514191813564386, |
|
"grad_norm": 0.092626191675663, |
|
"learning_rate": 0.00019732732219215397, |
|
"loss": 0.3618, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.07529130564684793, |
|
"grad_norm": 0.12614378333091736, |
|
"learning_rate": 0.0001973165174933581, |
|
"loss": 0.8307, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.07544069315805199, |
|
"grad_norm": 0.14259332418441772, |
|
"learning_rate": 0.00019730569129580206, |
|
"loss": 0.8972, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.07559008066925604, |
|
"grad_norm": 0.12121133506298065, |
|
"learning_rate": 0.0001972948436018776, |
|
"loss": 0.8446, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.07573946818046011, |
|
"grad_norm": 0.16819995641708374, |
|
"learning_rate": 0.00019728397441398112, |
|
"loss": 0.8122, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.07588885569166418, |
|
"grad_norm": 0.14065992832183838, |
|
"learning_rate": 0.00019727308373451377, |
|
"loss": 0.457, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.07603824320286824, |
|
"grad_norm": 0.16299694776535034, |
|
"learning_rate": 0.0001972621715658815, |
|
"loss": 0.878, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.07618763071407231, |
|
"grad_norm": 0.1941678524017334, |
|
"learning_rate": 0.000197251237910495, |
|
"loss": 0.9534, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.07633701822527637, |
|
"grad_norm": 0.14302954077720642, |
|
"learning_rate": 0.00019724028277076964, |
|
"loss": 0.8577, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.07648640573648043, |
|
"grad_norm": 0.19309553503990173, |
|
"learning_rate": 0.00019722930614912563, |
|
"loss": 0.9864, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.07663579324768449, |
|
"grad_norm": 0.1626858413219452, |
|
"learning_rate": 0.00019721830804798787, |
|
"loss": 0.8104, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.07678518075888856, |
|
"grad_norm": 0.11680503189563751, |
|
"learning_rate": 0.00019720728846978598, |
|
"loss": 0.7225, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.07693456827009262, |
|
"grad_norm": 0.11943230032920837, |
|
"learning_rate": 0.0001971962474169544, |
|
"loss": 0.895, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.07708395578129669, |
|
"grad_norm": 0.2065763920545578, |
|
"learning_rate": 0.00019718518489193225, |
|
"loss": 0.7482, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.07723334329250074, |
|
"grad_norm": 0.13309244811534882, |
|
"learning_rate": 0.0001971741008971634, |
|
"loss": 0.5227, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.07738273080370481, |
|
"grad_norm": 0.13642869889736176, |
|
"learning_rate": 0.00019716299543509654, |
|
"loss": 0.6441, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.07753211831490887, |
|
"grad_norm": 0.18893510103225708, |
|
"learning_rate": 0.00019715186850818498, |
|
"loss": 0.7619, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.07768150582611294, |
|
"grad_norm": 0.3364134132862091, |
|
"learning_rate": 0.00019714072011888686, |
|
"loss": 1.0618, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.077830893337317, |
|
"grad_norm": 0.1486874371767044, |
|
"learning_rate": 0.00019712955026966506, |
|
"loss": 0.7846, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.07798028084852106, |
|
"grad_norm": 0.21529722213745117, |
|
"learning_rate": 0.00019711835896298713, |
|
"loss": 0.4896, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.07812966835972512, |
|
"grad_norm": 0.1832679957151413, |
|
"learning_rate": 0.00019710714620132546, |
|
"loss": 0.8258, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.07827905587092919, |
|
"grad_norm": 0.189329594373703, |
|
"learning_rate": 0.00019709591198715707, |
|
"loss": 0.9139, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.07842844338213326, |
|
"grad_norm": 0.13310536742210388, |
|
"learning_rate": 0.0001970846563229638, |
|
"loss": 0.9531, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.07857783089333732, |
|
"grad_norm": 0.5338667631149292, |
|
"learning_rate": 0.00019707337921123221, |
|
"loss": 1.0734, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.07872721840454137, |
|
"grad_norm": 0.6397843360900879, |
|
"learning_rate": 0.0001970620806544536, |
|
"loss": 1.4513, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.07887660591574544, |
|
"grad_norm": 0.14354926347732544, |
|
"learning_rate": 0.00019705076065512398, |
|
"loss": 0.96, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.0790259934269495, |
|
"grad_norm": 0.11550958454608917, |
|
"learning_rate": 0.00019703941921574413, |
|
"loss": 0.7829, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.07917538093815357, |
|
"grad_norm": 0.14045141637325287, |
|
"learning_rate": 0.00019702805633881957, |
|
"loss": 0.6976, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.07932476844935764, |
|
"grad_norm": 0.18493744730949402, |
|
"learning_rate": 0.00019701667202686048, |
|
"loss": 1.1065, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.07947415596056169, |
|
"grad_norm": 0.2148275524377823, |
|
"learning_rate": 0.0001970052662823819, |
|
"loss": 1.1823, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.07962354347176576, |
|
"grad_norm": 0.1770048886537552, |
|
"learning_rate": 0.0001969938391079035, |
|
"loss": 0.8534, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.07977293098296982, |
|
"grad_norm": 0.11045973002910614, |
|
"learning_rate": 0.00019698239050594977, |
|
"loss": 0.628, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.07992231849417389, |
|
"grad_norm": 0.113344706594944, |
|
"learning_rate": 0.0001969709204790498, |
|
"loss": 0.7655, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.08007170600537795, |
|
"grad_norm": 0.13493701815605164, |
|
"learning_rate": 0.0001969594290297376, |
|
"loss": 0.6173, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.08022109351658202, |
|
"grad_norm": 0.10995621234178543, |
|
"learning_rate": 0.00019694791616055177, |
|
"loss": 0.8091, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.08037048102778607, |
|
"grad_norm": 0.1691836416721344, |
|
"learning_rate": 0.00019693638187403563, |
|
"loss": 0.9478, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.08051986853899014, |
|
"grad_norm": 0.1570545732975006, |
|
"learning_rate": 0.0001969248261727374, |
|
"loss": 0.8514, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.0806692560501942, |
|
"grad_norm": 0.14363858103752136, |
|
"learning_rate": 0.00019691324905920984, |
|
"loss": 0.5037, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.08081864356139827, |
|
"grad_norm": 0.12334268540143967, |
|
"learning_rate": 0.00019690165053601056, |
|
"loss": 0.9084, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.08096803107260234, |
|
"grad_norm": 0.3128170073032379, |
|
"learning_rate": 0.0001968900306057018, |
|
"loss": 0.737, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.08111741858380639, |
|
"grad_norm": 0.16364172101020813, |
|
"learning_rate": 0.00019687838927085066, |
|
"loss": 0.4789, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.08126680609501045, |
|
"grad_norm": 0.15874239802360535, |
|
"learning_rate": 0.0001968667265340288, |
|
"loss": 1.0205, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.08141619360621452, |
|
"grad_norm": 0.13250084221363068, |
|
"learning_rate": 0.00019685504239781278, |
|
"loss": 0.8377, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.08156558111741859, |
|
"grad_norm": 0.13961811363697052, |
|
"learning_rate": 0.00019684333686478383, |
|
"loss": 0.857, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.08171496862862265, |
|
"grad_norm": 0.1638427972793579, |
|
"learning_rate": 0.0001968316099375278, |
|
"loss": 0.6918, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.0818643561398267, |
|
"grad_norm": 0.260032057762146, |
|
"learning_rate": 0.00019681986161863542, |
|
"loss": 0.965, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.08201374365103077, |
|
"grad_norm": 0.12642204761505127, |
|
"learning_rate": 0.00019680809191070203, |
|
"loss": 0.5236, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.08216313116223484, |
|
"grad_norm": 0.18395595252513885, |
|
"learning_rate": 0.00019679630081632782, |
|
"loss": 1.0722, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.0823125186734389, |
|
"grad_norm": 0.14553199708461761, |
|
"learning_rate": 0.0001967844883381176, |
|
"loss": 0.7844, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.08246190618464297, |
|
"grad_norm": 0.13094641268253326, |
|
"learning_rate": 0.00019677265447868086, |
|
"loss": 0.5937, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.08261129369584702, |
|
"grad_norm": 0.1621766984462738, |
|
"learning_rate": 0.00019676079924063196, |
|
"loss": 0.7064, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.08276068120705109, |
|
"grad_norm": 0.16490310430526733, |
|
"learning_rate": 0.0001967489226265899, |
|
"loss": 0.654, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.08291006871825515, |
|
"grad_norm": 0.1863924264907837, |
|
"learning_rate": 0.00019673702463917842, |
|
"loss": 0.8091, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.08305945622945922, |
|
"grad_norm": 0.1565409004688263, |
|
"learning_rate": 0.00019672510528102597, |
|
"loss": 0.583, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.08320884374066329, |
|
"grad_norm": 0.2025175541639328, |
|
"learning_rate": 0.0001967131645547657, |
|
"loss": 0.8749, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.08335823125186734, |
|
"grad_norm": 0.23709741234779358, |
|
"learning_rate": 0.0001967012024630355, |
|
"loss": 1.1301, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.0835076187630714, |
|
"grad_norm": 0.33181461691856384, |
|
"learning_rate": 0.00019668921900847805, |
|
"loss": 1.0527, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.08365700627427547, |
|
"grad_norm": 0.11158733814954758, |
|
"learning_rate": 0.00019667721419374065, |
|
"loss": 0.6433, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.08380639378547954, |
|
"grad_norm": 0.21710413694381714, |
|
"learning_rate": 0.00019666518802147534, |
|
"loss": 0.7117, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.0839557812966836, |
|
"grad_norm": 0.1480075567960739, |
|
"learning_rate": 0.00019665314049433888, |
|
"loss": 0.8019, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.08410516880788767, |
|
"grad_norm": 0.15463611483573914, |
|
"learning_rate": 0.00019664107161499277, |
|
"loss": 0.679, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.08425455631909172, |
|
"grad_norm": 0.13593655824661255, |
|
"learning_rate": 0.00019662898138610323, |
|
"loss": 0.7527, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.08440394383029579, |
|
"grad_norm": 0.1662757396697998, |
|
"learning_rate": 0.0001966168698103412, |
|
"loss": 0.7486, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.08455333134149985, |
|
"grad_norm": 0.10743851959705353, |
|
"learning_rate": 0.00019660473689038228, |
|
"loss": 0.6341, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.08470271885270392, |
|
"grad_norm": 0.15268459916114807, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 0.8236, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.08485210636390798, |
|
"grad_norm": 0.14038819074630737, |
|
"learning_rate": 0.00019658040702859997, |
|
"loss": 0.8065, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.08500149387511204, |
|
"grad_norm": 0.15066532790660858, |
|
"learning_rate": 0.0001965682100921514, |
|
"loss": 0.9532, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.0851508813863161, |
|
"grad_norm": 0.1580052673816681, |
|
"learning_rate": 0.00019655599182225565, |
|
"loss": 1.0969, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.08530026889752017, |
|
"grad_norm": 0.16247299313545227, |
|
"learning_rate": 0.000196543752221612, |
|
"loss": 0.791, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.08544965640872423, |
|
"grad_norm": 0.1083194687962532, |
|
"learning_rate": 0.00019653149129292426, |
|
"loss": 0.6203, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.0855990439199283, |
|
"grad_norm": 0.19076959788799286, |
|
"learning_rate": 0.0001965192090389011, |
|
"loss": 0.6709, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.08574843143113235, |
|
"grad_norm": 0.15673895180225372, |
|
"learning_rate": 0.00019650690546225592, |
|
"loss": 0.7816, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.08589781894233642, |
|
"grad_norm": 0.13381899893283844, |
|
"learning_rate": 0.00019649458056570672, |
|
"loss": 0.8227, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.08604720645354048, |
|
"grad_norm": 0.10087165981531143, |
|
"learning_rate": 0.00019648223435197627, |
|
"loss": 0.5839, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.08619659396474455, |
|
"grad_norm": 0.17345178127288818, |
|
"learning_rate": 0.00019646986682379206, |
|
"loss": 0.8435, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.08634598147594862, |
|
"grad_norm": 0.16065159440040588, |
|
"learning_rate": 0.00019645747798388628, |
|
"loss": 0.8124, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.08649536898715267, |
|
"grad_norm": 0.32011693716049194, |
|
"learning_rate": 0.0001964450678349958, |
|
"loss": 0.8119, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.08664475649835673, |
|
"grad_norm": 0.13616526126861572, |
|
"learning_rate": 0.0001964326363798622, |
|
"loss": 0.8056, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.0867941440095608, |
|
"grad_norm": 0.28961271047592163, |
|
"learning_rate": 0.00019642018362123182, |
|
"loss": 1.0182, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.08694353152076487, |
|
"grad_norm": 0.16200341284275055, |
|
"learning_rate": 0.00019640770956185567, |
|
"loss": 0.9604, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.08709291903196893, |
|
"grad_norm": 0.1426459699869156, |
|
"learning_rate": 0.00019639521420448947, |
|
"loss": 0.6555, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.08724230654317298, |
|
"grad_norm": 0.11389955133199692, |
|
"learning_rate": 0.0001963826975518936, |
|
"loss": 0.6878, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.08739169405437705, |
|
"grad_norm": 0.12384461611509323, |
|
"learning_rate": 0.00019637015960683322, |
|
"loss": 0.612, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.08754108156558112, |
|
"grad_norm": 0.1436391919851303, |
|
"learning_rate": 0.00019635760037207817, |
|
"loss": 0.7069, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.08769046907678518, |
|
"grad_norm": 0.1199880838394165, |
|
"learning_rate": 0.00019634501985040296, |
|
"loss": 0.7547, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.08783985658798925, |
|
"grad_norm": 0.16157324612140656, |
|
"learning_rate": 0.00019633241804458687, |
|
"loss": 0.7839, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.0879892440991933, |
|
"grad_norm": 0.16362355649471283, |
|
"learning_rate": 0.00019631979495741378, |
|
"loss": 0.7197, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.08813863161039737, |
|
"grad_norm": 0.1794954091310501, |
|
"learning_rate": 0.00019630715059167238, |
|
"loss": 0.6936, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.08828801912160143, |
|
"grad_norm": 0.3147851228713989, |
|
"learning_rate": 0.00019629448495015597, |
|
"loss": 1.8882, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.0884374066328055, |
|
"grad_norm": 0.1880149394273758, |
|
"learning_rate": 0.0001962817980356626, |
|
"loss": 0.7354, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.08858679414400956, |
|
"grad_norm": 0.13019341230392456, |
|
"learning_rate": 0.00019626908985099503, |
|
"loss": 0.9714, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.08873618165521363, |
|
"grad_norm": 0.18782681226730347, |
|
"learning_rate": 0.0001962563603989607, |
|
"loss": 0.9334, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.08888556916641768, |
|
"grad_norm": 0.1618777960538864, |
|
"learning_rate": 0.00019624360968237172, |
|
"loss": 0.8143, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.08903495667762175, |
|
"grad_norm": 0.13201208412647247, |
|
"learning_rate": 0.00019623083770404492, |
|
"loss": 0.9077, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.08918434418882581, |
|
"grad_norm": 0.11902808398008347, |
|
"learning_rate": 0.0001962180444668019, |
|
"loss": 1.014, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.08933373170002988, |
|
"grad_norm": 0.17474393546581268, |
|
"learning_rate": 0.0001962052299734688, |
|
"loss": 0.8466, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.08948311921123395, |
|
"grad_norm": 0.2875848412513733, |
|
"learning_rate": 0.00019619239422687663, |
|
"loss": 0.8023, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.089632506722438, |
|
"grad_norm": 0.13769172132015228, |
|
"learning_rate": 0.00019617953722986096, |
|
"loss": 0.9026, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08978189423364207, |
|
"grad_norm": 0.30760055780410767, |
|
"learning_rate": 0.00019616665898526206, |
|
"loss": 1.0103, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.08993128174484613, |
|
"grad_norm": 0.5774983763694763, |
|
"learning_rate": 0.00019615375949592504, |
|
"loss": 1.4516, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.0900806692560502, |
|
"grad_norm": 0.13711205124855042, |
|
"learning_rate": 0.00019614083876469954, |
|
"loss": 0.7543, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.09023005676725426, |
|
"grad_norm": 0.1261071413755417, |
|
"learning_rate": 0.00019612789679443997, |
|
"loss": 0.6952, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.09037944427845832, |
|
"grad_norm": 0.13197720050811768, |
|
"learning_rate": 0.00019611493358800538, |
|
"loss": 0.906, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.09052883178966238, |
|
"grad_norm": 0.12066524475812912, |
|
"learning_rate": 0.00019610194914825962, |
|
"loss": 0.7621, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.09067821930086645, |
|
"grad_norm": 0.1722782999277115, |
|
"learning_rate": 0.00019608894347807108, |
|
"loss": 0.8798, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.09082760681207051, |
|
"grad_norm": 0.1058247983455658, |
|
"learning_rate": 0.000196075916580313, |
|
"loss": 0.6246, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.09097699432327458, |
|
"grad_norm": 0.17862388491630554, |
|
"learning_rate": 0.00019606286845786315, |
|
"loss": 0.6607, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.09112638183447863, |
|
"grad_norm": 0.1704971194267273, |
|
"learning_rate": 0.0001960497991136041, |
|
"loss": 0.8648, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.0912757693456827, |
|
"grad_norm": 0.12244229018688202, |
|
"learning_rate": 0.00019603670855042308, |
|
"loss": 0.7546, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.09142515685688676, |
|
"grad_norm": 0.1924651712179184, |
|
"learning_rate": 0.00019602359677121199, |
|
"loss": 0.7931, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.09157454436809083, |
|
"grad_norm": 0.1722334921360016, |
|
"learning_rate": 0.00019601046377886746, |
|
"loss": 1.259, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.0917239318792949, |
|
"grad_norm": 0.11828108131885529, |
|
"learning_rate": 0.0001959973095762907, |
|
"loss": 0.8418, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.09187331939049895, |
|
"grad_norm": 0.10462518781423569, |
|
"learning_rate": 0.0001959841341663878, |
|
"loss": 0.5707, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.09202270690170301, |
|
"grad_norm": 0.14686742424964905, |
|
"learning_rate": 0.00019597093755206936, |
|
"loss": 1.0242, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.09217209441290708, |
|
"grad_norm": 0.1107390895485878, |
|
"learning_rate": 0.00019595771973625068, |
|
"loss": 0.6829, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.09232148192411115, |
|
"grad_norm": 0.18590034544467926, |
|
"learning_rate": 0.00019594448072185182, |
|
"loss": 0.7426, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.09247086943531521, |
|
"grad_norm": 0.12690337002277374, |
|
"learning_rate": 0.00019593122051179748, |
|
"loss": 0.978, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.09262025694651928, |
|
"grad_norm": 0.19712156057357788, |
|
"learning_rate": 0.00019591793910901707, |
|
"loss": 0.8153, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.09276964445772333, |
|
"grad_norm": 0.16727258265018463, |
|
"learning_rate": 0.00019590463651644464, |
|
"loss": 0.8827, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.0929190319689274, |
|
"grad_norm": 0.21075357496738434, |
|
"learning_rate": 0.00019589131273701894, |
|
"loss": 0.624, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.09306841948013146, |
|
"grad_norm": 0.5324035286903381, |
|
"learning_rate": 0.00019587796777368347, |
|
"loss": 1.808, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.09321780699133553, |
|
"grad_norm": 0.12788911163806915, |
|
"learning_rate": 0.00019586460162938622, |
|
"loss": 0.8374, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.0933671945025396, |
|
"grad_norm": 0.1606101393699646, |
|
"learning_rate": 0.00019585121430708012, |
|
"loss": 0.6427, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.09351658201374365, |
|
"grad_norm": 0.1087862104177475, |
|
"learning_rate": 0.00019583780580972253, |
|
"loss": 0.5977, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.09366596952494771, |
|
"grad_norm": 0.1577569991350174, |
|
"learning_rate": 0.00019582437614027565, |
|
"loss": 0.7819, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.09381535703615178, |
|
"grad_norm": 0.17475225031375885, |
|
"learning_rate": 0.00019581092530170633, |
|
"loss": 1.0605, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.09396474454735584, |
|
"grad_norm": 0.12946583330631256, |
|
"learning_rate": 0.000195797453296986, |
|
"loss": 0.832, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.09411413205855991, |
|
"grad_norm": 0.14866258203983307, |
|
"learning_rate": 0.00019578396012909092, |
|
"loss": 0.7355, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.09426351956976396, |
|
"grad_norm": 0.13466285169124603, |
|
"learning_rate": 0.00019577044580100189, |
|
"loss": 0.6529, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.09441290708096803, |
|
"grad_norm": 0.11548906564712524, |
|
"learning_rate": 0.00019575691031570446, |
|
"loss": 0.7818, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.0945622945921721, |
|
"grad_norm": 0.18528424203395844, |
|
"learning_rate": 0.00019574335367618883, |
|
"loss": 1.0789, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.09471168210337616, |
|
"grad_norm": 0.1468898057937622, |
|
"learning_rate": 0.00019572977588544986, |
|
"loss": 0.9756, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.09486106961458023, |
|
"grad_norm": 0.16121335327625275, |
|
"learning_rate": 0.00019571617694648713, |
|
"loss": 0.7288, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.09501045712578428, |
|
"grad_norm": 0.12135611474514008, |
|
"learning_rate": 0.00019570255686230485, |
|
"loss": 0.5934, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.09515984463698834, |
|
"grad_norm": 0.12789905071258545, |
|
"learning_rate": 0.0001956889156359119, |
|
"loss": 0.6069, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.09530923214819241, |
|
"grad_norm": 0.359639436006546, |
|
"learning_rate": 0.00019567525327032187, |
|
"loss": 1.3909, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.09545861965939648, |
|
"grad_norm": 0.18724483251571655, |
|
"learning_rate": 0.00019566156976855297, |
|
"loss": 0.6935, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.09560800717060054, |
|
"grad_norm": 0.19026364386081696, |
|
"learning_rate": 0.0001956478651336281, |
|
"loss": 0.7966, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.0957573946818046, |
|
"grad_norm": 0.26914387941360474, |
|
"learning_rate": 0.00019563413936857484, |
|
"loss": 0.8772, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.09590678219300866, |
|
"grad_norm": 0.1765991449356079, |
|
"learning_rate": 0.00019562039247642546, |
|
"loss": 0.7672, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.09605616970421273, |
|
"grad_norm": 0.2534937560558319, |
|
"learning_rate": 0.00019560662446021677, |
|
"loss": 1.2049, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.09620555721541679, |
|
"grad_norm": 0.14177022874355316, |
|
"learning_rate": 0.00019559283532299043, |
|
"loss": 0.5386, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.09635494472662086, |
|
"grad_norm": 0.1188812330365181, |
|
"learning_rate": 0.00019557902506779268, |
|
"loss": 0.6466, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.09650433223782492, |
|
"grad_norm": 0.12482491880655289, |
|
"learning_rate": 0.00019556519369767438, |
|
"loss": 0.6614, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.09665371974902898, |
|
"grad_norm": 0.1450030505657196, |
|
"learning_rate": 0.00019555134121569112, |
|
"loss": 0.7014, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.09680310726023304, |
|
"grad_norm": 0.12003795802593231, |
|
"learning_rate": 0.0001955374676249031, |
|
"loss": 0.8168, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.09695249477143711, |
|
"grad_norm": 0.1340424120426178, |
|
"learning_rate": 0.0001955235729283753, |
|
"loss": 0.9613, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.09710188228264117, |
|
"grad_norm": 0.19842645525932312, |
|
"learning_rate": 0.0001955096571291772, |
|
"loss": 0.8049, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.09725126979384524, |
|
"grad_norm": 0.256247341632843, |
|
"learning_rate": 0.00019549572023038305, |
|
"loss": 1.3097, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.09740065730504929, |
|
"grad_norm": 0.4698760509490967, |
|
"learning_rate": 0.0001954817622350717, |
|
"loss": 1.6247, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.09755004481625336, |
|
"grad_norm": 0.16017255187034607, |
|
"learning_rate": 0.00019546778314632674, |
|
"loss": 0.9872, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.09769943232745743, |
|
"grad_norm": 0.13327959179878235, |
|
"learning_rate": 0.00019545378296723635, |
|
"loss": 0.8846, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.09784881983866149, |
|
"grad_norm": 0.3522357642650604, |
|
"learning_rate": 0.0001954397617008934, |
|
"loss": 1.2639, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.09799820734986556, |
|
"grad_norm": 0.14365145564079285, |
|
"learning_rate": 0.0001954257193503954, |
|
"loss": 0.6805, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.09814759486106961, |
|
"grad_norm": 0.09565582871437073, |
|
"learning_rate": 0.00019541165591884454, |
|
"loss": 0.5522, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.09829698237227368, |
|
"grad_norm": 0.11068779975175858, |
|
"learning_rate": 0.0001953975714093476, |
|
"loss": 0.6697, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.09844636988347774, |
|
"grad_norm": 0.15267115831375122, |
|
"learning_rate": 0.00019538346582501616, |
|
"loss": 0.743, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.09859575739468181, |
|
"grad_norm": 0.13380476832389832, |
|
"learning_rate": 0.00019536933916896633, |
|
"loss": 0.7273, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.09874514490588587, |
|
"grad_norm": 0.37871164083480835, |
|
"learning_rate": 0.0001953551914443189, |
|
"loss": 0.9902, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.09889453241708993, |
|
"grad_norm": 0.16891950368881226, |
|
"learning_rate": 0.00019534102265419932, |
|
"loss": 1.0287, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.09904391992829399, |
|
"grad_norm": 0.15239520370960236, |
|
"learning_rate": 0.00019532683280173768, |
|
"loss": 0.9578, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.09919330743949806, |
|
"grad_norm": 0.1469845324754715, |
|
"learning_rate": 0.00019531262189006882, |
|
"loss": 0.8758, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.09934269495070212, |
|
"grad_norm": 0.15531109273433685, |
|
"learning_rate": 0.00019529838992233208, |
|
"loss": 1.0518, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.09949208246190619, |
|
"grad_norm": 0.19832894206047058, |
|
"learning_rate": 0.0001952841369016716, |
|
"loss": 0.6697, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.09964146997311024, |
|
"grad_norm": 0.12545038759708405, |
|
"learning_rate": 0.00019526986283123601, |
|
"loss": 0.9188, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.09979085748431431, |
|
"grad_norm": 0.15024740993976593, |
|
"learning_rate": 0.00019525556771417875, |
|
"loss": 0.9019, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.09994024499551837, |
|
"grad_norm": 0.17119628190994263, |
|
"learning_rate": 0.0001952412515536578, |
|
"loss": 0.7931, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.10008963250672244, |
|
"grad_norm": 0.2737690806388855, |
|
"learning_rate": 0.00019522691435283585, |
|
"loss": 1.1618, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1002390200179265, |
|
"grad_norm": 0.16071948409080505, |
|
"learning_rate": 0.00019521255611488022, |
|
"loss": 0.4868, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.10038840752913056, |
|
"grad_norm": 0.14076560735702515, |
|
"learning_rate": 0.00019519817684296285, |
|
"loss": 0.6771, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.10053779504033462, |
|
"grad_norm": 0.1131826639175415, |
|
"learning_rate": 0.00019518377654026032, |
|
"loss": 0.768, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.10068718255153869, |
|
"grad_norm": 0.15727178752422333, |
|
"learning_rate": 0.00019516935520995393, |
|
"loss": 0.8779, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.10083657006274276, |
|
"grad_norm": 0.14138628542423248, |
|
"learning_rate": 0.00019515491285522957, |
|
"loss": 0.8227, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.10098595757394682, |
|
"grad_norm": 0.2561928331851959, |
|
"learning_rate": 0.0001951404494792778, |
|
"loss": 0.9701, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.10113534508515089, |
|
"grad_norm": 0.22002875804901123, |
|
"learning_rate": 0.00019512596508529378, |
|
"loss": 0.8702, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.10128473259635494, |
|
"grad_norm": 0.2049333155155182, |
|
"learning_rate": 0.00019511145967647737, |
|
"loss": 0.8744, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.101434120107559, |
|
"grad_norm": 0.19357016682624817, |
|
"learning_rate": 0.00019509693325603302, |
|
"loss": 0.6567, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.10158350761876307, |
|
"grad_norm": 0.14756688475608826, |
|
"learning_rate": 0.00019508238582716984, |
|
"loss": 0.7318, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.10173289512996714, |
|
"grad_norm": 0.16614054143428802, |
|
"learning_rate": 0.00019506781739310163, |
|
"loss": 0.9397, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.1018822826411712, |
|
"grad_norm": 0.11248838156461716, |
|
"learning_rate": 0.00019505322795704676, |
|
"loss": 0.7042, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.10203167015237526, |
|
"grad_norm": 0.14995478093624115, |
|
"learning_rate": 0.00019503861752222826, |
|
"loss": 0.7761, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.10218105766357932, |
|
"grad_norm": 0.15232588350772858, |
|
"learning_rate": 0.0001950239860918738, |
|
"loss": 0.7433, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.10233044517478339, |
|
"grad_norm": 0.17888033390045166, |
|
"learning_rate": 0.00019500933366921571, |
|
"loss": 1.0783, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.10247983268598745, |
|
"grad_norm": 0.16491816937923431, |
|
"learning_rate": 0.00019499466025749097, |
|
"loss": 0.6902, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.10262922019719152, |
|
"grad_norm": 0.29704946279525757, |
|
"learning_rate": 0.00019497996585994112, |
|
"loss": 0.8224, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.10277860770839557, |
|
"grad_norm": 0.13493022322654724, |
|
"learning_rate": 0.00019496525047981242, |
|
"loss": 0.6601, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.10292799521959964, |
|
"grad_norm": 0.24183045327663422, |
|
"learning_rate": 0.0001949505141203557, |
|
"loss": 1.0734, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.1030773827308037, |
|
"grad_norm": 0.41064098477363586, |
|
"learning_rate": 0.00019493575678482649, |
|
"loss": 1.1831, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.10322677024200777, |
|
"grad_norm": 0.21319490671157837, |
|
"learning_rate": 0.0001949209784764849, |
|
"loss": 0.9954, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.10337615775321184, |
|
"grad_norm": 0.15545840561389923, |
|
"learning_rate": 0.0001949061791985957, |
|
"loss": 0.831, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.10352554526441589, |
|
"grad_norm": 0.13938447833061218, |
|
"learning_rate": 0.00019489135895442826, |
|
"loss": 0.8778, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.10367493277561995, |
|
"grad_norm": 0.27140700817108154, |
|
"learning_rate": 0.00019487651774725663, |
|
"loss": 0.7168, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.10382432028682402, |
|
"grad_norm": 0.17713186144828796, |
|
"learning_rate": 0.00019486165558035948, |
|
"loss": 0.7476, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.10397370779802809, |
|
"grad_norm": 0.19945883750915527, |
|
"learning_rate": 0.00019484677245702004, |
|
"loss": 0.5964, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.10412309530923215, |
|
"grad_norm": 0.27336767315864563, |
|
"learning_rate": 0.0001948318683805263, |
|
"loss": 1.0354, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.1042724828204362, |
|
"grad_norm": 0.15666867792606354, |
|
"learning_rate": 0.0001948169433541708, |
|
"loss": 0.6364, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.10442187033164027, |
|
"grad_norm": 0.17508071660995483, |
|
"learning_rate": 0.00019480199738125068, |
|
"loss": 0.7329, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.10457125784284434, |
|
"grad_norm": 0.16016682982444763, |
|
"learning_rate": 0.00019478703046506773, |
|
"loss": 0.6733, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.1047206453540484, |
|
"grad_norm": 0.10869868099689484, |
|
"learning_rate": 0.0001947720426089284, |
|
"loss": 0.6218, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.10487003286525247, |
|
"grad_norm": 0.37434107065200806, |
|
"learning_rate": 0.00019475703381614375, |
|
"loss": 1.1511, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.10501942037645653, |
|
"grad_norm": 0.2148595005273819, |
|
"learning_rate": 0.00019474200409002945, |
|
"loss": 1.0165, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.10516880788766059, |
|
"grad_norm": 0.1994418203830719, |
|
"learning_rate": 0.00019472695343390585, |
|
"loss": 0.9581, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.10531819539886465, |
|
"grad_norm": 0.14765793085098267, |
|
"learning_rate": 0.0001947118818510978, |
|
"loss": 0.9704, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.10546758291006872, |
|
"grad_norm": 0.3004849851131439, |
|
"learning_rate": 0.00019469678934493488, |
|
"loss": 1.3147, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.10561697042127279, |
|
"grad_norm": 0.14334464073181152, |
|
"learning_rate": 0.0001946816759187513, |
|
"loss": 0.5802, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.10576635793247685, |
|
"grad_norm": 0.20262622833251953, |
|
"learning_rate": 0.0001946665415758858, |
|
"loss": 1.0572, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.1059157454436809, |
|
"grad_norm": 0.1506635844707489, |
|
"learning_rate": 0.00019465138631968184, |
|
"loss": 1.264, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.10606513295488497, |
|
"grad_norm": 0.1906273066997528, |
|
"learning_rate": 0.00019463621015348748, |
|
"loss": 0.9111, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.10621452046608904, |
|
"grad_norm": 0.14025966823101044, |
|
"learning_rate": 0.0001946210130806553, |
|
"loss": 0.7685, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.1063639079772931, |
|
"grad_norm": 0.13425306975841522, |
|
"learning_rate": 0.00019460579510454263, |
|
"loss": 0.8569, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.10651329548849717, |
|
"grad_norm": 0.2626805305480957, |
|
"learning_rate": 0.0001945905562285113, |
|
"loss": 1.1358, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.10666268299970122, |
|
"grad_norm": 0.12032942473888397, |
|
"learning_rate": 0.00019457529645592792, |
|
"loss": 0.9112, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.10681207051090529, |
|
"grad_norm": 0.16017121076583862, |
|
"learning_rate": 0.0001945600157901635, |
|
"loss": 0.4619, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.10696145802210935, |
|
"grad_norm": 0.1190473660826683, |
|
"learning_rate": 0.00019454471423459389, |
|
"loss": 0.7032, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.10711084553331342, |
|
"grad_norm": 0.15523146092891693, |
|
"learning_rate": 0.00019452939179259937, |
|
"loss": 0.6421, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.10726023304451748, |
|
"grad_norm": 0.27652865648269653, |
|
"learning_rate": 0.00019451404846756494, |
|
"loss": 0.8187, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.10740962055572154, |
|
"grad_norm": 0.2987957000732422, |
|
"learning_rate": 0.00019449868426288017, |
|
"loss": 0.8263, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.1075590080669256, |
|
"grad_norm": 0.143573597073555, |
|
"learning_rate": 0.00019448329918193927, |
|
"loss": 0.7988, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.10770839557812967, |
|
"grad_norm": 0.1809718906879425, |
|
"learning_rate": 0.00019446789322814106, |
|
"loss": 0.7172, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.10785778308933373, |
|
"grad_norm": 0.3178151845932007, |
|
"learning_rate": 0.00019445246640488893, |
|
"loss": 0.8902, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.1080071706005378, |
|
"grad_norm": 0.17539192736148834, |
|
"learning_rate": 0.00019443701871559092, |
|
"loss": 1.0598, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.10815655811174185, |
|
"grad_norm": 0.1401323676109314, |
|
"learning_rate": 0.00019442155016365965, |
|
"loss": 0.772, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.10830594562294592, |
|
"grad_norm": 0.14516496658325195, |
|
"learning_rate": 0.0001944060607525124, |
|
"loss": 1.0332, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.10845533313414998, |
|
"grad_norm": 0.12078884243965149, |
|
"learning_rate": 0.00019439055048557101, |
|
"loss": 0.4937, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.10860472064535405, |
|
"grad_norm": 0.12402236461639404, |
|
"learning_rate": 0.00019437501936626198, |
|
"loss": 0.8666, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.10875410815655812, |
|
"grad_norm": 0.2173183113336563, |
|
"learning_rate": 0.00019435946739801633, |
|
"loss": 0.7866, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.10890349566776218, |
|
"grad_norm": 0.18823803961277008, |
|
"learning_rate": 0.00019434389458426976, |
|
"loss": 0.6379, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.10905288317896623, |
|
"grad_norm": 0.1408422738313675, |
|
"learning_rate": 0.00019432830092846253, |
|
"loss": 0.7941, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.1092022706901703, |
|
"grad_norm": 0.11038701981306076, |
|
"learning_rate": 0.00019431268643403958, |
|
"loss": 0.7818, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.10935165820137437, |
|
"grad_norm": 0.2575507164001465, |
|
"learning_rate": 0.0001942970511044503, |
|
"loss": 0.8922, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.10950104571257843, |
|
"grad_norm": 0.17680765688419342, |
|
"learning_rate": 0.00019428139494314888, |
|
"loss": 0.9351, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.1096504332237825, |
|
"grad_norm": 0.13490507006645203, |
|
"learning_rate": 0.00019426571795359398, |
|
"loss": 0.8378, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.10979982073498655, |
|
"grad_norm": 0.28408023715019226, |
|
"learning_rate": 0.0001942500201392489, |
|
"loss": 0.7867, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.10994920824619062, |
|
"grad_norm": 0.16506126523017883, |
|
"learning_rate": 0.0001942343015035815, |
|
"loss": 1.1124, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.11009859575739468, |
|
"grad_norm": 0.2039964199066162, |
|
"learning_rate": 0.00019421856205006433, |
|
"loss": 0.9931, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.11024798326859875, |
|
"grad_norm": 0.14856329560279846, |
|
"learning_rate": 0.00019420280178217443, |
|
"loss": 0.7827, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.11039737077980281, |
|
"grad_norm": 0.1818045973777771, |
|
"learning_rate": 0.00019418702070339355, |
|
"loss": 0.5399, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.11054675829100687, |
|
"grad_norm": 0.2717982232570648, |
|
"learning_rate": 0.00019417121881720793, |
|
"loss": 1.2213, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.11069614580221093, |
|
"grad_norm": 0.12363403290510178, |
|
"learning_rate": 0.0001941553961271085, |
|
"loss": 0.7174, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.110845533313415, |
|
"grad_norm": 0.22691577672958374, |
|
"learning_rate": 0.0001941395526365907, |
|
"loss": 1.1225, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.11099492082461906, |
|
"grad_norm": 0.16864041984081268, |
|
"learning_rate": 0.00019412368834915462, |
|
"loss": 0.5674, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.11114430833582313, |
|
"grad_norm": 0.20602700114250183, |
|
"learning_rate": 0.00019410780326830498, |
|
"loss": 0.9028, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.11129369584702718, |
|
"grad_norm": 0.13901367783546448, |
|
"learning_rate": 0.00019409189739755096, |
|
"loss": 0.6309, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.11144308335823125, |
|
"grad_norm": 0.20725879073143005, |
|
"learning_rate": 0.0001940759707404065, |
|
"loss": 0.5478, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.11159247086943531, |
|
"grad_norm": 0.14670120179653168, |
|
"learning_rate": 0.00019406002330039001, |
|
"loss": 0.5592, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.11174185838063938, |
|
"grad_norm": 0.17827396094799042, |
|
"learning_rate": 0.00019404405508102455, |
|
"loss": 0.8181, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.11189124589184345, |
|
"grad_norm": 0.1741906702518463, |
|
"learning_rate": 0.00019402806608583774, |
|
"loss": 1.1636, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.1120406334030475, |
|
"grad_norm": 0.12306499481201172, |
|
"learning_rate": 0.00019401205631836178, |
|
"loss": 0.7564, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.11219002091425156, |
|
"grad_norm": 0.12863090634346008, |
|
"learning_rate": 0.00019399602578213353, |
|
"loss": 0.739, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.11233940842545563, |
|
"grad_norm": 0.11944539844989777, |
|
"learning_rate": 0.00019397997448069435, |
|
"loss": 0.5797, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.1124887959366597, |
|
"grad_norm": 0.11944981664419174, |
|
"learning_rate": 0.00019396390241759024, |
|
"loss": 0.7503, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.11263818344786376, |
|
"grad_norm": 0.12803085148334503, |
|
"learning_rate": 0.00019394780959637177, |
|
"loss": 0.984, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.11278757095906783, |
|
"grad_norm": 8.864607810974121, |
|
"learning_rate": 0.0001939316960205941, |
|
"loss": 4.3697, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.11293695847027188, |
|
"grad_norm": 0.15747593343257904, |
|
"learning_rate": 0.000193915561693817, |
|
"loss": 0.7344, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.11308634598147595, |
|
"grad_norm": 0.13283927738666534, |
|
"learning_rate": 0.00019389940661960478, |
|
"loss": 0.5302, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.11323573349268001, |
|
"grad_norm": 0.12874341011047363, |
|
"learning_rate": 0.00019388323080152633, |
|
"loss": 0.828, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.11338512100388408, |
|
"grad_norm": 0.4698888063430786, |
|
"learning_rate": 0.00019386703424315518, |
|
"loss": 1.5101, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.11353450851508815, |
|
"grad_norm": 0.12892121076583862, |
|
"learning_rate": 0.00019385081694806936, |
|
"loss": 0.7772, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.1136838960262922, |
|
"grad_norm": 0.28520870208740234, |
|
"learning_rate": 0.00019383457891985158, |
|
"loss": 0.928, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.11383328353749626, |
|
"grad_norm": 1.7955225706100464, |
|
"learning_rate": 0.00019381832016208904, |
|
"loss": 0.955, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.11398267104870033, |
|
"grad_norm": 0.14767761528491974, |
|
"learning_rate": 0.00019380204067837356, |
|
"loss": 0.6337, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.1141320585599044, |
|
"grad_norm": 0.171101912856102, |
|
"learning_rate": 0.0001937857404723016, |
|
"loss": 0.7905, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.11428144607110846, |
|
"grad_norm": 0.33381524682044983, |
|
"learning_rate": 0.000193769419547474, |
|
"loss": 0.9788, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.11443083358231251, |
|
"grad_norm": 0.1214352399110794, |
|
"learning_rate": 0.00019375307790749647, |
|
"loss": 0.9152, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.11458022109351658, |
|
"grad_norm": 0.3238249123096466, |
|
"learning_rate": 0.00019373671555597902, |
|
"loss": 0.9923, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.11472960860472065, |
|
"grad_norm": 0.14854952692985535, |
|
"learning_rate": 0.0001937203324965364, |
|
"loss": 0.7194, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.11487899611592471, |
|
"grad_norm": 0.13817191123962402, |
|
"learning_rate": 0.00019370392873278784, |
|
"loss": 0.7175, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.11502838362712878, |
|
"grad_norm": 0.24061405658721924, |
|
"learning_rate": 0.0001936875042683573, |
|
"loss": 0.9647, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.11517777113833283, |
|
"grad_norm": 0.1889171153306961, |
|
"learning_rate": 0.00019367105910687307, |
|
"loss": 0.9805, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.1153271586495369, |
|
"grad_norm": 0.2141283005475998, |
|
"learning_rate": 0.00019365459325196825, |
|
"loss": 0.7467, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.11547654616074096, |
|
"grad_norm": 0.20832307636737823, |
|
"learning_rate": 0.00019363810670728032, |
|
"loss": 0.8881, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.11562593367194503, |
|
"grad_norm": 0.19508862495422363, |
|
"learning_rate": 0.00019362159947645152, |
|
"loss": 0.9594, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.1157753211831491, |
|
"grad_norm": 0.16393844783306122, |
|
"learning_rate": 0.00019360507156312848, |
|
"loss": 0.8358, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.11592470869435315, |
|
"grad_norm": 0.17555637657642365, |
|
"learning_rate": 0.00019358852297096253, |
|
"loss": 0.7137, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.11607409620555721, |
|
"grad_norm": 0.16748878359794617, |
|
"learning_rate": 0.00019357195370360946, |
|
"loss": 0.8945, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.11622348371676128, |
|
"grad_norm": 0.11165950447320938, |
|
"learning_rate": 0.00019355536376472972, |
|
"loss": 0.5322, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.11637287122796534, |
|
"grad_norm": 0.12036951631307602, |
|
"learning_rate": 0.00019353875315798828, |
|
"loss": 0.824, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.11652225873916941, |
|
"grad_norm": 0.2941713333129883, |
|
"learning_rate": 0.0001935221218870547, |
|
"loss": 1.1819, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.11667164625037346, |
|
"grad_norm": 0.16016024351119995, |
|
"learning_rate": 0.0001935054699556031, |
|
"loss": 0.8635, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.11682103376157753, |
|
"grad_norm": 0.16375142335891724, |
|
"learning_rate": 0.0001934887973673121, |
|
"loss": 1.0148, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.1169704212727816, |
|
"grad_norm": 0.3563995659351349, |
|
"learning_rate": 0.000193472104125865, |
|
"loss": 0.9712, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.11711980878398566, |
|
"grad_norm": 0.19017907977104187, |
|
"learning_rate": 0.0001934553902349496, |
|
"loss": 0.9978, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.11726919629518973, |
|
"grad_norm": 0.14762148261070251, |
|
"learning_rate": 0.00019343865569825818, |
|
"loss": 0.8258, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.11741858380639379, |
|
"grad_norm": 0.19044718146324158, |
|
"learning_rate": 0.00019342190051948777, |
|
"loss": 1.0279, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.11756797131759784, |
|
"grad_norm": 0.15434981882572174, |
|
"learning_rate": 0.0001934051247023398, |
|
"loss": 0.6936, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.11771735882880191, |
|
"grad_norm": 0.21782897412776947, |
|
"learning_rate": 0.0001933883282505203, |
|
"loss": 0.8074, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.11786674634000598, |
|
"grad_norm": 0.1382855623960495, |
|
"learning_rate": 0.00019337151116773993, |
|
"loss": 0.76, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.11801613385121004, |
|
"grad_norm": 0.12988682091236115, |
|
"learning_rate": 0.00019335467345771377, |
|
"loss": 0.7619, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.11816552136241411, |
|
"grad_norm": 0.14587444067001343, |
|
"learning_rate": 0.0001933378151241616, |
|
"loss": 0.6618, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.11831490887361816, |
|
"grad_norm": 0.13079139590263367, |
|
"learning_rate": 0.0001933209361708077, |
|
"loss": 0.7573, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.11846429638482223, |
|
"grad_norm": 0.27695533633232117, |
|
"learning_rate": 0.00019330403660138085, |
|
"loss": 1.0731, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.11861368389602629, |
|
"grad_norm": 0.1361035704612732, |
|
"learning_rate": 0.00019328711641961445, |
|
"loss": 0.8251, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.11876307140723036, |
|
"grad_norm": 0.16837474703788757, |
|
"learning_rate": 0.00019327017562924644, |
|
"loss": 0.9535, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.11891245891843442, |
|
"grad_norm": 0.31326931715011597, |
|
"learning_rate": 0.00019325321423401933, |
|
"loss": 0.9828, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.11906184642963848, |
|
"grad_norm": 0.16937793791294098, |
|
"learning_rate": 0.00019323623223768015, |
|
"loss": 0.9445, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.11921123394084254, |
|
"grad_norm": 0.15119615197181702, |
|
"learning_rate": 0.00019321922964398046, |
|
"loss": 0.7483, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.11936062145204661, |
|
"grad_norm": 0.1750727891921997, |
|
"learning_rate": 0.00019320220645667645, |
|
"loss": 0.9364, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.11951000896325067, |
|
"grad_norm": 0.22479920089244843, |
|
"learning_rate": 0.00019318516267952874, |
|
"loss": 0.854, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.11965939647445474, |
|
"grad_norm": 0.12636275589466095, |
|
"learning_rate": 0.00019316809831630265, |
|
"loss": 0.8127, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.11980878398565879, |
|
"grad_norm": 0.1225171908736229, |
|
"learning_rate": 0.00019315101337076792, |
|
"loss": 0.7809, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.11995817149686286, |
|
"grad_norm": 0.24531179666519165, |
|
"learning_rate": 0.00019313390784669895, |
|
"loss": 1.0615, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.12010755900806692, |
|
"grad_norm": 0.15142390131950378, |
|
"learning_rate": 0.0001931167817478745, |
|
"loss": 0.913, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.12025694651927099, |
|
"grad_norm": 0.13152721524238586, |
|
"learning_rate": 0.00019309963507807811, |
|
"loss": 0.6284, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.12040633403047506, |
|
"grad_norm": 0.10537417232990265, |
|
"learning_rate": 0.0001930824678410977, |
|
"loss": 0.6071, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.12055572154167911, |
|
"grad_norm": 0.13866813480854034, |
|
"learning_rate": 0.0001930652800407258, |
|
"loss": 0.7461, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.12070510905288317, |
|
"grad_norm": 0.16239172220230103, |
|
"learning_rate": 0.00019304807168075944, |
|
"loss": 0.8089, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.12085449656408724, |
|
"grad_norm": 0.12834633886814117, |
|
"learning_rate": 0.00019303084276500027, |
|
"loss": 0.8459, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.12100388407529131, |
|
"grad_norm": 0.17036044597625732, |
|
"learning_rate": 0.00019301359329725436, |
|
"loss": 0.8131, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.12115327158649537, |
|
"grad_norm": 0.14274178445339203, |
|
"learning_rate": 0.00019299632328133247, |
|
"loss": 0.9325, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.12130265909769944, |
|
"grad_norm": 0.16712737083435059, |
|
"learning_rate": 0.00019297903272104977, |
|
"loss": 0.88, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.12145204660890349, |
|
"grad_norm": 0.1521551012992859, |
|
"learning_rate": 0.00019296172162022604, |
|
"loss": 0.9838, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.12160143412010756, |
|
"grad_norm": 0.1645859032869339, |
|
"learning_rate": 0.00019294438998268554, |
|
"loss": 0.6344, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.12175082163131162, |
|
"grad_norm": 0.15957693755626678, |
|
"learning_rate": 0.00019292703781225717, |
|
"loss": 0.926, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.12190020914251569, |
|
"grad_norm": 0.13982924818992615, |
|
"learning_rate": 0.00019290966511277422, |
|
"loss": 0.849, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.12204959665371976, |
|
"grad_norm": 0.16864308714866638, |
|
"learning_rate": 0.00019289227188807467, |
|
"loss": 0.6237, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.12219898416492381, |
|
"grad_norm": 0.14285649359226227, |
|
"learning_rate": 0.00019287485814200087, |
|
"loss": 0.6809, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.12234837167612787, |
|
"grad_norm": 0.6627392768859863, |
|
"learning_rate": 0.00019285742387839988, |
|
"loss": 1.9625, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.12249775918733194, |
|
"grad_norm": 0.13897117972373962, |
|
"learning_rate": 0.00019283996910112318, |
|
"loss": 1.0312, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.122647146698536, |
|
"grad_norm": 0.15520478785037994, |
|
"learning_rate": 0.00019282249381402677, |
|
"loss": 0.8191, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.12279653420974007, |
|
"grad_norm": 0.24286359548568726, |
|
"learning_rate": 0.00019280499802097126, |
|
"loss": 1.1401, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.12294592172094412, |
|
"grad_norm": 0.15701743960380554, |
|
"learning_rate": 0.00019278748172582173, |
|
"loss": 0.7389, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.12309530923214819, |
|
"grad_norm": 0.16320520639419556, |
|
"learning_rate": 0.0001927699449324478, |
|
"loss": 0.9232, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.12324469674335226, |
|
"grad_norm": 0.13166747987270355, |
|
"learning_rate": 0.00019275238764472364, |
|
"loss": 0.648, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.12339408425455632, |
|
"grad_norm": 0.1490585058927536, |
|
"learning_rate": 0.00019273480986652794, |
|
"loss": 0.8485, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.12354347176576039, |
|
"grad_norm": 0.1607678383588791, |
|
"learning_rate": 0.00019271721160174388, |
|
"loss": 0.7415, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.12369285927696444, |
|
"grad_norm": 0.13538645207881927, |
|
"learning_rate": 0.0001926995928542592, |
|
"loss": 0.702, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.1238422467881685, |
|
"grad_norm": 0.17759506404399872, |
|
"learning_rate": 0.00019268195362796622, |
|
"loss": 0.819, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.12399163429937257, |
|
"grad_norm": 0.14529088139533997, |
|
"learning_rate": 0.00019266429392676164, |
|
"loss": 0.5262, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.12414102181057664, |
|
"grad_norm": 0.16848066449165344, |
|
"learning_rate": 0.0001926466137545468, |
|
"loss": 0.6761, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.1242904093217807, |
|
"grad_norm": 0.33541253209114075, |
|
"learning_rate": 0.00019262891311522755, |
|
"loss": 0.9293, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.12443979683298476, |
|
"grad_norm": 0.2218908667564392, |
|
"learning_rate": 0.00019261119201271422, |
|
"loss": 1.0644, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.12458918434418882, |
|
"grad_norm": 0.1724868267774582, |
|
"learning_rate": 0.0001925934504509217, |
|
"loss": 0.8411, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.12473857185539289, |
|
"grad_norm": 0.12873922288417816, |
|
"learning_rate": 0.00019257568843376936, |
|
"loss": 0.7466, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.12488795936659695, |
|
"grad_norm": 0.13556797802448273, |
|
"learning_rate": 0.00019255790596518112, |
|
"loss": 0.675, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.12503734687780102, |
|
"grad_norm": 0.12460656464099884, |
|
"learning_rate": 0.00019254010304908543, |
|
"loss": 0.6401, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.12518673438900507, |
|
"grad_norm": 0.16490042209625244, |
|
"learning_rate": 0.00019252227968941522, |
|
"loss": 0.8934, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.12533612190020915, |
|
"grad_norm": 0.13448311388492584, |
|
"learning_rate": 0.00019250443589010792, |
|
"loss": 0.7896, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.1254855094114132, |
|
"grad_norm": 0.16792979836463928, |
|
"learning_rate": 0.00019248657165510556, |
|
"loss": 0.74, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.12563489692261726, |
|
"grad_norm": 0.1711784452199936, |
|
"learning_rate": 0.00019246868698835458, |
|
"loss": 0.5992, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.12578428443382134, |
|
"grad_norm": 0.12499507516622543, |
|
"learning_rate": 0.00019245078189380604, |
|
"loss": 0.8517, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.1259336719450254, |
|
"grad_norm": 0.09736009687185287, |
|
"learning_rate": 0.00019243285637541544, |
|
"loss": 0.6209, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.12608305945622947, |
|
"grad_norm": 0.21913480758666992, |
|
"learning_rate": 0.0001924149104371428, |
|
"loss": 0.9499, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.12623244696743352, |
|
"grad_norm": 0.23238122463226318, |
|
"learning_rate": 0.00019239694408295266, |
|
"loss": 1.1666, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.12638183447863757, |
|
"grad_norm": 0.24529285728931427, |
|
"learning_rate": 0.00019237895731681408, |
|
"loss": 0.734, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.12653122198984165, |
|
"grad_norm": 0.16577094793319702, |
|
"learning_rate": 0.00019236095014270064, |
|
"loss": 1.0808, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.1266806095010457, |
|
"grad_norm": 0.08479466289281845, |
|
"learning_rate": 0.0001923429225645904, |
|
"loss": 0.4277, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.12682999701224978, |
|
"grad_norm": 0.15633448958396912, |
|
"learning_rate": 0.00019232487458646587, |
|
"loss": 0.8547, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.12697938452345384, |
|
"grad_norm": 0.2966431975364685, |
|
"learning_rate": 0.00019230680621231425, |
|
"loss": 0.7691, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.12712877203465792, |
|
"grad_norm": 0.21817810833454132, |
|
"learning_rate": 0.00019228871744612704, |
|
"loss": 0.9089, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.12727815954586197, |
|
"grad_norm": 0.1577853262424469, |
|
"learning_rate": 0.0001922706082919004, |
|
"loss": 0.7345, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.12742754705706602, |
|
"grad_norm": 0.14371559023857117, |
|
"learning_rate": 0.00019225247875363487, |
|
"loss": 0.9727, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.1275769345682701, |
|
"grad_norm": 0.10418539494276047, |
|
"learning_rate": 0.0001922343288353356, |
|
"loss": 0.6287, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.12772632207947415, |
|
"grad_norm": 0.14414125680923462, |
|
"learning_rate": 0.00019221615854101215, |
|
"loss": 1.0211, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.12787570959067823, |
|
"grad_norm": 0.14058874547481537, |
|
"learning_rate": 0.00019219796787467867, |
|
"loss": 0.9388, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.12802509710188228, |
|
"grad_norm": 0.24741698801517487, |
|
"learning_rate": 0.00019217975684035374, |
|
"loss": 0.8997, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.12817448461308634, |
|
"grad_norm": 0.1672530174255371, |
|
"learning_rate": 0.00019216152544206049, |
|
"loss": 0.9332, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.12832387212429042, |
|
"grad_norm": 0.30283141136169434, |
|
"learning_rate": 0.00019214327368382645, |
|
"loss": 1.1119, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.12847325963549447, |
|
"grad_norm": 0.41509029269218445, |
|
"learning_rate": 0.00019212500156968383, |
|
"loss": 1.0261, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.12862264714669855, |
|
"grad_norm": 0.1252591907978058, |
|
"learning_rate": 0.00019210670910366917, |
|
"loss": 0.7836, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.1287720346579026, |
|
"grad_norm": 0.1359296441078186, |
|
"learning_rate": 0.00019208839628982358, |
|
"loss": 0.9255, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.12892142216910665, |
|
"grad_norm": 0.22711493074893951, |
|
"learning_rate": 0.0001920700631321926, |
|
"loss": 1.1905, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.12907080968031073, |
|
"grad_norm": 0.13732872903347015, |
|
"learning_rate": 0.00019205170963482643, |
|
"loss": 0.7808, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.12922019719151479, |
|
"grad_norm": 0.3582336902618408, |
|
"learning_rate": 0.00019203333580177954, |
|
"loss": 0.7557, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.12936958470271887, |
|
"grad_norm": 0.14303429424762726, |
|
"learning_rate": 0.00019201494163711104, |
|
"loss": 0.898, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.12951897221392292, |
|
"grad_norm": 0.11724136024713516, |
|
"learning_rate": 0.00019199652714488446, |
|
"loss": 0.6867, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.12966835972512697, |
|
"grad_norm": 0.14605864882469177, |
|
"learning_rate": 0.00019197809232916795, |
|
"loss": 0.7361, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.12981774723633105, |
|
"grad_norm": 0.15446977317333221, |
|
"learning_rate": 0.00019195963719403393, |
|
"loss": 0.7941, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.1299671347475351, |
|
"grad_norm": 0.17148366570472717, |
|
"learning_rate": 0.00019194116174355954, |
|
"loss": 0.7493, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.13011652225873918, |
|
"grad_norm": 0.12092535197734833, |
|
"learning_rate": 0.0001919226659818262, |
|
"loss": 0.8044, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.13026590976994323, |
|
"grad_norm": 0.174177348613739, |
|
"learning_rate": 0.00019190414991291998, |
|
"loss": 0.8565, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.13041529728114729, |
|
"grad_norm": 0.14217911660671234, |
|
"learning_rate": 0.0001918856135409314, |
|
"loss": 0.7967, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.13056468479235137, |
|
"grad_norm": 0.2161906510591507, |
|
"learning_rate": 0.00019186705686995533, |
|
"loss": 1.0536, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.13071407230355542, |
|
"grad_norm": 0.11044277995824814, |
|
"learning_rate": 0.00019184847990409134, |
|
"loss": 0.7443, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.1308634598147595, |
|
"grad_norm": 0.4287470877170563, |
|
"learning_rate": 0.0001918298826474433, |
|
"loss": 1.1078, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.13101284732596355, |
|
"grad_norm": 0.313516229391098, |
|
"learning_rate": 0.00019181126510411974, |
|
"loss": 1.1343, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.1311622348371676, |
|
"grad_norm": 0.15673229098320007, |
|
"learning_rate": 0.0001917926272782334, |
|
"loss": 1.0282, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.13131162234837168, |
|
"grad_norm": 0.14207200706005096, |
|
"learning_rate": 0.00019177396917390187, |
|
"loss": 0.7487, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.13146100985957573, |
|
"grad_norm": 0.21555255353450775, |
|
"learning_rate": 0.00019175529079524687, |
|
"loss": 0.8816, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.1316103973707798, |
|
"grad_norm": 0.10295391082763672, |
|
"learning_rate": 0.00019173659214639482, |
|
"loss": 0.7433, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.13175978488198387, |
|
"grad_norm": 0.12901347875595093, |
|
"learning_rate": 0.00019171787323147654, |
|
"loss": 0.828, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.13190917239318792, |
|
"grad_norm": 0.1917589008808136, |
|
"learning_rate": 0.00019169913405462733, |
|
"loss": 0.8845, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.132058559904392, |
|
"grad_norm": 0.14522455632686615, |
|
"learning_rate": 0.00019168037461998695, |
|
"loss": 0.8876, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.13220794741559605, |
|
"grad_norm": 0.33967098593711853, |
|
"learning_rate": 0.0001916615949316997, |
|
"loss": 0.93, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.13235733492680013, |
|
"grad_norm": 0.12859250605106354, |
|
"learning_rate": 0.00019164279499391427, |
|
"loss": 0.8351, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.13250672243800418, |
|
"grad_norm": 0.11414708197116852, |
|
"learning_rate": 0.00019162397481078386, |
|
"loss": 0.5253, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.13265610994920823, |
|
"grad_norm": 0.6967369914054871, |
|
"learning_rate": 0.00019160513438646617, |
|
"loss": 1.0658, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.13280549746041231, |
|
"grad_norm": 0.2920320928096771, |
|
"learning_rate": 0.00019158627372512337, |
|
"loss": 1.0202, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.13295488497161637, |
|
"grad_norm": 0.1285315304994583, |
|
"learning_rate": 0.00019156739283092205, |
|
"loss": 0.5447, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.13310427248282045, |
|
"grad_norm": 0.6523221135139465, |
|
"learning_rate": 0.00019154849170803327, |
|
"loss": 1.2813, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.1332536599940245, |
|
"grad_norm": 0.11954151839017868, |
|
"learning_rate": 0.00019152957036063265, |
|
"loss": 0.7564, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.13340304750522855, |
|
"grad_norm": 0.10571929812431335, |
|
"learning_rate": 0.0001915106287929002, |
|
"loss": 0.4487, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.13355243501643263, |
|
"grad_norm": 0.24934682250022888, |
|
"learning_rate": 0.00019149166700902032, |
|
"loss": 0.7512, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.13370182252763668, |
|
"grad_norm": 0.1705646514892578, |
|
"learning_rate": 0.00019147268501318212, |
|
"loss": 0.6647, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.13385121003884076, |
|
"grad_norm": 0.1977798193693161, |
|
"learning_rate": 0.0001914536828095789, |
|
"loss": 1.0058, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.13400059755004481, |
|
"grad_norm": 0.16323387622833252, |
|
"learning_rate": 0.00019143466040240863, |
|
"loss": 1.0301, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.13414998506124887, |
|
"grad_norm": 0.15375447273254395, |
|
"learning_rate": 0.0001914156177958736, |
|
"loss": 0.5928, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.13429937257245295, |
|
"grad_norm": 0.2192535251379013, |
|
"learning_rate": 0.00019139655499418067, |
|
"loss": 0.6105, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.134448760083657, |
|
"grad_norm": 0.35383331775665283, |
|
"learning_rate": 0.0001913774720015411, |
|
"loss": 0.9193, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.13459814759486108, |
|
"grad_norm": 0.2478116750717163, |
|
"learning_rate": 0.0001913583688221706, |
|
"loss": 1.0428, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.13474753510606513, |
|
"grad_norm": 0.14307573437690735, |
|
"learning_rate": 0.00019133924546028942, |
|
"loss": 0.8198, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.13489692261726918, |
|
"grad_norm": 0.27853766083717346, |
|
"learning_rate": 0.00019132010192012214, |
|
"loss": 0.9841, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.13504631012847326, |
|
"grad_norm": 0.13049887120723724, |
|
"learning_rate": 0.00019130093820589791, |
|
"loss": 0.5867, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.13519569763967731, |
|
"grad_norm": 0.1877085268497467, |
|
"learning_rate": 0.0001912817543218503, |
|
"loss": 1.0579, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.1353450851508814, |
|
"grad_norm": 0.18851879239082336, |
|
"learning_rate": 0.00019126255027221735, |
|
"loss": 0.5173, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.13549447266208545, |
|
"grad_norm": 0.1649446338415146, |
|
"learning_rate": 0.00019124332606124152, |
|
"loss": 0.8469, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.13564386017328953, |
|
"grad_norm": 0.1373070925474167, |
|
"learning_rate": 0.00019122408169316976, |
|
"loss": 0.7754, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.13579324768449358, |
|
"grad_norm": 0.13052485883235931, |
|
"learning_rate": 0.00019120481717225342, |
|
"loss": 0.9105, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.13594263519569763, |
|
"grad_norm": 0.15370683372020721, |
|
"learning_rate": 0.00019118553250274832, |
|
"loss": 0.6646, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.1360920227069017, |
|
"grad_norm": 0.1601782590150833, |
|
"learning_rate": 0.00019116622768891483, |
|
"loss": 0.7487, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.13624141021810576, |
|
"grad_norm": 0.19794587790966034, |
|
"learning_rate": 0.00019114690273501765, |
|
"loss": 0.5437, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.13639079772930984, |
|
"grad_norm": 0.12566497921943665, |
|
"learning_rate": 0.00019112755764532594, |
|
"loss": 0.8602, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.1365401852405139, |
|
"grad_norm": 0.5696573257446289, |
|
"learning_rate": 0.00019110819242411337, |
|
"loss": 1.1799, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.13668957275171795, |
|
"grad_norm": 0.11676638573408127, |
|
"learning_rate": 0.00019108880707565802, |
|
"loss": 0.6958, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.13683896026292203, |
|
"grad_norm": 0.12003248929977417, |
|
"learning_rate": 0.00019106940160424244, |
|
"loss": 0.5425, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.13698834777412608, |
|
"grad_norm": 0.265358030796051, |
|
"learning_rate": 0.00019104997601415352, |
|
"loss": 0.673, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.13713773528533016, |
|
"grad_norm": 0.17708387970924377, |
|
"learning_rate": 0.0001910305303096828, |
|
"loss": 0.7885, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.1372871227965342, |
|
"grad_norm": 0.17588114738464355, |
|
"learning_rate": 0.00019101106449512605, |
|
"loss": 0.8977, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.13743651030773826, |
|
"grad_norm": 0.20153087377548218, |
|
"learning_rate": 0.0001909915785747836, |
|
"loss": 0.6529, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.13758589781894234, |
|
"grad_norm": 0.14944609999656677, |
|
"learning_rate": 0.00019097207255296022, |
|
"loss": 0.7525, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.1377352853301464, |
|
"grad_norm": 0.29275017976760864, |
|
"learning_rate": 0.00019095254643396512, |
|
"loss": 1.0195, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.13788467284135048, |
|
"grad_norm": 0.26294517517089844, |
|
"learning_rate": 0.00019093300022211186, |
|
"loss": 1.2764, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.13803406035255453, |
|
"grad_norm": 0.10997014492750168, |
|
"learning_rate": 0.0001909134339217186, |
|
"loss": 0.5097, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.13818344786375858, |
|
"grad_norm": 0.25661003589630127, |
|
"learning_rate": 0.00019089384753710772, |
|
"loss": 1.1546, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.13833283537496266, |
|
"grad_norm": 0.14332082867622375, |
|
"learning_rate": 0.00019087424107260627, |
|
"loss": 0.6839, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.1384822228861667, |
|
"grad_norm": 0.29279524087905884, |
|
"learning_rate": 0.0001908546145325456, |
|
"loss": 0.8088, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.1386316103973708, |
|
"grad_norm": 0.2230231910943985, |
|
"learning_rate": 0.00019083496792126153, |
|
"loss": 0.7403, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.13878099790857484, |
|
"grad_norm": 0.20737332105636597, |
|
"learning_rate": 0.00019081530124309427, |
|
"loss": 1.1846, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.1389303854197789, |
|
"grad_norm": 0.2875371277332306, |
|
"learning_rate": 0.00019079561450238854, |
|
"loss": 0.9049, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.13907977293098298, |
|
"grad_norm": 0.12367036938667297, |
|
"learning_rate": 0.00019077590770349344, |
|
"loss": 0.8605, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.13922916044218703, |
|
"grad_norm": 0.34429073333740234, |
|
"learning_rate": 0.00019075618085076247, |
|
"loss": 1.0233, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.1393785479533911, |
|
"grad_norm": 0.15258093178272247, |
|
"learning_rate": 0.00019073643394855368, |
|
"loss": 0.8381, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.13952793546459516, |
|
"grad_norm": 0.15117698907852173, |
|
"learning_rate": 0.00019071666700122946, |
|
"loss": 0.6532, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.1396773229757992, |
|
"grad_norm": 0.11969941109418869, |
|
"learning_rate": 0.0001906968800131566, |
|
"loss": 0.7025, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.1398267104870033, |
|
"grad_norm": 0.12973757088184357, |
|
"learning_rate": 0.00019067707298870638, |
|
"loss": 0.6276, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.13997609799820734, |
|
"grad_norm": 0.24265740811824799, |
|
"learning_rate": 0.0001906572459322545, |
|
"loss": 0.8548, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.14012548550941142, |
|
"grad_norm": 0.18727362155914307, |
|
"learning_rate": 0.00019063739884818103, |
|
"loss": 0.5687, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.14027487302061548, |
|
"grad_norm": 0.13480857014656067, |
|
"learning_rate": 0.00019061753174087054, |
|
"loss": 0.9462, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.14042426053181953, |
|
"grad_norm": 0.2221670150756836, |
|
"learning_rate": 0.000190597644614712, |
|
"loss": 0.6429, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.1405736480430236, |
|
"grad_norm": 0.13278596103191376, |
|
"learning_rate": 0.00019057773747409879, |
|
"loss": 0.839, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.14072303555422766, |
|
"grad_norm": 0.12343950569629669, |
|
"learning_rate": 0.00019055781032342864, |
|
"loss": 0.8807, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.14087242306543174, |
|
"grad_norm": 0.1782051920890808, |
|
"learning_rate": 0.00019053786316710386, |
|
"loss": 1.0982, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.1410218105766358, |
|
"grad_norm": 0.1475050151348114, |
|
"learning_rate": 0.00019051789600953102, |
|
"loss": 1.0571, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.14117119808783984, |
|
"grad_norm": 0.12406841665506363, |
|
"learning_rate": 0.00019049790885512126, |
|
"loss": 0.641, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.14132058559904392, |
|
"grad_norm": 0.13010482490062714, |
|
"learning_rate": 0.00019047790170829003, |
|
"loss": 0.7616, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.14146997311024798, |
|
"grad_norm": 0.14790335297584534, |
|
"learning_rate": 0.00019045787457345722, |
|
"loss": 0.6587, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.14161936062145206, |
|
"grad_norm": 0.11543125659227371, |
|
"learning_rate": 0.00019043782745504711, |
|
"loss": 0.7014, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.1417687481326561, |
|
"grad_norm": 0.12451034784317017, |
|
"learning_rate": 0.00019041776035748847, |
|
"loss": 0.5931, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.14191813564386016, |
|
"grad_norm": 0.1254526674747467, |
|
"learning_rate": 0.00019039767328521442, |
|
"loss": 0.7772, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.14206752315506424, |
|
"grad_norm": 0.21236388385295868, |
|
"learning_rate": 0.00019037756624266252, |
|
"loss": 0.6836, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.1422169106662683, |
|
"grad_norm": 0.1704844981431961, |
|
"learning_rate": 0.0001903574392342747, |
|
"loss": 0.5561, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.14236629817747237, |
|
"grad_norm": 0.1470273733139038, |
|
"learning_rate": 0.0001903372922644974, |
|
"loss": 0.7572, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.14251568568867642, |
|
"grad_norm": 0.1643514782190323, |
|
"learning_rate": 0.00019031712533778137, |
|
"loss": 0.8037, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.14266507319988048, |
|
"grad_norm": 0.1775134652853012, |
|
"learning_rate": 0.0001902969384585818, |
|
"loss": 0.6325, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.14281446071108456, |
|
"grad_norm": 0.16977538168430328, |
|
"learning_rate": 0.00019027673163135827, |
|
"loss": 0.4522, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.1429638482222886, |
|
"grad_norm": 0.2153913825750351, |
|
"learning_rate": 0.00019025650486057484, |
|
"loss": 0.6189, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.1431132357334927, |
|
"grad_norm": 0.12824203073978424, |
|
"learning_rate": 0.00019023625815069989, |
|
"loss": 0.5429, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.14326262324469674, |
|
"grad_norm": 0.13086850941181183, |
|
"learning_rate": 0.00019021599150620625, |
|
"loss": 0.854, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.1434120107559008, |
|
"grad_norm": 0.13822171092033386, |
|
"learning_rate": 0.00019019570493157114, |
|
"loss": 0.7447, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.14356139826710487, |
|
"grad_norm": 0.13974325358867645, |
|
"learning_rate": 0.00019017539843127617, |
|
"loss": 0.9451, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.14371078577830892, |
|
"grad_norm": 0.17371952533721924, |
|
"learning_rate": 0.0001901550720098074, |
|
"loss": 0.9114, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.143860173289513, |
|
"grad_norm": 0.2538747191429138, |
|
"learning_rate": 0.00019013472567165523, |
|
"loss": 1.1961, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.14400956080071706, |
|
"grad_norm": 0.110201895236969, |
|
"learning_rate": 0.00019011435942131448, |
|
"loss": 0.4677, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.14415894831192114, |
|
"grad_norm": 0.11606165021657944, |
|
"learning_rate": 0.00019009397326328443, |
|
"loss": 0.7407, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.1443083358231252, |
|
"grad_norm": 0.1762661188840866, |
|
"learning_rate": 0.00019007356720206865, |
|
"loss": 0.7177, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.14445772333432924, |
|
"grad_norm": 0.16712108254432678, |
|
"learning_rate": 0.0001900531412421752, |
|
"loss": 0.858, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.14460711084553332, |
|
"grad_norm": 0.17348961532115936, |
|
"learning_rate": 0.00019003269538811647, |
|
"loss": 1.1409, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.14475649835673737, |
|
"grad_norm": 0.13761204481124878, |
|
"learning_rate": 0.0001900122296444093, |
|
"loss": 0.6666, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.14490588586794145, |
|
"grad_norm": 0.20189805328845978, |
|
"learning_rate": 0.00018999174401557488, |
|
"loss": 0.736, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.1450552733791455, |
|
"grad_norm": 0.15712390840053558, |
|
"learning_rate": 0.0001899712385061388, |
|
"loss": 0.7429, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.14520466089034956, |
|
"grad_norm": 0.12312103062868118, |
|
"learning_rate": 0.00018995071312063105, |
|
"loss": 0.8887, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.14535404840155364, |
|
"grad_norm": 0.16373762488365173, |
|
"learning_rate": 0.00018993016786358603, |
|
"loss": 1.0923, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.1455034359127577, |
|
"grad_norm": 0.1286284178495407, |
|
"learning_rate": 0.00018990960273954254, |
|
"loss": 0.5527, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.14565282342396177, |
|
"grad_norm": 0.1538635641336441, |
|
"learning_rate": 0.0001898890177530437, |
|
"loss": 0.8092, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.14580221093516582, |
|
"grad_norm": 0.14671294391155243, |
|
"learning_rate": 0.00018986841290863704, |
|
"loss": 0.7165, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.14595159844636987, |
|
"grad_norm": 0.1533249318599701, |
|
"learning_rate": 0.00018984778821087454, |
|
"loss": 0.6822, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.14610098595757395, |
|
"grad_norm": 0.1579664945602417, |
|
"learning_rate": 0.0001898271436643125, |
|
"loss": 0.8128, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.146250373468778, |
|
"grad_norm": 0.1557191014289856, |
|
"learning_rate": 0.00018980647927351166, |
|
"loss": 0.8699, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.14639976097998209, |
|
"grad_norm": 0.1736837476491928, |
|
"learning_rate": 0.00018978579504303706, |
|
"loss": 1.2032, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.14654914849118614, |
|
"grad_norm": 0.17181052267551422, |
|
"learning_rate": 0.00018976509097745826, |
|
"loss": 0.7804, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.1466985360023902, |
|
"grad_norm": 0.20371553301811218, |
|
"learning_rate": 0.000189744367081349, |
|
"loss": 0.9971, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.14684792351359427, |
|
"grad_norm": 0.11608695983886719, |
|
"learning_rate": 0.00018972362335928757, |
|
"loss": 0.7474, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.14699731102479832, |
|
"grad_norm": 0.23304304480552673, |
|
"learning_rate": 0.00018970285981585662, |
|
"loss": 1.0192, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.1471466985360024, |
|
"grad_norm": 0.18036411702632904, |
|
"learning_rate": 0.0001896820764556431, |
|
"loss": 1.1699, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.14729608604720645, |
|
"grad_norm": 0.45688414573669434, |
|
"learning_rate": 0.00018966127328323842, |
|
"loss": 1.5615, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.1474454735584105, |
|
"grad_norm": 0.11910564452409744, |
|
"learning_rate": 0.00018964045030323828, |
|
"loss": 0.5669, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.14759486106961459, |
|
"grad_norm": 0.15722838044166565, |
|
"learning_rate": 0.00018961960752024288, |
|
"loss": 0.7162, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.14774424858081864, |
|
"grad_norm": 0.13349586725234985, |
|
"learning_rate": 0.00018959874493885666, |
|
"loss": 0.8232, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.14789363609202272, |
|
"grad_norm": 0.2732388973236084, |
|
"learning_rate": 0.0001895778625636885, |
|
"loss": 0.9254, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.14804302360322677, |
|
"grad_norm": 0.12720026075839996, |
|
"learning_rate": 0.00018955696039935167, |
|
"loss": 0.6276, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.14819241111443082, |
|
"grad_norm": 0.12968410551548004, |
|
"learning_rate": 0.00018953603845046378, |
|
"loss": 0.7688, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.1483417986256349, |
|
"grad_norm": 0.2596571445465088, |
|
"learning_rate": 0.0001895150967216468, |
|
"loss": 1.0049, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.14849118613683895, |
|
"grad_norm": 0.12322080880403519, |
|
"learning_rate": 0.00018949413521752713, |
|
"loss": 0.7845, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.14864057364804303, |
|
"grad_norm": 0.12585557997226715, |
|
"learning_rate": 0.00018947315394273546, |
|
"loss": 0.5276, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.14878996115924709, |
|
"grad_norm": 0.16490721702575684, |
|
"learning_rate": 0.00018945215290190693, |
|
"loss": 0.5467, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.14893934867045114, |
|
"grad_norm": 0.20895147323608398, |
|
"learning_rate": 0.00018943113209968094, |
|
"loss": 0.6746, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.14908873618165522, |
|
"grad_norm": 0.15641289949417114, |
|
"learning_rate": 0.00018941009154070136, |
|
"loss": 0.5641, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.14923812369285927, |
|
"grad_norm": 0.12262630462646484, |
|
"learning_rate": 0.0001893890312296164, |
|
"loss": 0.5929, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.14938751120406335, |
|
"grad_norm": 0.15614943206310272, |
|
"learning_rate": 0.00018936795117107855, |
|
"loss": 0.7035, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1495368987152674, |
|
"grad_norm": 0.13168151676654816, |
|
"learning_rate": 0.00018934685136974482, |
|
"loss": 0.7546, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.14968628622647145, |
|
"grad_norm": 0.12817221879959106, |
|
"learning_rate": 0.0001893257318302764, |
|
"loss": 0.6542, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.14983567373767553, |
|
"grad_norm": 0.22590519487857819, |
|
"learning_rate": 0.000189304592557339, |
|
"loss": 0.9049, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.1499850612488796, |
|
"grad_norm": 0.1316809505224228, |
|
"learning_rate": 0.00018928343355560258, |
|
"loss": 0.8921, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.15013444876008367, |
|
"grad_norm": 0.16760273277759552, |
|
"learning_rate": 0.00018926225482974153, |
|
"loss": 0.5918, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.15028383627128772, |
|
"grad_norm": 0.1197691559791565, |
|
"learning_rate": 0.00018924105638443452, |
|
"loss": 0.6532, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.15043322378249177, |
|
"grad_norm": 0.1392420530319214, |
|
"learning_rate": 0.00018921983822436467, |
|
"loss": 0.9638, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.15058261129369585, |
|
"grad_norm": 0.17294703423976898, |
|
"learning_rate": 0.0001891986003542194, |
|
"loss": 0.7818, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.1507319988048999, |
|
"grad_norm": 0.2133634239435196, |
|
"learning_rate": 0.00018917734277869042, |
|
"loss": 0.8933, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.15088138631610398, |
|
"grad_norm": 0.13073426485061646, |
|
"learning_rate": 0.00018915606550247397, |
|
"loss": 0.7142, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.15103077382730803, |
|
"grad_norm": 0.2918628454208374, |
|
"learning_rate": 0.0001891347685302705, |
|
"loss": 0.9469, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.1511801613385121, |
|
"grad_norm": 0.12934882938861847, |
|
"learning_rate": 0.0001891134518667848, |
|
"loss": 0.8446, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.15132954884971617, |
|
"grad_norm": 0.12790237367153168, |
|
"learning_rate": 0.00018909211551672615, |
|
"loss": 0.5309, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.15147893636092022, |
|
"grad_norm": 0.2520146071910858, |
|
"learning_rate": 0.000189070759484808, |
|
"loss": 0.8729, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.1516283238721243, |
|
"grad_norm": 0.1819266974925995, |
|
"learning_rate": 0.00018904938377574827, |
|
"loss": 0.8364, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.15177771138332835, |
|
"grad_norm": 0.13670526444911957, |
|
"learning_rate": 0.0001890279883942692, |
|
"loss": 1.178, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.15192709889453243, |
|
"grad_norm": 0.15499313175678253, |
|
"learning_rate": 0.00018900657334509736, |
|
"loss": 0.6144, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.15207648640573648, |
|
"grad_norm": 0.13412916660308838, |
|
"learning_rate": 0.0001889851386329637, |
|
"loss": 0.7415, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.15222587391694054, |
|
"grad_norm": 0.17868481576442719, |
|
"learning_rate": 0.00018896368426260345, |
|
"loss": 1.0732, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.15237526142814461, |
|
"grad_norm": 0.197604700922966, |
|
"learning_rate": 0.00018894221023875622, |
|
"loss": 0.8012, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.15252464893934867, |
|
"grad_norm": 0.13587865233421326, |
|
"learning_rate": 0.000188920716566166, |
|
"loss": 0.5255, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.15267403645055275, |
|
"grad_norm": 0.13020993769168854, |
|
"learning_rate": 0.00018889920324958106, |
|
"loss": 0.5617, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.1528234239617568, |
|
"grad_norm": 0.19373324513435364, |
|
"learning_rate": 0.00018887767029375401, |
|
"loss": 1.085, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.15297281147296085, |
|
"grad_norm": 0.11815980821847916, |
|
"learning_rate": 0.00018885611770344185, |
|
"loss": 0.7142, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.15312219898416493, |
|
"grad_norm": 0.21390031278133392, |
|
"learning_rate": 0.00018883454548340587, |
|
"loss": 1.0385, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.15327158649536898, |
|
"grad_norm": 0.19770096242427826, |
|
"learning_rate": 0.00018881295363841174, |
|
"loss": 0.7931, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.15342097400657306, |
|
"grad_norm": 0.1958816796541214, |
|
"learning_rate": 0.00018879134217322942, |
|
"loss": 0.8291, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.15357036151777712, |
|
"grad_norm": 0.11775219440460205, |
|
"learning_rate": 0.00018876971109263324, |
|
"loss": 0.6206, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.15371974902898117, |
|
"grad_norm": 0.11675494909286499, |
|
"learning_rate": 0.00018874806040140185, |
|
"loss": 0.5483, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.15386913654018525, |
|
"grad_norm": 0.33525991439819336, |
|
"learning_rate": 0.00018872639010431822, |
|
"loss": 1.6754, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.1540185240513893, |
|
"grad_norm": 0.15912093222141266, |
|
"learning_rate": 0.00018870470020616967, |
|
"loss": 0.7152, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.15416791156259338, |
|
"grad_norm": 0.1797519475221634, |
|
"learning_rate": 0.0001886829907117478, |
|
"loss": 0.5595, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.15431729907379743, |
|
"grad_norm": 0.163869708776474, |
|
"learning_rate": 0.0001886612616258486, |
|
"loss": 0.6443, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.15446668658500148, |
|
"grad_norm": 0.21159547567367554, |
|
"learning_rate": 0.00018863951295327244, |
|
"loss": 0.8727, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.15461607409620556, |
|
"grad_norm": 0.17695502936840057, |
|
"learning_rate": 0.00018861774469882385, |
|
"loss": 1.269, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.15476546160740962, |
|
"grad_norm": 0.23800405859947205, |
|
"learning_rate": 0.00018859595686731187, |
|
"loss": 1.2047, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.1549148491186137, |
|
"grad_norm": 0.11121930181980133, |
|
"learning_rate": 0.0001885741494635497, |
|
"loss": 0.6537, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.15506423662981775, |
|
"grad_norm": 0.10217446833848953, |
|
"learning_rate": 0.00018855232249235498, |
|
"loss": 0.4512, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.1552136241410218, |
|
"grad_norm": 0.2477329820394516, |
|
"learning_rate": 0.0001885304759585496, |
|
"loss": 0.8929, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.15536301165222588, |
|
"grad_norm": 0.13516084849834442, |
|
"learning_rate": 0.00018850860986695985, |
|
"loss": 0.6883, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.15551239916342993, |
|
"grad_norm": 0.14974398910999298, |
|
"learning_rate": 0.0001884867242224163, |
|
"loss": 0.6658, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.155661786674634, |
|
"grad_norm": 0.12428376823663712, |
|
"learning_rate": 0.00018846481902975377, |
|
"loss": 0.8312, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.15581117418583806, |
|
"grad_norm": 0.10979831218719482, |
|
"learning_rate": 0.0001884428942938115, |
|
"loss": 0.6737, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.15596056169704212, |
|
"grad_norm": 0.18722845613956451, |
|
"learning_rate": 0.00018842095001943306, |
|
"loss": 1.0435, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.1561099492082462, |
|
"grad_norm": 0.1588689237833023, |
|
"learning_rate": 0.00018839898621146625, |
|
"loss": 0.792, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.15625933671945025, |
|
"grad_norm": 0.10412801802158356, |
|
"learning_rate": 0.00018837700287476316, |
|
"loss": 0.4995, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.15640872423065433, |
|
"grad_norm": 0.1294378787279129, |
|
"learning_rate": 0.00018835500001418036, |
|
"loss": 0.8798, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.15655811174185838, |
|
"grad_norm": 0.16280725598335266, |
|
"learning_rate": 0.00018833297763457858, |
|
"loss": 0.6571, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.15670749925306243, |
|
"grad_norm": 0.12636205554008484, |
|
"learning_rate": 0.0001883109357408229, |
|
"loss": 0.5629, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.1568568867642665, |
|
"grad_norm": 0.22952638566493988, |
|
"learning_rate": 0.00018828887433778278, |
|
"loss": 0.9414, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.15700627427547056, |
|
"grad_norm": 0.18174390494823456, |
|
"learning_rate": 0.00018826679343033186, |
|
"loss": 0.6405, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.15715566178667464, |
|
"grad_norm": 0.4626804292201996, |
|
"learning_rate": 0.00018824469302334822, |
|
"loss": 1.1709, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.1573050492978787, |
|
"grad_norm": 0.17580588161945343, |
|
"learning_rate": 0.00018822257312171416, |
|
"loss": 0.7962, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.15745443680908275, |
|
"grad_norm": 0.1333022564649582, |
|
"learning_rate": 0.0001882004337303163, |
|
"loss": 0.7548, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.15760382432028683, |
|
"grad_norm": 1.2796801328659058, |
|
"learning_rate": 0.00018817827485404564, |
|
"loss": 1.9406, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.15775321183149088, |
|
"grad_norm": 0.17332683503627777, |
|
"learning_rate": 0.0001881560964977974, |
|
"loss": 0.8372, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.15790259934269496, |
|
"grad_norm": 0.15442058444023132, |
|
"learning_rate": 0.0001881338986664711, |
|
"loss": 0.8606, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.158051986853899, |
|
"grad_norm": 0.14709503948688507, |
|
"learning_rate": 0.0001881116813649706, |
|
"loss": 0.8004, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.15820137436510306, |
|
"grad_norm": 0.18778932094573975, |
|
"learning_rate": 0.00018808944459820407, |
|
"loss": 0.9717, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.15835076187630714, |
|
"grad_norm": 0.1773632913827896, |
|
"learning_rate": 0.00018806718837108402, |
|
"loss": 0.981, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.1585001493875112, |
|
"grad_norm": 0.13269007205963135, |
|
"learning_rate": 0.00018804491268852708, |
|
"loss": 0.8844, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.15864953689871528, |
|
"grad_norm": 0.17859001457691193, |
|
"learning_rate": 0.00018802261755545443, |
|
"loss": 0.5105, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.15879892440991933, |
|
"grad_norm": 0.1438104659318924, |
|
"learning_rate": 0.0001880003029767913, |
|
"loss": 0.7643, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.15894831192112338, |
|
"grad_norm": 0.13282090425491333, |
|
"learning_rate": 0.0001879779689574674, |
|
"loss": 0.821, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.15909769943232746, |
|
"grad_norm": 0.24478429555892944, |
|
"learning_rate": 0.0001879556155024167, |
|
"loss": 0.6762, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.1592470869435315, |
|
"grad_norm": 0.11228256672620773, |
|
"learning_rate": 0.00018793324261657737, |
|
"loss": 0.8163, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.1593964744547356, |
|
"grad_norm": 0.10839138180017471, |
|
"learning_rate": 0.00018791085030489194, |
|
"loss": 0.6536, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.15954586196593964, |
|
"grad_norm": 0.14781688153743744, |
|
"learning_rate": 0.00018788843857230726, |
|
"loss": 0.7572, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.1596952494771437, |
|
"grad_norm": 0.18171058595180511, |
|
"learning_rate": 0.00018786600742377437, |
|
"loss": 0.9202, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.15984463698834778, |
|
"grad_norm": 0.11682315170764923, |
|
"learning_rate": 0.00018784355686424876, |
|
"loss": 0.7, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.15999402449955183, |
|
"grad_norm": 0.09597928076982498, |
|
"learning_rate": 0.00018782108689869006, |
|
"loss": 0.621, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.1601434120107559, |
|
"grad_norm": 0.15037846565246582, |
|
"learning_rate": 0.00018779859753206225, |
|
"loss": 0.7951, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.16029279952195996, |
|
"grad_norm": 0.11888540536165237, |
|
"learning_rate": 0.00018777608876933358, |
|
"loss": 0.7986, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.16044218703316404, |
|
"grad_norm": 0.4484766721725464, |
|
"learning_rate": 0.00018775356061547662, |
|
"loss": 1.231, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.1605915745443681, |
|
"grad_norm": 0.13222385942935944, |
|
"learning_rate": 0.00018773101307546815, |
|
"loss": 0.7957, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.16074096205557215, |
|
"grad_norm": 0.18500378727912903, |
|
"learning_rate": 0.00018770844615428932, |
|
"loss": 0.695, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.16089034956677623, |
|
"grad_norm": 0.10332745313644409, |
|
"learning_rate": 0.0001876858598569255, |
|
"loss": 0.556, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.16103973707798028, |
|
"grad_norm": 0.0973210334777832, |
|
"learning_rate": 0.00018766325418836637, |
|
"loss": 0.5246, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.16118912458918436, |
|
"grad_norm": 0.15679813921451569, |
|
"learning_rate": 0.00018764062915360588, |
|
"loss": 0.7182, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.1613385121003884, |
|
"grad_norm": 0.12232697755098343, |
|
"learning_rate": 0.00018761798475764224, |
|
"loss": 0.7503, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.16148789961159246, |
|
"grad_norm": 0.13506333529949188, |
|
"learning_rate": 0.00018759532100547799, |
|
"loss": 0.6708, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.16163728712279654, |
|
"grad_norm": 0.1789216846227646, |
|
"learning_rate": 0.00018757263790211988, |
|
"loss": 1.0662, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.1617866746340006, |
|
"grad_norm": 0.1154765784740448, |
|
"learning_rate": 0.00018754993545257894, |
|
"loss": 0.6931, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.16193606214520467, |
|
"grad_norm": 0.11015768349170685, |
|
"learning_rate": 0.0001875272136618706, |
|
"loss": 0.8244, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.16208544965640873, |
|
"grad_norm": 0.14176665246486664, |
|
"learning_rate": 0.00018750447253501436, |
|
"loss": 0.8434, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.16223483716761278, |
|
"grad_norm": 0.13398586213588715, |
|
"learning_rate": 0.00018748171207703417, |
|
"loss": 0.8307, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.16238422467881686, |
|
"grad_norm": 0.18871843814849854, |
|
"learning_rate": 0.00018745893229295813, |
|
"loss": 0.7193, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.1625336121900209, |
|
"grad_norm": 0.1976425051689148, |
|
"learning_rate": 0.00018743613318781868, |
|
"loss": 0.9341, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.162682999701225, |
|
"grad_norm": 0.12541085481643677, |
|
"learning_rate": 0.00018741331476665246, |
|
"loss": 0.7364, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.16283238721242904, |
|
"grad_norm": 0.3148552179336548, |
|
"learning_rate": 0.00018739047703450048, |
|
"loss": 1.0103, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1629817747236331, |
|
"grad_norm": 0.24794772267341614, |
|
"learning_rate": 0.00018736761999640792, |
|
"loss": 0.7772, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.16313116223483717, |
|
"grad_norm": 0.12505854666233063, |
|
"learning_rate": 0.00018734474365742428, |
|
"loss": 0.7458, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.16328054974604123, |
|
"grad_norm": 0.13425281643867493, |
|
"learning_rate": 0.0001873218480226033, |
|
"loss": 0.6537, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.1634299372572453, |
|
"grad_norm": 0.29929280281066895, |
|
"learning_rate": 0.00018729893309700295, |
|
"loss": 1.2375, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.16357932476844936, |
|
"grad_norm": 0.13596223294734955, |
|
"learning_rate": 0.00018727599888568555, |
|
"loss": 0.956, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.1637287122796534, |
|
"grad_norm": 0.13480113446712494, |
|
"learning_rate": 0.0001872530453937176, |
|
"loss": 0.8181, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.1638780997908575, |
|
"grad_norm": 0.14178095757961273, |
|
"learning_rate": 0.00018723007262616998, |
|
"loss": 0.7811, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.16402748730206154, |
|
"grad_norm": 0.11537758260965347, |
|
"learning_rate": 0.0001872070805881176, |
|
"loss": 0.7824, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.16417687481326562, |
|
"grad_norm": 0.15772311389446259, |
|
"learning_rate": 0.00018718406928463986, |
|
"loss": 0.679, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.16432626232446967, |
|
"grad_norm": 0.22873377799987793, |
|
"learning_rate": 0.00018716103872082026, |
|
"loss": 0.4674, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.16447564983567373, |
|
"grad_norm": 0.14255951344966888, |
|
"learning_rate": 0.00018713798890174666, |
|
"loss": 0.7014, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.1646250373468778, |
|
"grad_norm": 0.15469695627689362, |
|
"learning_rate": 0.00018711491983251113, |
|
"loss": 0.8959, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.16477442485808186, |
|
"grad_norm": 0.10443451255559921, |
|
"learning_rate": 0.00018709183151820996, |
|
"loss": 0.548, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.16492381236928594, |
|
"grad_norm": 0.10996398329734802, |
|
"learning_rate": 0.00018706872396394376, |
|
"loss": 0.6857, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.16507319988049, |
|
"grad_norm": 0.15236075222492218, |
|
"learning_rate": 0.00018704559717481732, |
|
"loss": 0.796, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.16522258739169404, |
|
"grad_norm": 0.19824904203414917, |
|
"learning_rate": 0.00018702245115593974, |
|
"loss": 0.6438, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.16537197490289812, |
|
"grad_norm": 0.17726042866706848, |
|
"learning_rate": 0.00018699928591242438, |
|
"loss": 0.9564, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.16552136241410217, |
|
"grad_norm": 0.12704910337924957, |
|
"learning_rate": 0.0001869761014493887, |
|
"loss": 0.5796, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.16567074992530625, |
|
"grad_norm": 0.22029517590999603, |
|
"learning_rate": 0.00018695289777195462, |
|
"loss": 0.7932, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.1658201374365103, |
|
"grad_norm": 0.1251213401556015, |
|
"learning_rate": 0.00018692967488524812, |
|
"loss": 0.8287, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.16596952494771436, |
|
"grad_norm": 0.16409267485141754, |
|
"learning_rate": 0.00018690643279439958, |
|
"loss": 0.6792, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.16611891245891844, |
|
"grad_norm": 0.11526962369680405, |
|
"learning_rate": 0.0001868831715045435, |
|
"loss": 0.5228, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.1662682999701225, |
|
"grad_norm": 0.14684948325157166, |
|
"learning_rate": 0.00018685989102081867, |
|
"loss": 0.6461, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.16641768748132657, |
|
"grad_norm": 0.18166372179985046, |
|
"learning_rate": 0.00018683659134836813, |
|
"loss": 0.8579, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.16656707499253062, |
|
"grad_norm": 0.12297386676073074, |
|
"learning_rate": 0.00018681327249233913, |
|
"loss": 0.8628, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.16671646250373467, |
|
"grad_norm": 0.1687268614768982, |
|
"learning_rate": 0.00018678993445788323, |
|
"loss": 0.9859, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.16686585001493875, |
|
"grad_norm": 0.10087472945451736, |
|
"learning_rate": 0.00018676657725015606, |
|
"loss": 0.6073, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.1670152375261428, |
|
"grad_norm": 0.12146350741386414, |
|
"learning_rate": 0.00018674320087431768, |
|
"loss": 0.68, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.1671646250373469, |
|
"grad_norm": 0.13258694112300873, |
|
"learning_rate": 0.0001867198053355323, |
|
"loss": 0.8528, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.16731401254855094, |
|
"grad_norm": 0.15267039835453033, |
|
"learning_rate": 0.00018669639063896836, |
|
"loss": 0.7095, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.167463400059755, |
|
"grad_norm": 0.12942974269390106, |
|
"learning_rate": 0.00018667295678979852, |
|
"loss": 0.8282, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.16761278757095907, |
|
"grad_norm": 0.1521858274936676, |
|
"learning_rate": 0.0001866495037931997, |
|
"loss": 0.8557, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.16776217508216312, |
|
"grad_norm": 0.1155681312084198, |
|
"learning_rate": 0.000186626031654353, |
|
"loss": 0.6572, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.1679115625933672, |
|
"grad_norm": 0.13675054907798767, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.7218, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.16806095010457126, |
|
"grad_norm": 0.13993573188781738, |
|
"learning_rate": 0.00018657902997066183, |
|
"loss": 0.6353, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.16821033761577533, |
|
"grad_norm": 0.16351041197776794, |
|
"learning_rate": 0.00018655550043620073, |
|
"loss": 1.027, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.1683597251269794, |
|
"grad_norm": 0.2799745500087738, |
|
"learning_rate": 0.00018653195178025864, |
|
"loss": 0.7067, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.16850911263818344, |
|
"grad_norm": 0.19105161726474762, |
|
"learning_rate": 0.0001865083840080378, |
|
"loss": 0.7683, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.16865850014938752, |
|
"grad_norm": 0.22695805132389069, |
|
"learning_rate": 0.00018648479712474468, |
|
"loss": 1.0468, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.16880788766059157, |
|
"grad_norm": 0.17338265478610992, |
|
"learning_rate": 0.00018646119113559006, |
|
"loss": 0.8898, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.16895727517179565, |
|
"grad_norm": 0.12882818281650543, |
|
"learning_rate": 0.00018643756604578885, |
|
"loss": 0.8652, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.1691066626829997, |
|
"grad_norm": 0.2222001552581787, |
|
"learning_rate": 0.00018641392186056016, |
|
"loss": 0.7363, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.16925605019420376, |
|
"grad_norm": 0.17244432866573334, |
|
"learning_rate": 0.00018639025858512741, |
|
"loss": 0.5881, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.16940543770540784, |
|
"grad_norm": 0.1207844614982605, |
|
"learning_rate": 0.0001863665762247182, |
|
"loss": 0.832, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.1695548252166119, |
|
"grad_norm": 0.20044654607772827, |
|
"learning_rate": 0.00018634287478456432, |
|
"loss": 0.9488, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.16970421272781597, |
|
"grad_norm": 0.13231255114078522, |
|
"learning_rate": 0.00018631915426990184, |
|
"loss": 0.5556, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.16985360023902002, |
|
"grad_norm": 0.444367378950119, |
|
"learning_rate": 0.00018629541468597092, |
|
"loss": 1.1058, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.17000298775022407, |
|
"grad_norm": 0.1951618492603302, |
|
"learning_rate": 0.00018627165603801605, |
|
"loss": 0.9483, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.17015237526142815, |
|
"grad_norm": 0.2736690044403076, |
|
"learning_rate": 0.00018624787833128588, |
|
"loss": 0.8482, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.1703017627726322, |
|
"grad_norm": 0.15237697958946228, |
|
"learning_rate": 0.0001862240815710333, |
|
"loss": 0.6764, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.17045115028383628, |
|
"grad_norm": 0.14255677163600922, |
|
"learning_rate": 0.0001862002657625154, |
|
"loss": 0.7176, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.17060053779504034, |
|
"grad_norm": 0.20565101504325867, |
|
"learning_rate": 0.0001861764309109934, |
|
"loss": 1.1075, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.1707499253062444, |
|
"grad_norm": 0.11338746547698975, |
|
"learning_rate": 0.0001861525770217329, |
|
"loss": 0.8231, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.17089931281744847, |
|
"grad_norm": 0.34042519330978394, |
|
"learning_rate": 0.00018612870410000354, |
|
"loss": 1.1565, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.17104870032865252, |
|
"grad_norm": 0.12565311789512634, |
|
"learning_rate": 0.00018610481215107925, |
|
"loss": 0.6115, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.1711980878398566, |
|
"grad_norm": 0.1262931525707245, |
|
"learning_rate": 0.00018608090118023808, |
|
"loss": 0.6307, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.17134747535106065, |
|
"grad_norm": 0.4232707917690277, |
|
"learning_rate": 0.00018605697119276242, |
|
"loss": 0.8655, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.1714968628622647, |
|
"grad_norm": 0.16684143245220184, |
|
"learning_rate": 0.00018603302219393874, |
|
"loss": 0.678, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.17164625037346878, |
|
"grad_norm": 0.17487527430057526, |
|
"learning_rate": 0.00018600905418905776, |
|
"loss": 0.4725, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.17179563788467284, |
|
"grad_norm": 0.1387338787317276, |
|
"learning_rate": 0.0001859850671834144, |
|
"loss": 0.8142, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.17194502539587692, |
|
"grad_norm": 0.15392735600471497, |
|
"learning_rate": 0.00018596106118230779, |
|
"loss": 0.7672, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.17209441290708097, |
|
"grad_norm": 0.251715749502182, |
|
"learning_rate": 0.0001859370361910412, |
|
"loss": 0.742, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.17224380041828502, |
|
"grad_norm": 0.18483877182006836, |
|
"learning_rate": 0.00018591299221492214, |
|
"loss": 0.8683, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.1723931879294891, |
|
"grad_norm": 0.14035139977931976, |
|
"learning_rate": 0.00018588892925926228, |
|
"loss": 0.6233, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.17254257544069315, |
|
"grad_norm": 0.43573567271232605, |
|
"learning_rate": 0.00018586484732937758, |
|
"loss": 0.9892, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.17269196295189723, |
|
"grad_norm": 0.11906327307224274, |
|
"learning_rate": 0.00018584074643058807, |
|
"loss": 0.7038, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.17284135046310128, |
|
"grad_norm": 0.19945280253887177, |
|
"learning_rate": 0.00018581662656821802, |
|
"loss": 0.6789, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.17299073797430534, |
|
"grad_norm": 0.25069642066955566, |
|
"learning_rate": 0.00018579248774759586, |
|
"loss": 0.9036, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.17314012548550942, |
|
"grad_norm": 0.1946730762720108, |
|
"learning_rate": 0.00018576832997405432, |
|
"loss": 0.9371, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.17328951299671347, |
|
"grad_norm": 0.16489769518375397, |
|
"learning_rate": 0.00018574415325293018, |
|
"loss": 0.5388, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.17343890050791755, |
|
"grad_norm": 0.13044323027133942, |
|
"learning_rate": 0.00018571995758956446, |
|
"loss": 0.6756, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.1735882880191216, |
|
"grad_norm": 0.13318844139575958, |
|
"learning_rate": 0.00018569574298930237, |
|
"loss": 0.9302, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.17373767553032565, |
|
"grad_norm": 0.1677417755126953, |
|
"learning_rate": 0.0001856715094574933, |
|
"loss": 0.9213, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.17388706304152973, |
|
"grad_norm": 0.11865703761577606, |
|
"learning_rate": 0.00018564725699949083, |
|
"loss": 0.8194, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.17403645055273378, |
|
"grad_norm": 0.16482964158058167, |
|
"learning_rate": 0.0001856229856206527, |
|
"loss": 0.7889, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.17418583806393786, |
|
"grad_norm": 0.13027256727218628, |
|
"learning_rate": 0.0001855986953263409, |
|
"loss": 0.7222, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.17433522557514192, |
|
"grad_norm": 0.15511631965637207, |
|
"learning_rate": 0.00018557438612192142, |
|
"loss": 1.2247, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.17448461308634597, |
|
"grad_norm": 0.16678637266159058, |
|
"learning_rate": 0.00018555005801276463, |
|
"loss": 0.5846, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.17463400059755005, |
|
"grad_norm": 0.15397848188877106, |
|
"learning_rate": 0.00018552571100424503, |
|
"loss": 0.7163, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.1747833881087541, |
|
"grad_norm": 0.3140554130077362, |
|
"learning_rate": 0.00018550134510174115, |
|
"loss": 1.1924, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.17493277561995818, |
|
"grad_norm": 0.14024198055267334, |
|
"learning_rate": 0.0001854769603106359, |
|
"loss": 0.9647, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.17508216313116223, |
|
"grad_norm": 0.2069367915391922, |
|
"learning_rate": 0.0001854525566363162, |
|
"loss": 0.9767, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.17523155064236628, |
|
"grad_norm": 0.13816118240356445, |
|
"learning_rate": 0.0001854281340841733, |
|
"loss": 0.542, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.17538093815357036, |
|
"grad_norm": 0.14822708070278168, |
|
"learning_rate": 0.00018540369265960242, |
|
"loss": 0.9574, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.17553032566477442, |
|
"grad_norm": 0.1753738522529602, |
|
"learning_rate": 0.00018537923236800315, |
|
"loss": 0.8379, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.1756797131759785, |
|
"grad_norm": 0.13829240202903748, |
|
"learning_rate": 0.00018535475321477906, |
|
"loss": 0.7342, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.17582910068718255, |
|
"grad_norm": 0.15101328492164612, |
|
"learning_rate": 0.00018533025520533805, |
|
"loss": 0.8258, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.1759784881983866, |
|
"grad_norm": 0.4592054784297943, |
|
"learning_rate": 0.00018530573834509215, |
|
"loss": 1.2556, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.17612787570959068, |
|
"grad_norm": 0.1719399243593216, |
|
"learning_rate": 0.00018528120263945744, |
|
"loss": 0.9381, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.17627726322079473, |
|
"grad_norm": 0.11263933032751083, |
|
"learning_rate": 0.0001852566480938543, |
|
"loss": 0.7793, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.1764266507319988, |
|
"grad_norm": 0.16535364091396332, |
|
"learning_rate": 0.00018523207471370716, |
|
"loss": 0.6528, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.17657603824320287, |
|
"grad_norm": 0.1505178064107895, |
|
"learning_rate": 0.00018520748250444474, |
|
"loss": 0.6552, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.17672542575440695, |
|
"grad_norm": 0.12604665756225586, |
|
"learning_rate": 0.00018518287147149983, |
|
"loss": 0.6862, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.176874813265611, |
|
"grad_norm": 0.12405974417924881, |
|
"learning_rate": 0.00018515824162030934, |
|
"loss": 0.7987, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.17702420077681505, |
|
"grad_norm": 0.15863847732543945, |
|
"learning_rate": 0.00018513359295631448, |
|
"loss": 0.7515, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.17717358828801913, |
|
"grad_norm": 0.16431525349617004, |
|
"learning_rate": 0.00018510892548496047, |
|
"loss": 0.6625, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.17732297579922318, |
|
"grad_norm": 0.2458639144897461, |
|
"learning_rate": 0.00018508423921169675, |
|
"loss": 1.3889, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.17747236331042726, |
|
"grad_norm": 0.1566503345966339, |
|
"learning_rate": 0.00018505953414197696, |
|
"loss": 0.8438, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.1776217508216313, |
|
"grad_norm": 0.12780135869979858, |
|
"learning_rate": 0.00018503481028125877, |
|
"loss": 0.9441, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.17777113833283537, |
|
"grad_norm": 0.2947339117527008, |
|
"learning_rate": 0.00018501006763500414, |
|
"loss": 1.2509, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.17792052584403945, |
|
"grad_norm": 0.23375345766544342, |
|
"learning_rate": 0.00018498530620867908, |
|
"loss": 0.6705, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.1780699133552435, |
|
"grad_norm": 0.2791805863380432, |
|
"learning_rate": 0.00018496052600775376, |
|
"loss": 0.9555, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.17821930086644758, |
|
"grad_norm": 0.14220461249351501, |
|
"learning_rate": 0.00018493572703770253, |
|
"loss": 0.8023, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.17836868837765163, |
|
"grad_norm": 0.10689699649810791, |
|
"learning_rate": 0.0001849109093040039, |
|
"loss": 0.3987, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.17851807588885568, |
|
"grad_norm": 0.11420352756977081, |
|
"learning_rate": 0.0001848860728121405, |
|
"loss": 0.6444, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.17866746340005976, |
|
"grad_norm": 0.1247362345457077, |
|
"learning_rate": 0.00018486121756759906, |
|
"loss": 0.8246, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.1788168509112638, |
|
"grad_norm": 0.15353257954120636, |
|
"learning_rate": 0.00018483634357587057, |
|
"loss": 0.5427, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.1789662384224679, |
|
"grad_norm": 0.11908576637506485, |
|
"learning_rate": 0.00018481145084245002, |
|
"loss": 0.671, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.17911562593367195, |
|
"grad_norm": 0.1491960883140564, |
|
"learning_rate": 0.0001847865393728366, |
|
"loss": 0.9276, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.179265013444876, |
|
"grad_norm": 0.16004832088947296, |
|
"learning_rate": 0.00018476160917253373, |
|
"loss": 0.9802, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.17941440095608008, |
|
"grad_norm": 0.20184099674224854, |
|
"learning_rate": 0.00018473666024704883, |
|
"loss": 0.8308, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.17956378846728413, |
|
"grad_norm": 0.16696742177009583, |
|
"learning_rate": 0.0001847116926018935, |
|
"loss": 1.1185, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.1797131759784882, |
|
"grad_norm": 0.1517256200313568, |
|
"learning_rate": 0.00018468670624258353, |
|
"loss": 0.7536, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.17986256348969226, |
|
"grad_norm": 0.16679219901561737, |
|
"learning_rate": 0.0001846617011746388, |
|
"loss": 0.6424, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.18001195100089631, |
|
"grad_norm": 0.15376229584217072, |
|
"learning_rate": 0.0001846366774035833, |
|
"loss": 0.9011, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.1801613385121004, |
|
"grad_norm": 0.13043151795864105, |
|
"learning_rate": 0.00018461163493494517, |
|
"loss": 0.9194, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.18031072602330445, |
|
"grad_norm": 0.15427030622959137, |
|
"learning_rate": 0.0001845865737742567, |
|
"loss": 0.8592, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.18046011353450853, |
|
"grad_norm": 0.4822232127189636, |
|
"learning_rate": 0.0001845614939270543, |
|
"loss": 1.6313, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.18060950104571258, |
|
"grad_norm": 0.15565815567970276, |
|
"learning_rate": 0.00018453639539887853, |
|
"loss": 0.7739, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.18075888855691663, |
|
"grad_norm": 0.11118531227111816, |
|
"learning_rate": 0.00018451127819527402, |
|
"loss": 0.6064, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.1809082760681207, |
|
"grad_norm": 0.14477503299713135, |
|
"learning_rate": 0.0001844861423217896, |
|
"loss": 0.9683, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.18105766357932476, |
|
"grad_norm": 0.1929170936346054, |
|
"learning_rate": 0.00018446098778397807, |
|
"loss": 0.6135, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.18120705109052884, |
|
"grad_norm": 0.13953596353530884, |
|
"learning_rate": 0.0001844358145873966, |
|
"loss": 1.0582, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.1813564386017329, |
|
"grad_norm": 0.1348966360092163, |
|
"learning_rate": 0.00018441062273760628, |
|
"loss": 0.4698, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.18150582611293695, |
|
"grad_norm": 0.1345159113407135, |
|
"learning_rate": 0.0001843854122401724, |
|
"loss": 0.848, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.18165521362414103, |
|
"grad_norm": 0.11462259292602539, |
|
"learning_rate": 0.00018436018310066435, |
|
"loss": 0.6769, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.18180460113534508, |
|
"grad_norm": 0.14895157516002655, |
|
"learning_rate": 0.0001843349353246557, |
|
"loss": 0.5784, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.18195398864654916, |
|
"grad_norm": 0.13859333097934723, |
|
"learning_rate": 0.000184309668917724, |
|
"loss": 0.903, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.1821033761577532, |
|
"grad_norm": 0.11198470741510391, |
|
"learning_rate": 0.00018428438388545107, |
|
"loss": 0.6247, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.18225276366895726, |
|
"grad_norm": 0.17095430195331573, |
|
"learning_rate": 0.0001842590802334227, |
|
"loss": 0.5969, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.18240215118016134, |
|
"grad_norm": 0.33381956815719604, |
|
"learning_rate": 0.00018423375796722895, |
|
"loss": 1.1353, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.1825515386913654, |
|
"grad_norm": 0.11893988400697708, |
|
"learning_rate": 0.00018420841709246383, |
|
"loss": 0.9319, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.18270092620256947, |
|
"grad_norm": 0.11547727882862091, |
|
"learning_rate": 0.00018418305761472566, |
|
"loss": 0.5685, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.18285031371377353, |
|
"grad_norm": 0.18776455521583557, |
|
"learning_rate": 0.0001841576795396166, |
|
"loss": 0.7411, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.18299970122497758, |
|
"grad_norm": 0.28690654039382935, |
|
"learning_rate": 0.00018413228287274315, |
|
"loss": 0.965, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.18314908873618166, |
|
"grad_norm": 0.12270329147577286, |
|
"learning_rate": 0.00018410686761971586, |
|
"loss": 0.6724, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.1832984762473857, |
|
"grad_norm": 0.19342897832393646, |
|
"learning_rate": 0.0001840814337861493, |
|
"loss": 0.8486, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.1834478637585898, |
|
"grad_norm": 0.12105520814657211, |
|
"learning_rate": 0.00018405598137766224, |
|
"loss": 0.9189, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.18359725126979384, |
|
"grad_norm": 0.14361943304538727, |
|
"learning_rate": 0.00018403051039987754, |
|
"loss": 0.4516, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.1837466387809979, |
|
"grad_norm": 0.23831139504909515, |
|
"learning_rate": 0.00018400502085842208, |
|
"loss": 1.2297, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.18389602629220198, |
|
"grad_norm": 0.19276589155197144, |
|
"learning_rate": 0.00018397951275892695, |
|
"loss": 0.6379, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.18404541380340603, |
|
"grad_norm": 0.32586970925331116, |
|
"learning_rate": 0.00018395398610702733, |
|
"loss": 1.0752, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.1841948013146101, |
|
"grad_norm": 0.1523769348859787, |
|
"learning_rate": 0.00018392844090836237, |
|
"loss": 1.0371, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.18434418882581416, |
|
"grad_norm": 0.1339939385652542, |
|
"learning_rate": 0.00018390287716857546, |
|
"loss": 0.5838, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.18449357633701824, |
|
"grad_norm": 0.17885640263557434, |
|
"learning_rate": 0.00018387729489331402, |
|
"loss": 1.0965, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.1846429638482223, |
|
"grad_norm": 0.2260708212852478, |
|
"learning_rate": 0.00018385169408822964, |
|
"loss": 1.0085, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.18479235135942634, |
|
"grad_norm": 0.20628871023654938, |
|
"learning_rate": 0.00018382607475897787, |
|
"loss": 0.4401, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.18494173887063042, |
|
"grad_norm": 0.17405936121940613, |
|
"learning_rate": 0.0001838004369112184, |
|
"loss": 0.5253, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.18509112638183448, |
|
"grad_norm": 0.11227359622716904, |
|
"learning_rate": 0.00018377478055061516, |
|
"loss": 0.6422, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.18524051389303856, |
|
"grad_norm": 0.18113653361797333, |
|
"learning_rate": 0.00018374910568283594, |
|
"loss": 0.5326, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.1853899014042426, |
|
"grad_norm": 0.12269013375043869, |
|
"learning_rate": 0.00018372341231355274, |
|
"loss": 0.6607, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.18553928891544666, |
|
"grad_norm": 0.31763309240341187, |
|
"learning_rate": 0.00018369770044844168, |
|
"loss": 0.8666, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.18568867642665074, |
|
"grad_norm": 0.17516912519931793, |
|
"learning_rate": 0.00018367197009318286, |
|
"loss": 1.0452, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.1858380639378548, |
|
"grad_norm": 0.11332941800355911, |
|
"learning_rate": 0.00018364622125346055, |
|
"loss": 0.4925, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.18598745144905887, |
|
"grad_norm": 0.1446741372346878, |
|
"learning_rate": 0.0001836204539349631, |
|
"loss": 0.6967, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.18613683896026292, |
|
"grad_norm": 0.177836611866951, |
|
"learning_rate": 0.0001835946681433829, |
|
"loss": 0.8671, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.18628622647146698, |
|
"grad_norm": 0.1338125616312027, |
|
"learning_rate": 0.00018356886388441645, |
|
"loss": 0.9787, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.18643561398267106, |
|
"grad_norm": 0.1504998356103897, |
|
"learning_rate": 0.00018354304116376425, |
|
"loss": 0.7123, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.1865850014938751, |
|
"grad_norm": 0.11992106586694717, |
|
"learning_rate": 0.00018351719998713106, |
|
"loss": 0.8016, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.1867343890050792, |
|
"grad_norm": 0.13362877070903778, |
|
"learning_rate": 0.0001834913403602255, |
|
"loss": 0.6951, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.18688377651628324, |
|
"grad_norm": 0.19289879500865936, |
|
"learning_rate": 0.00018346546228876047, |
|
"loss": 0.9931, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.1870331640274873, |
|
"grad_norm": 0.19565518200397491, |
|
"learning_rate": 0.00018343956577845276, |
|
"loss": 1.0326, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.18718255153869137, |
|
"grad_norm": 0.2017303854227066, |
|
"learning_rate": 0.00018341365083502335, |
|
"loss": 0.8296, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.18733193904989542, |
|
"grad_norm": 0.14418242871761322, |
|
"learning_rate": 0.00018338771746419726, |
|
"loss": 0.9156, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.1874813265610995, |
|
"grad_norm": 0.30200129747390747, |
|
"learning_rate": 0.00018336176567170363, |
|
"loss": 0.8995, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.18763071407230356, |
|
"grad_norm": 0.2137260138988495, |
|
"learning_rate": 0.00018333579546327556, |
|
"loss": 0.9876, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.1877801015835076, |
|
"grad_norm": 0.1436002552509308, |
|
"learning_rate": 0.0001833098068446503, |
|
"loss": 0.7803, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.1879294890947117, |
|
"grad_norm": 0.213914155960083, |
|
"learning_rate": 0.00018328379982156915, |
|
"loss": 0.8788, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.18807887660591574, |
|
"grad_norm": 0.18235763907432556, |
|
"learning_rate": 0.00018325777439977747, |
|
"loss": 0.6727, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.18822826411711982, |
|
"grad_norm": 0.184591144323349, |
|
"learning_rate": 0.00018323173058502472, |
|
"loss": 0.7196, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.18837765162832387, |
|
"grad_norm": 0.1435452103614807, |
|
"learning_rate": 0.00018320566838306431, |
|
"loss": 0.9443, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.18852703913952792, |
|
"grad_norm": 0.1862800419330597, |
|
"learning_rate": 0.00018317958779965387, |
|
"loss": 0.704, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.188676426650732, |
|
"grad_norm": 0.1972978413105011, |
|
"learning_rate": 0.000183153488840555, |
|
"loss": 0.7077, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.18882581416193606, |
|
"grad_norm": 0.13390903174877167, |
|
"learning_rate": 0.00018312737151153334, |
|
"loss": 0.658, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.18897520167314014, |
|
"grad_norm": 0.3871189057826996, |
|
"learning_rate": 0.00018310123581835868, |
|
"loss": 1.2051, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.1891245891843442, |
|
"grad_norm": 0.15519294142723083, |
|
"learning_rate": 0.00018307508176680472, |
|
"loss": 0.6735, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.18927397669554824, |
|
"grad_norm": 0.13292644917964935, |
|
"learning_rate": 0.0001830489093626494, |
|
"loss": 0.5655, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.18942336420675232, |
|
"grad_norm": 0.12040957808494568, |
|
"learning_rate": 0.00018302271861167456, |
|
"loss": 0.6574, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.18957275171795637, |
|
"grad_norm": 0.17542392015457153, |
|
"learning_rate": 0.00018299650951966614, |
|
"loss": 0.7045, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.18972213922916045, |
|
"grad_norm": 0.12977173924446106, |
|
"learning_rate": 0.0001829702820924142, |
|
"loss": 0.8453, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1898715267403645, |
|
"grad_norm": 0.3114042282104492, |
|
"learning_rate": 0.00018294403633571275, |
|
"loss": 0.9561, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.19002091425156856, |
|
"grad_norm": 0.1601576954126358, |
|
"learning_rate": 0.00018291777225535994, |
|
"loss": 0.8407, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.19017030176277264, |
|
"grad_norm": 0.12280754745006561, |
|
"learning_rate": 0.00018289148985715784, |
|
"loss": 0.5095, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.1903196892739767, |
|
"grad_norm": 0.12627717852592468, |
|
"learning_rate": 0.00018286518914691272, |
|
"loss": 0.5383, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.19046907678518077, |
|
"grad_norm": 0.13007701933383942, |
|
"learning_rate": 0.0001828388701304348, |
|
"loss": 0.8532, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.19061846429638482, |
|
"grad_norm": 0.13100433349609375, |
|
"learning_rate": 0.00018281253281353838, |
|
"loss": 0.822, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.19076785180758887, |
|
"grad_norm": 0.1324629783630371, |
|
"learning_rate": 0.0001827861772020418, |
|
"loss": 0.7904, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.19091723931879295, |
|
"grad_norm": 0.13711746037006378, |
|
"learning_rate": 0.00018275980330176737, |
|
"loss": 0.498, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.191066626829997, |
|
"grad_norm": 0.12584207952022552, |
|
"learning_rate": 0.00018273341111854153, |
|
"loss": 1.0128, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.19121601434120108, |
|
"grad_norm": 0.20175109803676605, |
|
"learning_rate": 0.00018270700065819477, |
|
"loss": 0.6671, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.19136540185240514, |
|
"grad_norm": 0.17598234117031097, |
|
"learning_rate": 0.00018268057192656156, |
|
"loss": 1.0968, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.1915147893636092, |
|
"grad_norm": 0.13300736248493195, |
|
"learning_rate": 0.00018265412492948042, |
|
"loss": 0.7158, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.19166417687481327, |
|
"grad_norm": 0.1788729578256607, |
|
"learning_rate": 0.00018262765967279386, |
|
"loss": 0.6676, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.19181356438601732, |
|
"grad_norm": 0.23207572102546692, |
|
"learning_rate": 0.0001826011761623486, |
|
"loss": 0.9164, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.1919629518972214, |
|
"grad_norm": 0.1788550764322281, |
|
"learning_rate": 0.00018257467440399515, |
|
"loss": 1.1415, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.19211233940842545, |
|
"grad_norm": 0.10894573479890823, |
|
"learning_rate": 0.0001825481544035882, |
|
"loss": 0.716, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.1922617269196295, |
|
"grad_norm": 0.12275734543800354, |
|
"learning_rate": 0.00018252161616698646, |
|
"loss": 0.6821, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.19241111443083359, |
|
"grad_norm": 0.12295856326818466, |
|
"learning_rate": 0.00018249505970005262, |
|
"loss": 0.6685, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.19256050194203764, |
|
"grad_norm": 0.25377437472343445, |
|
"learning_rate": 0.00018246848500865347, |
|
"loss": 0.9663, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.19270988945324172, |
|
"grad_norm": 0.12609486281871796, |
|
"learning_rate": 0.00018244189209865974, |
|
"loss": 0.8103, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.19285927696444577, |
|
"grad_norm": 0.18513751029968262, |
|
"learning_rate": 0.0001824152809759462, |
|
"loss": 1.1248, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.19300866447564985, |
|
"grad_norm": 0.13006910681724548, |
|
"learning_rate": 0.00018238865164639173, |
|
"loss": 0.6911, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.1931580519868539, |
|
"grad_norm": 0.14160241186618805, |
|
"learning_rate": 0.00018236200411587915, |
|
"loss": 0.5689, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.19330743949805795, |
|
"grad_norm": 0.17542794346809387, |
|
"learning_rate": 0.0001823353383902953, |
|
"loss": 0.6207, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.19345682700926203, |
|
"grad_norm": 0.251878559589386, |
|
"learning_rate": 0.00018230865447553107, |
|
"loss": 0.9893, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.19360621452046609, |
|
"grad_norm": 0.14598213136196136, |
|
"learning_rate": 0.0001822819523774814, |
|
"loss": 0.823, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.19375560203167017, |
|
"grad_norm": 0.14267736673355103, |
|
"learning_rate": 0.0001822552321020451, |
|
"loss": 0.7624, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.19390498954287422, |
|
"grad_norm": 0.2022227942943573, |
|
"learning_rate": 0.00018222849365512523, |
|
"loss": 0.8711, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.19405437705407827, |
|
"grad_norm": 0.29249998927116394, |
|
"learning_rate": 0.0001822017370426287, |
|
"loss": 0.9054, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.19420376456528235, |
|
"grad_norm": 0.12678225338459015, |
|
"learning_rate": 0.0001821749622704664, |
|
"loss": 0.6689, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1943531520764864, |
|
"grad_norm": 0.1714026778936386, |
|
"learning_rate": 0.00018214816934455333, |
|
"loss": 0.7199, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.19450253958769048, |
|
"grad_norm": 0.18611545860767365, |
|
"learning_rate": 0.00018212135827080857, |
|
"loss": 0.9119, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.19465192709889453, |
|
"grad_norm": 0.12778407335281372, |
|
"learning_rate": 0.00018209452905515496, |
|
"loss": 0.435, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.19480131461009859, |
|
"grad_norm": 0.3280845880508423, |
|
"learning_rate": 0.00018206768170351962, |
|
"loss": 0.9022, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.19495070212130267, |
|
"grad_norm": 0.13167747855186462, |
|
"learning_rate": 0.0001820408162218335, |
|
"loss": 0.8323, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.19510008963250672, |
|
"grad_norm": 0.14176693558692932, |
|
"learning_rate": 0.0001820139326160316, |
|
"loss": 0.5333, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.1952494771437108, |
|
"grad_norm": 0.20718522369861603, |
|
"learning_rate": 0.00018198703089205293, |
|
"loss": 0.8403, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.19539886465491485, |
|
"grad_norm": 0.15032266080379486, |
|
"learning_rate": 0.00018196011105584058, |
|
"loss": 1.1825, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.1955482521661189, |
|
"grad_norm": 0.13833296298980713, |
|
"learning_rate": 0.0001819331731133415, |
|
"loss": 1.0256, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.19569763967732298, |
|
"grad_norm": 0.15338090062141418, |
|
"learning_rate": 0.00018190621707050671, |
|
"loss": 0.7619, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.19584702718852703, |
|
"grad_norm": 0.15736734867095947, |
|
"learning_rate": 0.00018187924293329124, |
|
"loss": 0.9195, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.19599641469973111, |
|
"grad_norm": 0.1757577806711197, |
|
"learning_rate": 0.0001818522507076541, |
|
"loss": 0.8208, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.19614580221093517, |
|
"grad_norm": 0.16938672959804535, |
|
"learning_rate": 0.00018182524039955832, |
|
"loss": 0.6394, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.19629518972213922, |
|
"grad_norm": 0.11935129016637802, |
|
"learning_rate": 0.00018179821201497092, |
|
"loss": 0.8837, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.1964445772333433, |
|
"grad_norm": 0.1422419250011444, |
|
"learning_rate": 0.00018177116555986283, |
|
"loss": 0.8579, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.19659396474454735, |
|
"grad_norm": 0.16648200154304504, |
|
"learning_rate": 0.0001817441010402091, |
|
"loss": 0.4853, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.19674335225575143, |
|
"grad_norm": 0.16787053644657135, |
|
"learning_rate": 0.00018171701846198866, |
|
"loss": 0.5068, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.19689273976695548, |
|
"grad_norm": 0.13628660142421722, |
|
"learning_rate": 0.00018168991783118452, |
|
"loss": 0.7062, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.19704212727815953, |
|
"grad_norm": 0.11356477439403534, |
|
"learning_rate": 0.00018166279915378364, |
|
"loss": 0.5145, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.19719151478936361, |
|
"grad_norm": 0.1292952299118042, |
|
"learning_rate": 0.00018163566243577697, |
|
"loss": 0.7452, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.19734090230056767, |
|
"grad_norm": 0.17471875250339508, |
|
"learning_rate": 0.00018160850768315941, |
|
"loss": 0.9093, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.19749028981177175, |
|
"grad_norm": 0.1547754555940628, |
|
"learning_rate": 0.0001815813349019299, |
|
"loss": 0.6851, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.1976396773229758, |
|
"grad_norm": 0.11787121742963791, |
|
"learning_rate": 0.00018155414409809132, |
|
"loss": 0.8243, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.19778906483417985, |
|
"grad_norm": 0.13274557888507843, |
|
"learning_rate": 0.00018152693527765057, |
|
"loss": 0.7042, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.19793845234538393, |
|
"grad_norm": 0.1521092653274536, |
|
"learning_rate": 0.00018149970844661849, |
|
"loss": 0.5151, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.19808783985658798, |
|
"grad_norm": 0.13765467703342438, |
|
"learning_rate": 0.0001814724636110099, |
|
"loss": 0.8489, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.19823722736779206, |
|
"grad_norm": 0.26832038164138794, |
|
"learning_rate": 0.00018144520077684369, |
|
"loss": 0.7142, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.19838661487899611, |
|
"grad_norm": 0.10923701524734497, |
|
"learning_rate": 0.00018141791995014255, |
|
"loss": 0.6142, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.19853600239020017, |
|
"grad_norm": 0.1848011165857315, |
|
"learning_rate": 0.00018139062113693333, |
|
"loss": 0.8898, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.19868538990140425, |
|
"grad_norm": 0.12784364819526672, |
|
"learning_rate": 0.00018136330434324674, |
|
"loss": 0.8846, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.1988347774126083, |
|
"grad_norm": 0.13698415458202362, |
|
"learning_rate": 0.00018133596957511748, |
|
"loss": 0.7992, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.19898416492381238, |
|
"grad_norm": 0.16566555202007294, |
|
"learning_rate": 0.00018130861683858426, |
|
"loss": 0.8587, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.19913355243501643, |
|
"grad_norm": 0.12000430375337601, |
|
"learning_rate": 0.0001812812461396897, |
|
"loss": 0.815, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.19928293994622048, |
|
"grad_norm": 0.11932047456502914, |
|
"learning_rate": 0.00018125385748448048, |
|
"loss": 0.8236, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.19943232745742456, |
|
"grad_norm": 0.14856404066085815, |
|
"learning_rate": 0.00018122645087900708, |
|
"loss": 0.5428, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.19958171496862862, |
|
"grad_norm": 0.22421959042549133, |
|
"learning_rate": 0.00018119902632932416, |
|
"loss": 1.4216, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.1997311024798327, |
|
"grad_norm": 0.1441725790500641, |
|
"learning_rate": 0.00018117158384149023, |
|
"loss": 1.0382, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.19988048999103675, |
|
"grad_norm": 0.14890751242637634, |
|
"learning_rate": 0.0001811441234215677, |
|
"loss": 0.6485, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.2000298775022408, |
|
"grad_norm": 0.24321304261684418, |
|
"learning_rate": 0.00018111664507562304, |
|
"loss": 0.8343, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.20017926501344488, |
|
"grad_norm": 0.20097461342811584, |
|
"learning_rate": 0.0001810891488097267, |
|
"loss": 0.9692, |
|
"step": 1340 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 6694, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 670, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0585302765876019e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|